colo-compare: Fix old packet check bug.
[qemu/ar7.git] / tcg / ia64 / tcg-target.inc.c
blobbf9a97d75cd1865fcf371d3e9b750b4a73bbeeb5
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009-2010 Aurelien Jarno <aurelien@aurel32.net>
5 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
27 * Register definitions
30 #ifdef CONFIG_DEBUG_TCG
31 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
32 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
33 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
34 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
35 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
36 "r32", "r33", "r34", "r35", "r36", "r37", "r38", "r39",
37 "r40", "r41", "r42", "r43", "r44", "r45", "r46", "r47",
38 "r48", "r49", "r50", "r51", "r52", "r53", "r54", "r55",
39 "r56", "r57", "r58", "r59", "r60", "r61", "r62", "r63",
41 #endif
43 #ifndef CONFIG_SOFTMMU
44 #define TCG_GUEST_BASE_REG TCG_REG_R55
45 #endif
47 /* Branch registers */
48 enum {
49 TCG_REG_B0 = 0,
50 TCG_REG_B1,
51 TCG_REG_B2,
52 TCG_REG_B3,
53 TCG_REG_B4,
54 TCG_REG_B5,
55 TCG_REG_B6,
56 TCG_REG_B7,
59 /* Floating point registers */
60 enum {
61 TCG_REG_F0 = 0,
62 TCG_REG_F1,
63 TCG_REG_F2,
64 TCG_REG_F3,
65 TCG_REG_F4,
66 TCG_REG_F5,
67 TCG_REG_F6,
68 TCG_REG_F7,
69 TCG_REG_F8,
70 TCG_REG_F9,
71 TCG_REG_F10,
72 TCG_REG_F11,
73 TCG_REG_F12,
74 TCG_REG_F13,
75 TCG_REG_F14,
76 TCG_REG_F15,
79 /* Predicate registers */
80 enum {
81 TCG_REG_P0 = 0,
82 TCG_REG_P1,
83 TCG_REG_P2,
84 TCG_REG_P3,
85 TCG_REG_P4,
86 TCG_REG_P5,
87 TCG_REG_P6,
88 TCG_REG_P7,
89 TCG_REG_P8,
90 TCG_REG_P9,
91 TCG_REG_P10,
92 TCG_REG_P11,
93 TCG_REG_P12,
94 TCG_REG_P13,
95 TCG_REG_P14,
96 TCG_REG_P15,
99 /* Application registers */
100 enum {
101 TCG_REG_PFS = 64,
104 static const int tcg_target_reg_alloc_order[] = {
105 TCG_REG_R35,
106 TCG_REG_R36,
107 TCG_REG_R37,
108 TCG_REG_R38,
109 TCG_REG_R39,
110 TCG_REG_R40,
111 TCG_REG_R41,
112 TCG_REG_R42,
113 TCG_REG_R43,
114 TCG_REG_R44,
115 TCG_REG_R45,
116 TCG_REG_R46,
117 TCG_REG_R47,
118 TCG_REG_R48,
119 TCG_REG_R49,
120 TCG_REG_R50,
121 TCG_REG_R51,
122 TCG_REG_R52,
123 TCG_REG_R53,
124 TCG_REG_R54,
125 TCG_REG_R55,
126 TCG_REG_R14,
127 TCG_REG_R15,
128 TCG_REG_R16,
129 TCG_REG_R17,
130 TCG_REG_R18,
131 TCG_REG_R19,
132 TCG_REG_R20,
133 TCG_REG_R21,
134 TCG_REG_R22,
135 TCG_REG_R23,
136 TCG_REG_R24,
137 TCG_REG_R25,
138 TCG_REG_R26,
139 TCG_REG_R27,
140 TCG_REG_R28,
141 TCG_REG_R29,
142 TCG_REG_R30,
143 TCG_REG_R31,
144 TCG_REG_R56,
145 TCG_REG_R57,
146 TCG_REG_R58,
147 TCG_REG_R59,
148 TCG_REG_R60,
149 TCG_REG_R61,
150 TCG_REG_R62,
151 TCG_REG_R63,
152 TCG_REG_R8,
153 TCG_REG_R9,
154 TCG_REG_R10,
155 TCG_REG_R11
158 static const int tcg_target_call_iarg_regs[8] = {
159 TCG_REG_R56,
160 TCG_REG_R57,
161 TCG_REG_R58,
162 TCG_REG_R59,
163 TCG_REG_R60,
164 TCG_REG_R61,
165 TCG_REG_R62,
166 TCG_REG_R63,
169 static const int tcg_target_call_oarg_regs[] = {
170 TCG_REG_R8
174 * opcode formation
177 /* bundle templates: stops (double bar in the IA64 manual) are marked with
178 an uppercase letter. */
179 enum {
180 mii = 0x00,
181 miI = 0x01,
182 mIi = 0x02,
183 mII = 0x03,
184 mlx = 0x04,
185 mLX = 0x05,
186 mmi = 0x08,
187 mmI = 0x09,
188 Mmi = 0x0a,
189 MmI = 0x0b,
190 mfi = 0x0c,
191 mfI = 0x0d,
192 mmf = 0x0e,
193 mmF = 0x0f,
194 mib = 0x10,
195 miB = 0x11,
196 mbb = 0x12,
197 mbB = 0x13,
198 bbb = 0x16,
199 bbB = 0x17,
200 mmb = 0x18,
201 mmB = 0x19,
202 mfb = 0x1c,
203 mfB = 0x1d,
206 enum {
207 OPC_ADD_A1 = 0x10000000000ull,
208 OPC_AND_A1 = 0x10060000000ull,
209 OPC_AND_A3 = 0x10160000000ull,
210 OPC_ANDCM_A1 = 0x10068000000ull,
211 OPC_ANDCM_A3 = 0x10168000000ull,
212 OPC_ADDS_A4 = 0x10800000000ull,
213 OPC_ADDL_A5 = 0x12000000000ull,
214 OPC_ALLOC_M34 = 0x02c00000000ull,
215 OPC_BR_DPTK_FEW_B1 = 0x08400000000ull,
216 OPC_BR_SPTK_MANY_B1 = 0x08000001000ull,
217 OPC_BR_CALL_SPNT_FEW_B3 = 0x0a200000000ull,
218 OPC_BR_SPTK_MANY_B4 = 0x00100001000ull,
219 OPC_BR_CALL_SPTK_MANY_B5 = 0x02100001000ull,
220 OPC_BR_RET_SPTK_MANY_B4 = 0x00108001100ull,
221 OPC_BRL_SPTK_MANY_X3 = 0x18000001000ull,
222 OPC_BRL_CALL_SPNT_MANY_X4 = 0x1a200001000ull,
223 OPC_BRL_CALL_SPTK_MANY_X4 = 0x1a000001000ull,
224 OPC_CMP_LT_A6 = 0x18000000000ull,
225 OPC_CMP_LTU_A6 = 0x1a000000000ull,
226 OPC_CMP_EQ_A6 = 0x1c000000000ull,
227 OPC_CMP4_LT_A6 = 0x18400000000ull,
228 OPC_CMP4_LTU_A6 = 0x1a400000000ull,
229 OPC_CMP4_EQ_A6 = 0x1c400000000ull,
230 OPC_DEP_I14 = 0x0ae00000000ull,
231 OPC_DEP_I15 = 0x08000000000ull,
232 OPC_DEP_Z_I12 = 0x0a600000000ull,
233 OPC_EXTR_I11 = 0x0a400002000ull,
234 OPC_EXTR_U_I11 = 0x0a400000000ull,
235 OPC_FCVT_FX_TRUNC_S1_F10 = 0x004d0000000ull,
236 OPC_FCVT_FXU_TRUNC_S1_F10 = 0x004d8000000ull,
237 OPC_FCVT_XF_F11 = 0x000e0000000ull,
238 OPC_FMA_S1_F1 = 0x10400000000ull,
239 OPC_FNMA_S1_F1 = 0x18400000000ull,
240 OPC_FRCPA_S1_F6 = 0x00600000000ull,
241 OPC_GETF_SIG_M19 = 0x08708000000ull,
242 OPC_LD1_M1 = 0x08000000000ull,
243 OPC_LD1_M3 = 0x0a000000000ull,
244 OPC_LD2_M1 = 0x08040000000ull,
245 OPC_LD2_M3 = 0x0a040000000ull,
246 OPC_LD4_M1 = 0x08080000000ull,
247 OPC_LD4_M3 = 0x0a080000000ull,
248 OPC_LD8_M1 = 0x080c0000000ull,
249 OPC_LD8_M3 = 0x0a0c0000000ull,
250 OPC_MF_M24 = 0x00110000000ull,
251 OPC_MUX1_I3 = 0x0eca0000000ull,
252 OPC_NOP_B9 = 0x04008000000ull,
253 OPC_NOP_F16 = 0x00008000000ull,
254 OPC_NOP_I18 = 0x00008000000ull,
255 OPC_NOP_M48 = 0x00008000000ull,
256 OPC_MOV_I21 = 0x00e00100000ull,
257 OPC_MOV_RET_I21 = 0x00e00500000ull,
258 OPC_MOV_I22 = 0x00188000000ull,
259 OPC_MOV_I_I26 = 0x00150000000ull,
260 OPC_MOVL_X2 = 0x0c000000000ull,
261 OPC_OR_A1 = 0x10070000000ull,
262 OPC_OR_A3 = 0x10170000000ull,
263 OPC_SETF_EXP_M18 = 0x0c748000000ull,
264 OPC_SETF_SIG_M18 = 0x0c708000000ull,
265 OPC_SHL_I7 = 0x0f240000000ull,
266 OPC_SHR_I5 = 0x0f220000000ull,
267 OPC_SHR_U_I5 = 0x0f200000000ull,
268 OPC_SHRP_I10 = 0x0ac00000000ull,
269 OPC_SXT1_I29 = 0x000a0000000ull,
270 OPC_SXT2_I29 = 0x000a8000000ull,
271 OPC_SXT4_I29 = 0x000b0000000ull,
272 OPC_ST1_M4 = 0x08c00000000ull,
273 OPC_ST2_M4 = 0x08c40000000ull,
274 OPC_ST4_M4 = 0x08c80000000ull,
275 OPC_ST8_M4 = 0x08cc0000000ull,
276 OPC_SUB_A1 = 0x10028000000ull,
277 OPC_SUB_A3 = 0x10128000000ull,
278 OPC_UNPACK4_L_I2 = 0x0f860000000ull,
279 OPC_XMA_L_F2 = 0x1d000000000ull,
280 OPC_XOR_A1 = 0x10078000000ull,
281 OPC_XOR_A3 = 0x10178000000ull,
282 OPC_ZXT1_I29 = 0x00080000000ull,
283 OPC_ZXT2_I29 = 0x00088000000ull,
284 OPC_ZXT4_I29 = 0x00090000000ull,
286 INSN_NOP_M = OPC_NOP_M48, /* nop.m 0 */
287 INSN_NOP_I = OPC_NOP_I18, /* nop.i 0 */
290 static inline uint64_t tcg_opc_a1(int qp, uint64_t opc, int r1,
291 int r2, int r3)
293 return opc
294 | ((r3 & 0x7f) << 20)
295 | ((r2 & 0x7f) << 13)
296 | ((r1 & 0x7f) << 6)
297 | (qp & 0x3f);
300 static inline uint64_t tcg_opc_a3(int qp, uint64_t opc, int r1,
301 uint64_t imm, int r3)
303 return opc
304 | ((imm & 0x80) << 29) /* s */
305 | ((imm & 0x7f) << 13) /* imm7b */
306 | ((r3 & 0x7f) << 20)
307 | ((r1 & 0x7f) << 6)
308 | (qp & 0x3f);
311 static inline uint64_t tcg_opc_a4(int qp, uint64_t opc, int r1,
312 uint64_t imm, int r3)
314 return opc
315 | ((imm & 0x2000) << 23) /* s */
316 | ((imm & 0x1f80) << 20) /* imm6d */
317 | ((imm & 0x007f) << 13) /* imm7b */
318 | ((r3 & 0x7f) << 20)
319 | ((r1 & 0x7f) << 6)
320 | (qp & 0x3f);
323 static inline uint64_t tcg_opc_a5(int qp, uint64_t opc, int r1,
324 uint64_t imm, int r3)
326 return opc
327 | ((imm & 0x200000) << 15) /* s */
328 | ((imm & 0x1f0000) << 6) /* imm5c */
329 | ((imm & 0x00ff80) << 20) /* imm9d */
330 | ((imm & 0x00007f) << 13) /* imm7b */
331 | ((r3 & 0x03) << 20)
332 | ((r1 & 0x7f) << 6)
333 | (qp & 0x3f);
336 static inline uint64_t tcg_opc_a6(int qp, uint64_t opc, int p1,
337 int p2, int r2, int r3)
339 return opc
340 | ((p2 & 0x3f) << 27)
341 | ((r3 & 0x7f) << 20)
342 | ((r2 & 0x7f) << 13)
343 | ((p1 & 0x3f) << 6)
344 | (qp & 0x3f);
347 static inline uint64_t tcg_opc_b1(int qp, uint64_t opc, uint64_t imm)
349 return opc
350 | ((imm & 0x100000) << 16) /* s */
351 | ((imm & 0x0fffff) << 13) /* imm20b */
352 | (qp & 0x3f);
355 static inline uint64_t tcg_opc_b3(int qp, uint64_t opc, int b1, uint64_t imm)
357 return opc
358 | ((imm & 0x100000) << 16) /* s */
359 | ((imm & 0x0fffff) << 13) /* imm20b */
360 | ((b1 & 0x7) << 6)
361 | (qp & 0x3f);
364 static inline uint64_t tcg_opc_b4(int qp, uint64_t opc, int b2)
366 return opc
367 | ((b2 & 0x7) << 13)
368 | (qp & 0x3f);
371 static inline uint64_t tcg_opc_b5(int qp, uint64_t opc, int b1, int b2)
373 return opc
374 | ((b2 & 0x7) << 13)
375 | ((b1 & 0x7) << 6)
376 | (qp & 0x3f);
380 static inline uint64_t tcg_opc_b9(int qp, uint64_t opc, uint64_t imm)
382 return opc
383 | ((imm & 0x100000) << 16) /* i */
384 | ((imm & 0x0fffff) << 6) /* imm20a */
385 | (qp & 0x3f);
388 static inline uint64_t tcg_opc_f1(int qp, uint64_t opc, int f1,
389 int f3, int f4, int f2)
391 return opc
392 | ((f4 & 0x7f) << 27)
393 | ((f3 & 0x7f) << 20)
394 | ((f2 & 0x7f) << 13)
395 | ((f1 & 0x7f) << 6)
396 | (qp & 0x3f);
399 static inline uint64_t tcg_opc_f2(int qp, uint64_t opc, int f1,
400 int f3, int f4, int f2)
402 return opc
403 | ((f4 & 0x7f) << 27)
404 | ((f3 & 0x7f) << 20)
405 | ((f2 & 0x7f) << 13)
406 | ((f1 & 0x7f) << 6)
407 | (qp & 0x3f);
410 static inline uint64_t tcg_opc_f6(int qp, uint64_t opc, int f1,
411 int p2, int f2, int f3)
413 return opc
414 | ((p2 & 0x3f) << 27)
415 | ((f3 & 0x7f) << 20)
416 | ((f2 & 0x7f) << 13)
417 | ((f1 & 0x7f) << 6)
418 | (qp & 0x3f);
421 static inline uint64_t tcg_opc_f10(int qp, uint64_t opc, int f1, int f2)
423 return opc
424 | ((f2 & 0x7f) << 13)
425 | ((f1 & 0x7f) << 6)
426 | (qp & 0x3f);
429 static inline uint64_t tcg_opc_f11(int qp, uint64_t opc, int f1, int f2)
431 return opc
432 | ((f2 & 0x7f) << 13)
433 | ((f1 & 0x7f) << 6)
434 | (qp & 0x3f);
437 static inline uint64_t tcg_opc_f16(int qp, uint64_t opc, uint64_t imm)
439 return opc
440 | ((imm & 0x100000) << 16) /* i */
441 | ((imm & 0x0fffff) << 6) /* imm20a */
442 | (qp & 0x3f);
445 static inline uint64_t tcg_opc_i2(int qp, uint64_t opc, int r1,
446 int r2, int r3)
448 return opc
449 | ((r3 & 0x7f) << 20)
450 | ((r2 & 0x7f) << 13)
451 | ((r1 & 0x7f) << 6)
452 | (qp & 0x3f);
455 static inline uint64_t tcg_opc_i3(int qp, uint64_t opc, int r1,
456 int r2, int mbtype)
458 return opc
459 | ((mbtype & 0x0f) << 20)
460 | ((r2 & 0x7f) << 13)
461 | ((r1 & 0x7f) << 6)
462 | (qp & 0x3f);
465 static inline uint64_t tcg_opc_i5(int qp, uint64_t opc, int r1,
466 int r3, int r2)
468 return opc
469 | ((r3 & 0x7f) << 20)
470 | ((r2 & 0x7f) << 13)
471 | ((r1 & 0x7f) << 6)
472 | (qp & 0x3f);
475 static inline uint64_t tcg_opc_i7(int qp, uint64_t opc, int r1,
476 int r2, int r3)
478 return opc
479 | ((r3 & 0x7f) << 20)
480 | ((r2 & 0x7f) << 13)
481 | ((r1 & 0x7f) << 6)
482 | (qp & 0x3f);
485 static inline uint64_t tcg_opc_i10(int qp, uint64_t opc, int r1,
486 int r2, int r3, uint64_t count)
488 return opc
489 | ((count & 0x3f) << 27)
490 | ((r3 & 0x7f) << 20)
491 | ((r2 & 0x7f) << 13)
492 | ((r1 & 0x7f) << 6)
493 | (qp & 0x3f);
496 static inline uint64_t tcg_opc_i11(int qp, uint64_t opc, int r1,
497 int r3, uint64_t pos, uint64_t len)
499 return opc
500 | ((len & 0x3f) << 27)
501 | ((r3 & 0x7f) << 20)
502 | ((pos & 0x3f) << 14)
503 | ((r1 & 0x7f) << 6)
504 | (qp & 0x3f);
507 static inline uint64_t tcg_opc_i12(int qp, uint64_t opc, int r1,
508 int r2, uint64_t pos, uint64_t len)
510 return opc
511 | ((len & 0x3f) << 27)
512 | ((pos & 0x3f) << 20)
513 | ((r2 & 0x7f) << 13)
514 | ((r1 & 0x7f) << 6)
515 | (qp & 0x3f);
518 static inline uint64_t tcg_opc_i14(int qp, uint64_t opc, int r1, uint64_t imm,
519 int r3, uint64_t pos, uint64_t len)
521 return opc
522 | ((imm & 0x01) << 36)
523 | ((len & 0x3f) << 27)
524 | ((r3 & 0x7f) << 20)
525 | ((pos & 0x3f) << 14)
526 | ((r1 & 0x7f) << 6)
527 | (qp & 0x3f);
530 static inline uint64_t tcg_opc_i15(int qp, uint64_t opc, int r1, int r2,
531 int r3, uint64_t pos, uint64_t len)
533 return opc
534 | ((pos & 0x3f) << 31)
535 | ((len & 0x0f) << 27)
536 | ((r3 & 0x7f) << 20)
537 | ((r2 & 0x7f) << 13)
538 | ((r1 & 0x7f) << 6)
539 | (qp & 0x3f);
542 static inline uint64_t tcg_opc_i18(int qp, uint64_t opc, uint64_t imm)
544 return opc
545 | ((imm & 0x100000) << 16) /* i */
546 | ((imm & 0x0fffff) << 6) /* imm20a */
547 | (qp & 0x3f);
550 static inline uint64_t tcg_opc_i21(int qp, uint64_t opc, int b1,
551 int r2, uint64_t imm)
553 return opc
554 | ((imm & 0x1ff) << 24)
555 | ((r2 & 0x7f) << 13)
556 | ((b1 & 0x7) << 6)
557 | (qp & 0x3f);
560 static inline uint64_t tcg_opc_i22(int qp, uint64_t opc, int r1, int b2)
562 return opc
563 | ((b2 & 0x7) << 13)
564 | ((r1 & 0x7f) << 6)
565 | (qp & 0x3f);
568 static inline uint64_t tcg_opc_i26(int qp, uint64_t opc, int ar3, int r2)
570 return opc
571 | ((ar3 & 0x7f) << 20)
572 | ((r2 & 0x7f) << 13)
573 | (qp & 0x3f);
576 static inline uint64_t tcg_opc_i29(int qp, uint64_t opc, int r1, int r3)
578 return opc
579 | ((r3 & 0x7f) << 20)
580 | ((r1 & 0x7f) << 6)
581 | (qp & 0x3f);
584 static inline uint64_t tcg_opc_l2(uint64_t imm)
586 return (imm & 0x7fffffffffc00000ull) >> 22;
589 static inline uint64_t tcg_opc_l3(uint64_t imm)
591 return (imm & 0x07fffffffff00000ull) >> 18;
594 #define tcg_opc_l4 tcg_opc_l3
596 static inline uint64_t tcg_opc_m1(int qp, uint64_t opc, int r1, int r3)
598 return opc
599 | ((r3 & 0x7f) << 20)
600 | ((r1 & 0x7f) << 6)
601 | (qp & 0x3f);
604 static inline uint64_t tcg_opc_m3(int qp, uint64_t opc, int r1,
605 int r3, uint64_t imm)
607 return opc
608 | ((imm & 0x100) << 28) /* s */
609 | ((imm & 0x080) << 20) /* i */
610 | ((imm & 0x07f) << 13) /* imm7b */
611 | ((r3 & 0x7f) << 20)
612 | ((r1 & 0x7f) << 6)
613 | (qp & 0x3f);
616 static inline uint64_t tcg_opc_m4(int qp, uint64_t opc, int r2, int r3)
618 return opc
619 | ((r3 & 0x7f) << 20)
620 | ((r2 & 0x7f) << 13)
621 | (qp & 0x3f);
624 static inline uint64_t tcg_opc_m18(int qp, uint64_t opc, int f1, int r2)
626 return opc
627 | ((r2 & 0x7f) << 13)
628 | ((f1 & 0x7f) << 6)
629 | (qp & 0x3f);
632 static inline uint64_t tcg_opc_m19(int qp, uint64_t opc, int r1, int f2)
634 return opc
635 | ((f2 & 0x7f) << 13)
636 | ((r1 & 0x7f) << 6)
637 | (qp & 0x3f);
640 static inline uint64_t tcg_opc_m34(int qp, uint64_t opc, int r1,
641 int sof, int sol, int sor)
643 return opc
644 | ((sor & 0x0f) << 27)
645 | ((sol & 0x7f) << 20)
646 | ((sof & 0x7f) << 13)
647 | ((r1 & 0x7f) << 6)
648 | (qp & 0x3f);
651 static inline uint64_t tcg_opc_m48(int qp, uint64_t opc, uint64_t imm)
653 return opc
654 | ((imm & 0x100000) << 16) /* i */
655 | ((imm & 0x0fffff) << 6) /* imm20a */
656 | (qp & 0x3f);
659 static inline uint64_t tcg_opc_x2(int qp, uint64_t opc,
660 int r1, uint64_t imm)
662 return opc
663 | ((imm & 0x8000000000000000ull) >> 27) /* i */
664 | (imm & 0x0000000000200000ull) /* ic */
665 | ((imm & 0x00000000001f0000ull) << 6) /* imm5c */
666 | ((imm & 0x000000000000ff80ull) << 20) /* imm9d */
667 | ((imm & 0x000000000000007full) << 13) /* imm7b */
668 | ((r1 & 0x7f) << 6)
669 | (qp & 0x3f);
672 static inline uint64_t tcg_opc_x3(int qp, uint64_t opc, uint64_t imm)
674 return opc
675 | ((imm & 0x0800000000000000ull) >> 23) /* i */
676 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
677 | (qp & 0x3f);
680 static inline uint64_t tcg_opc_x4(int qp, uint64_t opc, int b1, uint64_t imm)
682 return opc
683 | ((imm & 0x0800000000000000ull) >> 23) /* i */
684 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
685 | ((b1 & 0x7) << 6)
686 | (qp & 0x3f);
691 * Relocations - Note that we never encode branches elsewhere than slot 2.
694 static void reloc_pcrel21b_slot2(tcg_insn_unit *pc, tcg_insn_unit *target)
696 uint64_t imm = target - pc;
698 pc->hi = (pc->hi & 0xf700000fffffffffull)
699 | ((imm & 0x100000) << 39) /* s */
700 | ((imm & 0x0fffff) << 36); /* imm20b */
703 static uint64_t get_reloc_pcrel21b_slot2(tcg_insn_unit *pc)
705 int64_t high = pc->hi;
707 return ((high >> 39) & 0x100000) + /* s */
708 ((high >> 36) & 0x0fffff); /* imm20b */
711 static void patch_reloc(tcg_insn_unit *code_ptr, int type,
712 intptr_t value, intptr_t addend)
714 tcg_debug_assert(addend == 0);
715 tcg_debug_assert(type == R_IA64_PCREL21B);
716 reloc_pcrel21b_slot2(code_ptr, (tcg_insn_unit *)value);
720 * Constraints
723 /* parse target specific constraints */
724 static const char *target_parse_constraint(TCGArgConstraint *ct,
725 const char *ct_str, TCGType type)
727 switch(*ct_str++) {
728 case 'r':
729 ct->ct |= TCG_CT_REG;
730 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
731 break;
732 case 'I':
733 ct->ct |= TCG_CT_CONST_S22;
734 break;
735 case 'S':
736 ct->ct |= TCG_CT_REG;
737 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
738 #if defined(CONFIG_SOFTMMU)
739 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R56);
740 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R57);
741 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R58);
742 #endif
743 break;
744 case 'Z':
745 /* We are cheating a bit here, using the fact that the register
746 r0 is also the register number 0. Hence there is no need
747 to check for const_args in each instruction. */
748 ct->ct |= TCG_CT_CONST_ZERO;
749 break;
750 default:
751 return NULL;
753 return ct_str;
756 /* test if a constant matches the constraint */
757 static inline int tcg_target_const_match(tcg_target_long val, TCGType type,
758 const TCGArgConstraint *arg_ct)
760 int ct;
761 ct = arg_ct->ct;
762 if (ct & TCG_CT_CONST)
763 return 1;
764 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
765 return 1;
766 else if ((ct & TCG_CT_CONST_S22) && val == ((int32_t)val << 10) >> 10)
767 return 1;
768 else
769 return 0;
773 * Code generation
776 static tcg_insn_unit *tb_ret_addr;
778 static inline void tcg_out_bundle(TCGContext *s, int template,
779 uint64_t slot0, uint64_t slot1,
780 uint64_t slot2)
782 template &= 0x1f; /* 5 bits */
783 slot0 &= 0x1ffffffffffull; /* 41 bits */
784 slot1 &= 0x1ffffffffffull; /* 41 bits */
785 slot2 &= 0x1ffffffffffull; /* 41 bits */
787 *s->code_ptr++ = (tcg_insn_unit){
788 (slot1 << 46) | (slot0 << 5) | template,
789 (slot2 << 23) | (slot1 >> 18)
793 static inline uint64_t tcg_opc_mov_a(int qp, TCGReg dst, TCGReg src)
795 return tcg_opc_a4(qp, OPC_ADDS_A4, dst, 0, src);
798 static inline void tcg_out_mov(TCGContext *s, TCGType type,
799 TCGReg ret, TCGReg arg)
801 tcg_out_bundle(s, mmI,
802 INSN_NOP_M,
803 INSN_NOP_M,
804 tcg_opc_mov_a(TCG_REG_P0, ret, arg));
807 static inline uint64_t tcg_opc_movi_a(int qp, TCGReg dst, int64_t src)
809 tcg_debug_assert(src == sextract64(src, 0, 22));
810 return tcg_opc_a5(qp, OPC_ADDL_A5, dst, src, TCG_REG_R0);
813 static inline void tcg_out_movi(TCGContext *s, TCGType type,
814 TCGReg reg, tcg_target_long arg)
816 tcg_out_bundle(s, mLX,
817 INSN_NOP_M,
818 tcg_opc_l2 (arg),
819 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, reg, arg));
822 static void tcg_out_br(TCGContext *s, TCGLabel *l)
824 uint64_t imm;
826 /* We pay attention here to not modify the branch target by reading
827 the existing value and using it again. This ensure that caches and
828 memory are kept coherent during retranslation. */
829 if (l->has_value) {
830 imm = l->u.value_ptr - s->code_ptr;
831 } else {
832 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
833 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, l, 0);
836 tcg_out_bundle(s, mmB,
837 INSN_NOP_M,
838 INSN_NOP_M,
839 tcg_opc_b1(TCG_REG_P0, OPC_BR_SPTK_MANY_B1, imm));
842 static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *desc)
844 uintptr_t func = desc->lo, gp = desc->hi, disp;
846 /* Look through the function descriptor. */
847 tcg_out_bundle(s, mlx,
848 INSN_NOP_M,
849 tcg_opc_l2 (gp),
850 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, TCG_REG_R1, gp));
851 disp = (tcg_insn_unit *)func - s->code_ptr;
852 tcg_out_bundle(s, mLX,
853 INSN_NOP_M,
854 tcg_opc_l4 (disp),
855 tcg_opc_x4 (TCG_REG_P0, OPC_BRL_CALL_SPTK_MANY_X4,
856 TCG_REG_B0, disp));
859 static void tcg_out_exit_tb(TCGContext *s, tcg_target_long arg)
861 uint64_t imm, opc1;
863 /* At least arg == 0 is a common operation. */
864 if (arg == sextract64(arg, 0, 22)) {
865 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R8, arg);
866 } else {
867 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R8, arg);
868 opc1 = INSN_NOP_M;
871 imm = tb_ret_addr - s->code_ptr;
873 tcg_out_bundle(s, mLX,
874 opc1,
875 tcg_opc_l3 (imm),
876 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, imm));
879 static inline void tcg_out_goto_tb(TCGContext *s, TCGArg arg)
881 if (s->tb_jmp_insn_offset) {
882 /* direct jump method */
883 tcg_abort();
884 } else {
885 /* indirect jump method */
886 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2,
887 (tcg_target_long)(s->tb_jmp_target_addr + arg));
888 tcg_out_bundle(s, MmI,
889 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1,
890 TCG_REG_R2, TCG_REG_R2),
891 INSN_NOP_M,
892 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6,
893 TCG_REG_R2, 0));
894 tcg_out_bundle(s, mmB,
895 INSN_NOP_M,
896 INSN_NOP_M,
897 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4,
898 TCG_REG_B6));
900 s->tb_jmp_reset_offset[arg] = tcg_current_code_size(s);
903 static inline void tcg_out_jmp(TCGContext *s, TCGArg addr)
905 tcg_out_bundle(s, mmI,
906 INSN_NOP_M,
907 INSN_NOP_M,
908 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6, addr, 0));
909 tcg_out_bundle(s, mmB,
910 INSN_NOP_M,
911 INSN_NOP_M,
912 tcg_opc_b4(TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
915 static inline void tcg_out_ld_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
916 TCGArg arg1, tcg_target_long arg2)
918 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
919 tcg_out_bundle(s, MmI,
920 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
921 TCG_REG_R2, arg2, arg1),
922 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
923 INSN_NOP_I);
924 } else {
925 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
926 tcg_out_bundle(s, MmI,
927 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
928 TCG_REG_R2, TCG_REG_R2, arg1),
929 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
930 INSN_NOP_I);
934 static inline void tcg_out_st_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
935 TCGArg arg1, tcg_target_long arg2)
937 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
938 tcg_out_bundle(s, MmI,
939 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
940 TCG_REG_R2, arg2, arg1),
941 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
942 INSN_NOP_I);
943 } else {
944 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
945 tcg_out_bundle(s, MmI,
946 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
947 TCG_REG_R2, TCG_REG_R2, arg1),
948 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
949 INSN_NOP_I);
953 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
954 TCGReg arg1, intptr_t arg2)
956 if (type == TCG_TYPE_I32) {
957 tcg_out_ld_rel(s, OPC_LD4_M1, arg, arg1, arg2);
958 } else {
959 tcg_out_ld_rel(s, OPC_LD8_M1, arg, arg1, arg2);
963 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
964 TCGReg arg1, intptr_t arg2)
966 if (type == TCG_TYPE_I32) {
967 tcg_out_st_rel(s, OPC_ST4_M4, arg, arg1, arg2);
968 } else {
969 tcg_out_st_rel(s, OPC_ST8_M4, arg, arg1, arg2);
973 static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
974 TCGReg base, intptr_t ofs)
976 if (val == 0) {
977 tcg_out_st(s, type, TCG_REG_R0, base, ofs);
978 return true;
980 return false;
983 static inline void tcg_out_alu(TCGContext *s, uint64_t opc_a1, uint64_t opc_a3,
984 TCGReg ret, TCGArg arg1, int const_arg1,
985 TCGArg arg2, int const_arg2)
987 uint64_t opc1 = 0, opc2 = 0, opc3 = 0;
989 if (const_arg2 && arg2 != 0) {
990 opc2 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R3, arg2);
991 arg2 = TCG_REG_R3;
993 if (const_arg1 && arg1 != 0) {
994 if (opc_a3 && arg1 == (int8_t)arg1) {
995 opc3 = tcg_opc_a3(TCG_REG_P0, opc_a3, ret, arg1, arg2);
996 } else {
997 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, arg1);
998 arg1 = TCG_REG_R2;
1001 if (opc3 == 0) {
1002 opc3 = tcg_opc_a1(TCG_REG_P0, opc_a1, ret, arg1, arg2);
1005 tcg_out_bundle(s, (opc1 || opc2 ? mII : miI),
1006 opc1 ? opc1 : INSN_NOP_M,
1007 opc2 ? opc2 : INSN_NOP_I,
1008 opc3);
1011 static inline void tcg_out_add(TCGContext *s, TCGReg ret, TCGReg arg1,
1012 TCGArg arg2, int const_arg2)
1014 if (const_arg2 && arg2 == sextract64(arg2, 0, 14)) {
1015 tcg_out_bundle(s, mmI,
1016 INSN_NOP_M,
1017 INSN_NOP_M,
1018 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, arg2, arg1));
1019 } else {
1020 tcg_out_alu(s, OPC_ADD_A1, 0, ret, arg1, 0, arg2, const_arg2);
1024 static inline void tcg_out_sub(TCGContext *s, TCGReg ret, TCGArg arg1,
1025 int const_arg1, TCGArg arg2, int const_arg2)
1027 if (!const_arg1 && const_arg2 && -arg2 == sextract64(-arg2, 0, 14)) {
1028 tcg_out_bundle(s, mmI,
1029 INSN_NOP_M,
1030 INSN_NOP_M,
1031 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, -arg2, arg1));
1032 } else {
1033 tcg_out_alu(s, OPC_SUB_A1, OPC_SUB_A3, ret,
1034 arg1, const_arg1, arg2, const_arg2);
1038 static inline void tcg_out_eqv(TCGContext *s, TCGArg ret,
1039 TCGArg arg1, int const_arg1,
1040 TCGArg arg2, int const_arg2)
1042 tcg_out_bundle(s, mII,
1043 INSN_NOP_M,
1044 tcg_opc_a1 (TCG_REG_P0, OPC_XOR_A1, ret, arg1, arg2),
1045 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1048 static inline void tcg_out_nand(TCGContext *s, TCGArg ret,
1049 TCGArg arg1, int const_arg1,
1050 TCGArg arg2, int const_arg2)
1052 tcg_out_bundle(s, mII,
1053 INSN_NOP_M,
1054 tcg_opc_a1 (TCG_REG_P0, OPC_AND_A1, ret, arg1, arg2),
1055 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1058 static inline void tcg_out_nor(TCGContext *s, TCGArg ret,
1059 TCGArg arg1, int const_arg1,
1060 TCGArg arg2, int const_arg2)
1062 tcg_out_bundle(s, mII,
1063 INSN_NOP_M,
1064 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, arg2),
1065 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1068 static inline void tcg_out_orc(TCGContext *s, TCGArg ret,
1069 TCGArg arg1, int const_arg1,
1070 TCGArg arg2, int const_arg2)
1072 tcg_out_bundle(s, mII,
1073 INSN_NOP_M,
1074 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, TCG_REG_R2, -1, arg2),
1075 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, TCG_REG_R2));
1078 static inline void tcg_out_mul(TCGContext *s, TCGArg ret,
1079 TCGArg arg1, TCGArg arg2)
1081 tcg_out_bundle(s, mmI,
1082 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F6, arg1),
1083 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F7, arg2),
1084 INSN_NOP_I);
1085 tcg_out_bundle(s, mmF,
1086 INSN_NOP_M,
1087 INSN_NOP_M,
1088 tcg_opc_f2 (TCG_REG_P0, OPC_XMA_L_F2, TCG_REG_F6, TCG_REG_F6,
1089 TCG_REG_F7, TCG_REG_F0));
1090 tcg_out_bundle(s, miI,
1091 tcg_opc_m19(TCG_REG_P0, OPC_GETF_SIG_M19, ret, TCG_REG_F6),
1092 INSN_NOP_I,
1093 INSN_NOP_I);
1096 static inline void tcg_out_sar_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1097 TCGArg arg2, int const_arg2)
1099 if (const_arg2) {
1100 tcg_out_bundle(s, miI,
1101 INSN_NOP_M,
1102 INSN_NOP_I,
1103 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1104 ret, arg1, arg2, 31 - arg2));
1105 } else {
1106 tcg_out_bundle(s, mII,
1107 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3,
1108 TCG_REG_R3, 0x1f, arg2),
1109 tcg_opc_i29(TCG_REG_P0, OPC_SXT4_I29, TCG_REG_R2, arg1),
1110 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret,
1111 TCG_REG_R2, TCG_REG_R3));
1115 static inline void tcg_out_sar_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1116 TCGArg arg2, int const_arg2)
1118 if (const_arg2) {
1119 tcg_out_bundle(s, miI,
1120 INSN_NOP_M,
1121 INSN_NOP_I,
1122 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1123 ret, arg1, arg2, 63 - arg2));
1124 } else {
1125 tcg_out_bundle(s, miI,
1126 INSN_NOP_M,
1127 INSN_NOP_I,
1128 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret, arg1, arg2));
1132 static inline void tcg_out_shl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1133 TCGArg arg2, int const_arg2)
1135 if (const_arg2) {
1136 tcg_out_bundle(s, miI,
1137 INSN_NOP_M,
1138 INSN_NOP_I,
1139 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1140 arg1, 63 - arg2, 31 - arg2));
1141 } else {
1142 tcg_out_bundle(s, mII,
1143 INSN_NOP_M,
1144 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R2,
1145 0x1f, arg2),
1146 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1147 arg1, TCG_REG_R2));
1151 static inline void tcg_out_shl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1152 TCGArg arg2, int const_arg2)
1154 if (const_arg2) {
1155 tcg_out_bundle(s, miI,
1156 INSN_NOP_M,
1157 INSN_NOP_I,
1158 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1159 arg1, 63 - arg2, 63 - arg2));
1160 } else {
1161 tcg_out_bundle(s, miI,
1162 INSN_NOP_M,
1163 INSN_NOP_I,
1164 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1165 arg1, arg2));
1169 static inline void tcg_out_shr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1170 TCGArg arg2, int const_arg2)
1172 if (const_arg2) {
1173 tcg_out_bundle(s, miI,
1174 INSN_NOP_M,
1175 INSN_NOP_I,
1176 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1177 arg1, arg2, 31 - arg2));
1178 } else {
1179 tcg_out_bundle(s, mII,
1180 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1181 0x1f, arg2),
1182 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29, TCG_REG_R2, arg1),
1183 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1184 TCG_REG_R2, TCG_REG_R3));
1188 static inline void tcg_out_shr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1189 TCGArg arg2, int const_arg2)
1191 if (const_arg2) {
1192 tcg_out_bundle(s, miI,
1193 INSN_NOP_M,
1194 INSN_NOP_I,
1195 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1196 arg1, arg2, 63 - arg2));
1197 } else {
1198 tcg_out_bundle(s, miI,
1199 INSN_NOP_M,
1200 INSN_NOP_I,
1201 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1202 arg1, arg2));
1206 static inline void tcg_out_rotl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1207 TCGArg arg2, int const_arg2)
1209 if (const_arg2) {
1210 tcg_out_bundle(s, mII,
1211 INSN_NOP_M,
1212 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1213 TCG_REG_R2, arg1, arg1),
1214 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1215 TCG_REG_R2, 32 - arg2, 31));
1216 } else {
1217 tcg_out_bundle(s, miI,
1218 INSN_NOP_M,
1219 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1220 TCG_REG_R2, arg1, arg1),
1221 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1222 0x1f, arg2));
1223 tcg_out_bundle(s, mII,
1224 INSN_NOP_M,
1225 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R3,
1226 0x20, TCG_REG_R3),
1227 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1228 TCG_REG_R2, TCG_REG_R3));
1232 static inline void tcg_out_rotl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1233 TCGArg arg2, int const_arg2)
1235 if (const_arg2) {
1236 tcg_out_bundle(s, miI,
1237 INSN_NOP_M,
1238 INSN_NOP_I,
1239 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1240 arg1, 0x40 - arg2));
1241 } else {
1242 tcg_out_bundle(s, mII,
1243 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1244 0x40, arg2),
1245 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R3,
1246 arg1, arg2),
1247 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R2,
1248 arg1, TCG_REG_R2));
1249 tcg_out_bundle(s, miI,
1250 INSN_NOP_M,
1251 INSN_NOP_I,
1252 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1253 TCG_REG_R2, TCG_REG_R3));
1257 static inline void tcg_out_rotr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1258 TCGArg arg2, int const_arg2)
1260 if (const_arg2) {
1261 tcg_out_bundle(s, mII,
1262 INSN_NOP_M,
1263 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1264 TCG_REG_R2, arg1, arg1),
1265 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1266 TCG_REG_R2, arg2, 31));
1267 } else {
1268 tcg_out_bundle(s, mII,
1269 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1270 0x1f, arg2),
1271 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1272 TCG_REG_R2, arg1, arg1),
1273 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1274 TCG_REG_R2, TCG_REG_R3));
1278 static inline void tcg_out_rotr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1279 TCGArg arg2, int const_arg2)
1281 if (const_arg2) {
1282 tcg_out_bundle(s, miI,
1283 INSN_NOP_M,
1284 INSN_NOP_I,
1285 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1286 arg1, arg2));
1287 } else {
1288 tcg_out_bundle(s, mII,
1289 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1290 0x40, arg2),
1291 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R3,
1292 arg1, arg2),
1293 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R2,
1294 arg1, TCG_REG_R2));
1295 tcg_out_bundle(s, miI,
1296 INSN_NOP_M,
1297 INSN_NOP_I,
1298 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1299 TCG_REG_R2, TCG_REG_R3));
1303 static const uint64_t opc_ext_i29[8] = {
1304 OPC_ZXT1_I29, OPC_ZXT2_I29, OPC_ZXT4_I29, 0,
1305 OPC_SXT1_I29, OPC_SXT2_I29, OPC_SXT4_I29, 0
1308 static inline uint64_t tcg_opc_ext_i(int qp, TCGMemOp opc, TCGReg d, TCGReg s)
1310 if ((opc & MO_SIZE) == MO_64) {
1311 return tcg_opc_mov_a(qp, d, s);
1312 } else {
1313 return tcg_opc_i29(qp, opc_ext_i29[opc & MO_SSIZE], d, s);
1317 static inline void tcg_out_ext(TCGContext *s, uint64_t opc_i29,
1318 TCGArg ret, TCGArg arg)
1320 tcg_out_bundle(s, miI,
1321 INSN_NOP_M,
1322 INSN_NOP_I,
1323 tcg_opc_i29(TCG_REG_P0, opc_i29, ret, arg));
1326 static inline uint64_t tcg_opc_bswap64_i(int qp, TCGReg d, TCGReg s)
1328 return tcg_opc_i3(qp, OPC_MUX1_I3, d, s, 0xb);
1331 static inline void tcg_out_bswap16(TCGContext *s, TCGArg ret, TCGArg arg)
1333 tcg_out_bundle(s, mII,
1334 INSN_NOP_M,
1335 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 15, 15),
1336 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1339 static inline void tcg_out_bswap32(TCGContext *s, TCGArg ret, TCGArg arg)
1341 tcg_out_bundle(s, mII,
1342 INSN_NOP_M,
1343 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 31, 31),
1344 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1347 static inline void tcg_out_bswap64(TCGContext *s, TCGArg ret, TCGArg arg)
1349 tcg_out_bundle(s, miI,
1350 INSN_NOP_M,
1351 INSN_NOP_I,
1352 tcg_opc_bswap64_i(TCG_REG_P0, ret, arg));
1355 static inline void tcg_out_deposit(TCGContext *s, TCGArg ret, TCGArg a1,
1356 TCGArg a2, int const_a2, int pos, int len)
1358 uint64_t i1 = 0, i2 = 0;
1359 int cpos = 63 - pos, lm1 = len - 1;
1361 if (const_a2) {
1362 /* Truncate the value of a constant a2 to the width of the field. */
1363 int mask = (1u << len) - 1;
1364 a2 &= mask;
1366 if (a2 == 0 || a2 == mask) {
1367 /* 1-bit signed constant inserted into register. */
1368 i2 = tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, ret, a2, a1, cpos, lm1);
1369 } else {
1370 /* Otherwise, load any constant into a temporary. Do this into
1371 the first I slot to help out with cross-unit delays. */
1372 i1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, a2);
1373 a2 = TCG_REG_R2;
1376 if (i2 == 0) {
1377 i2 = tcg_opc_i15(TCG_REG_P0, OPC_DEP_I15, ret, a2, a1, cpos, lm1);
1379 tcg_out_bundle(s, (i1 ? mII : miI),
1380 INSN_NOP_M,
1381 i1 ? i1 : INSN_NOP_I,
1382 i2);
1385 static inline uint64_t tcg_opc_cmp_a(int qp, TCGCond cond, TCGArg arg1,
1386 TCGArg arg2, int cmp4)
1388 uint64_t opc_eq_a6, opc_lt_a6, opc_ltu_a6;
1390 if (cmp4) {
1391 opc_eq_a6 = OPC_CMP4_EQ_A6;
1392 opc_lt_a6 = OPC_CMP4_LT_A6;
1393 opc_ltu_a6 = OPC_CMP4_LTU_A6;
1394 } else {
1395 opc_eq_a6 = OPC_CMP_EQ_A6;
1396 opc_lt_a6 = OPC_CMP_LT_A6;
1397 opc_ltu_a6 = OPC_CMP_LTU_A6;
1400 switch (cond) {
1401 case TCG_COND_EQ:
1402 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1403 case TCG_COND_NE:
1404 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1405 case TCG_COND_LT:
1406 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1407 case TCG_COND_LTU:
1408 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1409 case TCG_COND_GE:
1410 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1411 case TCG_COND_GEU:
1412 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1413 case TCG_COND_LE:
1414 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1415 case TCG_COND_LEU:
1416 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1417 case TCG_COND_GT:
1418 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1419 case TCG_COND_GTU:
1420 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1421 default:
1422 tcg_abort();
1423 break;
1427 static inline void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1,
1428 TCGReg arg2, TCGLabel *l, int cmp4)
1430 uint64_t imm;
1432 /* We pay attention here to not modify the branch target by reading
1433 the existing value and using it again. This ensure that caches and
1434 memory are kept coherent during retranslation. */
1435 if (l->has_value) {
1436 imm = l->u.value_ptr - s->code_ptr;
1437 } else {
1438 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
1439 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, l, 0);
1442 tcg_out_bundle(s, miB,
1443 INSN_NOP_M,
1444 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1445 tcg_opc_b1(TCG_REG_P6, OPC_BR_DPTK_FEW_B1, imm));
1448 static inline void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGArg ret,
1449 TCGArg arg1, TCGArg arg2, int cmp4)
1451 tcg_out_bundle(s, MmI,
1452 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1453 tcg_opc_movi_a(TCG_REG_P6, ret, 1),
1454 tcg_opc_movi_a(TCG_REG_P7, ret, 0));
1457 static inline void tcg_out_movcond(TCGContext *s, TCGCond cond, TCGArg ret,
1458 TCGArg c1, TCGArg c2,
1459 TCGArg v1, int const_v1,
1460 TCGArg v2, int const_v2, int cmp4)
1462 uint64_t opc1, opc2;
1464 if (const_v1) {
1465 opc1 = tcg_opc_movi_a(TCG_REG_P6, ret, v1);
1466 } else if (ret == v1) {
1467 opc1 = INSN_NOP_M;
1468 } else {
1469 opc1 = tcg_opc_mov_a(TCG_REG_P6, ret, v1);
1471 if (const_v2) {
1472 opc2 = tcg_opc_movi_a(TCG_REG_P7, ret, v2);
1473 } else if (ret == v2) {
1474 opc2 = INSN_NOP_I;
1475 } else {
1476 opc2 = tcg_opc_mov_a(TCG_REG_P7, ret, v2);
1479 tcg_out_bundle(s, MmI,
1480 tcg_opc_cmp_a(TCG_REG_P0, cond, c1, c2, cmp4),
1481 opc1,
1482 opc2);
1485 #if defined(CONFIG_SOFTMMU)
1486 /* We're expecting to use an signed 22-bit immediate add. */
1487 QEMU_BUILD_BUG_ON(offsetof(CPUArchState, tlb_table[NB_MMU_MODES - 1][1])
1488 > 0x1fffff)
1490 /* Load and compare a TLB entry, and return the result in (p6, p7).
1491 R2 is loaded with the addend TLB entry.
1492 R57 is loaded with the address, zero extented on 32-bit targets.
1493 R1, R3 are clobbered, leaving R56 free for...
1494 BSWAP_1, BSWAP_2 and I-slot insns for swapping data for store. */
1495 static inline void tcg_out_qemu_tlb(TCGContext *s, TCGReg addr_reg,
1496 TCGMemOp opc, int off_rw, int off_add,
1497 uint64_t bswap1, uint64_t bswap2)
1499 unsigned s_bits = opc & MO_SIZE;
1500 unsigned a_bits = get_alignment_bits(opc);
1502 /* We don't support unaligned accesses, but overalignment is easy. */
1503 if (a_bits < s_bits) {
1504 a_bits = s_bits;
1508 .mii
1509 mov r2 = off_rw
1510 extr.u r3 = addr_reg, ... # extract tlb page
1511 zxt4 r57 = addr_reg # or mov for 64-bit guest
1513 .mii
1514 addl r2 = r2, areg0
1515 shl r3 = r3, cteb # via dep.z
1516 dep r1 = 0, r57, ... # zero page ofs, keep align
1518 .mmi
1519 add r2 = r2, r3
1521 ld4 r3 = [r2], off_add-off_rw # or ld8 for 64-bit guest
1524 .mmi
1526 cmp.eq p6, p7 = r3, r58
1530 tcg_out_bundle(s, miI,
1531 tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, off_rw),
1532 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, TCG_REG_R3,
1533 addr_reg, TARGET_PAGE_BITS, CPU_TLB_BITS - 1),
1534 tcg_opc_ext_i(TCG_REG_P0,
1535 TARGET_LONG_BITS == 32 ? MO_UL : MO_Q,
1536 TCG_REG_R57, addr_reg));
1537 tcg_out_bundle(s, miI,
1538 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1539 TCG_REG_R2, TCG_AREG0),
1540 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, TCG_REG_R3,
1541 TCG_REG_R3, 63 - CPU_TLB_ENTRY_BITS,
1542 63 - CPU_TLB_ENTRY_BITS),
1543 tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, TCG_REG_R1, 0,
1544 TCG_REG_R57, 63 - a_bits,
1545 TARGET_PAGE_BITS - a_bits - 1));
1546 tcg_out_bundle(s, MmI,
1547 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
1548 TCG_REG_R2, TCG_REG_R2, TCG_REG_R3),
1549 tcg_opc_m3 (TCG_REG_P0,
1550 (TARGET_LONG_BITS == 32
1551 ? OPC_LD4_M3 : OPC_LD8_M3), TCG_REG_R3,
1552 TCG_REG_R2, off_add - off_rw),
1553 bswap1);
1554 tcg_out_bundle(s, mmI,
1555 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1, TCG_REG_R2, TCG_REG_R2),
1556 tcg_opc_a6 (TCG_REG_P0, OPC_CMP_EQ_A6, TCG_REG_P6,
1557 TCG_REG_P7, TCG_REG_R1, TCG_REG_R3),
1558 bswap2);
1561 typedef struct TCGLabelQemuLdst {
1562 bool is_ld;
1563 TCGMemOp size;
1564 tcg_insn_unit *label_ptr; /* label pointers to be updated */
1565 struct TCGLabelQemuLdst *next;
1566 } TCGLabelQemuLdst;
1568 typedef struct TCGBackendData {
1569 TCGLabelQemuLdst *labels;
1570 } TCGBackendData;
1572 static inline void tcg_out_tb_init(TCGContext *s)
1574 s->be->labels = NULL;
1577 static void add_qemu_ldst_label(TCGContext *s, bool is_ld, TCGMemOp opc,
1578 tcg_insn_unit *label_ptr)
1580 TCGBackendData *be = s->be;
1581 TCGLabelQemuLdst *l = tcg_malloc(sizeof(*l));
1583 l->is_ld = is_ld;
1584 l->size = opc & MO_SIZE;
1585 l->label_ptr = label_ptr;
1586 l->next = be->labels;
1587 be->labels = l;
1590 static bool tcg_out_tb_finalize(TCGContext *s)
1592 static const void * const helpers[8] = {
1593 helper_ret_stb_mmu,
1594 helper_le_stw_mmu,
1595 helper_le_stl_mmu,
1596 helper_le_stq_mmu,
1597 helper_ret_ldub_mmu,
1598 helper_le_lduw_mmu,
1599 helper_le_ldul_mmu,
1600 helper_le_ldq_mmu,
1602 tcg_insn_unit *thunks[8] = { };
1603 TCGLabelQemuLdst *l;
1605 for (l = s->be->labels; l != NULL; l = l->next) {
1606 long x = l->is_ld * 4 + l->size;
1607 tcg_insn_unit *dest = thunks[x];
1609 /* The out-of-line thunks are all the same; load the return address
1610 from B0, load the GP, and branch to the code. Note that we are
1611 always post-call, so the register window has rolled, so we're
1612 using incoming parameter register numbers, not outgoing. */
1613 if (dest == NULL) {
1614 uintptr_t *desc = (uintptr_t *)helpers[x];
1615 uintptr_t func = desc[0], gp = desc[1], disp;
1617 thunks[x] = dest = s->code_ptr;
1619 tcg_out_bundle(s, mlx,
1620 INSN_NOP_M,
1621 tcg_opc_l2 (gp),
1622 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
1623 TCG_REG_R1, gp));
1624 tcg_out_bundle(s, mii,
1625 INSN_NOP_M,
1626 INSN_NOP_I,
1627 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
1628 l->is_ld ? TCG_REG_R35 : TCG_REG_R36,
1629 TCG_REG_B0));
1630 disp = (tcg_insn_unit *)func - s->code_ptr;
1631 tcg_out_bundle(s, mLX,
1632 INSN_NOP_M,
1633 tcg_opc_l3 (disp),
1634 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, disp));
1637 reloc_pcrel21b_slot2(l->label_ptr, dest);
1639 /* Test for (pending) buffer overflow. The assumption is that any
1640 one operation beginning below the high water mark cannot overrun
1641 the buffer completely. Thus we can test for overflow after
1642 generating code without having to check during generation. */
1643 if (unlikely((void *)s->code_ptr > s->code_gen_highwater)) {
1644 return false;
1647 return true;
1650 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1652 static const uint64_t opc_ld_m1[4] = {
1653 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1655 int addr_reg, data_reg, mem_index;
1656 TCGMemOpIdx oi;
1657 TCGMemOp opc, s_bits;
1658 uint64_t fin1, fin2;
1659 tcg_insn_unit *label_ptr;
1661 data_reg = args[0];
1662 addr_reg = args[1];
1663 oi = args[2];
1664 opc = get_memop(oi);
1665 mem_index = get_mmuidx(oi);
1666 s_bits = opc & MO_SIZE;
1668 /* Read the TLB entry */
1669 tcg_out_qemu_tlb(s, addr_reg, opc,
1670 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read),
1671 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1672 INSN_NOP_I, INSN_NOP_I);
1674 /* P6 is the fast path, and P7 the slow path */
1676 fin2 = 0;
1677 if (opc & MO_BSWAP) {
1678 fin1 = tcg_opc_bswap64_i(TCG_REG_P0, data_reg, TCG_REG_R8);
1679 if (s_bits < MO_64) {
1680 int shift = 64 - (8 << s_bits);
1681 fin2 = (opc & MO_SIGN ? OPC_EXTR_I11 : OPC_EXTR_U_I11);
1682 fin2 = tcg_opc_i11(TCG_REG_P0, fin2,
1683 data_reg, data_reg, shift, 63 - shift);
1685 } else {
1686 fin1 = tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, TCG_REG_R8);
1689 tcg_out_bundle(s, mmI,
1690 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1691 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1692 TCG_REG_R2, TCG_REG_R57),
1693 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R58, oi));
1694 label_ptr = s->code_ptr;
1695 tcg_out_bundle(s, miB,
1696 tcg_opc_m1 (TCG_REG_P6, opc_ld_m1[s_bits],
1697 TCG_REG_R8, TCG_REG_R2),
1698 INSN_NOP_I,
1699 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1700 get_reloc_pcrel21b_slot2(label_ptr)));
1702 add_qemu_ldst_label(s, 1, opc, label_ptr);
1704 /* Note that we always use LE helper functions, so the bswap insns
1705 here for the fast path also apply to the slow path. */
1706 tcg_out_bundle(s, (fin2 ? mII : miI),
1707 INSN_NOP_M,
1708 fin1,
1709 fin2 ? fin2 : INSN_NOP_I);
1712 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1714 static const uint64_t opc_st_m4[4] = {
1715 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1717 TCGReg addr_reg, data_reg;
1718 int mem_index;
1719 uint64_t pre1, pre2;
1720 TCGMemOpIdx oi;
1721 TCGMemOp opc, s_bits;
1722 tcg_insn_unit *label_ptr;
1724 data_reg = args[0];
1725 addr_reg = args[1];
1726 oi = args[2];
1727 opc = get_memop(oi);
1728 mem_index = get_mmuidx(oi);
1729 s_bits = opc & MO_SIZE;
1731 /* Note that we always use LE helper functions, so the bswap insns
1732 that are here for the fast path also apply to the slow path,
1733 and move the data into the argument register. */
1734 pre2 = INSN_NOP_I;
1735 if (opc & MO_BSWAP) {
1736 pre1 = tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R58, data_reg);
1737 if (s_bits < MO_64) {
1738 int shift = 64 - (8 << s_bits);
1739 pre2 = tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11,
1740 TCG_REG_R58, TCG_REG_R58, shift, 63 - shift);
1742 } else {
1743 /* Just move the data into place for the slow path. */
1744 pre1 = tcg_opc_ext_i(TCG_REG_P0, opc, TCG_REG_R58, data_reg);
1747 tcg_out_qemu_tlb(s, addr_reg, opc,
1748 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write),
1749 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1750 pre1, pre2);
1752 /* P6 is the fast path, and P7 the slow path */
1753 tcg_out_bundle(s, mmI,
1754 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1755 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1756 TCG_REG_R2, TCG_REG_R57),
1757 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R59, oi));
1758 label_ptr = s->code_ptr;
1759 tcg_out_bundle(s, miB,
1760 tcg_opc_m4 (TCG_REG_P6, opc_st_m4[s_bits],
1761 TCG_REG_R58, TCG_REG_R2),
1762 INSN_NOP_I,
1763 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1764 get_reloc_pcrel21b_slot2(label_ptr)));
1766 add_qemu_ldst_label(s, 0, opc, label_ptr);
1769 #else /* !CONFIG_SOFTMMU */
1770 # include "tcg-be-null.h"
1772 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1774 static uint64_t const opc_ld_m1[4] = {
1775 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1777 int addr_reg, data_reg;
1778 TCGMemOp opc, s_bits, bswap;
1780 data_reg = args[0];
1781 addr_reg = args[1];
1782 opc = args[2];
1783 s_bits = opc & MO_SIZE;
1784 bswap = opc & MO_BSWAP;
1786 #if TARGET_LONG_BITS == 32
1787 if (guest_base != 0) {
1788 tcg_out_bundle(s, mII,
1789 INSN_NOP_M,
1790 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1791 TCG_REG_R3, addr_reg),
1792 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1793 TCG_GUEST_BASE_REG, TCG_REG_R3));
1794 } else {
1795 tcg_out_bundle(s, miI,
1796 INSN_NOP_M,
1797 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1798 TCG_REG_R2, addr_reg),
1799 INSN_NOP_I);
1802 if (!bswap) {
1803 if (!(opc & MO_SIGN)) {
1804 tcg_out_bundle(s, miI,
1805 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1806 data_reg, TCG_REG_R2),
1807 INSN_NOP_I,
1808 INSN_NOP_I);
1809 } else {
1810 tcg_out_bundle(s, mII,
1811 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1812 data_reg, TCG_REG_R2),
1813 INSN_NOP_I,
1814 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1816 } else if (s_bits == MO_64) {
1817 tcg_out_bundle(s, mII,
1818 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1819 data_reg, TCG_REG_R2),
1820 INSN_NOP_I,
1821 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1822 } else {
1823 if (s_bits == MO_16) {
1824 tcg_out_bundle(s, mII,
1825 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1826 data_reg, TCG_REG_R2),
1827 INSN_NOP_I,
1828 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1829 data_reg, data_reg, 15, 15));
1830 } else {
1831 tcg_out_bundle(s, mII,
1832 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1833 data_reg, TCG_REG_R2),
1834 INSN_NOP_I,
1835 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1836 data_reg, data_reg, 31, 31));
1838 if (!(opc & MO_SIGN)) {
1839 tcg_out_bundle(s, miI,
1840 INSN_NOP_M,
1841 INSN_NOP_I,
1842 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1843 } else {
1844 tcg_out_bundle(s, mII,
1845 INSN_NOP_M,
1846 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg),
1847 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1850 #else
1851 if (guest_base != 0) {
1852 tcg_out_bundle(s, MmI,
1853 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1854 TCG_GUEST_BASE_REG, addr_reg),
1855 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1856 data_reg, TCG_REG_R2),
1857 INSN_NOP_I);
1858 } else {
1859 tcg_out_bundle(s, mmI,
1860 INSN_NOP_M,
1861 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1862 data_reg, addr_reg),
1863 INSN_NOP_I);
1866 if (bswap && s_bits == MO_16) {
1867 tcg_out_bundle(s, mII,
1868 INSN_NOP_M,
1869 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1870 data_reg, data_reg, 15, 15),
1871 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1872 } else if (bswap && s_bits == MO_32) {
1873 tcg_out_bundle(s, mII,
1874 INSN_NOP_M,
1875 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1876 data_reg, data_reg, 31, 31),
1877 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1878 } else if (bswap && s_bits == MO_64) {
1879 tcg_out_bundle(s, miI,
1880 INSN_NOP_M,
1881 INSN_NOP_I,
1882 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1884 if (opc & MO_SIGN) {
1885 tcg_out_bundle(s, miI,
1886 INSN_NOP_M,
1887 INSN_NOP_I,
1888 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1890 #endif
1893 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1895 static uint64_t const opc_st_m4[4] = {
1896 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1898 int addr_reg, data_reg;
1899 #if TARGET_LONG_BITS == 64
1900 uint64_t add_guest_base;
1901 #endif
1902 TCGMemOp opc, s_bits, bswap;
1904 data_reg = args[0];
1905 addr_reg = args[1];
1906 opc = args[2];
1907 s_bits = opc & MO_SIZE;
1908 bswap = opc & MO_BSWAP;
1910 #if TARGET_LONG_BITS == 32
1911 if (guest_base != 0) {
1912 tcg_out_bundle(s, mII,
1913 INSN_NOP_M,
1914 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1915 TCG_REG_R3, addr_reg),
1916 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1917 TCG_GUEST_BASE_REG, TCG_REG_R3));
1918 } else {
1919 tcg_out_bundle(s, miI,
1920 INSN_NOP_M,
1921 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1922 TCG_REG_R2, addr_reg),
1923 INSN_NOP_I);
1926 if (bswap) {
1927 if (s_bits == MO_16) {
1928 tcg_out_bundle(s, mII,
1929 INSN_NOP_M,
1930 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1931 TCG_REG_R3, data_reg, 15, 15),
1932 tcg_opc_bswap64_i(TCG_REG_P0,
1933 TCG_REG_R3, TCG_REG_R3));
1934 data_reg = TCG_REG_R3;
1935 } else if (s_bits == MO_32) {
1936 tcg_out_bundle(s, mII,
1937 INSN_NOP_M,
1938 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1939 TCG_REG_R3, data_reg, 31, 31),
1940 tcg_opc_bswap64_i(TCG_REG_P0,
1941 TCG_REG_R3, TCG_REG_R3));
1942 data_reg = TCG_REG_R3;
1943 } else if (s_bits == MO_64) {
1944 tcg_out_bundle(s, miI,
1945 INSN_NOP_M,
1946 INSN_NOP_I,
1947 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1948 data_reg = TCG_REG_R3;
1951 tcg_out_bundle(s, mmI,
1952 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1953 data_reg, TCG_REG_R2),
1954 INSN_NOP_M,
1955 INSN_NOP_I);
1956 #else
1957 if (guest_base != 0) {
1958 add_guest_base = tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1959 TCG_GUEST_BASE_REG, addr_reg);
1960 addr_reg = TCG_REG_R2;
1961 } else {
1962 add_guest_base = INSN_NOP_M;
1965 if (!bswap) {
1966 tcg_out_bundle(s, (guest_base ? MmI : mmI),
1967 add_guest_base,
1968 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1969 data_reg, addr_reg),
1970 INSN_NOP_I);
1971 } else {
1972 if (s_bits == MO_16) {
1973 tcg_out_bundle(s, mII,
1974 add_guest_base,
1975 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1976 TCG_REG_R3, data_reg, 15, 15),
1977 tcg_opc_bswap64_i(TCG_REG_P0,
1978 TCG_REG_R3, TCG_REG_R3));
1979 data_reg = TCG_REG_R3;
1980 } else if (s_bits == MO_32) {
1981 tcg_out_bundle(s, mII,
1982 add_guest_base,
1983 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1984 TCG_REG_R3, data_reg, 31, 31),
1985 tcg_opc_bswap64_i(TCG_REG_P0,
1986 TCG_REG_R3, TCG_REG_R3));
1987 data_reg = TCG_REG_R3;
1988 } else if (s_bits == MO_64) {
1989 tcg_out_bundle(s, miI,
1990 add_guest_base,
1991 INSN_NOP_I,
1992 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1993 data_reg = TCG_REG_R3;
1995 tcg_out_bundle(s, miI,
1996 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1997 data_reg, addr_reg),
1998 INSN_NOP_I,
1999 INSN_NOP_I);
2001 #endif
2004 #endif
2006 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
2007 const TCGArg *args, const int *const_args)
2009 switch(opc) {
2010 case INDEX_op_exit_tb:
2011 tcg_out_exit_tb(s, args[0]);
2012 break;
2013 case INDEX_op_br:
2014 tcg_out_br(s, arg_label(args[0]));
2015 break;
2016 case INDEX_op_goto_tb:
2017 tcg_out_goto_tb(s, args[0]);
2018 break;
2020 case INDEX_op_ld8u_i32:
2021 case INDEX_op_ld8u_i64:
2022 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2023 break;
2024 case INDEX_op_ld8s_i32:
2025 case INDEX_op_ld8s_i64:
2026 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2027 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[0]);
2028 break;
2029 case INDEX_op_ld16u_i32:
2030 case INDEX_op_ld16u_i64:
2031 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2032 break;
2033 case INDEX_op_ld16s_i32:
2034 case INDEX_op_ld16s_i64:
2035 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2036 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[0]);
2037 break;
2038 case INDEX_op_ld_i32:
2039 case INDEX_op_ld32u_i64:
2040 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2041 break;
2042 case INDEX_op_ld32s_i64:
2043 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2044 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[0]);
2045 break;
2046 case INDEX_op_ld_i64:
2047 tcg_out_ld_rel(s, OPC_LD8_M1, args[0], args[1], args[2]);
2048 break;
2049 case INDEX_op_st8_i32:
2050 case INDEX_op_st8_i64:
2051 tcg_out_st_rel(s, OPC_ST1_M4, args[0], args[1], args[2]);
2052 break;
2053 case INDEX_op_st16_i32:
2054 case INDEX_op_st16_i64:
2055 tcg_out_st_rel(s, OPC_ST2_M4, args[0], args[1], args[2]);
2056 break;
2057 case INDEX_op_st_i32:
2058 case INDEX_op_st32_i64:
2059 tcg_out_st_rel(s, OPC_ST4_M4, args[0], args[1], args[2]);
2060 break;
2061 case INDEX_op_st_i64:
2062 tcg_out_st_rel(s, OPC_ST8_M4, args[0], args[1], args[2]);
2063 break;
2065 case INDEX_op_add_i32:
2066 case INDEX_op_add_i64:
2067 tcg_out_add(s, args[0], args[1], args[2], const_args[2]);
2068 break;
2069 case INDEX_op_sub_i32:
2070 case INDEX_op_sub_i64:
2071 tcg_out_sub(s, args[0], args[1], const_args[1], args[2], const_args[2]);
2072 break;
2074 case INDEX_op_and_i32:
2075 case INDEX_op_and_i64:
2076 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2077 tcg_out_alu(s, OPC_AND_A1, OPC_AND_A3, args[0],
2078 args[2], const_args[2], args[1], const_args[1]);
2079 break;
2080 case INDEX_op_andc_i32:
2081 case INDEX_op_andc_i64:
2082 tcg_out_alu(s, OPC_ANDCM_A1, OPC_ANDCM_A3, args[0],
2083 args[1], const_args[1], args[2], const_args[2]);
2084 break;
2085 case INDEX_op_eqv_i32:
2086 case INDEX_op_eqv_i64:
2087 tcg_out_eqv(s, args[0], args[1], const_args[1],
2088 args[2], const_args[2]);
2089 break;
2090 case INDEX_op_nand_i32:
2091 case INDEX_op_nand_i64:
2092 tcg_out_nand(s, args[0], args[1], const_args[1],
2093 args[2], const_args[2]);
2094 break;
2095 case INDEX_op_nor_i32:
2096 case INDEX_op_nor_i64:
2097 tcg_out_nor(s, args[0], args[1], const_args[1],
2098 args[2], const_args[2]);
2099 break;
2100 case INDEX_op_or_i32:
2101 case INDEX_op_or_i64:
2102 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2103 tcg_out_alu(s, OPC_OR_A1, OPC_OR_A3, args[0],
2104 args[2], const_args[2], args[1], const_args[1]);
2105 break;
2106 case INDEX_op_orc_i32:
2107 case INDEX_op_orc_i64:
2108 tcg_out_orc(s, args[0], args[1], const_args[1],
2109 args[2], const_args[2]);
2110 break;
2111 case INDEX_op_xor_i32:
2112 case INDEX_op_xor_i64:
2113 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2114 tcg_out_alu(s, OPC_XOR_A1, OPC_XOR_A3, args[0],
2115 args[2], const_args[2], args[1], const_args[1]);
2116 break;
2118 case INDEX_op_mul_i32:
2119 case INDEX_op_mul_i64:
2120 tcg_out_mul(s, args[0], args[1], args[2]);
2121 break;
2123 case INDEX_op_sar_i32:
2124 tcg_out_sar_i32(s, args[0], args[1], args[2], const_args[2]);
2125 break;
2126 case INDEX_op_sar_i64:
2127 tcg_out_sar_i64(s, args[0], args[1], args[2], const_args[2]);
2128 break;
2129 case INDEX_op_shl_i32:
2130 tcg_out_shl_i32(s, args[0], args[1], args[2], const_args[2]);
2131 break;
2132 case INDEX_op_shl_i64:
2133 tcg_out_shl_i64(s, args[0], args[1], args[2], const_args[2]);
2134 break;
2135 case INDEX_op_shr_i32:
2136 tcg_out_shr_i32(s, args[0], args[1], args[2], const_args[2]);
2137 break;
2138 case INDEX_op_shr_i64:
2139 tcg_out_shr_i64(s, args[0], args[1], args[2], const_args[2]);
2140 break;
2141 case INDEX_op_rotl_i32:
2142 tcg_out_rotl_i32(s, args[0], args[1], args[2], const_args[2]);
2143 break;
2144 case INDEX_op_rotl_i64:
2145 tcg_out_rotl_i64(s, args[0], args[1], args[2], const_args[2]);
2146 break;
2147 case INDEX_op_rotr_i32:
2148 tcg_out_rotr_i32(s, args[0], args[1], args[2], const_args[2]);
2149 break;
2150 case INDEX_op_rotr_i64:
2151 tcg_out_rotr_i64(s, args[0], args[1], args[2], const_args[2]);
2152 break;
2154 case INDEX_op_ext8s_i32:
2155 case INDEX_op_ext8s_i64:
2156 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[1]);
2157 break;
2158 case INDEX_op_ext8u_i32:
2159 case INDEX_op_ext8u_i64:
2160 tcg_out_ext(s, OPC_ZXT1_I29, args[0], args[1]);
2161 break;
2162 case INDEX_op_ext16s_i32:
2163 case INDEX_op_ext16s_i64:
2164 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[1]);
2165 break;
2166 case INDEX_op_ext16u_i32:
2167 case INDEX_op_ext16u_i64:
2168 tcg_out_ext(s, OPC_ZXT2_I29, args[0], args[1]);
2169 break;
2170 case INDEX_op_ext_i32_i64:
2171 case INDEX_op_ext32s_i64:
2172 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[1]);
2173 break;
2174 case INDEX_op_extu_i32_i64:
2175 case INDEX_op_ext32u_i64:
2176 tcg_out_ext(s, OPC_ZXT4_I29, args[0], args[1]);
2177 break;
2179 case INDEX_op_bswap16_i32:
2180 case INDEX_op_bswap16_i64:
2181 tcg_out_bswap16(s, args[0], args[1]);
2182 break;
2183 case INDEX_op_bswap32_i32:
2184 case INDEX_op_bswap32_i64:
2185 tcg_out_bswap32(s, args[0], args[1]);
2186 break;
2187 case INDEX_op_bswap64_i64:
2188 tcg_out_bswap64(s, args[0], args[1]);
2189 break;
2191 case INDEX_op_deposit_i32:
2192 case INDEX_op_deposit_i64:
2193 tcg_out_deposit(s, args[0], args[1], args[2], const_args[2],
2194 args[3], args[4]);
2195 break;
2197 case INDEX_op_brcond_i32:
2198 tcg_out_brcond(s, args[2], args[0], args[1], arg_label(args[3]), 1);
2199 break;
2200 case INDEX_op_brcond_i64:
2201 tcg_out_brcond(s, args[2], args[0], args[1], arg_label(args[3]), 0);
2202 break;
2203 case INDEX_op_setcond_i32:
2204 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 1);
2205 break;
2206 case INDEX_op_setcond_i64:
2207 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 0);
2208 break;
2209 case INDEX_op_movcond_i32:
2210 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2211 args[3], const_args[3], args[4], const_args[4], 1);
2212 break;
2213 case INDEX_op_movcond_i64:
2214 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2215 args[3], const_args[3], args[4], const_args[4], 0);
2216 break;
2218 case INDEX_op_qemu_ld_i32:
2219 tcg_out_qemu_ld(s, args);
2220 break;
2221 case INDEX_op_qemu_ld_i64:
2222 tcg_out_qemu_ld(s, args);
2223 break;
2224 case INDEX_op_qemu_st_i32:
2225 tcg_out_qemu_st(s, args);
2226 break;
2227 case INDEX_op_qemu_st_i64:
2228 tcg_out_qemu_st(s, args);
2229 break;
2231 case INDEX_op_mb:
2232 tcg_out_bundle(s, mmI, OPC_MF_M24, INSN_NOP_M, INSN_NOP_I);
2233 break;
2234 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
2235 case INDEX_op_mov_i64:
2236 case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
2237 case INDEX_op_movi_i64:
2238 case INDEX_op_call: /* Always emitted via tcg_out_call. */
2239 default:
2240 tcg_abort();
2244 static const TCGTargetOpDef ia64_op_defs[] = {
2245 { INDEX_op_br, { } },
2246 { INDEX_op_exit_tb, { } },
2247 { INDEX_op_goto_tb, { } },
2249 { INDEX_op_ld8u_i32, { "r", "r" } },
2250 { INDEX_op_ld8s_i32, { "r", "r" } },
2251 { INDEX_op_ld16u_i32, { "r", "r" } },
2252 { INDEX_op_ld16s_i32, { "r", "r" } },
2253 { INDEX_op_ld_i32, { "r", "r" } },
2254 { INDEX_op_st8_i32, { "rZ", "r" } },
2255 { INDEX_op_st16_i32, { "rZ", "r" } },
2256 { INDEX_op_st_i32, { "rZ", "r" } },
2258 { INDEX_op_add_i32, { "r", "rZ", "rI" } },
2259 { INDEX_op_sub_i32, { "r", "rI", "rI" } },
2261 { INDEX_op_and_i32, { "r", "rI", "rI" } },
2262 { INDEX_op_andc_i32, { "r", "rI", "rI" } },
2263 { INDEX_op_eqv_i32, { "r", "rZ", "rZ" } },
2264 { INDEX_op_nand_i32, { "r", "rZ", "rZ" } },
2265 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
2266 { INDEX_op_or_i32, { "r", "rI", "rI" } },
2267 { INDEX_op_orc_i32, { "r", "rZ", "rZ" } },
2268 { INDEX_op_xor_i32, { "r", "rI", "rI" } },
2270 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
2272 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
2273 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
2274 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
2275 { INDEX_op_rotl_i32, { "r", "rZ", "ri" } },
2276 { INDEX_op_rotr_i32, { "r", "rZ", "ri" } },
2278 { INDEX_op_ext8s_i32, { "r", "rZ"} },
2279 { INDEX_op_ext8u_i32, { "r", "rZ"} },
2280 { INDEX_op_ext16s_i32, { "r", "rZ"} },
2281 { INDEX_op_ext16u_i32, { "r", "rZ"} },
2283 { INDEX_op_bswap16_i32, { "r", "rZ" } },
2284 { INDEX_op_bswap32_i32, { "r", "rZ" } },
2286 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
2287 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
2288 { INDEX_op_movcond_i32, { "r", "rZ", "rZ", "rI", "rI" } },
2290 { INDEX_op_ld8u_i64, { "r", "r" } },
2291 { INDEX_op_ld8s_i64, { "r", "r" } },
2292 { INDEX_op_ld16u_i64, { "r", "r" } },
2293 { INDEX_op_ld16s_i64, { "r", "r" } },
2294 { INDEX_op_ld32u_i64, { "r", "r" } },
2295 { INDEX_op_ld32s_i64, { "r", "r" } },
2296 { INDEX_op_ld_i64, { "r", "r" } },
2297 { INDEX_op_st8_i64, { "rZ", "r" } },
2298 { INDEX_op_st16_i64, { "rZ", "r" } },
2299 { INDEX_op_st32_i64, { "rZ", "r" } },
2300 { INDEX_op_st_i64, { "rZ", "r" } },
2302 { INDEX_op_add_i64, { "r", "rZ", "rI" } },
2303 { INDEX_op_sub_i64, { "r", "rI", "rI" } },
2305 { INDEX_op_and_i64, { "r", "rI", "rI" } },
2306 { INDEX_op_andc_i64, { "r", "rI", "rI" } },
2307 { INDEX_op_eqv_i64, { "r", "rZ", "rZ" } },
2308 { INDEX_op_nand_i64, { "r", "rZ", "rZ" } },
2309 { INDEX_op_nor_i64, { "r", "rZ", "rZ" } },
2310 { INDEX_op_or_i64, { "r", "rI", "rI" } },
2311 { INDEX_op_orc_i64, { "r", "rZ", "rZ" } },
2312 { INDEX_op_xor_i64, { "r", "rI", "rI" } },
2314 { INDEX_op_mul_i64, { "r", "rZ", "rZ" } },
2316 { INDEX_op_sar_i64, { "r", "rZ", "ri" } },
2317 { INDEX_op_shl_i64, { "r", "rZ", "ri" } },
2318 { INDEX_op_shr_i64, { "r", "rZ", "ri" } },
2319 { INDEX_op_rotl_i64, { "r", "rZ", "ri" } },
2320 { INDEX_op_rotr_i64, { "r", "rZ", "ri" } },
2322 { INDEX_op_ext8s_i64, { "r", "rZ"} },
2323 { INDEX_op_ext8u_i64, { "r", "rZ"} },
2324 { INDEX_op_ext16s_i64, { "r", "rZ"} },
2325 { INDEX_op_ext16u_i64, { "r", "rZ"} },
2326 { INDEX_op_ext32s_i64, { "r", "rZ"} },
2327 { INDEX_op_ext32u_i64, { "r", "rZ"} },
2328 { INDEX_op_ext_i32_i64, { "r", "rZ" } },
2329 { INDEX_op_extu_i32_i64, { "r", "rZ" } },
2331 { INDEX_op_bswap16_i64, { "r", "rZ" } },
2332 { INDEX_op_bswap32_i64, { "r", "rZ" } },
2333 { INDEX_op_bswap64_i64, { "r", "rZ" } },
2335 { INDEX_op_brcond_i64, { "rZ", "rZ" } },
2336 { INDEX_op_setcond_i64, { "r", "rZ", "rZ" } },
2337 { INDEX_op_movcond_i64, { "r", "rZ", "rZ", "rI", "rI" } },
2339 { INDEX_op_deposit_i32, { "r", "rZ", "ri" } },
2340 { INDEX_op_deposit_i64, { "r", "rZ", "ri" } },
2342 { INDEX_op_qemu_ld_i32, { "r", "r" } },
2343 { INDEX_op_qemu_ld_i64, { "r", "r" } },
2344 { INDEX_op_qemu_st_i32, { "SZ", "r" } },
2345 { INDEX_op_qemu_st_i64, { "SZ", "r" } },
2347 { INDEX_op_mb, { } },
2348 { -1 },
2351 static const TCGTargetOpDef *tcg_target_op_def(TCGOpcode op)
2353 int i, n = ARRAY_SIZE(ia64_op_defs);
2355 for (i = 0; i < n; ++i) {
2356 if (ia64_op_defs[i].op == op) {
2357 return &ia64_op_defs[i];
2360 return NULL;
2363 /* Generate global QEMU prologue and epilogue code */
2364 static void tcg_target_qemu_prologue(TCGContext *s)
2366 int frame_size;
2368 /* reserve some stack space */
2369 frame_size = TCG_STATIC_CALL_ARGS_SIZE +
2370 CPU_TEMP_BUF_NLONGS * sizeof(long);
2371 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
2372 ~(TCG_TARGET_STACK_ALIGN - 1);
2373 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
2374 CPU_TEMP_BUF_NLONGS * sizeof(long));
2376 /* First emit adhoc function descriptor */
2377 *s->code_ptr = (tcg_insn_unit){
2378 (uint64_t)(s->code_ptr + 1), /* entry point */
2379 0 /* skip gp */
2381 s->code_ptr++;
2383 /* prologue */
2384 tcg_out_bundle(s, miI,
2385 tcg_opc_m34(TCG_REG_P0, OPC_ALLOC_M34,
2386 TCG_REG_R34, 32, 24, 0),
2387 INSN_NOP_I,
2388 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2389 TCG_REG_B6, TCG_REG_R33, 0));
2391 /* ??? If guest_base < 0x200000, we could load the register via
2392 an ADDL in the M slot of the next bundle. */
2393 if (guest_base != 0) {
2394 tcg_out_bundle(s, mlx,
2395 INSN_NOP_M,
2396 tcg_opc_l2(guest_base),
2397 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
2398 TCG_GUEST_BASE_REG, guest_base));
2399 tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
2402 tcg_out_bundle(s, miB,
2403 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2404 TCG_REG_R12, -frame_size, TCG_REG_R12),
2405 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
2406 TCG_REG_R33, TCG_REG_B0),
2407 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
2409 /* epilogue */
2410 tb_ret_addr = s->code_ptr;
2411 tcg_out_bundle(s, miI,
2412 INSN_NOP_M,
2413 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2414 TCG_REG_B0, TCG_REG_R33, 0),
2415 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2416 TCG_REG_R12, frame_size, TCG_REG_R12));
2417 tcg_out_bundle(s, miB,
2418 INSN_NOP_M,
2419 tcg_opc_i26(TCG_REG_P0, OPC_MOV_I_I26,
2420 TCG_REG_PFS, TCG_REG_R34),
2421 tcg_opc_b4 (TCG_REG_P0, OPC_BR_RET_SPTK_MANY_B4,
2422 TCG_REG_B0));
2425 static void tcg_target_init(TCGContext *s)
2427 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32],
2428 0xffffffffffffffffull);
2429 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I64],
2430 0xffffffffffffffffull);
2432 tcg_regset_clear(tcg_target_call_clobber_regs);
2433 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R8);
2434 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R9);
2435 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R10);
2436 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R11);
2437 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R14);
2438 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R15);
2439 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R16);
2440 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R17);
2441 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R18);
2442 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R19);
2443 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R20);
2444 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R21);
2445 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R22);
2446 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R23);
2447 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R24);
2448 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R25);
2449 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R26);
2450 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R27);
2451 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R28);
2452 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R29);
2453 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R30);
2454 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R31);
2455 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R56);
2456 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R57);
2457 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R58);
2458 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R59);
2459 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R60);
2460 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R61);
2461 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R62);
2462 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R63);
2464 tcg_regset_clear(s->reserved_regs);
2465 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0); /* zero register */
2466 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1); /* global pointer */
2467 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2); /* internal use */
2468 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R3); /* internal use */
2469 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R12); /* stack pointer */
2470 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R13); /* thread pointer */
2471 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R33); /* return address */
2472 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R34); /* PFS */
2474 /* The following 4 are not in use, are call-saved, but *not* saved
2475 by the prologue. Therefore we cannot use them without modifying
2476 the prologue. There doesn't seem to be any good reason to use
2477 these as opposed to the windowed registers. */
2478 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R4);
2479 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R5);
2480 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R6);
2481 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R7);