mptsas: Fix a migration compatible issue
[qemu/ar7.git] / tcg / ia64 / tcg-target.inc.c
blobc91f39281b99e57d161dbf94cdb880bd54fcc5f8
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009-2010 Aurelien Jarno <aurelien@aurel32.net>
5 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
27 * Register definitions
30 #ifdef CONFIG_DEBUG_TCG
31 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
32 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
33 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
34 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
35 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
36 "r32", "r33", "r34", "r35", "r36", "r37", "r38", "r39",
37 "r40", "r41", "r42", "r43", "r44", "r45", "r46", "r47",
38 "r48", "r49", "r50", "r51", "r52", "r53", "r54", "r55",
39 "r56", "r57", "r58", "r59", "r60", "r61", "r62", "r63",
41 #endif
43 #ifndef CONFIG_SOFTMMU
44 #define TCG_GUEST_BASE_REG TCG_REG_R55
45 #endif
47 /* Branch registers */
48 enum {
49 TCG_REG_B0 = 0,
50 TCG_REG_B1,
51 TCG_REG_B2,
52 TCG_REG_B3,
53 TCG_REG_B4,
54 TCG_REG_B5,
55 TCG_REG_B6,
56 TCG_REG_B7,
59 /* Floating point registers */
60 enum {
61 TCG_REG_F0 = 0,
62 TCG_REG_F1,
63 TCG_REG_F2,
64 TCG_REG_F3,
65 TCG_REG_F4,
66 TCG_REG_F5,
67 TCG_REG_F6,
68 TCG_REG_F7,
69 TCG_REG_F8,
70 TCG_REG_F9,
71 TCG_REG_F10,
72 TCG_REG_F11,
73 TCG_REG_F12,
74 TCG_REG_F13,
75 TCG_REG_F14,
76 TCG_REG_F15,
79 /* Predicate registers */
80 enum {
81 TCG_REG_P0 = 0,
82 TCG_REG_P1,
83 TCG_REG_P2,
84 TCG_REG_P3,
85 TCG_REG_P4,
86 TCG_REG_P5,
87 TCG_REG_P6,
88 TCG_REG_P7,
89 TCG_REG_P8,
90 TCG_REG_P9,
91 TCG_REG_P10,
92 TCG_REG_P11,
93 TCG_REG_P12,
94 TCG_REG_P13,
95 TCG_REG_P14,
96 TCG_REG_P15,
99 /* Application registers */
100 enum {
101 TCG_REG_PFS = 64,
104 static const int tcg_target_reg_alloc_order[] = {
105 TCG_REG_R35,
106 TCG_REG_R36,
107 TCG_REG_R37,
108 TCG_REG_R38,
109 TCG_REG_R39,
110 TCG_REG_R40,
111 TCG_REG_R41,
112 TCG_REG_R42,
113 TCG_REG_R43,
114 TCG_REG_R44,
115 TCG_REG_R45,
116 TCG_REG_R46,
117 TCG_REG_R47,
118 TCG_REG_R48,
119 TCG_REG_R49,
120 TCG_REG_R50,
121 TCG_REG_R51,
122 TCG_REG_R52,
123 TCG_REG_R53,
124 TCG_REG_R54,
125 TCG_REG_R55,
126 TCG_REG_R14,
127 TCG_REG_R15,
128 TCG_REG_R16,
129 TCG_REG_R17,
130 TCG_REG_R18,
131 TCG_REG_R19,
132 TCG_REG_R20,
133 TCG_REG_R21,
134 TCG_REG_R22,
135 TCG_REG_R23,
136 TCG_REG_R24,
137 TCG_REG_R25,
138 TCG_REG_R26,
139 TCG_REG_R27,
140 TCG_REG_R28,
141 TCG_REG_R29,
142 TCG_REG_R30,
143 TCG_REG_R31,
144 TCG_REG_R56,
145 TCG_REG_R57,
146 TCG_REG_R58,
147 TCG_REG_R59,
148 TCG_REG_R60,
149 TCG_REG_R61,
150 TCG_REG_R62,
151 TCG_REG_R63,
152 TCG_REG_R8,
153 TCG_REG_R9,
154 TCG_REG_R10,
155 TCG_REG_R11
158 static const int tcg_target_call_iarg_regs[8] = {
159 TCG_REG_R56,
160 TCG_REG_R57,
161 TCG_REG_R58,
162 TCG_REG_R59,
163 TCG_REG_R60,
164 TCG_REG_R61,
165 TCG_REG_R62,
166 TCG_REG_R63,
169 static const int tcg_target_call_oarg_regs[] = {
170 TCG_REG_R8
174 * opcode formation
177 /* bundle templates: stops (double bar in the IA64 manual) are marked with
178 an uppercase letter. */
179 enum {
180 mii = 0x00,
181 miI = 0x01,
182 mIi = 0x02,
183 mII = 0x03,
184 mlx = 0x04,
185 mLX = 0x05,
186 mmi = 0x08,
187 mmI = 0x09,
188 Mmi = 0x0a,
189 MmI = 0x0b,
190 mfi = 0x0c,
191 mfI = 0x0d,
192 mmf = 0x0e,
193 mmF = 0x0f,
194 mib = 0x10,
195 miB = 0x11,
196 mbb = 0x12,
197 mbB = 0x13,
198 bbb = 0x16,
199 bbB = 0x17,
200 mmb = 0x18,
201 mmB = 0x19,
202 mfb = 0x1c,
203 mfB = 0x1d,
206 enum {
207 OPC_ADD_A1 = 0x10000000000ull,
208 OPC_AND_A1 = 0x10060000000ull,
209 OPC_AND_A3 = 0x10160000000ull,
210 OPC_ANDCM_A1 = 0x10068000000ull,
211 OPC_ANDCM_A3 = 0x10168000000ull,
212 OPC_ADDS_A4 = 0x10800000000ull,
213 OPC_ADDL_A5 = 0x12000000000ull,
214 OPC_ALLOC_M34 = 0x02c00000000ull,
215 OPC_BR_DPTK_FEW_B1 = 0x08400000000ull,
216 OPC_BR_SPTK_MANY_B1 = 0x08000001000ull,
217 OPC_BR_CALL_SPNT_FEW_B3 = 0x0a200000000ull,
218 OPC_BR_SPTK_MANY_B4 = 0x00100001000ull,
219 OPC_BR_CALL_SPTK_MANY_B5 = 0x02100001000ull,
220 OPC_BR_RET_SPTK_MANY_B4 = 0x00108001100ull,
221 OPC_BRL_SPTK_MANY_X3 = 0x18000001000ull,
222 OPC_BRL_CALL_SPNT_MANY_X4 = 0x1a200001000ull,
223 OPC_BRL_CALL_SPTK_MANY_X4 = 0x1a000001000ull,
224 OPC_CMP_LT_A6 = 0x18000000000ull,
225 OPC_CMP_LTU_A6 = 0x1a000000000ull,
226 OPC_CMP_EQ_A6 = 0x1c000000000ull,
227 OPC_CMP4_LT_A6 = 0x18400000000ull,
228 OPC_CMP4_LTU_A6 = 0x1a400000000ull,
229 OPC_CMP4_EQ_A6 = 0x1c400000000ull,
230 OPC_DEP_I14 = 0x0ae00000000ull,
231 OPC_DEP_I15 = 0x08000000000ull,
232 OPC_DEP_Z_I12 = 0x0a600000000ull,
233 OPC_EXTR_I11 = 0x0a400002000ull,
234 OPC_EXTR_U_I11 = 0x0a400000000ull,
235 OPC_FCVT_FX_TRUNC_S1_F10 = 0x004d0000000ull,
236 OPC_FCVT_FXU_TRUNC_S1_F10 = 0x004d8000000ull,
237 OPC_FCVT_XF_F11 = 0x000e0000000ull,
238 OPC_FMA_S1_F1 = 0x10400000000ull,
239 OPC_FNMA_S1_F1 = 0x18400000000ull,
240 OPC_FRCPA_S1_F6 = 0x00600000000ull,
241 OPC_GETF_SIG_M19 = 0x08708000000ull,
242 OPC_LD1_M1 = 0x08000000000ull,
243 OPC_LD1_M3 = 0x0a000000000ull,
244 OPC_LD2_M1 = 0x08040000000ull,
245 OPC_LD2_M3 = 0x0a040000000ull,
246 OPC_LD4_M1 = 0x08080000000ull,
247 OPC_LD4_M3 = 0x0a080000000ull,
248 OPC_LD8_M1 = 0x080c0000000ull,
249 OPC_LD8_M3 = 0x0a0c0000000ull,
250 OPC_MUX1_I3 = 0x0eca0000000ull,
251 OPC_NOP_B9 = 0x04008000000ull,
252 OPC_NOP_F16 = 0x00008000000ull,
253 OPC_NOP_I18 = 0x00008000000ull,
254 OPC_NOP_M48 = 0x00008000000ull,
255 OPC_MOV_I21 = 0x00e00100000ull,
256 OPC_MOV_RET_I21 = 0x00e00500000ull,
257 OPC_MOV_I22 = 0x00188000000ull,
258 OPC_MOV_I_I26 = 0x00150000000ull,
259 OPC_MOVL_X2 = 0x0c000000000ull,
260 OPC_OR_A1 = 0x10070000000ull,
261 OPC_OR_A3 = 0x10170000000ull,
262 OPC_SETF_EXP_M18 = 0x0c748000000ull,
263 OPC_SETF_SIG_M18 = 0x0c708000000ull,
264 OPC_SHL_I7 = 0x0f240000000ull,
265 OPC_SHR_I5 = 0x0f220000000ull,
266 OPC_SHR_U_I5 = 0x0f200000000ull,
267 OPC_SHRP_I10 = 0x0ac00000000ull,
268 OPC_SXT1_I29 = 0x000a0000000ull,
269 OPC_SXT2_I29 = 0x000a8000000ull,
270 OPC_SXT4_I29 = 0x000b0000000ull,
271 OPC_ST1_M4 = 0x08c00000000ull,
272 OPC_ST2_M4 = 0x08c40000000ull,
273 OPC_ST4_M4 = 0x08c80000000ull,
274 OPC_ST8_M4 = 0x08cc0000000ull,
275 OPC_SUB_A1 = 0x10028000000ull,
276 OPC_SUB_A3 = 0x10128000000ull,
277 OPC_UNPACK4_L_I2 = 0x0f860000000ull,
278 OPC_XMA_L_F2 = 0x1d000000000ull,
279 OPC_XOR_A1 = 0x10078000000ull,
280 OPC_XOR_A3 = 0x10178000000ull,
281 OPC_ZXT1_I29 = 0x00080000000ull,
282 OPC_ZXT2_I29 = 0x00088000000ull,
283 OPC_ZXT4_I29 = 0x00090000000ull,
285 INSN_NOP_M = OPC_NOP_M48, /* nop.m 0 */
286 INSN_NOP_I = OPC_NOP_I18, /* nop.i 0 */
289 static inline uint64_t tcg_opc_a1(int qp, uint64_t opc, int r1,
290 int r2, int r3)
292 return opc
293 | ((r3 & 0x7f) << 20)
294 | ((r2 & 0x7f) << 13)
295 | ((r1 & 0x7f) << 6)
296 | (qp & 0x3f);
299 static inline uint64_t tcg_opc_a3(int qp, uint64_t opc, int r1,
300 uint64_t imm, int r3)
302 return opc
303 | ((imm & 0x80) << 29) /* s */
304 | ((imm & 0x7f) << 13) /* imm7b */
305 | ((r3 & 0x7f) << 20)
306 | ((r1 & 0x7f) << 6)
307 | (qp & 0x3f);
310 static inline uint64_t tcg_opc_a4(int qp, uint64_t opc, int r1,
311 uint64_t imm, int r3)
313 return opc
314 | ((imm & 0x2000) << 23) /* s */
315 | ((imm & 0x1f80) << 20) /* imm6d */
316 | ((imm & 0x007f) << 13) /* imm7b */
317 | ((r3 & 0x7f) << 20)
318 | ((r1 & 0x7f) << 6)
319 | (qp & 0x3f);
322 static inline uint64_t tcg_opc_a5(int qp, uint64_t opc, int r1,
323 uint64_t imm, int r3)
325 return opc
326 | ((imm & 0x200000) << 15) /* s */
327 | ((imm & 0x1f0000) << 6) /* imm5c */
328 | ((imm & 0x00ff80) << 20) /* imm9d */
329 | ((imm & 0x00007f) << 13) /* imm7b */
330 | ((r3 & 0x03) << 20)
331 | ((r1 & 0x7f) << 6)
332 | (qp & 0x3f);
335 static inline uint64_t tcg_opc_a6(int qp, uint64_t opc, int p1,
336 int p2, int r2, int r3)
338 return opc
339 | ((p2 & 0x3f) << 27)
340 | ((r3 & 0x7f) << 20)
341 | ((r2 & 0x7f) << 13)
342 | ((p1 & 0x3f) << 6)
343 | (qp & 0x3f);
346 static inline uint64_t tcg_opc_b1(int qp, uint64_t opc, uint64_t imm)
348 return opc
349 | ((imm & 0x100000) << 16) /* s */
350 | ((imm & 0x0fffff) << 13) /* imm20b */
351 | (qp & 0x3f);
354 static inline uint64_t tcg_opc_b3(int qp, uint64_t opc, int b1, uint64_t imm)
356 return opc
357 | ((imm & 0x100000) << 16) /* s */
358 | ((imm & 0x0fffff) << 13) /* imm20b */
359 | ((b1 & 0x7) << 6)
360 | (qp & 0x3f);
363 static inline uint64_t tcg_opc_b4(int qp, uint64_t opc, int b2)
365 return opc
366 | ((b2 & 0x7) << 13)
367 | (qp & 0x3f);
370 static inline uint64_t tcg_opc_b5(int qp, uint64_t opc, int b1, int b2)
372 return opc
373 | ((b2 & 0x7) << 13)
374 | ((b1 & 0x7) << 6)
375 | (qp & 0x3f);
379 static inline uint64_t tcg_opc_b9(int qp, uint64_t opc, uint64_t imm)
381 return opc
382 | ((imm & 0x100000) << 16) /* i */
383 | ((imm & 0x0fffff) << 6) /* imm20a */
384 | (qp & 0x3f);
387 static inline uint64_t tcg_opc_f1(int qp, uint64_t opc, int f1,
388 int f3, int f4, int f2)
390 return opc
391 | ((f4 & 0x7f) << 27)
392 | ((f3 & 0x7f) << 20)
393 | ((f2 & 0x7f) << 13)
394 | ((f1 & 0x7f) << 6)
395 | (qp & 0x3f);
398 static inline uint64_t tcg_opc_f2(int qp, uint64_t opc, int f1,
399 int f3, int f4, int f2)
401 return opc
402 | ((f4 & 0x7f) << 27)
403 | ((f3 & 0x7f) << 20)
404 | ((f2 & 0x7f) << 13)
405 | ((f1 & 0x7f) << 6)
406 | (qp & 0x3f);
409 static inline uint64_t tcg_opc_f6(int qp, uint64_t opc, int f1,
410 int p2, int f2, int f3)
412 return opc
413 | ((p2 & 0x3f) << 27)
414 | ((f3 & 0x7f) << 20)
415 | ((f2 & 0x7f) << 13)
416 | ((f1 & 0x7f) << 6)
417 | (qp & 0x3f);
420 static inline uint64_t tcg_opc_f10(int qp, uint64_t opc, int f1, int f2)
422 return opc
423 | ((f2 & 0x7f) << 13)
424 | ((f1 & 0x7f) << 6)
425 | (qp & 0x3f);
428 static inline uint64_t tcg_opc_f11(int qp, uint64_t opc, int f1, int f2)
430 return opc
431 | ((f2 & 0x7f) << 13)
432 | ((f1 & 0x7f) << 6)
433 | (qp & 0x3f);
436 static inline uint64_t tcg_opc_f16(int qp, uint64_t opc, uint64_t imm)
438 return opc
439 | ((imm & 0x100000) << 16) /* i */
440 | ((imm & 0x0fffff) << 6) /* imm20a */
441 | (qp & 0x3f);
444 static inline uint64_t tcg_opc_i2(int qp, uint64_t opc, int r1,
445 int r2, int r3)
447 return opc
448 | ((r3 & 0x7f) << 20)
449 | ((r2 & 0x7f) << 13)
450 | ((r1 & 0x7f) << 6)
451 | (qp & 0x3f);
454 static inline uint64_t tcg_opc_i3(int qp, uint64_t opc, int r1,
455 int r2, int mbtype)
457 return opc
458 | ((mbtype & 0x0f) << 20)
459 | ((r2 & 0x7f) << 13)
460 | ((r1 & 0x7f) << 6)
461 | (qp & 0x3f);
464 static inline uint64_t tcg_opc_i5(int qp, uint64_t opc, int r1,
465 int r3, int r2)
467 return opc
468 | ((r3 & 0x7f) << 20)
469 | ((r2 & 0x7f) << 13)
470 | ((r1 & 0x7f) << 6)
471 | (qp & 0x3f);
474 static inline uint64_t tcg_opc_i7(int qp, uint64_t opc, int r1,
475 int r2, int r3)
477 return opc
478 | ((r3 & 0x7f) << 20)
479 | ((r2 & 0x7f) << 13)
480 | ((r1 & 0x7f) << 6)
481 | (qp & 0x3f);
484 static inline uint64_t tcg_opc_i10(int qp, uint64_t opc, int r1,
485 int r2, int r3, uint64_t count)
487 return opc
488 | ((count & 0x3f) << 27)
489 | ((r3 & 0x7f) << 20)
490 | ((r2 & 0x7f) << 13)
491 | ((r1 & 0x7f) << 6)
492 | (qp & 0x3f);
495 static inline uint64_t tcg_opc_i11(int qp, uint64_t opc, int r1,
496 int r3, uint64_t pos, uint64_t len)
498 return opc
499 | ((len & 0x3f) << 27)
500 | ((r3 & 0x7f) << 20)
501 | ((pos & 0x3f) << 14)
502 | ((r1 & 0x7f) << 6)
503 | (qp & 0x3f);
506 static inline uint64_t tcg_opc_i12(int qp, uint64_t opc, int r1,
507 int r2, uint64_t pos, uint64_t len)
509 return opc
510 | ((len & 0x3f) << 27)
511 | ((pos & 0x3f) << 20)
512 | ((r2 & 0x7f) << 13)
513 | ((r1 & 0x7f) << 6)
514 | (qp & 0x3f);
517 static inline uint64_t tcg_opc_i14(int qp, uint64_t opc, int r1, uint64_t imm,
518 int r3, uint64_t pos, uint64_t len)
520 return opc
521 | ((imm & 0x01) << 36)
522 | ((len & 0x3f) << 27)
523 | ((r3 & 0x7f) << 20)
524 | ((pos & 0x3f) << 14)
525 | ((r1 & 0x7f) << 6)
526 | (qp & 0x3f);
529 static inline uint64_t tcg_opc_i15(int qp, uint64_t opc, int r1, int r2,
530 int r3, uint64_t pos, uint64_t len)
532 return opc
533 | ((pos & 0x3f) << 31)
534 | ((len & 0x0f) << 27)
535 | ((r3 & 0x7f) << 20)
536 | ((r2 & 0x7f) << 13)
537 | ((r1 & 0x7f) << 6)
538 | (qp & 0x3f);
541 static inline uint64_t tcg_opc_i18(int qp, uint64_t opc, uint64_t imm)
543 return opc
544 | ((imm & 0x100000) << 16) /* i */
545 | ((imm & 0x0fffff) << 6) /* imm20a */
546 | (qp & 0x3f);
549 static inline uint64_t tcg_opc_i21(int qp, uint64_t opc, int b1,
550 int r2, uint64_t imm)
552 return opc
553 | ((imm & 0x1ff) << 24)
554 | ((r2 & 0x7f) << 13)
555 | ((b1 & 0x7) << 6)
556 | (qp & 0x3f);
559 static inline uint64_t tcg_opc_i22(int qp, uint64_t opc, int r1, int b2)
561 return opc
562 | ((b2 & 0x7) << 13)
563 | ((r1 & 0x7f) << 6)
564 | (qp & 0x3f);
567 static inline uint64_t tcg_opc_i26(int qp, uint64_t opc, int ar3, int r2)
569 return opc
570 | ((ar3 & 0x7f) << 20)
571 | ((r2 & 0x7f) << 13)
572 | (qp & 0x3f);
575 static inline uint64_t tcg_opc_i29(int qp, uint64_t opc, int r1, int r3)
577 return opc
578 | ((r3 & 0x7f) << 20)
579 | ((r1 & 0x7f) << 6)
580 | (qp & 0x3f);
583 static inline uint64_t tcg_opc_l2(uint64_t imm)
585 return (imm & 0x7fffffffffc00000ull) >> 22;
588 static inline uint64_t tcg_opc_l3(uint64_t imm)
590 return (imm & 0x07fffffffff00000ull) >> 18;
593 #define tcg_opc_l4 tcg_opc_l3
595 static inline uint64_t tcg_opc_m1(int qp, uint64_t opc, int r1, int r3)
597 return opc
598 | ((r3 & 0x7f) << 20)
599 | ((r1 & 0x7f) << 6)
600 | (qp & 0x3f);
603 static inline uint64_t tcg_opc_m3(int qp, uint64_t opc, int r1,
604 int r3, uint64_t imm)
606 return opc
607 | ((imm & 0x100) << 28) /* s */
608 | ((imm & 0x080) << 20) /* i */
609 | ((imm & 0x07f) << 13) /* imm7b */
610 | ((r3 & 0x7f) << 20)
611 | ((r1 & 0x7f) << 6)
612 | (qp & 0x3f);
615 static inline uint64_t tcg_opc_m4(int qp, uint64_t opc, int r2, int r3)
617 return opc
618 | ((r3 & 0x7f) << 20)
619 | ((r2 & 0x7f) << 13)
620 | (qp & 0x3f);
623 static inline uint64_t tcg_opc_m18(int qp, uint64_t opc, int f1, int r2)
625 return opc
626 | ((r2 & 0x7f) << 13)
627 | ((f1 & 0x7f) << 6)
628 | (qp & 0x3f);
631 static inline uint64_t tcg_opc_m19(int qp, uint64_t opc, int r1, int f2)
633 return opc
634 | ((f2 & 0x7f) << 13)
635 | ((r1 & 0x7f) << 6)
636 | (qp & 0x3f);
639 static inline uint64_t tcg_opc_m34(int qp, uint64_t opc, int r1,
640 int sof, int sol, int sor)
642 return opc
643 | ((sor & 0x0f) << 27)
644 | ((sol & 0x7f) << 20)
645 | ((sof & 0x7f) << 13)
646 | ((r1 & 0x7f) << 6)
647 | (qp & 0x3f);
650 static inline uint64_t tcg_opc_m48(int qp, uint64_t opc, uint64_t imm)
652 return opc
653 | ((imm & 0x100000) << 16) /* i */
654 | ((imm & 0x0fffff) << 6) /* imm20a */
655 | (qp & 0x3f);
658 static inline uint64_t tcg_opc_x2(int qp, uint64_t opc,
659 int r1, uint64_t imm)
661 return opc
662 | ((imm & 0x8000000000000000ull) >> 27) /* i */
663 | (imm & 0x0000000000200000ull) /* ic */
664 | ((imm & 0x00000000001f0000ull) << 6) /* imm5c */
665 | ((imm & 0x000000000000ff80ull) << 20) /* imm9d */
666 | ((imm & 0x000000000000007full) << 13) /* imm7b */
667 | ((r1 & 0x7f) << 6)
668 | (qp & 0x3f);
671 static inline uint64_t tcg_opc_x3(int qp, uint64_t opc, uint64_t imm)
673 return opc
674 | ((imm & 0x0800000000000000ull) >> 23) /* i */
675 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
676 | (qp & 0x3f);
679 static inline uint64_t tcg_opc_x4(int qp, uint64_t opc, int b1, uint64_t imm)
681 return opc
682 | ((imm & 0x0800000000000000ull) >> 23) /* i */
683 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
684 | ((b1 & 0x7) << 6)
685 | (qp & 0x3f);
690 * Relocations - Note that we never encode branches elsewhere than slot 2.
693 static void reloc_pcrel21b_slot2(tcg_insn_unit *pc, tcg_insn_unit *target)
695 uint64_t imm = target - pc;
697 pc->hi = (pc->hi & 0xf700000fffffffffull)
698 | ((imm & 0x100000) << 39) /* s */
699 | ((imm & 0x0fffff) << 36); /* imm20b */
702 static uint64_t get_reloc_pcrel21b_slot2(tcg_insn_unit *pc)
704 int64_t high = pc->hi;
706 return ((high >> 39) & 0x100000) + /* s */
707 ((high >> 36) & 0x0fffff); /* imm20b */
710 static void patch_reloc(tcg_insn_unit *code_ptr, int type,
711 intptr_t value, intptr_t addend)
713 tcg_debug_assert(addend == 0);
714 tcg_debug_assert(type == R_IA64_PCREL21B);
715 reloc_pcrel21b_slot2(code_ptr, (tcg_insn_unit *)value);
719 * Constraints
722 /* parse target specific constraints */
723 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
725 const char *ct_str;
727 ct_str = *pct_str;
728 switch(ct_str[0]) {
729 case 'r':
730 ct->ct |= TCG_CT_REG;
731 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
732 break;
733 case 'I':
734 ct->ct |= TCG_CT_CONST_S22;
735 break;
736 case 'S':
737 ct->ct |= TCG_CT_REG;
738 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
739 #if defined(CONFIG_SOFTMMU)
740 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R56);
741 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R57);
742 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R58);
743 #endif
744 break;
745 case 'Z':
746 /* We are cheating a bit here, using the fact that the register
747 r0 is also the register number 0. Hence there is no need
748 to check for const_args in each instruction. */
749 ct->ct |= TCG_CT_CONST_ZERO;
750 break;
751 default:
752 return -1;
754 ct_str++;
755 *pct_str = ct_str;
756 return 0;
759 /* test if a constant matches the constraint */
760 static inline int tcg_target_const_match(tcg_target_long val, TCGType type,
761 const TCGArgConstraint *arg_ct)
763 int ct;
764 ct = arg_ct->ct;
765 if (ct & TCG_CT_CONST)
766 return 1;
767 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
768 return 1;
769 else if ((ct & TCG_CT_CONST_S22) && val == ((int32_t)val << 10) >> 10)
770 return 1;
771 else
772 return 0;
776 * Code generation
779 static tcg_insn_unit *tb_ret_addr;
781 static inline void tcg_out_bundle(TCGContext *s, int template,
782 uint64_t slot0, uint64_t slot1,
783 uint64_t slot2)
785 template &= 0x1f; /* 5 bits */
786 slot0 &= 0x1ffffffffffull; /* 41 bits */
787 slot1 &= 0x1ffffffffffull; /* 41 bits */
788 slot2 &= 0x1ffffffffffull; /* 41 bits */
790 *s->code_ptr++ = (tcg_insn_unit){
791 (slot1 << 46) | (slot0 << 5) | template,
792 (slot2 << 23) | (slot1 >> 18)
796 static inline uint64_t tcg_opc_mov_a(int qp, TCGReg dst, TCGReg src)
798 return tcg_opc_a4(qp, OPC_ADDS_A4, dst, 0, src);
801 static inline void tcg_out_mov(TCGContext *s, TCGType type,
802 TCGReg ret, TCGReg arg)
804 tcg_out_bundle(s, mmI,
805 INSN_NOP_M,
806 INSN_NOP_M,
807 tcg_opc_mov_a(TCG_REG_P0, ret, arg));
810 static inline uint64_t tcg_opc_movi_a(int qp, TCGReg dst, int64_t src)
812 tcg_debug_assert(src == sextract64(src, 0, 22));
813 return tcg_opc_a5(qp, OPC_ADDL_A5, dst, src, TCG_REG_R0);
816 static inline void tcg_out_movi(TCGContext *s, TCGType type,
817 TCGReg reg, tcg_target_long arg)
819 tcg_out_bundle(s, mLX,
820 INSN_NOP_M,
821 tcg_opc_l2 (arg),
822 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, reg, arg));
825 static void tcg_out_br(TCGContext *s, TCGLabel *l)
827 uint64_t imm;
829 /* We pay attention here to not modify the branch target by reading
830 the existing value and using it again. This ensure that caches and
831 memory are kept coherent during retranslation. */
832 if (l->has_value) {
833 imm = l->u.value_ptr - s->code_ptr;
834 } else {
835 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
836 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, l, 0);
839 tcg_out_bundle(s, mmB,
840 INSN_NOP_M,
841 INSN_NOP_M,
842 tcg_opc_b1(TCG_REG_P0, OPC_BR_SPTK_MANY_B1, imm));
845 static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *desc)
847 uintptr_t func = desc->lo, gp = desc->hi, disp;
849 /* Look through the function descriptor. */
850 tcg_out_bundle(s, mlx,
851 INSN_NOP_M,
852 tcg_opc_l2 (gp),
853 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, TCG_REG_R1, gp));
854 disp = (tcg_insn_unit *)func - s->code_ptr;
855 tcg_out_bundle(s, mLX,
856 INSN_NOP_M,
857 tcg_opc_l4 (disp),
858 tcg_opc_x4 (TCG_REG_P0, OPC_BRL_CALL_SPTK_MANY_X4,
859 TCG_REG_B0, disp));
862 static void tcg_out_exit_tb(TCGContext *s, tcg_target_long arg)
864 uint64_t imm, opc1;
866 /* At least arg == 0 is a common operation. */
867 if (arg == sextract64(arg, 0, 22)) {
868 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R8, arg);
869 } else {
870 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R8, arg);
871 opc1 = INSN_NOP_M;
874 imm = tb_ret_addr - s->code_ptr;
876 tcg_out_bundle(s, mLX,
877 opc1,
878 tcg_opc_l3 (imm),
879 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, imm));
882 static inline void tcg_out_goto_tb(TCGContext *s, TCGArg arg)
884 if (s->tb_jmp_insn_offset) {
885 /* direct jump method */
886 tcg_abort();
887 } else {
888 /* indirect jump method */
889 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2,
890 (tcg_target_long)(s->tb_jmp_target_addr + arg));
891 tcg_out_bundle(s, MmI,
892 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1,
893 TCG_REG_R2, TCG_REG_R2),
894 INSN_NOP_M,
895 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6,
896 TCG_REG_R2, 0));
897 tcg_out_bundle(s, mmB,
898 INSN_NOP_M,
899 INSN_NOP_M,
900 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4,
901 TCG_REG_B6));
903 s->tb_jmp_reset_offset[arg] = tcg_current_code_size(s);
906 static inline void tcg_out_jmp(TCGContext *s, TCGArg addr)
908 tcg_out_bundle(s, mmI,
909 INSN_NOP_M,
910 INSN_NOP_M,
911 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6, addr, 0));
912 tcg_out_bundle(s, mmB,
913 INSN_NOP_M,
914 INSN_NOP_M,
915 tcg_opc_b4(TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
918 static inline void tcg_out_ld_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
919 TCGArg arg1, tcg_target_long arg2)
921 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
922 tcg_out_bundle(s, MmI,
923 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
924 TCG_REG_R2, arg2, arg1),
925 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
926 INSN_NOP_I);
927 } else {
928 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
929 tcg_out_bundle(s, MmI,
930 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
931 TCG_REG_R2, TCG_REG_R2, arg1),
932 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
933 INSN_NOP_I);
937 static inline void tcg_out_st_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
938 TCGArg arg1, tcg_target_long arg2)
940 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
941 tcg_out_bundle(s, MmI,
942 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
943 TCG_REG_R2, arg2, arg1),
944 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
945 INSN_NOP_I);
946 } else {
947 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
948 tcg_out_bundle(s, MmI,
949 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
950 TCG_REG_R2, TCG_REG_R2, arg1),
951 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
952 INSN_NOP_I);
956 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
957 TCGReg arg1, intptr_t arg2)
959 if (type == TCG_TYPE_I32) {
960 tcg_out_ld_rel(s, OPC_LD4_M1, arg, arg1, arg2);
961 } else {
962 tcg_out_ld_rel(s, OPC_LD8_M1, arg, arg1, arg2);
966 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
967 TCGReg arg1, intptr_t arg2)
969 if (type == TCG_TYPE_I32) {
970 tcg_out_st_rel(s, OPC_ST4_M4, arg, arg1, arg2);
971 } else {
972 tcg_out_st_rel(s, OPC_ST8_M4, arg, arg1, arg2);
976 static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
977 TCGReg base, intptr_t ofs)
979 if (val == 0) {
980 tcg_out_st(s, type, TCG_REG_R0, base, ofs);
981 return true;
983 return false;
986 static inline void tcg_out_alu(TCGContext *s, uint64_t opc_a1, uint64_t opc_a3,
987 TCGReg ret, TCGArg arg1, int const_arg1,
988 TCGArg arg2, int const_arg2)
990 uint64_t opc1 = 0, opc2 = 0, opc3 = 0;
992 if (const_arg2 && arg2 != 0) {
993 opc2 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R3, arg2);
994 arg2 = TCG_REG_R3;
996 if (const_arg1 && arg1 != 0) {
997 if (opc_a3 && arg1 == (int8_t)arg1) {
998 opc3 = tcg_opc_a3(TCG_REG_P0, opc_a3, ret, arg1, arg2);
999 } else {
1000 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, arg1);
1001 arg1 = TCG_REG_R2;
1004 if (opc3 == 0) {
1005 opc3 = tcg_opc_a1(TCG_REG_P0, opc_a1, ret, arg1, arg2);
1008 tcg_out_bundle(s, (opc1 || opc2 ? mII : miI),
1009 opc1 ? opc1 : INSN_NOP_M,
1010 opc2 ? opc2 : INSN_NOP_I,
1011 opc3);
1014 static inline void tcg_out_add(TCGContext *s, TCGReg ret, TCGReg arg1,
1015 TCGArg arg2, int const_arg2)
1017 if (const_arg2 && arg2 == sextract64(arg2, 0, 14)) {
1018 tcg_out_bundle(s, mmI,
1019 INSN_NOP_M,
1020 INSN_NOP_M,
1021 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, arg2, arg1));
1022 } else {
1023 tcg_out_alu(s, OPC_ADD_A1, 0, ret, arg1, 0, arg2, const_arg2);
1027 static inline void tcg_out_sub(TCGContext *s, TCGReg ret, TCGArg arg1,
1028 int const_arg1, TCGArg arg2, int const_arg2)
1030 if (!const_arg1 && const_arg2 && -arg2 == sextract64(-arg2, 0, 14)) {
1031 tcg_out_bundle(s, mmI,
1032 INSN_NOP_M,
1033 INSN_NOP_M,
1034 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, -arg2, arg1));
1035 } else {
1036 tcg_out_alu(s, OPC_SUB_A1, OPC_SUB_A3, ret,
1037 arg1, const_arg1, arg2, const_arg2);
1041 static inline void tcg_out_eqv(TCGContext *s, TCGArg ret,
1042 TCGArg arg1, int const_arg1,
1043 TCGArg arg2, int const_arg2)
1045 tcg_out_bundle(s, mII,
1046 INSN_NOP_M,
1047 tcg_opc_a1 (TCG_REG_P0, OPC_XOR_A1, ret, arg1, arg2),
1048 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1051 static inline void tcg_out_nand(TCGContext *s, TCGArg ret,
1052 TCGArg arg1, int const_arg1,
1053 TCGArg arg2, int const_arg2)
1055 tcg_out_bundle(s, mII,
1056 INSN_NOP_M,
1057 tcg_opc_a1 (TCG_REG_P0, OPC_AND_A1, ret, arg1, arg2),
1058 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1061 static inline void tcg_out_nor(TCGContext *s, TCGArg ret,
1062 TCGArg arg1, int const_arg1,
1063 TCGArg arg2, int const_arg2)
1065 tcg_out_bundle(s, mII,
1066 INSN_NOP_M,
1067 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, arg2),
1068 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1071 static inline void tcg_out_orc(TCGContext *s, TCGArg ret,
1072 TCGArg arg1, int const_arg1,
1073 TCGArg arg2, int const_arg2)
1075 tcg_out_bundle(s, mII,
1076 INSN_NOP_M,
1077 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, TCG_REG_R2, -1, arg2),
1078 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, TCG_REG_R2));
1081 static inline void tcg_out_mul(TCGContext *s, TCGArg ret,
1082 TCGArg arg1, TCGArg arg2)
1084 tcg_out_bundle(s, mmI,
1085 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F6, arg1),
1086 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F7, arg2),
1087 INSN_NOP_I);
1088 tcg_out_bundle(s, mmF,
1089 INSN_NOP_M,
1090 INSN_NOP_M,
1091 tcg_opc_f2 (TCG_REG_P0, OPC_XMA_L_F2, TCG_REG_F6, TCG_REG_F6,
1092 TCG_REG_F7, TCG_REG_F0));
1093 tcg_out_bundle(s, miI,
1094 tcg_opc_m19(TCG_REG_P0, OPC_GETF_SIG_M19, ret, TCG_REG_F6),
1095 INSN_NOP_I,
1096 INSN_NOP_I);
1099 static inline void tcg_out_sar_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1100 TCGArg arg2, int const_arg2)
1102 if (const_arg2) {
1103 tcg_out_bundle(s, miI,
1104 INSN_NOP_M,
1105 INSN_NOP_I,
1106 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1107 ret, arg1, arg2, 31 - arg2));
1108 } else {
1109 tcg_out_bundle(s, mII,
1110 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3,
1111 TCG_REG_R3, 0x1f, arg2),
1112 tcg_opc_i29(TCG_REG_P0, OPC_SXT4_I29, TCG_REG_R2, arg1),
1113 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret,
1114 TCG_REG_R2, TCG_REG_R3));
1118 static inline void tcg_out_sar_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1119 TCGArg arg2, int const_arg2)
1121 if (const_arg2) {
1122 tcg_out_bundle(s, miI,
1123 INSN_NOP_M,
1124 INSN_NOP_I,
1125 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1126 ret, arg1, arg2, 63 - arg2));
1127 } else {
1128 tcg_out_bundle(s, miI,
1129 INSN_NOP_M,
1130 INSN_NOP_I,
1131 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret, arg1, arg2));
1135 static inline void tcg_out_shl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1136 TCGArg arg2, int const_arg2)
1138 if (const_arg2) {
1139 tcg_out_bundle(s, miI,
1140 INSN_NOP_M,
1141 INSN_NOP_I,
1142 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1143 arg1, 63 - arg2, 31 - arg2));
1144 } else {
1145 tcg_out_bundle(s, mII,
1146 INSN_NOP_M,
1147 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R2,
1148 0x1f, arg2),
1149 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1150 arg1, TCG_REG_R2));
1154 static inline void tcg_out_shl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1155 TCGArg arg2, int const_arg2)
1157 if (const_arg2) {
1158 tcg_out_bundle(s, miI,
1159 INSN_NOP_M,
1160 INSN_NOP_I,
1161 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1162 arg1, 63 - arg2, 63 - arg2));
1163 } else {
1164 tcg_out_bundle(s, miI,
1165 INSN_NOP_M,
1166 INSN_NOP_I,
1167 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1168 arg1, arg2));
1172 static inline void tcg_out_shr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1173 TCGArg arg2, int const_arg2)
1175 if (const_arg2) {
1176 tcg_out_bundle(s, miI,
1177 INSN_NOP_M,
1178 INSN_NOP_I,
1179 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1180 arg1, arg2, 31 - arg2));
1181 } else {
1182 tcg_out_bundle(s, mII,
1183 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1184 0x1f, arg2),
1185 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29, TCG_REG_R2, arg1),
1186 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1187 TCG_REG_R2, TCG_REG_R3));
1191 static inline void tcg_out_shr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1192 TCGArg arg2, int const_arg2)
1194 if (const_arg2) {
1195 tcg_out_bundle(s, miI,
1196 INSN_NOP_M,
1197 INSN_NOP_I,
1198 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1199 arg1, arg2, 63 - arg2));
1200 } else {
1201 tcg_out_bundle(s, miI,
1202 INSN_NOP_M,
1203 INSN_NOP_I,
1204 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1205 arg1, arg2));
1209 static inline void tcg_out_rotl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1210 TCGArg arg2, int const_arg2)
1212 if (const_arg2) {
1213 tcg_out_bundle(s, mII,
1214 INSN_NOP_M,
1215 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1216 TCG_REG_R2, arg1, arg1),
1217 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1218 TCG_REG_R2, 32 - arg2, 31));
1219 } else {
1220 tcg_out_bundle(s, miI,
1221 INSN_NOP_M,
1222 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1223 TCG_REG_R2, arg1, arg1),
1224 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1225 0x1f, arg2));
1226 tcg_out_bundle(s, mII,
1227 INSN_NOP_M,
1228 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R3,
1229 0x20, TCG_REG_R3),
1230 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1231 TCG_REG_R2, TCG_REG_R3));
1235 static inline void tcg_out_rotl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1236 TCGArg arg2, int const_arg2)
1238 if (const_arg2) {
1239 tcg_out_bundle(s, miI,
1240 INSN_NOP_M,
1241 INSN_NOP_I,
1242 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1243 arg1, 0x40 - arg2));
1244 } else {
1245 tcg_out_bundle(s, mII,
1246 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1247 0x40, arg2),
1248 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R3,
1249 arg1, arg2),
1250 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R2,
1251 arg1, TCG_REG_R2));
1252 tcg_out_bundle(s, miI,
1253 INSN_NOP_M,
1254 INSN_NOP_I,
1255 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1256 TCG_REG_R2, TCG_REG_R3));
1260 static inline void tcg_out_rotr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1261 TCGArg arg2, int const_arg2)
1263 if (const_arg2) {
1264 tcg_out_bundle(s, mII,
1265 INSN_NOP_M,
1266 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1267 TCG_REG_R2, arg1, arg1),
1268 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1269 TCG_REG_R2, arg2, 31));
1270 } else {
1271 tcg_out_bundle(s, mII,
1272 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1273 0x1f, arg2),
1274 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1275 TCG_REG_R2, arg1, arg1),
1276 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1277 TCG_REG_R2, TCG_REG_R3));
1281 static inline void tcg_out_rotr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1282 TCGArg arg2, int const_arg2)
1284 if (const_arg2) {
1285 tcg_out_bundle(s, miI,
1286 INSN_NOP_M,
1287 INSN_NOP_I,
1288 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1289 arg1, arg2));
1290 } else {
1291 tcg_out_bundle(s, mII,
1292 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1293 0x40, arg2),
1294 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R3,
1295 arg1, arg2),
1296 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R2,
1297 arg1, TCG_REG_R2));
1298 tcg_out_bundle(s, miI,
1299 INSN_NOP_M,
1300 INSN_NOP_I,
1301 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1302 TCG_REG_R2, TCG_REG_R3));
1306 static const uint64_t opc_ext_i29[8] = {
1307 OPC_ZXT1_I29, OPC_ZXT2_I29, OPC_ZXT4_I29, 0,
1308 OPC_SXT1_I29, OPC_SXT2_I29, OPC_SXT4_I29, 0
1311 static inline uint64_t tcg_opc_ext_i(int qp, TCGMemOp opc, TCGReg d, TCGReg s)
1313 if ((opc & MO_SIZE) == MO_64) {
1314 return tcg_opc_mov_a(qp, d, s);
1315 } else {
1316 return tcg_opc_i29(qp, opc_ext_i29[opc & MO_SSIZE], d, s);
1320 static inline void tcg_out_ext(TCGContext *s, uint64_t opc_i29,
1321 TCGArg ret, TCGArg arg)
1323 tcg_out_bundle(s, miI,
1324 INSN_NOP_M,
1325 INSN_NOP_I,
1326 tcg_opc_i29(TCG_REG_P0, opc_i29, ret, arg));
1329 static inline uint64_t tcg_opc_bswap64_i(int qp, TCGReg d, TCGReg s)
1331 return tcg_opc_i3(qp, OPC_MUX1_I3, d, s, 0xb);
1334 static inline void tcg_out_bswap16(TCGContext *s, TCGArg ret, TCGArg arg)
1336 tcg_out_bundle(s, mII,
1337 INSN_NOP_M,
1338 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 15, 15),
1339 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1342 static inline void tcg_out_bswap32(TCGContext *s, TCGArg ret, TCGArg arg)
1344 tcg_out_bundle(s, mII,
1345 INSN_NOP_M,
1346 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 31, 31),
1347 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1350 static inline void tcg_out_bswap64(TCGContext *s, TCGArg ret, TCGArg arg)
1352 tcg_out_bundle(s, miI,
1353 INSN_NOP_M,
1354 INSN_NOP_I,
1355 tcg_opc_bswap64_i(TCG_REG_P0, ret, arg));
1358 static inline void tcg_out_deposit(TCGContext *s, TCGArg ret, TCGArg a1,
1359 TCGArg a2, int const_a2, int pos, int len)
1361 uint64_t i1 = 0, i2 = 0;
1362 int cpos = 63 - pos, lm1 = len - 1;
1364 if (const_a2) {
1365 /* Truncate the value of a constant a2 to the width of the field. */
1366 int mask = (1u << len) - 1;
1367 a2 &= mask;
1369 if (a2 == 0 || a2 == mask) {
1370 /* 1-bit signed constant inserted into register. */
1371 i2 = tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, ret, a2, a1, cpos, lm1);
1372 } else {
1373 /* Otherwise, load any constant into a temporary. Do this into
1374 the first I slot to help out with cross-unit delays. */
1375 i1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, a2);
1376 a2 = TCG_REG_R2;
1379 if (i2 == 0) {
1380 i2 = tcg_opc_i15(TCG_REG_P0, OPC_DEP_I15, ret, a2, a1, cpos, lm1);
1382 tcg_out_bundle(s, (i1 ? mII : miI),
1383 INSN_NOP_M,
1384 i1 ? i1 : INSN_NOP_I,
1385 i2);
1388 static inline uint64_t tcg_opc_cmp_a(int qp, TCGCond cond, TCGArg arg1,
1389 TCGArg arg2, int cmp4)
1391 uint64_t opc_eq_a6, opc_lt_a6, opc_ltu_a6;
1393 if (cmp4) {
1394 opc_eq_a6 = OPC_CMP4_EQ_A6;
1395 opc_lt_a6 = OPC_CMP4_LT_A6;
1396 opc_ltu_a6 = OPC_CMP4_LTU_A6;
1397 } else {
1398 opc_eq_a6 = OPC_CMP_EQ_A6;
1399 opc_lt_a6 = OPC_CMP_LT_A6;
1400 opc_ltu_a6 = OPC_CMP_LTU_A6;
1403 switch (cond) {
1404 case TCG_COND_EQ:
1405 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1406 case TCG_COND_NE:
1407 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1408 case TCG_COND_LT:
1409 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1410 case TCG_COND_LTU:
1411 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1412 case TCG_COND_GE:
1413 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1414 case TCG_COND_GEU:
1415 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1416 case TCG_COND_LE:
1417 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1418 case TCG_COND_LEU:
1419 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1420 case TCG_COND_GT:
1421 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1422 case TCG_COND_GTU:
1423 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1424 default:
1425 tcg_abort();
1426 break;
1430 static inline void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1,
1431 TCGReg arg2, TCGLabel *l, int cmp4)
1433 uint64_t imm;
1435 /* We pay attention here to not modify the branch target by reading
1436 the existing value and using it again. This ensure that caches and
1437 memory are kept coherent during retranslation. */
1438 if (l->has_value) {
1439 imm = l->u.value_ptr - s->code_ptr;
1440 } else {
1441 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
1442 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, l, 0);
1445 tcg_out_bundle(s, miB,
1446 INSN_NOP_M,
1447 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1448 tcg_opc_b1(TCG_REG_P6, OPC_BR_DPTK_FEW_B1, imm));
1451 static inline void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGArg ret,
1452 TCGArg arg1, TCGArg arg2, int cmp4)
1454 tcg_out_bundle(s, MmI,
1455 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1456 tcg_opc_movi_a(TCG_REG_P6, ret, 1),
1457 tcg_opc_movi_a(TCG_REG_P7, ret, 0));
1460 static inline void tcg_out_movcond(TCGContext *s, TCGCond cond, TCGArg ret,
1461 TCGArg c1, TCGArg c2,
1462 TCGArg v1, int const_v1,
1463 TCGArg v2, int const_v2, int cmp4)
1465 uint64_t opc1, opc2;
1467 if (const_v1) {
1468 opc1 = tcg_opc_movi_a(TCG_REG_P6, ret, v1);
1469 } else if (ret == v1) {
1470 opc1 = INSN_NOP_M;
1471 } else {
1472 opc1 = tcg_opc_mov_a(TCG_REG_P6, ret, v1);
1474 if (const_v2) {
1475 opc2 = tcg_opc_movi_a(TCG_REG_P7, ret, v2);
1476 } else if (ret == v2) {
1477 opc2 = INSN_NOP_I;
1478 } else {
1479 opc2 = tcg_opc_mov_a(TCG_REG_P7, ret, v2);
1482 tcg_out_bundle(s, MmI,
1483 tcg_opc_cmp_a(TCG_REG_P0, cond, c1, c2, cmp4),
1484 opc1,
1485 opc2);
1488 #if defined(CONFIG_SOFTMMU)
1489 /* We're expecting to use an signed 22-bit immediate add. */
1490 QEMU_BUILD_BUG_ON(offsetof(CPUArchState, tlb_table[NB_MMU_MODES - 1][1])
1491 > 0x1fffff)
1493 /* Load and compare a TLB entry, and return the result in (p6, p7).
1494 R2 is loaded with the addend TLB entry.
1495 R57 is loaded with the address, zero extented on 32-bit targets.
1496 R1, R3 are clobbered, leaving R56 free for...
1497 BSWAP_1, BSWAP_2 and I-slot insns for swapping data for store. */
1498 static inline void tcg_out_qemu_tlb(TCGContext *s, TCGReg addr_reg,
1499 TCGMemOp s_bits, int off_rw, int off_add,
1500 uint64_t bswap1, uint64_t bswap2)
1503 .mii
1504 mov r2 = off_rw
1505 extr.u r3 = addr_reg, ... # extract tlb page
1506 zxt4 r57 = addr_reg # or mov for 64-bit guest
1508 .mii
1509 addl r2 = r2, areg0
1510 shl r3 = r3, cteb # via dep.z
1511 dep r1 = 0, r57, ... # zero page ofs, keep align
1513 .mmi
1514 add r2 = r2, r3
1516 ld4 r3 = [r2], off_add-off_rw # or ld8 for 64-bit guest
1519 .mmi
1521 cmp.eq p6, p7 = r3, r58
1525 tcg_out_bundle(s, miI,
1526 tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, off_rw),
1527 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, TCG_REG_R3,
1528 addr_reg, TARGET_PAGE_BITS, CPU_TLB_BITS - 1),
1529 tcg_opc_ext_i(TCG_REG_P0,
1530 TARGET_LONG_BITS == 32 ? MO_UL : MO_Q,
1531 TCG_REG_R57, addr_reg));
1532 tcg_out_bundle(s, miI,
1533 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1534 TCG_REG_R2, TCG_AREG0),
1535 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, TCG_REG_R3,
1536 TCG_REG_R3, 63 - CPU_TLB_ENTRY_BITS,
1537 63 - CPU_TLB_ENTRY_BITS),
1538 tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, TCG_REG_R1, 0,
1539 TCG_REG_R57, 63 - s_bits,
1540 TARGET_PAGE_BITS - s_bits - 1));
1541 tcg_out_bundle(s, MmI,
1542 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
1543 TCG_REG_R2, TCG_REG_R2, TCG_REG_R3),
1544 tcg_opc_m3 (TCG_REG_P0,
1545 (TARGET_LONG_BITS == 32
1546 ? OPC_LD4_M3 : OPC_LD8_M3), TCG_REG_R3,
1547 TCG_REG_R2, off_add - off_rw),
1548 bswap1);
1549 tcg_out_bundle(s, mmI,
1550 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1, TCG_REG_R2, TCG_REG_R2),
1551 tcg_opc_a6 (TCG_REG_P0, OPC_CMP_EQ_A6, TCG_REG_P6,
1552 TCG_REG_P7, TCG_REG_R1, TCG_REG_R3),
1553 bswap2);
1556 typedef struct TCGLabelQemuLdst {
1557 bool is_ld;
1558 TCGMemOp size;
1559 tcg_insn_unit *label_ptr; /* label pointers to be updated */
1560 struct TCGLabelQemuLdst *next;
1561 } TCGLabelQemuLdst;
1563 typedef struct TCGBackendData {
1564 TCGLabelQemuLdst *labels;
1565 } TCGBackendData;
1567 static inline void tcg_out_tb_init(TCGContext *s)
1569 s->be->labels = NULL;
1572 static void add_qemu_ldst_label(TCGContext *s, bool is_ld, TCGMemOp opc,
1573 tcg_insn_unit *label_ptr)
1575 TCGBackendData *be = s->be;
1576 TCGLabelQemuLdst *l = tcg_malloc(sizeof(*l));
1578 l->is_ld = is_ld;
1579 l->size = opc & MO_SIZE;
1580 l->label_ptr = label_ptr;
1581 l->next = be->labels;
1582 be->labels = l;
1585 static bool tcg_out_tb_finalize(TCGContext *s)
1587 static const void * const helpers[8] = {
1588 helper_ret_stb_mmu,
1589 helper_le_stw_mmu,
1590 helper_le_stl_mmu,
1591 helper_le_stq_mmu,
1592 helper_ret_ldub_mmu,
1593 helper_le_lduw_mmu,
1594 helper_le_ldul_mmu,
1595 helper_le_ldq_mmu,
1597 tcg_insn_unit *thunks[8] = { };
1598 TCGLabelQemuLdst *l;
1600 for (l = s->be->labels; l != NULL; l = l->next) {
1601 long x = l->is_ld * 4 + l->size;
1602 tcg_insn_unit *dest = thunks[x];
1604 /* The out-of-line thunks are all the same; load the return address
1605 from B0, load the GP, and branch to the code. Note that we are
1606 always post-call, so the register window has rolled, so we're
1607 using incoming parameter register numbers, not outgoing. */
1608 if (dest == NULL) {
1609 uintptr_t *desc = (uintptr_t *)helpers[x];
1610 uintptr_t func = desc[0], gp = desc[1], disp;
1612 thunks[x] = dest = s->code_ptr;
1614 tcg_out_bundle(s, mlx,
1615 INSN_NOP_M,
1616 tcg_opc_l2 (gp),
1617 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
1618 TCG_REG_R1, gp));
1619 tcg_out_bundle(s, mii,
1620 INSN_NOP_M,
1621 INSN_NOP_I,
1622 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
1623 l->is_ld ? TCG_REG_R35 : TCG_REG_R36,
1624 TCG_REG_B0));
1625 disp = (tcg_insn_unit *)func - s->code_ptr;
1626 tcg_out_bundle(s, mLX,
1627 INSN_NOP_M,
1628 tcg_opc_l3 (disp),
1629 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, disp));
1632 reloc_pcrel21b_slot2(l->label_ptr, dest);
1634 /* Test for (pending) buffer overflow. The assumption is that any
1635 one operation beginning below the high water mark cannot overrun
1636 the buffer completely. Thus we can test for overflow after
1637 generating code without having to check during generation. */
1638 if (unlikely((void *)s->code_ptr > s->code_gen_highwater)) {
1639 return false;
1642 return true;
1645 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1647 static const uint64_t opc_ld_m1[4] = {
1648 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1650 int addr_reg, data_reg, mem_index;
1651 TCGMemOpIdx oi;
1652 TCGMemOp opc, s_bits;
1653 uint64_t fin1, fin2;
1654 tcg_insn_unit *label_ptr;
1656 data_reg = args[0];
1657 addr_reg = args[1];
1658 oi = args[2];
1659 opc = get_memop(oi);
1660 mem_index = get_mmuidx(oi);
1661 s_bits = opc & MO_SIZE;
1663 /* Read the TLB entry */
1664 tcg_out_qemu_tlb(s, addr_reg, s_bits,
1665 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read),
1666 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1667 INSN_NOP_I, INSN_NOP_I);
1669 /* P6 is the fast path, and P7 the slow path */
1671 fin2 = 0;
1672 if (opc & MO_BSWAP) {
1673 fin1 = tcg_opc_bswap64_i(TCG_REG_P0, data_reg, TCG_REG_R8);
1674 if (s_bits < MO_64) {
1675 int shift = 64 - (8 << s_bits);
1676 fin2 = (opc & MO_SIGN ? OPC_EXTR_I11 : OPC_EXTR_U_I11);
1677 fin2 = tcg_opc_i11(TCG_REG_P0, fin2,
1678 data_reg, data_reg, shift, 63 - shift);
1680 } else {
1681 fin1 = tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, TCG_REG_R8);
1684 tcg_out_bundle(s, mmI,
1685 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1686 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1687 TCG_REG_R2, TCG_REG_R57),
1688 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R58, oi));
1689 label_ptr = s->code_ptr;
1690 tcg_out_bundle(s, miB,
1691 tcg_opc_m1 (TCG_REG_P6, opc_ld_m1[s_bits],
1692 TCG_REG_R8, TCG_REG_R2),
1693 INSN_NOP_I,
1694 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1695 get_reloc_pcrel21b_slot2(label_ptr)));
1697 add_qemu_ldst_label(s, 1, opc, label_ptr);
1699 /* Note that we always use LE helper functions, so the bswap insns
1700 here for the fast path also apply to the slow path. */
1701 tcg_out_bundle(s, (fin2 ? mII : miI),
1702 INSN_NOP_M,
1703 fin1,
1704 fin2 ? fin2 : INSN_NOP_I);
1707 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1709 static const uint64_t opc_st_m4[4] = {
1710 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1712 TCGReg addr_reg, data_reg;
1713 int mem_index;
1714 uint64_t pre1, pre2;
1715 TCGMemOpIdx oi;
1716 TCGMemOp opc, s_bits;
1717 tcg_insn_unit *label_ptr;
1719 data_reg = args[0];
1720 addr_reg = args[1];
1721 oi = args[2];
1722 opc = get_memop(oi);
1723 mem_index = get_mmuidx(oi);
1724 s_bits = opc & MO_SIZE;
1726 /* Note that we always use LE helper functions, so the bswap insns
1727 that are here for the fast path also apply to the slow path,
1728 and move the data into the argument register. */
1729 pre2 = INSN_NOP_I;
1730 if (opc & MO_BSWAP) {
1731 pre1 = tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R58, data_reg);
1732 if (s_bits < MO_64) {
1733 int shift = 64 - (8 << s_bits);
1734 pre2 = tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11,
1735 TCG_REG_R58, TCG_REG_R58, shift, 63 - shift);
1737 } else {
1738 /* Just move the data into place for the slow path. */
1739 pre1 = tcg_opc_ext_i(TCG_REG_P0, opc, TCG_REG_R58, data_reg);
1742 tcg_out_qemu_tlb(s, addr_reg, s_bits,
1743 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write),
1744 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1745 pre1, pre2);
1747 /* P6 is the fast path, and P7 the slow path */
1748 tcg_out_bundle(s, mmI,
1749 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1750 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1751 TCG_REG_R2, TCG_REG_R57),
1752 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R59, oi));
1753 label_ptr = s->code_ptr;
1754 tcg_out_bundle(s, miB,
1755 tcg_opc_m4 (TCG_REG_P6, opc_st_m4[s_bits],
1756 TCG_REG_R58, TCG_REG_R2),
1757 INSN_NOP_I,
1758 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1759 get_reloc_pcrel21b_slot2(label_ptr)));
1761 add_qemu_ldst_label(s, 0, opc, label_ptr);
1764 #else /* !CONFIG_SOFTMMU */
1765 # include "tcg-be-null.h"
1767 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1769 static uint64_t const opc_ld_m1[4] = {
1770 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1772 int addr_reg, data_reg;
1773 TCGMemOp opc, s_bits, bswap;
1775 data_reg = args[0];
1776 addr_reg = args[1];
1777 opc = args[2];
1778 s_bits = opc & MO_SIZE;
1779 bswap = opc & MO_BSWAP;
1781 #if TARGET_LONG_BITS == 32
1782 if (guest_base != 0) {
1783 tcg_out_bundle(s, mII,
1784 INSN_NOP_M,
1785 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1786 TCG_REG_R3, addr_reg),
1787 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1788 TCG_GUEST_BASE_REG, TCG_REG_R3));
1789 } else {
1790 tcg_out_bundle(s, miI,
1791 INSN_NOP_M,
1792 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1793 TCG_REG_R2, addr_reg),
1794 INSN_NOP_I);
1797 if (!bswap) {
1798 if (!(opc & MO_SIGN)) {
1799 tcg_out_bundle(s, miI,
1800 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1801 data_reg, TCG_REG_R2),
1802 INSN_NOP_I,
1803 INSN_NOP_I);
1804 } else {
1805 tcg_out_bundle(s, mII,
1806 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1807 data_reg, TCG_REG_R2),
1808 INSN_NOP_I,
1809 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1811 } else if (s_bits == MO_64) {
1812 tcg_out_bundle(s, mII,
1813 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1814 data_reg, TCG_REG_R2),
1815 INSN_NOP_I,
1816 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1817 } else {
1818 if (s_bits == MO_16) {
1819 tcg_out_bundle(s, mII,
1820 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1821 data_reg, TCG_REG_R2),
1822 INSN_NOP_I,
1823 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1824 data_reg, data_reg, 15, 15));
1825 } else {
1826 tcg_out_bundle(s, mII,
1827 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1828 data_reg, TCG_REG_R2),
1829 INSN_NOP_I,
1830 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1831 data_reg, data_reg, 31, 31));
1833 if (!(opc & MO_SIGN)) {
1834 tcg_out_bundle(s, miI,
1835 INSN_NOP_M,
1836 INSN_NOP_I,
1837 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1838 } else {
1839 tcg_out_bundle(s, mII,
1840 INSN_NOP_M,
1841 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg),
1842 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1845 #else
1846 if (guest_base != 0) {
1847 tcg_out_bundle(s, MmI,
1848 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1849 TCG_GUEST_BASE_REG, addr_reg),
1850 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1851 data_reg, TCG_REG_R2),
1852 INSN_NOP_I);
1853 } else {
1854 tcg_out_bundle(s, mmI,
1855 INSN_NOP_M,
1856 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1857 data_reg, addr_reg),
1858 INSN_NOP_I);
1861 if (bswap && s_bits == MO_16) {
1862 tcg_out_bundle(s, mII,
1863 INSN_NOP_M,
1864 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1865 data_reg, data_reg, 15, 15),
1866 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1867 } else if (bswap && s_bits == MO_32) {
1868 tcg_out_bundle(s, mII,
1869 INSN_NOP_M,
1870 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1871 data_reg, data_reg, 31, 31),
1872 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1873 } else if (bswap && s_bits == MO_64) {
1874 tcg_out_bundle(s, miI,
1875 INSN_NOP_M,
1876 INSN_NOP_I,
1877 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1879 if (opc & MO_SIGN) {
1880 tcg_out_bundle(s, miI,
1881 INSN_NOP_M,
1882 INSN_NOP_I,
1883 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1885 #endif
1888 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1890 static uint64_t const opc_st_m4[4] = {
1891 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1893 int addr_reg, data_reg;
1894 #if TARGET_LONG_BITS == 64
1895 uint64_t add_guest_base;
1896 #endif
1897 TCGMemOp opc, s_bits, bswap;
1899 data_reg = args[0];
1900 addr_reg = args[1];
1901 opc = args[2];
1902 s_bits = opc & MO_SIZE;
1903 bswap = opc & MO_BSWAP;
1905 #if TARGET_LONG_BITS == 32
1906 if (guest_base != 0) {
1907 tcg_out_bundle(s, mII,
1908 INSN_NOP_M,
1909 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1910 TCG_REG_R3, addr_reg),
1911 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1912 TCG_GUEST_BASE_REG, TCG_REG_R3));
1913 } else {
1914 tcg_out_bundle(s, miI,
1915 INSN_NOP_M,
1916 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1917 TCG_REG_R2, addr_reg),
1918 INSN_NOP_I);
1921 if (bswap) {
1922 if (s_bits == MO_16) {
1923 tcg_out_bundle(s, mII,
1924 INSN_NOP_M,
1925 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1926 TCG_REG_R3, data_reg, 15, 15),
1927 tcg_opc_bswap64_i(TCG_REG_P0,
1928 TCG_REG_R3, TCG_REG_R3));
1929 data_reg = TCG_REG_R3;
1930 } else if (s_bits == MO_32) {
1931 tcg_out_bundle(s, mII,
1932 INSN_NOP_M,
1933 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1934 TCG_REG_R3, data_reg, 31, 31),
1935 tcg_opc_bswap64_i(TCG_REG_P0,
1936 TCG_REG_R3, TCG_REG_R3));
1937 data_reg = TCG_REG_R3;
1938 } else if (s_bits == MO_64) {
1939 tcg_out_bundle(s, miI,
1940 INSN_NOP_M,
1941 INSN_NOP_I,
1942 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1943 data_reg = TCG_REG_R3;
1946 tcg_out_bundle(s, mmI,
1947 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1948 data_reg, TCG_REG_R2),
1949 INSN_NOP_M,
1950 INSN_NOP_I);
1951 #else
1952 if (guest_base != 0) {
1953 add_guest_base = tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1954 TCG_GUEST_BASE_REG, addr_reg);
1955 addr_reg = TCG_REG_R2;
1956 } else {
1957 add_guest_base = INSN_NOP_M;
1960 if (!bswap) {
1961 tcg_out_bundle(s, (guest_base ? MmI : mmI),
1962 add_guest_base,
1963 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1964 data_reg, addr_reg),
1965 INSN_NOP_I);
1966 } else {
1967 if (s_bits == MO_16) {
1968 tcg_out_bundle(s, mII,
1969 add_guest_base,
1970 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1971 TCG_REG_R3, data_reg, 15, 15),
1972 tcg_opc_bswap64_i(TCG_REG_P0,
1973 TCG_REG_R3, TCG_REG_R3));
1974 data_reg = TCG_REG_R3;
1975 } else if (s_bits == MO_32) {
1976 tcg_out_bundle(s, mII,
1977 add_guest_base,
1978 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1979 TCG_REG_R3, data_reg, 31, 31),
1980 tcg_opc_bswap64_i(TCG_REG_P0,
1981 TCG_REG_R3, TCG_REG_R3));
1982 data_reg = TCG_REG_R3;
1983 } else if (s_bits == MO_64) {
1984 tcg_out_bundle(s, miI,
1985 add_guest_base,
1986 INSN_NOP_I,
1987 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1988 data_reg = TCG_REG_R3;
1990 tcg_out_bundle(s, miI,
1991 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1992 data_reg, addr_reg),
1993 INSN_NOP_I,
1994 INSN_NOP_I);
1996 #endif
1999 #endif
2001 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
2002 const TCGArg *args, const int *const_args)
2004 switch(opc) {
2005 case INDEX_op_exit_tb:
2006 tcg_out_exit_tb(s, args[0]);
2007 break;
2008 case INDEX_op_br:
2009 tcg_out_br(s, arg_label(args[0]));
2010 break;
2011 case INDEX_op_goto_tb:
2012 tcg_out_goto_tb(s, args[0]);
2013 break;
2015 case INDEX_op_ld8u_i32:
2016 case INDEX_op_ld8u_i64:
2017 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2018 break;
2019 case INDEX_op_ld8s_i32:
2020 case INDEX_op_ld8s_i64:
2021 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2022 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[0]);
2023 break;
2024 case INDEX_op_ld16u_i32:
2025 case INDEX_op_ld16u_i64:
2026 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2027 break;
2028 case INDEX_op_ld16s_i32:
2029 case INDEX_op_ld16s_i64:
2030 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2031 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[0]);
2032 break;
2033 case INDEX_op_ld_i32:
2034 case INDEX_op_ld32u_i64:
2035 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2036 break;
2037 case INDEX_op_ld32s_i64:
2038 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2039 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[0]);
2040 break;
2041 case INDEX_op_ld_i64:
2042 tcg_out_ld_rel(s, OPC_LD8_M1, args[0], args[1], args[2]);
2043 break;
2044 case INDEX_op_st8_i32:
2045 case INDEX_op_st8_i64:
2046 tcg_out_st_rel(s, OPC_ST1_M4, args[0], args[1], args[2]);
2047 break;
2048 case INDEX_op_st16_i32:
2049 case INDEX_op_st16_i64:
2050 tcg_out_st_rel(s, OPC_ST2_M4, args[0], args[1], args[2]);
2051 break;
2052 case INDEX_op_st_i32:
2053 case INDEX_op_st32_i64:
2054 tcg_out_st_rel(s, OPC_ST4_M4, args[0], args[1], args[2]);
2055 break;
2056 case INDEX_op_st_i64:
2057 tcg_out_st_rel(s, OPC_ST8_M4, args[0], args[1], args[2]);
2058 break;
2060 case INDEX_op_add_i32:
2061 case INDEX_op_add_i64:
2062 tcg_out_add(s, args[0], args[1], args[2], const_args[2]);
2063 break;
2064 case INDEX_op_sub_i32:
2065 case INDEX_op_sub_i64:
2066 tcg_out_sub(s, args[0], args[1], const_args[1], args[2], const_args[2]);
2067 break;
2069 case INDEX_op_and_i32:
2070 case INDEX_op_and_i64:
2071 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2072 tcg_out_alu(s, OPC_AND_A1, OPC_AND_A3, args[0],
2073 args[2], const_args[2], args[1], const_args[1]);
2074 break;
2075 case INDEX_op_andc_i32:
2076 case INDEX_op_andc_i64:
2077 tcg_out_alu(s, OPC_ANDCM_A1, OPC_ANDCM_A3, args[0],
2078 args[1], const_args[1], args[2], const_args[2]);
2079 break;
2080 case INDEX_op_eqv_i32:
2081 case INDEX_op_eqv_i64:
2082 tcg_out_eqv(s, args[0], args[1], const_args[1],
2083 args[2], const_args[2]);
2084 break;
2085 case INDEX_op_nand_i32:
2086 case INDEX_op_nand_i64:
2087 tcg_out_nand(s, args[0], args[1], const_args[1],
2088 args[2], const_args[2]);
2089 break;
2090 case INDEX_op_nor_i32:
2091 case INDEX_op_nor_i64:
2092 tcg_out_nor(s, args[0], args[1], const_args[1],
2093 args[2], const_args[2]);
2094 break;
2095 case INDEX_op_or_i32:
2096 case INDEX_op_or_i64:
2097 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2098 tcg_out_alu(s, OPC_OR_A1, OPC_OR_A3, args[0],
2099 args[2], const_args[2], args[1], const_args[1]);
2100 break;
2101 case INDEX_op_orc_i32:
2102 case INDEX_op_orc_i64:
2103 tcg_out_orc(s, args[0], args[1], const_args[1],
2104 args[2], const_args[2]);
2105 break;
2106 case INDEX_op_xor_i32:
2107 case INDEX_op_xor_i64:
2108 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2109 tcg_out_alu(s, OPC_XOR_A1, OPC_XOR_A3, args[0],
2110 args[2], const_args[2], args[1], const_args[1]);
2111 break;
2113 case INDEX_op_mul_i32:
2114 case INDEX_op_mul_i64:
2115 tcg_out_mul(s, args[0], args[1], args[2]);
2116 break;
2118 case INDEX_op_sar_i32:
2119 tcg_out_sar_i32(s, args[0], args[1], args[2], const_args[2]);
2120 break;
2121 case INDEX_op_sar_i64:
2122 tcg_out_sar_i64(s, args[0], args[1], args[2], const_args[2]);
2123 break;
2124 case INDEX_op_shl_i32:
2125 tcg_out_shl_i32(s, args[0], args[1], args[2], const_args[2]);
2126 break;
2127 case INDEX_op_shl_i64:
2128 tcg_out_shl_i64(s, args[0], args[1], args[2], const_args[2]);
2129 break;
2130 case INDEX_op_shr_i32:
2131 tcg_out_shr_i32(s, args[0], args[1], args[2], const_args[2]);
2132 break;
2133 case INDEX_op_shr_i64:
2134 tcg_out_shr_i64(s, args[0], args[1], args[2], const_args[2]);
2135 break;
2136 case INDEX_op_rotl_i32:
2137 tcg_out_rotl_i32(s, args[0], args[1], args[2], const_args[2]);
2138 break;
2139 case INDEX_op_rotl_i64:
2140 tcg_out_rotl_i64(s, args[0], args[1], args[2], const_args[2]);
2141 break;
2142 case INDEX_op_rotr_i32:
2143 tcg_out_rotr_i32(s, args[0], args[1], args[2], const_args[2]);
2144 break;
2145 case INDEX_op_rotr_i64:
2146 tcg_out_rotr_i64(s, args[0], args[1], args[2], const_args[2]);
2147 break;
2149 case INDEX_op_ext8s_i32:
2150 case INDEX_op_ext8s_i64:
2151 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[1]);
2152 break;
2153 case INDEX_op_ext8u_i32:
2154 case INDEX_op_ext8u_i64:
2155 tcg_out_ext(s, OPC_ZXT1_I29, args[0], args[1]);
2156 break;
2157 case INDEX_op_ext16s_i32:
2158 case INDEX_op_ext16s_i64:
2159 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[1]);
2160 break;
2161 case INDEX_op_ext16u_i32:
2162 case INDEX_op_ext16u_i64:
2163 tcg_out_ext(s, OPC_ZXT2_I29, args[0], args[1]);
2164 break;
2165 case INDEX_op_ext_i32_i64:
2166 case INDEX_op_ext32s_i64:
2167 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[1]);
2168 break;
2169 case INDEX_op_extu_i32_i64:
2170 case INDEX_op_ext32u_i64:
2171 tcg_out_ext(s, OPC_ZXT4_I29, args[0], args[1]);
2172 break;
2174 case INDEX_op_bswap16_i32:
2175 case INDEX_op_bswap16_i64:
2176 tcg_out_bswap16(s, args[0], args[1]);
2177 break;
2178 case INDEX_op_bswap32_i32:
2179 case INDEX_op_bswap32_i64:
2180 tcg_out_bswap32(s, args[0], args[1]);
2181 break;
2182 case INDEX_op_bswap64_i64:
2183 tcg_out_bswap64(s, args[0], args[1]);
2184 break;
2186 case INDEX_op_deposit_i32:
2187 case INDEX_op_deposit_i64:
2188 tcg_out_deposit(s, args[0], args[1], args[2], const_args[2],
2189 args[3], args[4]);
2190 break;
2192 case INDEX_op_brcond_i32:
2193 tcg_out_brcond(s, args[2], args[0], args[1], arg_label(args[3]), 1);
2194 break;
2195 case INDEX_op_brcond_i64:
2196 tcg_out_brcond(s, args[2], args[0], args[1], arg_label(args[3]), 0);
2197 break;
2198 case INDEX_op_setcond_i32:
2199 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 1);
2200 break;
2201 case INDEX_op_setcond_i64:
2202 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 0);
2203 break;
2204 case INDEX_op_movcond_i32:
2205 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2206 args[3], const_args[3], args[4], const_args[4], 1);
2207 break;
2208 case INDEX_op_movcond_i64:
2209 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2210 args[3], const_args[3], args[4], const_args[4], 0);
2211 break;
2213 case INDEX_op_qemu_ld_i32:
2214 tcg_out_qemu_ld(s, args);
2215 break;
2216 case INDEX_op_qemu_ld_i64:
2217 tcg_out_qemu_ld(s, args);
2218 break;
2219 case INDEX_op_qemu_st_i32:
2220 tcg_out_qemu_st(s, args);
2221 break;
2222 case INDEX_op_qemu_st_i64:
2223 tcg_out_qemu_st(s, args);
2224 break;
2226 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
2227 case INDEX_op_mov_i64:
2228 case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
2229 case INDEX_op_movi_i64:
2230 case INDEX_op_call: /* Always emitted via tcg_out_call. */
2231 default:
2232 tcg_abort();
2236 static const TCGTargetOpDef ia64_op_defs[] = {
2237 { INDEX_op_br, { } },
2238 { INDEX_op_exit_tb, { } },
2239 { INDEX_op_goto_tb, { } },
2241 { INDEX_op_ld8u_i32, { "r", "r" } },
2242 { INDEX_op_ld8s_i32, { "r", "r" } },
2243 { INDEX_op_ld16u_i32, { "r", "r" } },
2244 { INDEX_op_ld16s_i32, { "r", "r" } },
2245 { INDEX_op_ld_i32, { "r", "r" } },
2246 { INDEX_op_st8_i32, { "rZ", "r" } },
2247 { INDEX_op_st16_i32, { "rZ", "r" } },
2248 { INDEX_op_st_i32, { "rZ", "r" } },
2250 { INDEX_op_add_i32, { "r", "rZ", "rI" } },
2251 { INDEX_op_sub_i32, { "r", "rI", "rI" } },
2253 { INDEX_op_and_i32, { "r", "rI", "rI" } },
2254 { INDEX_op_andc_i32, { "r", "rI", "rI" } },
2255 { INDEX_op_eqv_i32, { "r", "rZ", "rZ" } },
2256 { INDEX_op_nand_i32, { "r", "rZ", "rZ" } },
2257 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
2258 { INDEX_op_or_i32, { "r", "rI", "rI" } },
2259 { INDEX_op_orc_i32, { "r", "rZ", "rZ" } },
2260 { INDEX_op_xor_i32, { "r", "rI", "rI" } },
2262 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
2264 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
2265 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
2266 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
2267 { INDEX_op_rotl_i32, { "r", "rZ", "ri" } },
2268 { INDEX_op_rotr_i32, { "r", "rZ", "ri" } },
2270 { INDEX_op_ext8s_i32, { "r", "rZ"} },
2271 { INDEX_op_ext8u_i32, { "r", "rZ"} },
2272 { INDEX_op_ext16s_i32, { "r", "rZ"} },
2273 { INDEX_op_ext16u_i32, { "r", "rZ"} },
2275 { INDEX_op_bswap16_i32, { "r", "rZ" } },
2276 { INDEX_op_bswap32_i32, { "r", "rZ" } },
2278 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
2279 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
2280 { INDEX_op_movcond_i32, { "r", "rZ", "rZ", "rI", "rI" } },
2282 { INDEX_op_ld8u_i64, { "r", "r" } },
2283 { INDEX_op_ld8s_i64, { "r", "r" } },
2284 { INDEX_op_ld16u_i64, { "r", "r" } },
2285 { INDEX_op_ld16s_i64, { "r", "r" } },
2286 { INDEX_op_ld32u_i64, { "r", "r" } },
2287 { INDEX_op_ld32s_i64, { "r", "r" } },
2288 { INDEX_op_ld_i64, { "r", "r" } },
2289 { INDEX_op_st8_i64, { "rZ", "r" } },
2290 { INDEX_op_st16_i64, { "rZ", "r" } },
2291 { INDEX_op_st32_i64, { "rZ", "r" } },
2292 { INDEX_op_st_i64, { "rZ", "r" } },
2294 { INDEX_op_add_i64, { "r", "rZ", "rI" } },
2295 { INDEX_op_sub_i64, { "r", "rI", "rI" } },
2297 { INDEX_op_and_i64, { "r", "rI", "rI" } },
2298 { INDEX_op_andc_i64, { "r", "rI", "rI" } },
2299 { INDEX_op_eqv_i64, { "r", "rZ", "rZ" } },
2300 { INDEX_op_nand_i64, { "r", "rZ", "rZ" } },
2301 { INDEX_op_nor_i64, { "r", "rZ", "rZ" } },
2302 { INDEX_op_or_i64, { "r", "rI", "rI" } },
2303 { INDEX_op_orc_i64, { "r", "rZ", "rZ" } },
2304 { INDEX_op_xor_i64, { "r", "rI", "rI" } },
2306 { INDEX_op_mul_i64, { "r", "rZ", "rZ" } },
2308 { INDEX_op_sar_i64, { "r", "rZ", "ri" } },
2309 { INDEX_op_shl_i64, { "r", "rZ", "ri" } },
2310 { INDEX_op_shr_i64, { "r", "rZ", "ri" } },
2311 { INDEX_op_rotl_i64, { "r", "rZ", "ri" } },
2312 { INDEX_op_rotr_i64, { "r", "rZ", "ri" } },
2314 { INDEX_op_ext8s_i64, { "r", "rZ"} },
2315 { INDEX_op_ext8u_i64, { "r", "rZ"} },
2316 { INDEX_op_ext16s_i64, { "r", "rZ"} },
2317 { INDEX_op_ext16u_i64, { "r", "rZ"} },
2318 { INDEX_op_ext32s_i64, { "r", "rZ"} },
2319 { INDEX_op_ext32u_i64, { "r", "rZ"} },
2320 { INDEX_op_ext_i32_i64, { "r", "rZ" } },
2321 { INDEX_op_extu_i32_i64, { "r", "rZ" } },
2323 { INDEX_op_bswap16_i64, { "r", "rZ" } },
2324 { INDEX_op_bswap32_i64, { "r", "rZ" } },
2325 { INDEX_op_bswap64_i64, { "r", "rZ" } },
2327 { INDEX_op_brcond_i64, { "rZ", "rZ" } },
2328 { INDEX_op_setcond_i64, { "r", "rZ", "rZ" } },
2329 { INDEX_op_movcond_i64, { "r", "rZ", "rZ", "rI", "rI" } },
2331 { INDEX_op_deposit_i32, { "r", "rZ", "ri" } },
2332 { INDEX_op_deposit_i64, { "r", "rZ", "ri" } },
2334 { INDEX_op_qemu_ld_i32, { "r", "r" } },
2335 { INDEX_op_qemu_ld_i64, { "r", "r" } },
2336 { INDEX_op_qemu_st_i32, { "SZ", "r" } },
2337 { INDEX_op_qemu_st_i64, { "SZ", "r" } },
2339 { -1 },
2342 /* Generate global QEMU prologue and epilogue code */
2343 static void tcg_target_qemu_prologue(TCGContext *s)
2345 int frame_size;
2347 /* reserve some stack space */
2348 frame_size = TCG_STATIC_CALL_ARGS_SIZE +
2349 CPU_TEMP_BUF_NLONGS * sizeof(long);
2350 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
2351 ~(TCG_TARGET_STACK_ALIGN - 1);
2352 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
2353 CPU_TEMP_BUF_NLONGS * sizeof(long));
2355 /* First emit adhoc function descriptor */
2356 *s->code_ptr = (tcg_insn_unit){
2357 (uint64_t)(s->code_ptr + 1), /* entry point */
2358 0 /* skip gp */
2360 s->code_ptr++;
2362 /* prologue */
2363 tcg_out_bundle(s, miI,
2364 tcg_opc_m34(TCG_REG_P0, OPC_ALLOC_M34,
2365 TCG_REG_R34, 32, 24, 0),
2366 INSN_NOP_I,
2367 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2368 TCG_REG_B6, TCG_REG_R33, 0));
2370 /* ??? If guest_base < 0x200000, we could load the register via
2371 an ADDL in the M slot of the next bundle. */
2372 if (guest_base != 0) {
2373 tcg_out_bundle(s, mlx,
2374 INSN_NOP_M,
2375 tcg_opc_l2(guest_base),
2376 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
2377 TCG_GUEST_BASE_REG, guest_base));
2378 tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
2381 tcg_out_bundle(s, miB,
2382 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2383 TCG_REG_R12, -frame_size, TCG_REG_R12),
2384 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
2385 TCG_REG_R33, TCG_REG_B0),
2386 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
2388 /* epilogue */
2389 tb_ret_addr = s->code_ptr;
2390 tcg_out_bundle(s, miI,
2391 INSN_NOP_M,
2392 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2393 TCG_REG_B0, TCG_REG_R33, 0),
2394 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2395 TCG_REG_R12, frame_size, TCG_REG_R12));
2396 tcg_out_bundle(s, miB,
2397 INSN_NOP_M,
2398 tcg_opc_i26(TCG_REG_P0, OPC_MOV_I_I26,
2399 TCG_REG_PFS, TCG_REG_R34),
2400 tcg_opc_b4 (TCG_REG_P0, OPC_BR_RET_SPTK_MANY_B4,
2401 TCG_REG_B0));
2404 static void tcg_target_init(TCGContext *s)
2406 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32],
2407 0xffffffffffffffffull);
2408 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I64],
2409 0xffffffffffffffffull);
2411 tcg_regset_clear(tcg_target_call_clobber_regs);
2412 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R8);
2413 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R9);
2414 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R10);
2415 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R11);
2416 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R14);
2417 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R15);
2418 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R16);
2419 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R17);
2420 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R18);
2421 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R19);
2422 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R20);
2423 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R21);
2424 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R22);
2425 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R23);
2426 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R24);
2427 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R25);
2428 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R26);
2429 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R27);
2430 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R28);
2431 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R29);
2432 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R30);
2433 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R31);
2434 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R56);
2435 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R57);
2436 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R58);
2437 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R59);
2438 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R60);
2439 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R61);
2440 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R62);
2441 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R63);
2443 tcg_regset_clear(s->reserved_regs);
2444 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0); /* zero register */
2445 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1); /* global pointer */
2446 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2); /* internal use */
2447 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R3); /* internal use */
2448 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R12); /* stack pointer */
2449 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R13); /* thread pointer */
2450 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R33); /* return address */
2451 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R34); /* PFS */
2453 /* The following 4 are not in use, are call-saved, but *not* saved
2454 by the prologue. Therefore we cannot use them without modifying
2455 the prologue. There doesn't seem to be any good reason to use
2456 these as opposed to the windowed registers. */
2457 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R4);
2458 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R5);
2459 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R6);
2460 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R7);
2462 tcg_add_target_add_op_defs(ia64_op_defs);