linux-user: remove --enable-guest-base/--disable-guest-base
[qemu/ar7.git] / tcg / ia64 / tcg-target.c
blob64b5cb6d973ea5479863b450478fe300d4a9b4d2
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009-2010 Aurelien Jarno <aurelien@aurel32.net>
5 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
27 * Register definitions
30 #ifndef NDEBUG
31 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
32 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
33 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
34 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
35 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
36 "r32", "r33", "r34", "r35", "r36", "r37", "r38", "r39",
37 "r40", "r41", "r42", "r43", "r44", "r45", "r46", "r47",
38 "r48", "r49", "r50", "r51", "r52", "r53", "r54", "r55",
39 "r56", "r57", "r58", "r59", "r60", "r61", "r62", "r63",
41 #endif
43 #ifndef CONFIG_SOFTMMU
44 #define TCG_GUEST_BASE_REG TCG_REG_R55
45 #endif
46 #ifndef GUEST_BASE
47 #define GUEST_BASE 0
48 #endif
50 /* Branch registers */
51 enum {
52 TCG_REG_B0 = 0,
53 TCG_REG_B1,
54 TCG_REG_B2,
55 TCG_REG_B3,
56 TCG_REG_B4,
57 TCG_REG_B5,
58 TCG_REG_B6,
59 TCG_REG_B7,
62 /* Floating point registers */
63 enum {
64 TCG_REG_F0 = 0,
65 TCG_REG_F1,
66 TCG_REG_F2,
67 TCG_REG_F3,
68 TCG_REG_F4,
69 TCG_REG_F5,
70 TCG_REG_F6,
71 TCG_REG_F7,
72 TCG_REG_F8,
73 TCG_REG_F9,
74 TCG_REG_F10,
75 TCG_REG_F11,
76 TCG_REG_F12,
77 TCG_REG_F13,
78 TCG_REG_F14,
79 TCG_REG_F15,
82 /* Predicate registers */
83 enum {
84 TCG_REG_P0 = 0,
85 TCG_REG_P1,
86 TCG_REG_P2,
87 TCG_REG_P3,
88 TCG_REG_P4,
89 TCG_REG_P5,
90 TCG_REG_P6,
91 TCG_REG_P7,
92 TCG_REG_P8,
93 TCG_REG_P9,
94 TCG_REG_P10,
95 TCG_REG_P11,
96 TCG_REG_P12,
97 TCG_REG_P13,
98 TCG_REG_P14,
99 TCG_REG_P15,
102 /* Application registers */
103 enum {
104 TCG_REG_PFS = 64,
107 static const int tcg_target_reg_alloc_order[] = {
108 TCG_REG_R35,
109 TCG_REG_R36,
110 TCG_REG_R37,
111 TCG_REG_R38,
112 TCG_REG_R39,
113 TCG_REG_R40,
114 TCG_REG_R41,
115 TCG_REG_R42,
116 TCG_REG_R43,
117 TCG_REG_R44,
118 TCG_REG_R45,
119 TCG_REG_R46,
120 TCG_REG_R47,
121 TCG_REG_R48,
122 TCG_REG_R49,
123 TCG_REG_R50,
124 TCG_REG_R51,
125 TCG_REG_R52,
126 TCG_REG_R53,
127 TCG_REG_R54,
128 TCG_REG_R55,
129 TCG_REG_R14,
130 TCG_REG_R15,
131 TCG_REG_R16,
132 TCG_REG_R17,
133 TCG_REG_R18,
134 TCG_REG_R19,
135 TCG_REG_R20,
136 TCG_REG_R21,
137 TCG_REG_R22,
138 TCG_REG_R23,
139 TCG_REG_R24,
140 TCG_REG_R25,
141 TCG_REG_R26,
142 TCG_REG_R27,
143 TCG_REG_R28,
144 TCG_REG_R29,
145 TCG_REG_R30,
146 TCG_REG_R31,
147 TCG_REG_R56,
148 TCG_REG_R57,
149 TCG_REG_R58,
150 TCG_REG_R59,
151 TCG_REG_R60,
152 TCG_REG_R61,
153 TCG_REG_R62,
154 TCG_REG_R63,
155 TCG_REG_R8,
156 TCG_REG_R9,
157 TCG_REG_R10,
158 TCG_REG_R11
161 static const int tcg_target_call_iarg_regs[8] = {
162 TCG_REG_R56,
163 TCG_REG_R57,
164 TCG_REG_R58,
165 TCG_REG_R59,
166 TCG_REG_R60,
167 TCG_REG_R61,
168 TCG_REG_R62,
169 TCG_REG_R63,
172 static const int tcg_target_call_oarg_regs[] = {
173 TCG_REG_R8
177 * opcode formation
180 /* bundle templates: stops (double bar in the IA64 manual) are marked with
181 an uppercase letter. */
182 enum {
183 mii = 0x00,
184 miI = 0x01,
185 mIi = 0x02,
186 mII = 0x03,
187 mlx = 0x04,
188 mLX = 0x05,
189 mmi = 0x08,
190 mmI = 0x09,
191 Mmi = 0x0a,
192 MmI = 0x0b,
193 mfi = 0x0c,
194 mfI = 0x0d,
195 mmf = 0x0e,
196 mmF = 0x0f,
197 mib = 0x10,
198 miB = 0x11,
199 mbb = 0x12,
200 mbB = 0x13,
201 bbb = 0x16,
202 bbB = 0x17,
203 mmb = 0x18,
204 mmB = 0x19,
205 mfb = 0x1c,
206 mfB = 0x1d,
209 enum {
210 OPC_ADD_A1 = 0x10000000000ull,
211 OPC_AND_A1 = 0x10060000000ull,
212 OPC_AND_A3 = 0x10160000000ull,
213 OPC_ANDCM_A1 = 0x10068000000ull,
214 OPC_ANDCM_A3 = 0x10168000000ull,
215 OPC_ADDS_A4 = 0x10800000000ull,
216 OPC_ADDL_A5 = 0x12000000000ull,
217 OPC_ALLOC_M34 = 0x02c00000000ull,
218 OPC_BR_DPTK_FEW_B1 = 0x08400000000ull,
219 OPC_BR_SPTK_MANY_B1 = 0x08000001000ull,
220 OPC_BR_CALL_SPNT_FEW_B3 = 0x0a200000000ull,
221 OPC_BR_SPTK_MANY_B4 = 0x00100001000ull,
222 OPC_BR_CALL_SPTK_MANY_B5 = 0x02100001000ull,
223 OPC_BR_RET_SPTK_MANY_B4 = 0x00108001100ull,
224 OPC_BRL_SPTK_MANY_X3 = 0x18000001000ull,
225 OPC_BRL_CALL_SPNT_MANY_X4 = 0x1a200001000ull,
226 OPC_BRL_CALL_SPTK_MANY_X4 = 0x1a000001000ull,
227 OPC_CMP_LT_A6 = 0x18000000000ull,
228 OPC_CMP_LTU_A6 = 0x1a000000000ull,
229 OPC_CMP_EQ_A6 = 0x1c000000000ull,
230 OPC_CMP4_LT_A6 = 0x18400000000ull,
231 OPC_CMP4_LTU_A6 = 0x1a400000000ull,
232 OPC_CMP4_EQ_A6 = 0x1c400000000ull,
233 OPC_DEP_I14 = 0x0ae00000000ull,
234 OPC_DEP_I15 = 0x08000000000ull,
235 OPC_DEP_Z_I12 = 0x0a600000000ull,
236 OPC_EXTR_I11 = 0x0a400002000ull,
237 OPC_EXTR_U_I11 = 0x0a400000000ull,
238 OPC_FCVT_FX_TRUNC_S1_F10 = 0x004d0000000ull,
239 OPC_FCVT_FXU_TRUNC_S1_F10 = 0x004d8000000ull,
240 OPC_FCVT_XF_F11 = 0x000e0000000ull,
241 OPC_FMA_S1_F1 = 0x10400000000ull,
242 OPC_FNMA_S1_F1 = 0x18400000000ull,
243 OPC_FRCPA_S1_F6 = 0x00600000000ull,
244 OPC_GETF_SIG_M19 = 0x08708000000ull,
245 OPC_LD1_M1 = 0x08000000000ull,
246 OPC_LD1_M3 = 0x0a000000000ull,
247 OPC_LD2_M1 = 0x08040000000ull,
248 OPC_LD2_M3 = 0x0a040000000ull,
249 OPC_LD4_M1 = 0x08080000000ull,
250 OPC_LD4_M3 = 0x0a080000000ull,
251 OPC_LD8_M1 = 0x080c0000000ull,
252 OPC_LD8_M3 = 0x0a0c0000000ull,
253 OPC_MUX1_I3 = 0x0eca0000000ull,
254 OPC_NOP_B9 = 0x04008000000ull,
255 OPC_NOP_F16 = 0x00008000000ull,
256 OPC_NOP_I18 = 0x00008000000ull,
257 OPC_NOP_M48 = 0x00008000000ull,
258 OPC_MOV_I21 = 0x00e00100000ull,
259 OPC_MOV_RET_I21 = 0x00e00500000ull,
260 OPC_MOV_I22 = 0x00188000000ull,
261 OPC_MOV_I_I26 = 0x00150000000ull,
262 OPC_MOVL_X2 = 0x0c000000000ull,
263 OPC_OR_A1 = 0x10070000000ull,
264 OPC_OR_A3 = 0x10170000000ull,
265 OPC_SETF_EXP_M18 = 0x0c748000000ull,
266 OPC_SETF_SIG_M18 = 0x0c708000000ull,
267 OPC_SHL_I7 = 0x0f240000000ull,
268 OPC_SHR_I5 = 0x0f220000000ull,
269 OPC_SHR_U_I5 = 0x0f200000000ull,
270 OPC_SHRP_I10 = 0x0ac00000000ull,
271 OPC_SXT1_I29 = 0x000a0000000ull,
272 OPC_SXT2_I29 = 0x000a8000000ull,
273 OPC_SXT4_I29 = 0x000b0000000ull,
274 OPC_ST1_M4 = 0x08c00000000ull,
275 OPC_ST2_M4 = 0x08c40000000ull,
276 OPC_ST4_M4 = 0x08c80000000ull,
277 OPC_ST8_M4 = 0x08cc0000000ull,
278 OPC_SUB_A1 = 0x10028000000ull,
279 OPC_SUB_A3 = 0x10128000000ull,
280 OPC_UNPACK4_L_I2 = 0x0f860000000ull,
281 OPC_XMA_L_F2 = 0x1d000000000ull,
282 OPC_XOR_A1 = 0x10078000000ull,
283 OPC_XOR_A3 = 0x10178000000ull,
284 OPC_ZXT1_I29 = 0x00080000000ull,
285 OPC_ZXT2_I29 = 0x00088000000ull,
286 OPC_ZXT4_I29 = 0x00090000000ull,
288 INSN_NOP_M = OPC_NOP_M48, /* nop.m 0 */
289 INSN_NOP_I = OPC_NOP_I18, /* nop.i 0 */
292 static inline uint64_t tcg_opc_a1(int qp, uint64_t opc, int r1,
293 int r2, int r3)
295 return opc
296 | ((r3 & 0x7f) << 20)
297 | ((r2 & 0x7f) << 13)
298 | ((r1 & 0x7f) << 6)
299 | (qp & 0x3f);
302 static inline uint64_t tcg_opc_a3(int qp, uint64_t opc, int r1,
303 uint64_t imm, int r3)
305 return opc
306 | ((imm & 0x80) << 29) /* s */
307 | ((imm & 0x7f) << 13) /* imm7b */
308 | ((r3 & 0x7f) << 20)
309 | ((r1 & 0x7f) << 6)
310 | (qp & 0x3f);
313 static inline uint64_t tcg_opc_a4(int qp, uint64_t opc, int r1,
314 uint64_t imm, int r3)
316 return opc
317 | ((imm & 0x2000) << 23) /* s */
318 | ((imm & 0x1f80) << 20) /* imm6d */
319 | ((imm & 0x007f) << 13) /* imm7b */
320 | ((r3 & 0x7f) << 20)
321 | ((r1 & 0x7f) << 6)
322 | (qp & 0x3f);
325 static inline uint64_t tcg_opc_a5(int qp, uint64_t opc, int r1,
326 uint64_t imm, int r3)
328 return opc
329 | ((imm & 0x200000) << 15) /* s */
330 | ((imm & 0x1f0000) << 6) /* imm5c */
331 | ((imm & 0x00ff80) << 20) /* imm9d */
332 | ((imm & 0x00007f) << 13) /* imm7b */
333 | ((r3 & 0x03) << 20)
334 | ((r1 & 0x7f) << 6)
335 | (qp & 0x3f);
338 static inline uint64_t tcg_opc_a6(int qp, uint64_t opc, int p1,
339 int p2, int r2, int r3)
341 return opc
342 | ((p2 & 0x3f) << 27)
343 | ((r3 & 0x7f) << 20)
344 | ((r2 & 0x7f) << 13)
345 | ((p1 & 0x3f) << 6)
346 | (qp & 0x3f);
349 static inline uint64_t tcg_opc_b1(int qp, uint64_t opc, uint64_t imm)
351 return opc
352 | ((imm & 0x100000) << 16) /* s */
353 | ((imm & 0x0fffff) << 13) /* imm20b */
354 | (qp & 0x3f);
357 static inline uint64_t tcg_opc_b3(int qp, uint64_t opc, int b1, uint64_t imm)
359 return opc
360 | ((imm & 0x100000) << 16) /* s */
361 | ((imm & 0x0fffff) << 13) /* imm20b */
362 | ((b1 & 0x7) << 6)
363 | (qp & 0x3f);
366 static inline uint64_t tcg_opc_b4(int qp, uint64_t opc, int b2)
368 return opc
369 | ((b2 & 0x7) << 13)
370 | (qp & 0x3f);
373 static inline uint64_t tcg_opc_b5(int qp, uint64_t opc, int b1, int b2)
375 return opc
376 | ((b2 & 0x7) << 13)
377 | ((b1 & 0x7) << 6)
378 | (qp & 0x3f);
382 static inline uint64_t tcg_opc_b9(int qp, uint64_t opc, uint64_t imm)
384 return opc
385 | ((imm & 0x100000) << 16) /* i */
386 | ((imm & 0x0fffff) << 6) /* imm20a */
387 | (qp & 0x3f);
390 static inline uint64_t tcg_opc_f1(int qp, uint64_t opc, int f1,
391 int f3, int f4, int f2)
393 return opc
394 | ((f4 & 0x7f) << 27)
395 | ((f3 & 0x7f) << 20)
396 | ((f2 & 0x7f) << 13)
397 | ((f1 & 0x7f) << 6)
398 | (qp & 0x3f);
401 static inline uint64_t tcg_opc_f2(int qp, uint64_t opc, int f1,
402 int f3, int f4, int f2)
404 return opc
405 | ((f4 & 0x7f) << 27)
406 | ((f3 & 0x7f) << 20)
407 | ((f2 & 0x7f) << 13)
408 | ((f1 & 0x7f) << 6)
409 | (qp & 0x3f);
412 static inline uint64_t tcg_opc_f6(int qp, uint64_t opc, int f1,
413 int p2, int f2, int f3)
415 return opc
416 | ((p2 & 0x3f) << 27)
417 | ((f3 & 0x7f) << 20)
418 | ((f2 & 0x7f) << 13)
419 | ((f1 & 0x7f) << 6)
420 | (qp & 0x3f);
423 static inline uint64_t tcg_opc_f10(int qp, uint64_t opc, int f1, int f2)
425 return opc
426 | ((f2 & 0x7f) << 13)
427 | ((f1 & 0x7f) << 6)
428 | (qp & 0x3f);
431 static inline uint64_t tcg_opc_f11(int qp, uint64_t opc, int f1, int f2)
433 return opc
434 | ((f2 & 0x7f) << 13)
435 | ((f1 & 0x7f) << 6)
436 | (qp & 0x3f);
439 static inline uint64_t tcg_opc_f16(int qp, uint64_t opc, uint64_t imm)
441 return opc
442 | ((imm & 0x100000) << 16) /* i */
443 | ((imm & 0x0fffff) << 6) /* imm20a */
444 | (qp & 0x3f);
447 static inline uint64_t tcg_opc_i2(int qp, uint64_t opc, int r1,
448 int r2, int r3)
450 return opc
451 | ((r3 & 0x7f) << 20)
452 | ((r2 & 0x7f) << 13)
453 | ((r1 & 0x7f) << 6)
454 | (qp & 0x3f);
457 static inline uint64_t tcg_opc_i3(int qp, uint64_t opc, int r1,
458 int r2, int mbtype)
460 return opc
461 | ((mbtype & 0x0f) << 20)
462 | ((r2 & 0x7f) << 13)
463 | ((r1 & 0x7f) << 6)
464 | (qp & 0x3f);
467 static inline uint64_t tcg_opc_i5(int qp, uint64_t opc, int r1,
468 int r3, int r2)
470 return opc
471 | ((r3 & 0x7f) << 20)
472 | ((r2 & 0x7f) << 13)
473 | ((r1 & 0x7f) << 6)
474 | (qp & 0x3f);
477 static inline uint64_t tcg_opc_i7(int qp, uint64_t opc, int r1,
478 int r2, int r3)
480 return opc
481 | ((r3 & 0x7f) << 20)
482 | ((r2 & 0x7f) << 13)
483 | ((r1 & 0x7f) << 6)
484 | (qp & 0x3f);
487 static inline uint64_t tcg_opc_i10(int qp, uint64_t opc, int r1,
488 int r2, int r3, uint64_t count)
490 return opc
491 | ((count & 0x3f) << 27)
492 | ((r3 & 0x7f) << 20)
493 | ((r2 & 0x7f) << 13)
494 | ((r1 & 0x7f) << 6)
495 | (qp & 0x3f);
498 static inline uint64_t tcg_opc_i11(int qp, uint64_t opc, int r1,
499 int r3, uint64_t pos, uint64_t len)
501 return opc
502 | ((len & 0x3f) << 27)
503 | ((r3 & 0x7f) << 20)
504 | ((pos & 0x3f) << 14)
505 | ((r1 & 0x7f) << 6)
506 | (qp & 0x3f);
509 static inline uint64_t tcg_opc_i12(int qp, uint64_t opc, int r1,
510 int r2, uint64_t pos, uint64_t len)
512 return opc
513 | ((len & 0x3f) << 27)
514 | ((pos & 0x3f) << 20)
515 | ((r2 & 0x7f) << 13)
516 | ((r1 & 0x7f) << 6)
517 | (qp & 0x3f);
520 static inline uint64_t tcg_opc_i14(int qp, uint64_t opc, int r1, uint64_t imm,
521 int r3, uint64_t pos, uint64_t len)
523 return opc
524 | ((imm & 0x01) << 36)
525 | ((len & 0x3f) << 27)
526 | ((r3 & 0x7f) << 20)
527 | ((pos & 0x3f) << 14)
528 | ((r1 & 0x7f) << 6)
529 | (qp & 0x3f);
532 static inline uint64_t tcg_opc_i15(int qp, uint64_t opc, int r1, int r2,
533 int r3, uint64_t pos, uint64_t len)
535 return opc
536 | ((pos & 0x3f) << 31)
537 | ((len & 0x0f) << 27)
538 | ((r3 & 0x7f) << 20)
539 | ((r2 & 0x7f) << 13)
540 | ((r1 & 0x7f) << 6)
541 | (qp & 0x3f);
544 static inline uint64_t tcg_opc_i18(int qp, uint64_t opc, uint64_t imm)
546 return opc
547 | ((imm & 0x100000) << 16) /* i */
548 | ((imm & 0x0fffff) << 6) /* imm20a */
549 | (qp & 0x3f);
552 static inline uint64_t tcg_opc_i21(int qp, uint64_t opc, int b1,
553 int r2, uint64_t imm)
555 return opc
556 | ((imm & 0x1ff) << 24)
557 | ((r2 & 0x7f) << 13)
558 | ((b1 & 0x7) << 6)
559 | (qp & 0x3f);
562 static inline uint64_t tcg_opc_i22(int qp, uint64_t opc, int r1, int b2)
564 return opc
565 | ((b2 & 0x7) << 13)
566 | ((r1 & 0x7f) << 6)
567 | (qp & 0x3f);
570 static inline uint64_t tcg_opc_i26(int qp, uint64_t opc, int ar3, int r2)
572 return opc
573 | ((ar3 & 0x7f) << 20)
574 | ((r2 & 0x7f) << 13)
575 | (qp & 0x3f);
578 static inline uint64_t tcg_opc_i29(int qp, uint64_t opc, int r1, int r3)
580 return opc
581 | ((r3 & 0x7f) << 20)
582 | ((r1 & 0x7f) << 6)
583 | (qp & 0x3f);
586 static inline uint64_t tcg_opc_l2(uint64_t imm)
588 return (imm & 0x7fffffffffc00000ull) >> 22;
591 static inline uint64_t tcg_opc_l3(uint64_t imm)
593 return (imm & 0x07fffffffff00000ull) >> 18;
596 #define tcg_opc_l4 tcg_opc_l3
598 static inline uint64_t tcg_opc_m1(int qp, uint64_t opc, int r1, int r3)
600 return opc
601 | ((r3 & 0x7f) << 20)
602 | ((r1 & 0x7f) << 6)
603 | (qp & 0x3f);
606 static inline uint64_t tcg_opc_m3(int qp, uint64_t opc, int r1,
607 int r3, uint64_t imm)
609 return opc
610 | ((imm & 0x100) << 28) /* s */
611 | ((imm & 0x080) << 20) /* i */
612 | ((imm & 0x07f) << 13) /* imm7b */
613 | ((r3 & 0x7f) << 20)
614 | ((r1 & 0x7f) << 6)
615 | (qp & 0x3f);
618 static inline uint64_t tcg_opc_m4(int qp, uint64_t opc, int r2, int r3)
620 return opc
621 | ((r3 & 0x7f) << 20)
622 | ((r2 & 0x7f) << 13)
623 | (qp & 0x3f);
626 static inline uint64_t tcg_opc_m18(int qp, uint64_t opc, int f1, int r2)
628 return opc
629 | ((r2 & 0x7f) << 13)
630 | ((f1 & 0x7f) << 6)
631 | (qp & 0x3f);
634 static inline uint64_t tcg_opc_m19(int qp, uint64_t opc, int r1, int f2)
636 return opc
637 | ((f2 & 0x7f) << 13)
638 | ((r1 & 0x7f) << 6)
639 | (qp & 0x3f);
642 static inline uint64_t tcg_opc_m34(int qp, uint64_t opc, int r1,
643 int sof, int sol, int sor)
645 return opc
646 | ((sor & 0x0f) << 27)
647 | ((sol & 0x7f) << 20)
648 | ((sof & 0x7f) << 13)
649 | ((r1 & 0x7f) << 6)
650 | (qp & 0x3f);
653 static inline uint64_t tcg_opc_m48(int qp, uint64_t opc, uint64_t imm)
655 return opc
656 | ((imm & 0x100000) << 16) /* i */
657 | ((imm & 0x0fffff) << 6) /* imm20a */
658 | (qp & 0x3f);
661 static inline uint64_t tcg_opc_x2(int qp, uint64_t opc,
662 int r1, uint64_t imm)
664 return opc
665 | ((imm & 0x8000000000000000ull) >> 27) /* i */
666 | (imm & 0x0000000000200000ull) /* ic */
667 | ((imm & 0x00000000001f0000ull) << 6) /* imm5c */
668 | ((imm & 0x000000000000ff80ull) << 20) /* imm9d */
669 | ((imm & 0x000000000000007full) << 13) /* imm7b */
670 | ((r1 & 0x7f) << 6)
671 | (qp & 0x3f);
674 static inline uint64_t tcg_opc_x3(int qp, uint64_t opc, uint64_t imm)
676 return opc
677 | ((imm & 0x0800000000000000ull) >> 23) /* i */
678 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
679 | (qp & 0x3f);
682 static inline uint64_t tcg_opc_x4(int qp, uint64_t opc, int b1, uint64_t imm)
684 return opc
685 | ((imm & 0x0800000000000000ull) >> 23) /* i */
686 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
687 | ((b1 & 0x7) << 6)
688 | (qp & 0x3f);
693 * Relocations - Note that we never encode branches elsewhere than slot 2.
696 static void reloc_pcrel21b_slot2(tcg_insn_unit *pc, tcg_insn_unit *target)
698 uint64_t imm = target - pc;
700 pc->hi = (pc->hi & 0xf700000fffffffffull)
701 | ((imm & 0x100000) << 39) /* s */
702 | ((imm & 0x0fffff) << 36); /* imm20b */
705 static uint64_t get_reloc_pcrel21b_slot2(tcg_insn_unit *pc)
707 int64_t high = pc->hi;
709 return ((high >> 39) & 0x100000) + /* s */
710 ((high >> 36) & 0x0fffff); /* imm20b */
713 static void patch_reloc(tcg_insn_unit *code_ptr, int type,
714 intptr_t value, intptr_t addend)
716 assert(addend == 0);
717 assert(type == R_IA64_PCREL21B);
718 reloc_pcrel21b_slot2(code_ptr, (tcg_insn_unit *)value);
722 * Constraints
725 /* parse target specific constraints */
726 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
728 const char *ct_str;
730 ct_str = *pct_str;
731 switch(ct_str[0]) {
732 case 'r':
733 ct->ct |= TCG_CT_REG;
734 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
735 break;
736 case 'I':
737 ct->ct |= TCG_CT_CONST_S22;
738 break;
739 case 'S':
740 ct->ct |= TCG_CT_REG;
741 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
742 #if defined(CONFIG_SOFTMMU)
743 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R56);
744 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R57);
745 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R58);
746 #endif
747 break;
748 case 'Z':
749 /* We are cheating a bit here, using the fact that the register
750 r0 is also the register number 0. Hence there is no need
751 to check for const_args in each instruction. */
752 ct->ct |= TCG_CT_CONST_ZERO;
753 break;
754 default:
755 return -1;
757 ct_str++;
758 *pct_str = ct_str;
759 return 0;
762 /* test if a constant matches the constraint */
763 static inline int tcg_target_const_match(tcg_target_long val, TCGType type,
764 const TCGArgConstraint *arg_ct)
766 int ct;
767 ct = arg_ct->ct;
768 if (ct & TCG_CT_CONST)
769 return 1;
770 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
771 return 1;
772 else if ((ct & TCG_CT_CONST_S22) && val == ((int32_t)val << 10) >> 10)
773 return 1;
774 else
775 return 0;
779 * Code generation
782 static tcg_insn_unit *tb_ret_addr;
784 static inline void tcg_out_bundle(TCGContext *s, int template,
785 uint64_t slot0, uint64_t slot1,
786 uint64_t slot2)
788 template &= 0x1f; /* 5 bits */
789 slot0 &= 0x1ffffffffffull; /* 41 bits */
790 slot1 &= 0x1ffffffffffull; /* 41 bits */
791 slot2 &= 0x1ffffffffffull; /* 41 bits */
793 *s->code_ptr++ = (tcg_insn_unit){
794 (slot1 << 46) | (slot0 << 5) | template,
795 (slot2 << 23) | (slot1 >> 18)
799 static inline uint64_t tcg_opc_mov_a(int qp, TCGReg dst, TCGReg src)
801 return tcg_opc_a4(qp, OPC_ADDS_A4, dst, 0, src);
804 static inline void tcg_out_mov(TCGContext *s, TCGType type,
805 TCGReg ret, TCGReg arg)
807 tcg_out_bundle(s, mmI,
808 INSN_NOP_M,
809 INSN_NOP_M,
810 tcg_opc_mov_a(TCG_REG_P0, ret, arg));
813 static inline uint64_t tcg_opc_movi_a(int qp, TCGReg dst, int64_t src)
815 assert(src == sextract64(src, 0, 22));
816 return tcg_opc_a5(qp, OPC_ADDL_A5, dst, src, TCG_REG_R0);
819 static inline void tcg_out_movi(TCGContext *s, TCGType type,
820 TCGReg reg, tcg_target_long arg)
822 tcg_out_bundle(s, mLX,
823 INSN_NOP_M,
824 tcg_opc_l2 (arg),
825 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, reg, arg));
828 static void tcg_out_br(TCGContext *s, TCGLabel *l)
830 uint64_t imm;
832 /* We pay attention here to not modify the branch target by reading
833 the existing value and using it again. This ensure that caches and
834 memory are kept coherent during retranslation. */
835 if (l->has_value) {
836 imm = l->u.value_ptr - s->code_ptr;
837 } else {
838 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
839 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, l, 0);
842 tcg_out_bundle(s, mmB,
843 INSN_NOP_M,
844 INSN_NOP_M,
845 tcg_opc_b1(TCG_REG_P0, OPC_BR_SPTK_MANY_B1, imm));
848 static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *desc)
850 uintptr_t func = desc->lo, gp = desc->hi, disp;
852 /* Look through the function descriptor. */
853 tcg_out_bundle(s, mlx,
854 INSN_NOP_M,
855 tcg_opc_l2 (gp),
856 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, TCG_REG_R1, gp));
857 disp = (tcg_insn_unit *)func - s->code_ptr;
858 tcg_out_bundle(s, mLX,
859 INSN_NOP_M,
860 tcg_opc_l4 (disp),
861 tcg_opc_x4 (TCG_REG_P0, OPC_BRL_CALL_SPTK_MANY_X4,
862 TCG_REG_B0, disp));
865 static void tcg_out_exit_tb(TCGContext *s, tcg_target_long arg)
867 uint64_t imm, opc1;
869 /* At least arg == 0 is a common operation. */
870 if (arg == sextract64(arg, 0, 22)) {
871 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R8, arg);
872 } else {
873 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R8, arg);
874 opc1 = INSN_NOP_M;
877 imm = tb_ret_addr - s->code_ptr;
879 tcg_out_bundle(s, mLX,
880 opc1,
881 tcg_opc_l3 (imm),
882 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, imm));
885 static inline void tcg_out_goto_tb(TCGContext *s, TCGArg arg)
887 if (s->tb_jmp_offset) {
888 /* direct jump method */
889 tcg_abort();
890 } else {
891 /* indirect jump method */
892 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2,
893 (tcg_target_long)(s->tb_next + arg));
894 tcg_out_bundle(s, MmI,
895 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1,
896 TCG_REG_R2, TCG_REG_R2),
897 INSN_NOP_M,
898 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6,
899 TCG_REG_R2, 0));
900 tcg_out_bundle(s, mmB,
901 INSN_NOP_M,
902 INSN_NOP_M,
903 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4,
904 TCG_REG_B6));
906 s->tb_next_offset[arg] = tcg_current_code_size(s);
909 static inline void tcg_out_jmp(TCGContext *s, TCGArg addr)
911 tcg_out_bundle(s, mmI,
912 INSN_NOP_M,
913 INSN_NOP_M,
914 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6, addr, 0));
915 tcg_out_bundle(s, mmB,
916 INSN_NOP_M,
917 INSN_NOP_M,
918 tcg_opc_b4(TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
921 static inline void tcg_out_ld_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
922 TCGArg arg1, tcg_target_long arg2)
924 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
925 tcg_out_bundle(s, MmI,
926 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
927 TCG_REG_R2, arg2, arg1),
928 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
929 INSN_NOP_I);
930 } else {
931 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
932 tcg_out_bundle(s, MmI,
933 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
934 TCG_REG_R2, TCG_REG_R2, arg1),
935 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
936 INSN_NOP_I);
940 static inline void tcg_out_st_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
941 TCGArg arg1, tcg_target_long arg2)
943 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
944 tcg_out_bundle(s, MmI,
945 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
946 TCG_REG_R2, arg2, arg1),
947 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
948 INSN_NOP_I);
949 } else {
950 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
951 tcg_out_bundle(s, MmI,
952 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
953 TCG_REG_R2, TCG_REG_R2, arg1),
954 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
955 INSN_NOP_I);
959 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
960 TCGReg arg1, intptr_t arg2)
962 if (type == TCG_TYPE_I32) {
963 tcg_out_ld_rel(s, OPC_LD4_M1, arg, arg1, arg2);
964 } else {
965 tcg_out_ld_rel(s, OPC_LD8_M1, arg, arg1, arg2);
969 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
970 TCGReg arg1, intptr_t arg2)
972 if (type == TCG_TYPE_I32) {
973 tcg_out_st_rel(s, OPC_ST4_M4, arg, arg1, arg2);
974 } else {
975 tcg_out_st_rel(s, OPC_ST8_M4, arg, arg1, arg2);
979 static inline void tcg_out_alu(TCGContext *s, uint64_t opc_a1, uint64_t opc_a3,
980 TCGReg ret, TCGArg arg1, int const_arg1,
981 TCGArg arg2, int const_arg2)
983 uint64_t opc1 = 0, opc2 = 0, opc3 = 0;
985 if (const_arg2 && arg2 != 0) {
986 opc2 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R3, arg2);
987 arg2 = TCG_REG_R3;
989 if (const_arg1 && arg1 != 0) {
990 if (opc_a3 && arg1 == (int8_t)arg1) {
991 opc3 = tcg_opc_a3(TCG_REG_P0, opc_a3, ret, arg1, arg2);
992 } else {
993 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, arg1);
994 arg1 = TCG_REG_R2;
997 if (opc3 == 0) {
998 opc3 = tcg_opc_a1(TCG_REG_P0, opc_a1, ret, arg1, arg2);
1001 tcg_out_bundle(s, (opc1 || opc2 ? mII : miI),
1002 opc1 ? opc1 : INSN_NOP_M,
1003 opc2 ? opc2 : INSN_NOP_I,
1004 opc3);
1007 static inline void tcg_out_add(TCGContext *s, TCGReg ret, TCGReg arg1,
1008 TCGArg arg2, int const_arg2)
1010 if (const_arg2 && arg2 == sextract64(arg2, 0, 14)) {
1011 tcg_out_bundle(s, mmI,
1012 INSN_NOP_M,
1013 INSN_NOP_M,
1014 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, arg2, arg1));
1015 } else {
1016 tcg_out_alu(s, OPC_ADD_A1, 0, ret, arg1, 0, arg2, const_arg2);
1020 static inline void tcg_out_sub(TCGContext *s, TCGReg ret, TCGArg arg1,
1021 int const_arg1, TCGArg arg2, int const_arg2)
1023 if (!const_arg1 && const_arg2 && -arg2 == sextract64(-arg2, 0, 14)) {
1024 tcg_out_bundle(s, mmI,
1025 INSN_NOP_M,
1026 INSN_NOP_M,
1027 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, -arg2, arg1));
1028 } else {
1029 tcg_out_alu(s, OPC_SUB_A1, OPC_SUB_A3, ret,
1030 arg1, const_arg1, arg2, const_arg2);
1034 static inline void tcg_out_eqv(TCGContext *s, TCGArg ret,
1035 TCGArg arg1, int const_arg1,
1036 TCGArg arg2, int const_arg2)
1038 tcg_out_bundle(s, mII,
1039 INSN_NOP_M,
1040 tcg_opc_a1 (TCG_REG_P0, OPC_XOR_A1, ret, arg1, arg2),
1041 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1044 static inline void tcg_out_nand(TCGContext *s, TCGArg ret,
1045 TCGArg arg1, int const_arg1,
1046 TCGArg arg2, int const_arg2)
1048 tcg_out_bundle(s, mII,
1049 INSN_NOP_M,
1050 tcg_opc_a1 (TCG_REG_P0, OPC_AND_A1, ret, arg1, arg2),
1051 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1054 static inline void tcg_out_nor(TCGContext *s, TCGArg ret,
1055 TCGArg arg1, int const_arg1,
1056 TCGArg arg2, int const_arg2)
1058 tcg_out_bundle(s, mII,
1059 INSN_NOP_M,
1060 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, arg2),
1061 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1064 static inline void tcg_out_orc(TCGContext *s, TCGArg ret,
1065 TCGArg arg1, int const_arg1,
1066 TCGArg arg2, int const_arg2)
1068 tcg_out_bundle(s, mII,
1069 INSN_NOP_M,
1070 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, TCG_REG_R2, -1, arg2),
1071 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, TCG_REG_R2));
1074 static inline void tcg_out_mul(TCGContext *s, TCGArg ret,
1075 TCGArg arg1, TCGArg arg2)
1077 tcg_out_bundle(s, mmI,
1078 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F6, arg1),
1079 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F7, arg2),
1080 INSN_NOP_I);
1081 tcg_out_bundle(s, mmF,
1082 INSN_NOP_M,
1083 INSN_NOP_M,
1084 tcg_opc_f2 (TCG_REG_P0, OPC_XMA_L_F2, TCG_REG_F6, TCG_REG_F6,
1085 TCG_REG_F7, TCG_REG_F0));
1086 tcg_out_bundle(s, miI,
1087 tcg_opc_m19(TCG_REG_P0, OPC_GETF_SIG_M19, ret, TCG_REG_F6),
1088 INSN_NOP_I,
1089 INSN_NOP_I);
1092 static inline void tcg_out_sar_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1093 TCGArg arg2, int const_arg2)
1095 if (const_arg2) {
1096 tcg_out_bundle(s, miI,
1097 INSN_NOP_M,
1098 INSN_NOP_I,
1099 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1100 ret, arg1, arg2, 31 - arg2));
1101 } else {
1102 tcg_out_bundle(s, mII,
1103 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3,
1104 TCG_REG_R3, 0x1f, arg2),
1105 tcg_opc_i29(TCG_REG_P0, OPC_SXT4_I29, TCG_REG_R2, arg1),
1106 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret,
1107 TCG_REG_R2, TCG_REG_R3));
1111 static inline void tcg_out_sar_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1112 TCGArg arg2, int const_arg2)
1114 if (const_arg2) {
1115 tcg_out_bundle(s, miI,
1116 INSN_NOP_M,
1117 INSN_NOP_I,
1118 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1119 ret, arg1, arg2, 63 - arg2));
1120 } else {
1121 tcg_out_bundle(s, miI,
1122 INSN_NOP_M,
1123 INSN_NOP_I,
1124 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret, arg1, arg2));
1128 static inline void tcg_out_shl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1129 TCGArg arg2, int const_arg2)
1131 if (const_arg2) {
1132 tcg_out_bundle(s, miI,
1133 INSN_NOP_M,
1134 INSN_NOP_I,
1135 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1136 arg1, 63 - arg2, 31 - arg2));
1137 } else {
1138 tcg_out_bundle(s, mII,
1139 INSN_NOP_M,
1140 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R2,
1141 0x1f, arg2),
1142 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1143 arg1, TCG_REG_R2));
1147 static inline void tcg_out_shl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1148 TCGArg arg2, int const_arg2)
1150 if (const_arg2) {
1151 tcg_out_bundle(s, miI,
1152 INSN_NOP_M,
1153 INSN_NOP_I,
1154 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1155 arg1, 63 - arg2, 63 - arg2));
1156 } else {
1157 tcg_out_bundle(s, miI,
1158 INSN_NOP_M,
1159 INSN_NOP_I,
1160 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1161 arg1, arg2));
1165 static inline void tcg_out_shr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1166 TCGArg arg2, int const_arg2)
1168 if (const_arg2) {
1169 tcg_out_bundle(s, miI,
1170 INSN_NOP_M,
1171 INSN_NOP_I,
1172 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1173 arg1, arg2, 31 - arg2));
1174 } else {
1175 tcg_out_bundle(s, mII,
1176 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1177 0x1f, arg2),
1178 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29, TCG_REG_R2, arg1),
1179 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1180 TCG_REG_R2, TCG_REG_R3));
1184 static inline void tcg_out_shr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1185 TCGArg arg2, int const_arg2)
1187 if (const_arg2) {
1188 tcg_out_bundle(s, miI,
1189 INSN_NOP_M,
1190 INSN_NOP_I,
1191 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1192 arg1, arg2, 63 - arg2));
1193 } else {
1194 tcg_out_bundle(s, miI,
1195 INSN_NOP_M,
1196 INSN_NOP_I,
1197 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1198 arg1, arg2));
1202 static inline void tcg_out_rotl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1203 TCGArg arg2, int const_arg2)
1205 if (const_arg2) {
1206 tcg_out_bundle(s, mII,
1207 INSN_NOP_M,
1208 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1209 TCG_REG_R2, arg1, arg1),
1210 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1211 TCG_REG_R2, 32 - arg2, 31));
1212 } else {
1213 tcg_out_bundle(s, miI,
1214 INSN_NOP_M,
1215 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1216 TCG_REG_R2, arg1, arg1),
1217 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1218 0x1f, arg2));
1219 tcg_out_bundle(s, mII,
1220 INSN_NOP_M,
1221 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R3,
1222 0x20, TCG_REG_R3),
1223 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1224 TCG_REG_R2, TCG_REG_R3));
1228 static inline void tcg_out_rotl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1229 TCGArg arg2, int const_arg2)
1231 if (const_arg2) {
1232 tcg_out_bundle(s, miI,
1233 INSN_NOP_M,
1234 INSN_NOP_I,
1235 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1236 arg1, 0x40 - arg2));
1237 } else {
1238 tcg_out_bundle(s, mII,
1239 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1240 0x40, arg2),
1241 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R3,
1242 arg1, arg2),
1243 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R2,
1244 arg1, TCG_REG_R2));
1245 tcg_out_bundle(s, miI,
1246 INSN_NOP_M,
1247 INSN_NOP_I,
1248 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1249 TCG_REG_R2, TCG_REG_R3));
1253 static inline void tcg_out_rotr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1254 TCGArg arg2, int const_arg2)
1256 if (const_arg2) {
1257 tcg_out_bundle(s, mII,
1258 INSN_NOP_M,
1259 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1260 TCG_REG_R2, arg1, arg1),
1261 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1262 TCG_REG_R2, arg2, 31));
1263 } else {
1264 tcg_out_bundle(s, mII,
1265 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1266 0x1f, arg2),
1267 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1268 TCG_REG_R2, arg1, arg1),
1269 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1270 TCG_REG_R2, TCG_REG_R3));
1274 static inline void tcg_out_rotr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1275 TCGArg arg2, int const_arg2)
1277 if (const_arg2) {
1278 tcg_out_bundle(s, miI,
1279 INSN_NOP_M,
1280 INSN_NOP_I,
1281 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1282 arg1, arg2));
1283 } else {
1284 tcg_out_bundle(s, mII,
1285 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1286 0x40, arg2),
1287 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R3,
1288 arg1, arg2),
1289 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R2,
1290 arg1, TCG_REG_R2));
1291 tcg_out_bundle(s, miI,
1292 INSN_NOP_M,
1293 INSN_NOP_I,
1294 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1295 TCG_REG_R2, TCG_REG_R3));
1299 static const uint64_t opc_ext_i29[8] = {
1300 OPC_ZXT1_I29, OPC_ZXT2_I29, OPC_ZXT4_I29, 0,
1301 OPC_SXT1_I29, OPC_SXT2_I29, OPC_SXT4_I29, 0
1304 static inline uint64_t tcg_opc_ext_i(int qp, TCGMemOp opc, TCGReg d, TCGReg s)
1306 if ((opc & MO_SIZE) == MO_64) {
1307 return tcg_opc_mov_a(qp, d, s);
1308 } else {
1309 return tcg_opc_i29(qp, opc_ext_i29[opc & MO_SSIZE], d, s);
1313 static inline void tcg_out_ext(TCGContext *s, uint64_t opc_i29,
1314 TCGArg ret, TCGArg arg)
1316 tcg_out_bundle(s, miI,
1317 INSN_NOP_M,
1318 INSN_NOP_I,
1319 tcg_opc_i29(TCG_REG_P0, opc_i29, ret, arg));
1322 static inline uint64_t tcg_opc_bswap64_i(int qp, TCGReg d, TCGReg s)
1324 return tcg_opc_i3(qp, OPC_MUX1_I3, d, s, 0xb);
1327 static inline void tcg_out_bswap16(TCGContext *s, TCGArg ret, TCGArg arg)
1329 tcg_out_bundle(s, mII,
1330 INSN_NOP_M,
1331 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 15, 15),
1332 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1335 static inline void tcg_out_bswap32(TCGContext *s, TCGArg ret, TCGArg arg)
1337 tcg_out_bundle(s, mII,
1338 INSN_NOP_M,
1339 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 31, 31),
1340 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1343 static inline void tcg_out_bswap64(TCGContext *s, TCGArg ret, TCGArg arg)
1345 tcg_out_bundle(s, miI,
1346 INSN_NOP_M,
1347 INSN_NOP_I,
1348 tcg_opc_bswap64_i(TCG_REG_P0, ret, arg));
1351 static inline void tcg_out_deposit(TCGContext *s, TCGArg ret, TCGArg a1,
1352 TCGArg a2, int const_a2, int pos, int len)
1354 uint64_t i1 = 0, i2 = 0;
1355 int cpos = 63 - pos, lm1 = len - 1;
1357 if (const_a2) {
1358 /* Truncate the value of a constant a2 to the width of the field. */
1359 int mask = (1u << len) - 1;
1360 a2 &= mask;
1362 if (a2 == 0 || a2 == mask) {
1363 /* 1-bit signed constant inserted into register. */
1364 i2 = tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, ret, a2, a1, cpos, lm1);
1365 } else {
1366 /* Otherwise, load any constant into a temporary. Do this into
1367 the first I slot to help out with cross-unit delays. */
1368 i1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, a2);
1369 a2 = TCG_REG_R2;
1372 if (i2 == 0) {
1373 i2 = tcg_opc_i15(TCG_REG_P0, OPC_DEP_I15, ret, a2, a1, cpos, lm1);
1375 tcg_out_bundle(s, (i1 ? mII : miI),
1376 INSN_NOP_M,
1377 i1 ? i1 : INSN_NOP_I,
1378 i2);
1381 static inline uint64_t tcg_opc_cmp_a(int qp, TCGCond cond, TCGArg arg1,
1382 TCGArg arg2, int cmp4)
1384 uint64_t opc_eq_a6, opc_lt_a6, opc_ltu_a6;
1386 if (cmp4) {
1387 opc_eq_a6 = OPC_CMP4_EQ_A6;
1388 opc_lt_a6 = OPC_CMP4_LT_A6;
1389 opc_ltu_a6 = OPC_CMP4_LTU_A6;
1390 } else {
1391 opc_eq_a6 = OPC_CMP_EQ_A6;
1392 opc_lt_a6 = OPC_CMP_LT_A6;
1393 opc_ltu_a6 = OPC_CMP_LTU_A6;
1396 switch (cond) {
1397 case TCG_COND_EQ:
1398 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1399 case TCG_COND_NE:
1400 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1401 case TCG_COND_LT:
1402 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1403 case TCG_COND_LTU:
1404 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1405 case TCG_COND_GE:
1406 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1407 case TCG_COND_GEU:
1408 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1409 case TCG_COND_LE:
1410 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1411 case TCG_COND_LEU:
1412 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1413 case TCG_COND_GT:
1414 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1415 case TCG_COND_GTU:
1416 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1417 default:
1418 tcg_abort();
1419 break;
1423 static inline void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1,
1424 TCGReg arg2, TCGLabel *l, int cmp4)
1426 uint64_t imm;
1428 /* We pay attention here to not modify the branch target by reading
1429 the existing value and using it again. This ensure that caches and
1430 memory are kept coherent during retranslation. */
1431 if (l->has_value) {
1432 imm = l->u.value_ptr - s->code_ptr;
1433 } else {
1434 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
1435 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, l, 0);
1438 tcg_out_bundle(s, miB,
1439 INSN_NOP_M,
1440 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1441 tcg_opc_b1(TCG_REG_P6, OPC_BR_DPTK_FEW_B1, imm));
1444 static inline void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGArg ret,
1445 TCGArg arg1, TCGArg arg2, int cmp4)
1447 tcg_out_bundle(s, MmI,
1448 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1449 tcg_opc_movi_a(TCG_REG_P6, ret, 1),
1450 tcg_opc_movi_a(TCG_REG_P7, ret, 0));
1453 static inline void tcg_out_movcond(TCGContext *s, TCGCond cond, TCGArg ret,
1454 TCGArg c1, TCGArg c2,
1455 TCGArg v1, int const_v1,
1456 TCGArg v2, int const_v2, int cmp4)
1458 uint64_t opc1, opc2;
1460 if (const_v1) {
1461 opc1 = tcg_opc_movi_a(TCG_REG_P6, ret, v1);
1462 } else if (ret == v1) {
1463 opc1 = INSN_NOP_M;
1464 } else {
1465 opc1 = tcg_opc_mov_a(TCG_REG_P6, ret, v1);
1467 if (const_v2) {
1468 opc2 = tcg_opc_movi_a(TCG_REG_P7, ret, v2);
1469 } else if (ret == v2) {
1470 opc2 = INSN_NOP_I;
1471 } else {
1472 opc2 = tcg_opc_mov_a(TCG_REG_P7, ret, v2);
1475 tcg_out_bundle(s, MmI,
1476 tcg_opc_cmp_a(TCG_REG_P0, cond, c1, c2, cmp4),
1477 opc1,
1478 opc2);
1481 #if defined(CONFIG_SOFTMMU)
1482 /* We're expecting to use an signed 22-bit immediate add. */
1483 QEMU_BUILD_BUG_ON(offsetof(CPUArchState, tlb_table[NB_MMU_MODES - 1][1])
1484 > 0x1fffff)
1486 /* Load and compare a TLB entry, and return the result in (p6, p7).
1487 R2 is loaded with the addend TLB entry.
1488 R57 is loaded with the address, zero extented on 32-bit targets.
1489 R1, R3 are clobbered, leaving R56 free for...
1490 BSWAP_1, BSWAP_2 and I-slot insns for swapping data for store. */
1491 static inline void tcg_out_qemu_tlb(TCGContext *s, TCGReg addr_reg,
1492 TCGMemOp s_bits, int off_rw, int off_add,
1493 uint64_t bswap1, uint64_t bswap2)
1496 .mii
1497 mov r2 = off_rw
1498 extr.u r3 = addr_reg, ... # extract tlb page
1499 zxt4 r57 = addr_reg # or mov for 64-bit guest
1501 .mii
1502 addl r2 = r2, areg0
1503 shl r3 = r3, cteb # via dep.z
1504 dep r1 = 0, r57, ... # zero page ofs, keep align
1506 .mmi
1507 add r2 = r2, r3
1509 ld4 r3 = [r2], off_add-off_rw # or ld8 for 64-bit guest
1512 .mmi
1514 cmp.eq p6, p7 = r3, r58
1518 tcg_out_bundle(s, miI,
1519 tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, off_rw),
1520 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, TCG_REG_R3,
1521 addr_reg, TARGET_PAGE_BITS, CPU_TLB_BITS - 1),
1522 tcg_opc_ext_i(TCG_REG_P0,
1523 TARGET_LONG_BITS == 32 ? MO_UL : MO_Q,
1524 TCG_REG_R57, addr_reg));
1525 tcg_out_bundle(s, miI,
1526 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1527 TCG_REG_R2, TCG_AREG0),
1528 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, TCG_REG_R3,
1529 TCG_REG_R3, 63 - CPU_TLB_ENTRY_BITS,
1530 63 - CPU_TLB_ENTRY_BITS),
1531 tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, TCG_REG_R1, 0,
1532 TCG_REG_R57, 63 - s_bits,
1533 TARGET_PAGE_BITS - s_bits - 1));
1534 tcg_out_bundle(s, MmI,
1535 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
1536 TCG_REG_R2, TCG_REG_R2, TCG_REG_R3),
1537 tcg_opc_m3 (TCG_REG_P0,
1538 (TARGET_LONG_BITS == 32
1539 ? OPC_LD4_M3 : OPC_LD8_M3), TCG_REG_R3,
1540 TCG_REG_R2, off_add - off_rw),
1541 bswap1);
1542 tcg_out_bundle(s, mmI,
1543 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1, TCG_REG_R2, TCG_REG_R2),
1544 tcg_opc_a6 (TCG_REG_P0, OPC_CMP_EQ_A6, TCG_REG_P6,
1545 TCG_REG_P7, TCG_REG_R1, TCG_REG_R3),
1546 bswap2);
1549 typedef struct TCGLabelQemuLdst {
1550 bool is_ld;
1551 TCGMemOp size;
1552 tcg_insn_unit *label_ptr; /* label pointers to be updated */
1553 struct TCGLabelQemuLdst *next;
1554 } TCGLabelQemuLdst;
1556 typedef struct TCGBackendData {
1557 TCGLabelQemuLdst *labels;
1558 } TCGBackendData;
1560 static inline void tcg_out_tb_init(TCGContext *s)
1562 s->be->labels = NULL;
1565 static void add_qemu_ldst_label(TCGContext *s, bool is_ld, TCGMemOp opc,
1566 tcg_insn_unit *label_ptr)
1568 TCGBackendData *be = s->be;
1569 TCGLabelQemuLdst *l = tcg_malloc(sizeof(*l));
1571 l->is_ld = is_ld;
1572 l->size = opc & MO_SIZE;
1573 l->label_ptr = label_ptr;
1574 l->next = be->labels;
1575 be->labels = l;
1578 static void tcg_out_tb_finalize(TCGContext *s)
1580 static const void * const helpers[8] = {
1581 helper_ret_stb_mmu,
1582 helper_le_stw_mmu,
1583 helper_le_stl_mmu,
1584 helper_le_stq_mmu,
1585 helper_ret_ldub_mmu,
1586 helper_le_lduw_mmu,
1587 helper_le_ldul_mmu,
1588 helper_le_ldq_mmu,
1590 tcg_insn_unit *thunks[8] = { };
1591 TCGLabelQemuLdst *l;
1593 for (l = s->be->labels; l != NULL; l = l->next) {
1594 long x = l->is_ld * 4 + l->size;
1595 tcg_insn_unit *dest = thunks[x];
1597 /* The out-of-line thunks are all the same; load the return address
1598 from B0, load the GP, and branch to the code. Note that we are
1599 always post-call, so the register window has rolled, so we're
1600 using incomming parameter register numbers, not outgoing. */
1601 if (dest == NULL) {
1602 uintptr_t *desc = (uintptr_t *)helpers[x];
1603 uintptr_t func = desc[0], gp = desc[1], disp;
1605 thunks[x] = dest = s->code_ptr;
1607 tcg_out_bundle(s, mlx,
1608 INSN_NOP_M,
1609 tcg_opc_l2 (gp),
1610 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
1611 TCG_REG_R1, gp));
1612 tcg_out_bundle(s, mii,
1613 INSN_NOP_M,
1614 INSN_NOP_I,
1615 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
1616 l->is_ld ? TCG_REG_R35 : TCG_REG_R36,
1617 TCG_REG_B0));
1618 disp = (tcg_insn_unit *)func - s->code_ptr;
1619 tcg_out_bundle(s, mLX,
1620 INSN_NOP_M,
1621 tcg_opc_l3 (disp),
1622 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, disp));
1625 reloc_pcrel21b_slot2(l->label_ptr, dest);
1629 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1631 static const uint64_t opc_ld_m1[4] = {
1632 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1634 int addr_reg, data_reg, mem_index;
1635 TCGMemOpIdx oi;
1636 TCGMemOp opc, s_bits;
1637 uint64_t fin1, fin2;
1638 tcg_insn_unit *label_ptr;
1640 data_reg = args[0];
1641 addr_reg = args[1];
1642 oi = args[2];
1643 opc = get_memop(oi);
1644 mem_index = get_mmuidx(oi);
1645 s_bits = opc & MO_SIZE;
1647 /* Read the TLB entry */
1648 tcg_out_qemu_tlb(s, addr_reg, s_bits,
1649 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read),
1650 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1651 INSN_NOP_I, INSN_NOP_I);
1653 /* P6 is the fast path, and P7 the slow path */
1655 fin2 = 0;
1656 if (opc & MO_BSWAP) {
1657 fin1 = tcg_opc_bswap64_i(TCG_REG_P0, data_reg, TCG_REG_R8);
1658 if (s_bits < MO_64) {
1659 int shift = 64 - (8 << s_bits);
1660 fin2 = (opc & MO_SIGN ? OPC_EXTR_I11 : OPC_EXTR_U_I11);
1661 fin2 = tcg_opc_i11(TCG_REG_P0, fin2,
1662 data_reg, data_reg, shift, 63 - shift);
1664 } else {
1665 fin1 = tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, TCG_REG_R8);
1668 tcg_out_bundle(s, mmI,
1669 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1670 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1671 TCG_REG_R2, TCG_REG_R57),
1672 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R58, oi));
1673 label_ptr = s->code_ptr;
1674 tcg_out_bundle(s, miB,
1675 tcg_opc_m1 (TCG_REG_P6, opc_ld_m1[s_bits],
1676 TCG_REG_R8, TCG_REG_R2),
1677 INSN_NOP_I,
1678 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1679 get_reloc_pcrel21b_slot2(label_ptr)));
1681 add_qemu_ldst_label(s, 1, opc, label_ptr);
1683 /* Note that we always use LE helper functions, so the bswap insns
1684 here for the fast path also apply to the slow path. */
1685 tcg_out_bundle(s, (fin2 ? mII : miI),
1686 INSN_NOP_M,
1687 fin1,
1688 fin2 ? fin2 : INSN_NOP_I);
1691 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1693 static const uint64_t opc_st_m4[4] = {
1694 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1696 TCGReg addr_reg, data_reg;
1697 int mem_index;
1698 uint64_t pre1, pre2;
1699 TCGMemOpIdx oi;
1700 TCGMemOp opc, s_bits;
1701 tcg_insn_unit *label_ptr;
1703 data_reg = args[0];
1704 addr_reg = args[1];
1705 oi = args[2];
1706 opc = get_memop(oi);
1707 mem_index = get_mmuidx(oi);
1708 s_bits = opc & MO_SIZE;
1710 /* Note that we always use LE helper functions, so the bswap insns
1711 that are here for the fast path also apply to the slow path,
1712 and move the data into the argument register. */
1713 pre2 = INSN_NOP_I;
1714 if (opc & MO_BSWAP) {
1715 pre1 = tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R58, data_reg);
1716 if (s_bits < MO_64) {
1717 int shift = 64 - (8 << s_bits);
1718 pre2 = tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11,
1719 TCG_REG_R58, TCG_REG_R58, shift, 63 - shift);
1721 } else {
1722 /* Just move the data into place for the slow path. */
1723 pre1 = tcg_opc_ext_i(TCG_REG_P0, opc, TCG_REG_R58, data_reg);
1726 tcg_out_qemu_tlb(s, addr_reg, s_bits,
1727 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write),
1728 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1729 pre1, pre2);
1731 /* P6 is the fast path, and P7 the slow path */
1732 tcg_out_bundle(s, mmI,
1733 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1734 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1735 TCG_REG_R2, TCG_REG_R57),
1736 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R59, oi));
1737 label_ptr = s->code_ptr;
1738 tcg_out_bundle(s, miB,
1739 tcg_opc_m4 (TCG_REG_P6, opc_st_m4[s_bits],
1740 TCG_REG_R58, TCG_REG_R2),
1741 INSN_NOP_I,
1742 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1743 get_reloc_pcrel21b_slot2(label_ptr)));
1745 add_qemu_ldst_label(s, 0, opc, label_ptr);
1748 #else /* !CONFIG_SOFTMMU */
1749 # include "tcg-be-null.h"
1751 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1753 static uint64_t const opc_ld_m1[4] = {
1754 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1756 int addr_reg, data_reg;
1757 TCGMemOp opc, s_bits, bswap;
1759 data_reg = args[0];
1760 addr_reg = args[1];
1761 opc = args[2];
1762 s_bits = opc & MO_SIZE;
1763 bswap = opc & MO_BSWAP;
1765 #if TARGET_LONG_BITS == 32
1766 if (GUEST_BASE != 0) {
1767 tcg_out_bundle(s, mII,
1768 INSN_NOP_M,
1769 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1770 TCG_REG_R3, addr_reg),
1771 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1772 TCG_GUEST_BASE_REG, TCG_REG_R3));
1773 } else {
1774 tcg_out_bundle(s, miI,
1775 INSN_NOP_M,
1776 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1777 TCG_REG_R2, addr_reg),
1778 INSN_NOP_I);
1781 if (!bswap) {
1782 if (!(opc & MO_SIGN)) {
1783 tcg_out_bundle(s, miI,
1784 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1785 data_reg, TCG_REG_R2),
1786 INSN_NOP_I,
1787 INSN_NOP_I);
1788 } else {
1789 tcg_out_bundle(s, mII,
1790 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1791 data_reg, TCG_REG_R2),
1792 INSN_NOP_I,
1793 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1795 } else if (s_bits == MO_64) {
1796 tcg_out_bundle(s, mII,
1797 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1798 data_reg, TCG_REG_R2),
1799 INSN_NOP_I,
1800 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1801 } else {
1802 if (s_bits == MO_16) {
1803 tcg_out_bundle(s, mII,
1804 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1805 data_reg, TCG_REG_R2),
1806 INSN_NOP_I,
1807 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1808 data_reg, data_reg, 15, 15));
1809 } else {
1810 tcg_out_bundle(s, mII,
1811 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1812 data_reg, TCG_REG_R2),
1813 INSN_NOP_I,
1814 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1815 data_reg, data_reg, 31, 31));
1817 if (!(opc & MO_SIGN)) {
1818 tcg_out_bundle(s, miI,
1819 INSN_NOP_M,
1820 INSN_NOP_I,
1821 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1822 } else {
1823 tcg_out_bundle(s, mII,
1824 INSN_NOP_M,
1825 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg),
1826 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1829 #else
1830 if (GUEST_BASE != 0) {
1831 tcg_out_bundle(s, MmI,
1832 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1833 TCG_GUEST_BASE_REG, addr_reg),
1834 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1835 data_reg, TCG_REG_R2),
1836 INSN_NOP_I);
1837 } else {
1838 tcg_out_bundle(s, mmI,
1839 INSN_NOP_M,
1840 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1841 data_reg, addr_reg),
1842 INSN_NOP_I);
1845 if (bswap && s_bits == MO_16) {
1846 tcg_out_bundle(s, mII,
1847 INSN_NOP_M,
1848 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1849 data_reg, data_reg, 15, 15),
1850 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1851 } else if (bswap && s_bits == MO_32) {
1852 tcg_out_bundle(s, mII,
1853 INSN_NOP_M,
1854 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1855 data_reg, data_reg, 31, 31),
1856 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1857 } else if (bswap && s_bits == MO_64) {
1858 tcg_out_bundle(s, miI,
1859 INSN_NOP_M,
1860 INSN_NOP_I,
1861 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1863 if (opc & MO_SIGN) {
1864 tcg_out_bundle(s, miI,
1865 INSN_NOP_M,
1866 INSN_NOP_I,
1867 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1869 #endif
1872 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1874 static uint64_t const opc_st_m4[4] = {
1875 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1877 int addr_reg, data_reg;
1878 #if TARGET_LONG_BITS == 64
1879 uint64_t add_guest_base;
1880 #endif
1881 TCGMemOp opc, s_bits, bswap;
1883 data_reg = args[0];
1884 addr_reg = args[1];
1885 opc = args[2];
1886 s_bits = opc & MO_SIZE;
1887 bswap = opc & MO_BSWAP;
1889 #if TARGET_LONG_BITS == 32
1890 if (GUEST_BASE != 0) {
1891 tcg_out_bundle(s, mII,
1892 INSN_NOP_M,
1893 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1894 TCG_REG_R3, addr_reg),
1895 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1896 TCG_GUEST_BASE_REG, TCG_REG_R3));
1897 } else {
1898 tcg_out_bundle(s, miI,
1899 INSN_NOP_M,
1900 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1901 TCG_REG_R2, addr_reg),
1902 INSN_NOP_I);
1905 if (bswap) {
1906 if (s_bits == MO_16) {
1907 tcg_out_bundle(s, mII,
1908 INSN_NOP_M,
1909 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1910 TCG_REG_R3, data_reg, 15, 15),
1911 tcg_opc_bswap64_i(TCG_REG_P0,
1912 TCG_REG_R3, TCG_REG_R3));
1913 data_reg = TCG_REG_R3;
1914 } else if (s_bits == MO_32) {
1915 tcg_out_bundle(s, mII,
1916 INSN_NOP_M,
1917 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1918 TCG_REG_R3, data_reg, 31, 31),
1919 tcg_opc_bswap64_i(TCG_REG_P0,
1920 TCG_REG_R3, TCG_REG_R3));
1921 data_reg = TCG_REG_R3;
1922 } else if (s_bits == MO_64) {
1923 tcg_out_bundle(s, miI,
1924 INSN_NOP_M,
1925 INSN_NOP_I,
1926 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1927 data_reg = TCG_REG_R3;
1930 tcg_out_bundle(s, mmI,
1931 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1932 data_reg, TCG_REG_R2),
1933 INSN_NOP_M,
1934 INSN_NOP_I);
1935 #else
1936 if (GUEST_BASE != 0) {
1937 add_guest_base = tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1938 TCG_GUEST_BASE_REG, addr_reg);
1939 addr_reg = TCG_REG_R2;
1940 } else {
1941 add_guest_base = INSN_NOP_M;
1944 if (!bswap) {
1945 tcg_out_bundle(s, (GUEST_BASE ? MmI : mmI),
1946 add_guest_base,
1947 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1948 data_reg, addr_reg),
1949 INSN_NOP_I);
1950 } else {
1951 if (s_bits == MO_16) {
1952 tcg_out_bundle(s, mII,
1953 add_guest_base,
1954 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1955 TCG_REG_R3, data_reg, 15, 15),
1956 tcg_opc_bswap64_i(TCG_REG_P0,
1957 TCG_REG_R3, TCG_REG_R3));
1958 data_reg = TCG_REG_R3;
1959 } else if (s_bits == MO_32) {
1960 tcg_out_bundle(s, mII,
1961 add_guest_base,
1962 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1963 TCG_REG_R3, data_reg, 31, 31),
1964 tcg_opc_bswap64_i(TCG_REG_P0,
1965 TCG_REG_R3, TCG_REG_R3));
1966 data_reg = TCG_REG_R3;
1967 } else if (s_bits == MO_64) {
1968 tcg_out_bundle(s, miI,
1969 add_guest_base,
1970 INSN_NOP_I,
1971 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1972 data_reg = TCG_REG_R3;
1974 tcg_out_bundle(s, miI,
1975 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1976 data_reg, addr_reg),
1977 INSN_NOP_I,
1978 INSN_NOP_I);
1980 #endif
1983 #endif
1985 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1986 const TCGArg *args, const int *const_args)
1988 switch(opc) {
1989 case INDEX_op_exit_tb:
1990 tcg_out_exit_tb(s, args[0]);
1991 break;
1992 case INDEX_op_br:
1993 tcg_out_br(s, arg_label(args[0]));
1994 break;
1995 case INDEX_op_goto_tb:
1996 tcg_out_goto_tb(s, args[0]);
1997 break;
1999 case INDEX_op_ld8u_i32:
2000 case INDEX_op_ld8u_i64:
2001 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2002 break;
2003 case INDEX_op_ld8s_i32:
2004 case INDEX_op_ld8s_i64:
2005 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2006 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[0]);
2007 break;
2008 case INDEX_op_ld16u_i32:
2009 case INDEX_op_ld16u_i64:
2010 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2011 break;
2012 case INDEX_op_ld16s_i32:
2013 case INDEX_op_ld16s_i64:
2014 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2015 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[0]);
2016 break;
2017 case INDEX_op_ld_i32:
2018 case INDEX_op_ld32u_i64:
2019 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2020 break;
2021 case INDEX_op_ld32s_i64:
2022 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2023 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[0]);
2024 break;
2025 case INDEX_op_ld_i64:
2026 tcg_out_ld_rel(s, OPC_LD8_M1, args[0], args[1], args[2]);
2027 break;
2028 case INDEX_op_st8_i32:
2029 case INDEX_op_st8_i64:
2030 tcg_out_st_rel(s, OPC_ST1_M4, args[0], args[1], args[2]);
2031 break;
2032 case INDEX_op_st16_i32:
2033 case INDEX_op_st16_i64:
2034 tcg_out_st_rel(s, OPC_ST2_M4, args[0], args[1], args[2]);
2035 break;
2036 case INDEX_op_st_i32:
2037 case INDEX_op_st32_i64:
2038 tcg_out_st_rel(s, OPC_ST4_M4, args[0], args[1], args[2]);
2039 break;
2040 case INDEX_op_st_i64:
2041 tcg_out_st_rel(s, OPC_ST8_M4, args[0], args[1], args[2]);
2042 break;
2044 case INDEX_op_add_i32:
2045 case INDEX_op_add_i64:
2046 tcg_out_add(s, args[0], args[1], args[2], const_args[2]);
2047 break;
2048 case INDEX_op_sub_i32:
2049 case INDEX_op_sub_i64:
2050 tcg_out_sub(s, args[0], args[1], const_args[1], args[2], const_args[2]);
2051 break;
2053 case INDEX_op_and_i32:
2054 case INDEX_op_and_i64:
2055 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2056 tcg_out_alu(s, OPC_AND_A1, OPC_AND_A3, args[0],
2057 args[2], const_args[2], args[1], const_args[1]);
2058 break;
2059 case INDEX_op_andc_i32:
2060 case INDEX_op_andc_i64:
2061 tcg_out_alu(s, OPC_ANDCM_A1, OPC_ANDCM_A3, args[0],
2062 args[1], const_args[1], args[2], const_args[2]);
2063 break;
2064 case INDEX_op_eqv_i32:
2065 case INDEX_op_eqv_i64:
2066 tcg_out_eqv(s, args[0], args[1], const_args[1],
2067 args[2], const_args[2]);
2068 break;
2069 case INDEX_op_nand_i32:
2070 case INDEX_op_nand_i64:
2071 tcg_out_nand(s, args[0], args[1], const_args[1],
2072 args[2], const_args[2]);
2073 break;
2074 case INDEX_op_nor_i32:
2075 case INDEX_op_nor_i64:
2076 tcg_out_nor(s, args[0], args[1], const_args[1],
2077 args[2], const_args[2]);
2078 break;
2079 case INDEX_op_or_i32:
2080 case INDEX_op_or_i64:
2081 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2082 tcg_out_alu(s, OPC_OR_A1, OPC_OR_A3, args[0],
2083 args[2], const_args[2], args[1], const_args[1]);
2084 break;
2085 case INDEX_op_orc_i32:
2086 case INDEX_op_orc_i64:
2087 tcg_out_orc(s, args[0], args[1], const_args[1],
2088 args[2], const_args[2]);
2089 break;
2090 case INDEX_op_xor_i32:
2091 case INDEX_op_xor_i64:
2092 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2093 tcg_out_alu(s, OPC_XOR_A1, OPC_XOR_A3, args[0],
2094 args[2], const_args[2], args[1], const_args[1]);
2095 break;
2097 case INDEX_op_mul_i32:
2098 case INDEX_op_mul_i64:
2099 tcg_out_mul(s, args[0], args[1], args[2]);
2100 break;
2102 case INDEX_op_sar_i32:
2103 tcg_out_sar_i32(s, args[0], args[1], args[2], const_args[2]);
2104 break;
2105 case INDEX_op_sar_i64:
2106 tcg_out_sar_i64(s, args[0], args[1], args[2], const_args[2]);
2107 break;
2108 case INDEX_op_shl_i32:
2109 tcg_out_shl_i32(s, args[0], args[1], args[2], const_args[2]);
2110 break;
2111 case INDEX_op_shl_i64:
2112 tcg_out_shl_i64(s, args[0], args[1], args[2], const_args[2]);
2113 break;
2114 case INDEX_op_shr_i32:
2115 tcg_out_shr_i32(s, args[0], args[1], args[2], const_args[2]);
2116 break;
2117 case INDEX_op_shr_i64:
2118 tcg_out_shr_i64(s, args[0], args[1], args[2], const_args[2]);
2119 break;
2120 case INDEX_op_rotl_i32:
2121 tcg_out_rotl_i32(s, args[0], args[1], args[2], const_args[2]);
2122 break;
2123 case INDEX_op_rotl_i64:
2124 tcg_out_rotl_i64(s, args[0], args[1], args[2], const_args[2]);
2125 break;
2126 case INDEX_op_rotr_i32:
2127 tcg_out_rotr_i32(s, args[0], args[1], args[2], const_args[2]);
2128 break;
2129 case INDEX_op_rotr_i64:
2130 tcg_out_rotr_i64(s, args[0], args[1], args[2], const_args[2]);
2131 break;
2133 case INDEX_op_ext8s_i32:
2134 case INDEX_op_ext8s_i64:
2135 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[1]);
2136 break;
2137 case INDEX_op_ext8u_i32:
2138 case INDEX_op_ext8u_i64:
2139 tcg_out_ext(s, OPC_ZXT1_I29, args[0], args[1]);
2140 break;
2141 case INDEX_op_ext16s_i32:
2142 case INDEX_op_ext16s_i64:
2143 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[1]);
2144 break;
2145 case INDEX_op_ext16u_i32:
2146 case INDEX_op_ext16u_i64:
2147 tcg_out_ext(s, OPC_ZXT2_I29, args[0], args[1]);
2148 break;
2149 case INDEX_op_ext_i32_i64:
2150 case INDEX_op_ext32s_i64:
2151 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[1]);
2152 break;
2153 case INDEX_op_extu_i32_i64:
2154 case INDEX_op_ext32u_i64:
2155 tcg_out_ext(s, OPC_ZXT4_I29, args[0], args[1]);
2156 break;
2158 case INDEX_op_bswap16_i32:
2159 case INDEX_op_bswap16_i64:
2160 tcg_out_bswap16(s, args[0], args[1]);
2161 break;
2162 case INDEX_op_bswap32_i32:
2163 case INDEX_op_bswap32_i64:
2164 tcg_out_bswap32(s, args[0], args[1]);
2165 break;
2166 case INDEX_op_bswap64_i64:
2167 tcg_out_bswap64(s, args[0], args[1]);
2168 break;
2170 case INDEX_op_deposit_i32:
2171 case INDEX_op_deposit_i64:
2172 tcg_out_deposit(s, args[0], args[1], args[2], const_args[2],
2173 args[3], args[4]);
2174 break;
2176 case INDEX_op_brcond_i32:
2177 tcg_out_brcond(s, args[2], args[0], args[1], arg_label(args[3]), 1);
2178 break;
2179 case INDEX_op_brcond_i64:
2180 tcg_out_brcond(s, args[2], args[0], args[1], arg_label(args[3]), 0);
2181 break;
2182 case INDEX_op_setcond_i32:
2183 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 1);
2184 break;
2185 case INDEX_op_setcond_i64:
2186 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 0);
2187 break;
2188 case INDEX_op_movcond_i32:
2189 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2190 args[3], const_args[3], args[4], const_args[4], 1);
2191 break;
2192 case INDEX_op_movcond_i64:
2193 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2194 args[3], const_args[3], args[4], const_args[4], 0);
2195 break;
2197 case INDEX_op_qemu_ld_i32:
2198 tcg_out_qemu_ld(s, args);
2199 break;
2200 case INDEX_op_qemu_ld_i64:
2201 tcg_out_qemu_ld(s, args);
2202 break;
2203 case INDEX_op_qemu_st_i32:
2204 tcg_out_qemu_st(s, args);
2205 break;
2206 case INDEX_op_qemu_st_i64:
2207 tcg_out_qemu_st(s, args);
2208 break;
2210 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
2211 case INDEX_op_mov_i64:
2212 case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
2213 case INDEX_op_movi_i64:
2214 case INDEX_op_call: /* Always emitted via tcg_out_call. */
2215 default:
2216 tcg_abort();
2220 static const TCGTargetOpDef ia64_op_defs[] = {
2221 { INDEX_op_br, { } },
2222 { INDEX_op_exit_tb, { } },
2223 { INDEX_op_goto_tb, { } },
2225 { INDEX_op_ld8u_i32, { "r", "r" } },
2226 { INDEX_op_ld8s_i32, { "r", "r" } },
2227 { INDEX_op_ld16u_i32, { "r", "r" } },
2228 { INDEX_op_ld16s_i32, { "r", "r" } },
2229 { INDEX_op_ld_i32, { "r", "r" } },
2230 { INDEX_op_st8_i32, { "rZ", "r" } },
2231 { INDEX_op_st16_i32, { "rZ", "r" } },
2232 { INDEX_op_st_i32, { "rZ", "r" } },
2234 { INDEX_op_add_i32, { "r", "rZ", "rI" } },
2235 { INDEX_op_sub_i32, { "r", "rI", "rI" } },
2237 { INDEX_op_and_i32, { "r", "rI", "rI" } },
2238 { INDEX_op_andc_i32, { "r", "rI", "rI" } },
2239 { INDEX_op_eqv_i32, { "r", "rZ", "rZ" } },
2240 { INDEX_op_nand_i32, { "r", "rZ", "rZ" } },
2241 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
2242 { INDEX_op_or_i32, { "r", "rI", "rI" } },
2243 { INDEX_op_orc_i32, { "r", "rZ", "rZ" } },
2244 { INDEX_op_xor_i32, { "r", "rI", "rI" } },
2246 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
2248 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
2249 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
2250 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
2251 { INDEX_op_rotl_i32, { "r", "rZ", "ri" } },
2252 { INDEX_op_rotr_i32, { "r", "rZ", "ri" } },
2254 { INDEX_op_ext8s_i32, { "r", "rZ"} },
2255 { INDEX_op_ext8u_i32, { "r", "rZ"} },
2256 { INDEX_op_ext16s_i32, { "r", "rZ"} },
2257 { INDEX_op_ext16u_i32, { "r", "rZ"} },
2259 { INDEX_op_bswap16_i32, { "r", "rZ" } },
2260 { INDEX_op_bswap32_i32, { "r", "rZ" } },
2262 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
2263 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
2264 { INDEX_op_movcond_i32, { "r", "rZ", "rZ", "rI", "rI" } },
2266 { INDEX_op_ld8u_i64, { "r", "r" } },
2267 { INDEX_op_ld8s_i64, { "r", "r" } },
2268 { INDEX_op_ld16u_i64, { "r", "r" } },
2269 { INDEX_op_ld16s_i64, { "r", "r" } },
2270 { INDEX_op_ld32u_i64, { "r", "r" } },
2271 { INDEX_op_ld32s_i64, { "r", "r" } },
2272 { INDEX_op_ld_i64, { "r", "r" } },
2273 { INDEX_op_st8_i64, { "rZ", "r" } },
2274 { INDEX_op_st16_i64, { "rZ", "r" } },
2275 { INDEX_op_st32_i64, { "rZ", "r" } },
2276 { INDEX_op_st_i64, { "rZ", "r" } },
2278 { INDEX_op_add_i64, { "r", "rZ", "rI" } },
2279 { INDEX_op_sub_i64, { "r", "rI", "rI" } },
2281 { INDEX_op_and_i64, { "r", "rI", "rI" } },
2282 { INDEX_op_andc_i64, { "r", "rI", "rI" } },
2283 { INDEX_op_eqv_i64, { "r", "rZ", "rZ" } },
2284 { INDEX_op_nand_i64, { "r", "rZ", "rZ" } },
2285 { INDEX_op_nor_i64, { "r", "rZ", "rZ" } },
2286 { INDEX_op_or_i64, { "r", "rI", "rI" } },
2287 { INDEX_op_orc_i64, { "r", "rZ", "rZ" } },
2288 { INDEX_op_xor_i64, { "r", "rI", "rI" } },
2290 { INDEX_op_mul_i64, { "r", "rZ", "rZ" } },
2292 { INDEX_op_sar_i64, { "r", "rZ", "ri" } },
2293 { INDEX_op_shl_i64, { "r", "rZ", "ri" } },
2294 { INDEX_op_shr_i64, { "r", "rZ", "ri" } },
2295 { INDEX_op_rotl_i64, { "r", "rZ", "ri" } },
2296 { INDEX_op_rotr_i64, { "r", "rZ", "ri" } },
2298 { INDEX_op_ext8s_i64, { "r", "rZ"} },
2299 { INDEX_op_ext8u_i64, { "r", "rZ"} },
2300 { INDEX_op_ext16s_i64, { "r", "rZ"} },
2301 { INDEX_op_ext16u_i64, { "r", "rZ"} },
2302 { INDEX_op_ext32s_i64, { "r", "rZ"} },
2303 { INDEX_op_ext32u_i64, { "r", "rZ"} },
2304 { INDEX_op_ext_i32_i64, { "r", "rZ" } },
2305 { INDEX_op_extu_i32_i64, { "r", "rZ" } },
2307 { INDEX_op_bswap16_i64, { "r", "rZ" } },
2308 { INDEX_op_bswap32_i64, { "r", "rZ" } },
2309 { INDEX_op_bswap64_i64, { "r", "rZ" } },
2311 { INDEX_op_brcond_i64, { "rZ", "rZ" } },
2312 { INDEX_op_setcond_i64, { "r", "rZ", "rZ" } },
2313 { INDEX_op_movcond_i64, { "r", "rZ", "rZ", "rI", "rI" } },
2315 { INDEX_op_deposit_i32, { "r", "rZ", "ri" } },
2316 { INDEX_op_deposit_i64, { "r", "rZ", "ri" } },
2318 { INDEX_op_qemu_ld_i32, { "r", "r" } },
2319 { INDEX_op_qemu_ld_i64, { "r", "r" } },
2320 { INDEX_op_qemu_st_i32, { "SZ", "r" } },
2321 { INDEX_op_qemu_st_i64, { "SZ", "r" } },
2323 { -1 },
2326 /* Generate global QEMU prologue and epilogue code */
2327 static void tcg_target_qemu_prologue(TCGContext *s)
2329 int frame_size;
2331 /* reserve some stack space */
2332 frame_size = TCG_STATIC_CALL_ARGS_SIZE +
2333 CPU_TEMP_BUF_NLONGS * sizeof(long);
2334 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
2335 ~(TCG_TARGET_STACK_ALIGN - 1);
2336 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
2337 CPU_TEMP_BUF_NLONGS * sizeof(long));
2339 /* First emit adhoc function descriptor */
2340 *s->code_ptr = (tcg_insn_unit){
2341 (uint64_t)(s->code_ptr + 1), /* entry point */
2342 0 /* skip gp */
2344 s->code_ptr++;
2346 /* prologue */
2347 tcg_out_bundle(s, miI,
2348 tcg_opc_m34(TCG_REG_P0, OPC_ALLOC_M34,
2349 TCG_REG_R34, 32, 24, 0),
2350 INSN_NOP_I,
2351 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2352 TCG_REG_B6, TCG_REG_R33, 0));
2354 /* ??? If GUEST_BASE < 0x200000, we could load the register via
2355 an ADDL in the M slot of the next bundle. */
2356 if (GUEST_BASE != 0) {
2357 tcg_out_bundle(s, mlx,
2358 INSN_NOP_M,
2359 tcg_opc_l2 (GUEST_BASE),
2360 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
2361 TCG_GUEST_BASE_REG, GUEST_BASE));
2362 tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
2365 tcg_out_bundle(s, miB,
2366 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2367 TCG_REG_R12, -frame_size, TCG_REG_R12),
2368 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
2369 TCG_REG_R33, TCG_REG_B0),
2370 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
2372 /* epilogue */
2373 tb_ret_addr = s->code_ptr;
2374 tcg_out_bundle(s, miI,
2375 INSN_NOP_M,
2376 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2377 TCG_REG_B0, TCG_REG_R33, 0),
2378 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2379 TCG_REG_R12, frame_size, TCG_REG_R12));
2380 tcg_out_bundle(s, miB,
2381 INSN_NOP_M,
2382 tcg_opc_i26(TCG_REG_P0, OPC_MOV_I_I26,
2383 TCG_REG_PFS, TCG_REG_R34),
2384 tcg_opc_b4 (TCG_REG_P0, OPC_BR_RET_SPTK_MANY_B4,
2385 TCG_REG_B0));
2388 static void tcg_target_init(TCGContext *s)
2390 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32],
2391 0xffffffffffffffffull);
2392 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I64],
2393 0xffffffffffffffffull);
2395 tcg_regset_clear(tcg_target_call_clobber_regs);
2396 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R8);
2397 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R9);
2398 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R10);
2399 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R11);
2400 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R14);
2401 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R15);
2402 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R16);
2403 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R17);
2404 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R18);
2405 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R19);
2406 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R20);
2407 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R21);
2408 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R22);
2409 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R23);
2410 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R24);
2411 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R25);
2412 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R26);
2413 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R27);
2414 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R28);
2415 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R29);
2416 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R30);
2417 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R31);
2418 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R56);
2419 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R57);
2420 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R58);
2421 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R59);
2422 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R60);
2423 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R61);
2424 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R62);
2425 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R63);
2427 tcg_regset_clear(s->reserved_regs);
2428 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0); /* zero register */
2429 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1); /* global pointer */
2430 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2); /* internal use */
2431 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R3); /* internal use */
2432 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R12); /* stack pointer */
2433 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R13); /* thread pointer */
2434 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R33); /* return address */
2435 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R34); /* PFS */
2437 /* The following 4 are not in use, are call-saved, but *not* saved
2438 by the prologue. Therefore we cannot use them without modifying
2439 the prologue. There doesn't seem to be any good reason to use
2440 these as opposed to the windowed registers. */
2441 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R4);
2442 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R5);
2443 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R6);
2444 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R7);
2446 tcg_add_target_add_op_defs(ia64_op_defs);