pc: kvm_apic: Pass APIC ID depending on xAPIC/x2APIC mode
[qemu.git] / tcg / ia64 / tcg-target.inc.c
blobb04d716c3da3e57d265db7731985a7e2d0969efa
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009-2010 Aurelien Jarno <aurelien@aurel32.net>
5 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
27 * Register definitions
30 #ifdef CONFIG_DEBUG_TCG
31 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
32 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
33 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
34 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
35 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
36 "r32", "r33", "r34", "r35", "r36", "r37", "r38", "r39",
37 "r40", "r41", "r42", "r43", "r44", "r45", "r46", "r47",
38 "r48", "r49", "r50", "r51", "r52", "r53", "r54", "r55",
39 "r56", "r57", "r58", "r59", "r60", "r61", "r62", "r63",
41 #endif
43 #ifndef CONFIG_SOFTMMU
44 #define TCG_GUEST_BASE_REG TCG_REG_R55
45 #endif
47 /* Branch registers */
48 enum {
49 TCG_REG_B0 = 0,
50 TCG_REG_B1,
51 TCG_REG_B2,
52 TCG_REG_B3,
53 TCG_REG_B4,
54 TCG_REG_B5,
55 TCG_REG_B6,
56 TCG_REG_B7,
59 /* Floating point registers */
60 enum {
61 TCG_REG_F0 = 0,
62 TCG_REG_F1,
63 TCG_REG_F2,
64 TCG_REG_F3,
65 TCG_REG_F4,
66 TCG_REG_F5,
67 TCG_REG_F6,
68 TCG_REG_F7,
69 TCG_REG_F8,
70 TCG_REG_F9,
71 TCG_REG_F10,
72 TCG_REG_F11,
73 TCG_REG_F12,
74 TCG_REG_F13,
75 TCG_REG_F14,
76 TCG_REG_F15,
79 /* Predicate registers */
80 enum {
81 TCG_REG_P0 = 0,
82 TCG_REG_P1,
83 TCG_REG_P2,
84 TCG_REG_P3,
85 TCG_REG_P4,
86 TCG_REG_P5,
87 TCG_REG_P6,
88 TCG_REG_P7,
89 TCG_REG_P8,
90 TCG_REG_P9,
91 TCG_REG_P10,
92 TCG_REG_P11,
93 TCG_REG_P12,
94 TCG_REG_P13,
95 TCG_REG_P14,
96 TCG_REG_P15,
99 /* Application registers */
100 enum {
101 TCG_REG_PFS = 64,
104 static const int tcg_target_reg_alloc_order[] = {
105 TCG_REG_R35,
106 TCG_REG_R36,
107 TCG_REG_R37,
108 TCG_REG_R38,
109 TCG_REG_R39,
110 TCG_REG_R40,
111 TCG_REG_R41,
112 TCG_REG_R42,
113 TCG_REG_R43,
114 TCG_REG_R44,
115 TCG_REG_R45,
116 TCG_REG_R46,
117 TCG_REG_R47,
118 TCG_REG_R48,
119 TCG_REG_R49,
120 TCG_REG_R50,
121 TCG_REG_R51,
122 TCG_REG_R52,
123 TCG_REG_R53,
124 TCG_REG_R54,
125 TCG_REG_R55,
126 TCG_REG_R14,
127 TCG_REG_R15,
128 TCG_REG_R16,
129 TCG_REG_R17,
130 TCG_REG_R18,
131 TCG_REG_R19,
132 TCG_REG_R20,
133 TCG_REG_R21,
134 TCG_REG_R22,
135 TCG_REG_R23,
136 TCG_REG_R24,
137 TCG_REG_R25,
138 TCG_REG_R26,
139 TCG_REG_R27,
140 TCG_REG_R28,
141 TCG_REG_R29,
142 TCG_REG_R30,
143 TCG_REG_R31,
144 TCG_REG_R56,
145 TCG_REG_R57,
146 TCG_REG_R58,
147 TCG_REG_R59,
148 TCG_REG_R60,
149 TCG_REG_R61,
150 TCG_REG_R62,
151 TCG_REG_R63,
152 TCG_REG_R8,
153 TCG_REG_R9,
154 TCG_REG_R10,
155 TCG_REG_R11
158 static const int tcg_target_call_iarg_regs[8] = {
159 TCG_REG_R56,
160 TCG_REG_R57,
161 TCG_REG_R58,
162 TCG_REG_R59,
163 TCG_REG_R60,
164 TCG_REG_R61,
165 TCG_REG_R62,
166 TCG_REG_R63,
169 static const int tcg_target_call_oarg_regs[] = {
170 TCG_REG_R8
174 * opcode formation
177 /* bundle templates: stops (double bar in the IA64 manual) are marked with
178 an uppercase letter. */
179 enum {
180 mii = 0x00,
181 miI = 0x01,
182 mIi = 0x02,
183 mII = 0x03,
184 mlx = 0x04,
185 mLX = 0x05,
186 mmi = 0x08,
187 mmI = 0x09,
188 Mmi = 0x0a,
189 MmI = 0x0b,
190 mfi = 0x0c,
191 mfI = 0x0d,
192 mmf = 0x0e,
193 mmF = 0x0f,
194 mib = 0x10,
195 miB = 0x11,
196 mbb = 0x12,
197 mbB = 0x13,
198 bbb = 0x16,
199 bbB = 0x17,
200 mmb = 0x18,
201 mmB = 0x19,
202 mfb = 0x1c,
203 mfB = 0x1d,
206 enum {
207 OPC_ADD_A1 = 0x10000000000ull,
208 OPC_AND_A1 = 0x10060000000ull,
209 OPC_AND_A3 = 0x10160000000ull,
210 OPC_ANDCM_A1 = 0x10068000000ull,
211 OPC_ANDCM_A3 = 0x10168000000ull,
212 OPC_ADDS_A4 = 0x10800000000ull,
213 OPC_ADDL_A5 = 0x12000000000ull,
214 OPC_ALLOC_M34 = 0x02c00000000ull,
215 OPC_BR_DPTK_FEW_B1 = 0x08400000000ull,
216 OPC_BR_SPTK_MANY_B1 = 0x08000001000ull,
217 OPC_BR_CALL_SPNT_FEW_B3 = 0x0a200000000ull,
218 OPC_BR_SPTK_MANY_B4 = 0x00100001000ull,
219 OPC_BR_CALL_SPTK_MANY_B5 = 0x02100001000ull,
220 OPC_BR_RET_SPTK_MANY_B4 = 0x00108001100ull,
221 OPC_BRL_SPTK_MANY_X3 = 0x18000001000ull,
222 OPC_BRL_CALL_SPNT_MANY_X4 = 0x1a200001000ull,
223 OPC_BRL_CALL_SPTK_MANY_X4 = 0x1a000001000ull,
224 OPC_CMP_LT_A6 = 0x18000000000ull,
225 OPC_CMP_LTU_A6 = 0x1a000000000ull,
226 OPC_CMP_EQ_A6 = 0x1c000000000ull,
227 OPC_CMP4_LT_A6 = 0x18400000000ull,
228 OPC_CMP4_LTU_A6 = 0x1a400000000ull,
229 OPC_CMP4_EQ_A6 = 0x1c400000000ull,
230 OPC_DEP_I14 = 0x0ae00000000ull,
231 OPC_DEP_I15 = 0x08000000000ull,
232 OPC_DEP_Z_I12 = 0x0a600000000ull,
233 OPC_EXTR_I11 = 0x0a400002000ull,
234 OPC_EXTR_U_I11 = 0x0a400000000ull,
235 OPC_FCVT_FX_TRUNC_S1_F10 = 0x004d0000000ull,
236 OPC_FCVT_FXU_TRUNC_S1_F10 = 0x004d8000000ull,
237 OPC_FCVT_XF_F11 = 0x000e0000000ull,
238 OPC_FMA_S1_F1 = 0x10400000000ull,
239 OPC_FNMA_S1_F1 = 0x18400000000ull,
240 OPC_FRCPA_S1_F6 = 0x00600000000ull,
241 OPC_GETF_SIG_M19 = 0x08708000000ull,
242 OPC_LD1_M1 = 0x08000000000ull,
243 OPC_LD1_M3 = 0x0a000000000ull,
244 OPC_LD2_M1 = 0x08040000000ull,
245 OPC_LD2_M3 = 0x0a040000000ull,
246 OPC_LD4_M1 = 0x08080000000ull,
247 OPC_LD4_M3 = 0x0a080000000ull,
248 OPC_LD8_M1 = 0x080c0000000ull,
249 OPC_LD8_M3 = 0x0a0c0000000ull,
250 OPC_MF_M24 = 0x00110000000ull,
251 OPC_MUX1_I3 = 0x0eca0000000ull,
252 OPC_NOP_B9 = 0x04008000000ull,
253 OPC_NOP_F16 = 0x00008000000ull,
254 OPC_NOP_I18 = 0x00008000000ull,
255 OPC_NOP_M48 = 0x00008000000ull,
256 OPC_MOV_I21 = 0x00e00100000ull,
257 OPC_MOV_RET_I21 = 0x00e00500000ull,
258 OPC_MOV_I22 = 0x00188000000ull,
259 OPC_MOV_I_I26 = 0x00150000000ull,
260 OPC_MOVL_X2 = 0x0c000000000ull,
261 OPC_OR_A1 = 0x10070000000ull,
262 OPC_OR_A3 = 0x10170000000ull,
263 OPC_SETF_EXP_M18 = 0x0c748000000ull,
264 OPC_SETF_SIG_M18 = 0x0c708000000ull,
265 OPC_SHL_I7 = 0x0f240000000ull,
266 OPC_SHR_I5 = 0x0f220000000ull,
267 OPC_SHR_U_I5 = 0x0f200000000ull,
268 OPC_SHRP_I10 = 0x0ac00000000ull,
269 OPC_SXT1_I29 = 0x000a0000000ull,
270 OPC_SXT2_I29 = 0x000a8000000ull,
271 OPC_SXT4_I29 = 0x000b0000000ull,
272 OPC_ST1_M4 = 0x08c00000000ull,
273 OPC_ST2_M4 = 0x08c40000000ull,
274 OPC_ST4_M4 = 0x08c80000000ull,
275 OPC_ST8_M4 = 0x08cc0000000ull,
276 OPC_SUB_A1 = 0x10028000000ull,
277 OPC_SUB_A3 = 0x10128000000ull,
278 OPC_UNPACK4_L_I2 = 0x0f860000000ull,
279 OPC_XMA_L_F2 = 0x1d000000000ull,
280 OPC_XOR_A1 = 0x10078000000ull,
281 OPC_XOR_A3 = 0x10178000000ull,
282 OPC_ZXT1_I29 = 0x00080000000ull,
283 OPC_ZXT2_I29 = 0x00088000000ull,
284 OPC_ZXT4_I29 = 0x00090000000ull,
286 INSN_NOP_M = OPC_NOP_M48, /* nop.m 0 */
287 INSN_NOP_I = OPC_NOP_I18, /* nop.i 0 */
290 static inline uint64_t tcg_opc_a1(int qp, uint64_t opc, int r1,
291 int r2, int r3)
293 return opc
294 | ((r3 & 0x7f) << 20)
295 | ((r2 & 0x7f) << 13)
296 | ((r1 & 0x7f) << 6)
297 | (qp & 0x3f);
300 static inline uint64_t tcg_opc_a3(int qp, uint64_t opc, int r1,
301 uint64_t imm, int r3)
303 return opc
304 | ((imm & 0x80) << 29) /* s */
305 | ((imm & 0x7f) << 13) /* imm7b */
306 | ((r3 & 0x7f) << 20)
307 | ((r1 & 0x7f) << 6)
308 | (qp & 0x3f);
311 static inline uint64_t tcg_opc_a4(int qp, uint64_t opc, int r1,
312 uint64_t imm, int r3)
314 return opc
315 | ((imm & 0x2000) << 23) /* s */
316 | ((imm & 0x1f80) << 20) /* imm6d */
317 | ((imm & 0x007f) << 13) /* imm7b */
318 | ((r3 & 0x7f) << 20)
319 | ((r1 & 0x7f) << 6)
320 | (qp & 0x3f);
323 static inline uint64_t tcg_opc_a5(int qp, uint64_t opc, int r1,
324 uint64_t imm, int r3)
326 return opc
327 | ((imm & 0x200000) << 15) /* s */
328 | ((imm & 0x1f0000) << 6) /* imm5c */
329 | ((imm & 0x00ff80) << 20) /* imm9d */
330 | ((imm & 0x00007f) << 13) /* imm7b */
331 | ((r3 & 0x03) << 20)
332 | ((r1 & 0x7f) << 6)
333 | (qp & 0x3f);
336 static inline uint64_t tcg_opc_a6(int qp, uint64_t opc, int p1,
337 int p2, int r2, int r3)
339 return opc
340 | ((p2 & 0x3f) << 27)
341 | ((r3 & 0x7f) << 20)
342 | ((r2 & 0x7f) << 13)
343 | ((p1 & 0x3f) << 6)
344 | (qp & 0x3f);
347 static inline uint64_t tcg_opc_b1(int qp, uint64_t opc, uint64_t imm)
349 return opc
350 | ((imm & 0x100000) << 16) /* s */
351 | ((imm & 0x0fffff) << 13) /* imm20b */
352 | (qp & 0x3f);
355 static inline uint64_t tcg_opc_b3(int qp, uint64_t opc, int b1, uint64_t imm)
357 return opc
358 | ((imm & 0x100000) << 16) /* s */
359 | ((imm & 0x0fffff) << 13) /* imm20b */
360 | ((b1 & 0x7) << 6)
361 | (qp & 0x3f);
364 static inline uint64_t tcg_opc_b4(int qp, uint64_t opc, int b2)
366 return opc
367 | ((b2 & 0x7) << 13)
368 | (qp & 0x3f);
371 static inline uint64_t tcg_opc_b5(int qp, uint64_t opc, int b1, int b2)
373 return opc
374 | ((b2 & 0x7) << 13)
375 | ((b1 & 0x7) << 6)
376 | (qp & 0x3f);
380 static inline uint64_t tcg_opc_b9(int qp, uint64_t opc, uint64_t imm)
382 return opc
383 | ((imm & 0x100000) << 16) /* i */
384 | ((imm & 0x0fffff) << 6) /* imm20a */
385 | (qp & 0x3f);
388 static inline uint64_t tcg_opc_f1(int qp, uint64_t opc, int f1,
389 int f3, int f4, int f2)
391 return opc
392 | ((f4 & 0x7f) << 27)
393 | ((f3 & 0x7f) << 20)
394 | ((f2 & 0x7f) << 13)
395 | ((f1 & 0x7f) << 6)
396 | (qp & 0x3f);
399 static inline uint64_t tcg_opc_f2(int qp, uint64_t opc, int f1,
400 int f3, int f4, int f2)
402 return opc
403 | ((f4 & 0x7f) << 27)
404 | ((f3 & 0x7f) << 20)
405 | ((f2 & 0x7f) << 13)
406 | ((f1 & 0x7f) << 6)
407 | (qp & 0x3f);
410 static inline uint64_t tcg_opc_f6(int qp, uint64_t opc, int f1,
411 int p2, int f2, int f3)
413 return opc
414 | ((p2 & 0x3f) << 27)
415 | ((f3 & 0x7f) << 20)
416 | ((f2 & 0x7f) << 13)
417 | ((f1 & 0x7f) << 6)
418 | (qp & 0x3f);
421 static inline uint64_t tcg_opc_f10(int qp, uint64_t opc, int f1, int f2)
423 return opc
424 | ((f2 & 0x7f) << 13)
425 | ((f1 & 0x7f) << 6)
426 | (qp & 0x3f);
429 static inline uint64_t tcg_opc_f11(int qp, uint64_t opc, int f1, int f2)
431 return opc
432 | ((f2 & 0x7f) << 13)
433 | ((f1 & 0x7f) << 6)
434 | (qp & 0x3f);
437 static inline uint64_t tcg_opc_f16(int qp, uint64_t opc, uint64_t imm)
439 return opc
440 | ((imm & 0x100000) << 16) /* i */
441 | ((imm & 0x0fffff) << 6) /* imm20a */
442 | (qp & 0x3f);
445 static inline uint64_t tcg_opc_i2(int qp, uint64_t opc, int r1,
446 int r2, int r3)
448 return opc
449 | ((r3 & 0x7f) << 20)
450 | ((r2 & 0x7f) << 13)
451 | ((r1 & 0x7f) << 6)
452 | (qp & 0x3f);
455 static inline uint64_t tcg_opc_i3(int qp, uint64_t opc, int r1,
456 int r2, int mbtype)
458 return opc
459 | ((mbtype & 0x0f) << 20)
460 | ((r2 & 0x7f) << 13)
461 | ((r1 & 0x7f) << 6)
462 | (qp & 0x3f);
465 static inline uint64_t tcg_opc_i5(int qp, uint64_t opc, int r1,
466 int r3, int r2)
468 return opc
469 | ((r3 & 0x7f) << 20)
470 | ((r2 & 0x7f) << 13)
471 | ((r1 & 0x7f) << 6)
472 | (qp & 0x3f);
475 static inline uint64_t tcg_opc_i7(int qp, uint64_t opc, int r1,
476 int r2, int r3)
478 return opc
479 | ((r3 & 0x7f) << 20)
480 | ((r2 & 0x7f) << 13)
481 | ((r1 & 0x7f) << 6)
482 | (qp & 0x3f);
485 static inline uint64_t tcg_opc_i10(int qp, uint64_t opc, int r1,
486 int r2, int r3, uint64_t count)
488 return opc
489 | ((count & 0x3f) << 27)
490 | ((r3 & 0x7f) << 20)
491 | ((r2 & 0x7f) << 13)
492 | ((r1 & 0x7f) << 6)
493 | (qp & 0x3f);
496 static inline uint64_t tcg_opc_i11(int qp, uint64_t opc, int r1,
497 int r3, uint64_t pos, uint64_t len)
499 return opc
500 | ((len & 0x3f) << 27)
501 | ((r3 & 0x7f) << 20)
502 | ((pos & 0x3f) << 14)
503 | ((r1 & 0x7f) << 6)
504 | (qp & 0x3f);
507 static inline uint64_t tcg_opc_i12(int qp, uint64_t opc, int r1,
508 int r2, uint64_t pos, uint64_t len)
510 return opc
511 | ((len & 0x3f) << 27)
512 | ((pos & 0x3f) << 20)
513 | ((r2 & 0x7f) << 13)
514 | ((r1 & 0x7f) << 6)
515 | (qp & 0x3f);
518 static inline uint64_t tcg_opc_i14(int qp, uint64_t opc, int r1, uint64_t imm,
519 int r3, uint64_t pos, uint64_t len)
521 return opc
522 | ((imm & 0x01) << 36)
523 | ((len & 0x3f) << 27)
524 | ((r3 & 0x7f) << 20)
525 | ((pos & 0x3f) << 14)
526 | ((r1 & 0x7f) << 6)
527 | (qp & 0x3f);
530 static inline uint64_t tcg_opc_i15(int qp, uint64_t opc, int r1, int r2,
531 int r3, uint64_t pos, uint64_t len)
533 return opc
534 | ((pos & 0x3f) << 31)
535 | ((len & 0x0f) << 27)
536 | ((r3 & 0x7f) << 20)
537 | ((r2 & 0x7f) << 13)
538 | ((r1 & 0x7f) << 6)
539 | (qp & 0x3f);
542 static inline uint64_t tcg_opc_i18(int qp, uint64_t opc, uint64_t imm)
544 return opc
545 | ((imm & 0x100000) << 16) /* i */
546 | ((imm & 0x0fffff) << 6) /* imm20a */
547 | (qp & 0x3f);
550 static inline uint64_t tcg_opc_i21(int qp, uint64_t opc, int b1,
551 int r2, uint64_t imm)
553 return opc
554 | ((imm & 0x1ff) << 24)
555 | ((r2 & 0x7f) << 13)
556 | ((b1 & 0x7) << 6)
557 | (qp & 0x3f);
560 static inline uint64_t tcg_opc_i22(int qp, uint64_t opc, int r1, int b2)
562 return opc
563 | ((b2 & 0x7) << 13)
564 | ((r1 & 0x7f) << 6)
565 | (qp & 0x3f);
568 static inline uint64_t tcg_opc_i26(int qp, uint64_t opc, int ar3, int r2)
570 return opc
571 | ((ar3 & 0x7f) << 20)
572 | ((r2 & 0x7f) << 13)
573 | (qp & 0x3f);
576 static inline uint64_t tcg_opc_i29(int qp, uint64_t opc, int r1, int r3)
578 return opc
579 | ((r3 & 0x7f) << 20)
580 | ((r1 & 0x7f) << 6)
581 | (qp & 0x3f);
584 static inline uint64_t tcg_opc_l2(uint64_t imm)
586 return (imm & 0x7fffffffffc00000ull) >> 22;
589 static inline uint64_t tcg_opc_l3(uint64_t imm)
591 return (imm & 0x07fffffffff00000ull) >> 18;
594 #define tcg_opc_l4 tcg_opc_l3
596 static inline uint64_t tcg_opc_m1(int qp, uint64_t opc, int r1, int r3)
598 return opc
599 | ((r3 & 0x7f) << 20)
600 | ((r1 & 0x7f) << 6)
601 | (qp & 0x3f);
604 static inline uint64_t tcg_opc_m3(int qp, uint64_t opc, int r1,
605 int r3, uint64_t imm)
607 return opc
608 | ((imm & 0x100) << 28) /* s */
609 | ((imm & 0x080) << 20) /* i */
610 | ((imm & 0x07f) << 13) /* imm7b */
611 | ((r3 & 0x7f) << 20)
612 | ((r1 & 0x7f) << 6)
613 | (qp & 0x3f);
616 static inline uint64_t tcg_opc_m4(int qp, uint64_t opc, int r2, int r3)
618 return opc
619 | ((r3 & 0x7f) << 20)
620 | ((r2 & 0x7f) << 13)
621 | (qp & 0x3f);
624 static inline uint64_t tcg_opc_m18(int qp, uint64_t opc, int f1, int r2)
626 return opc
627 | ((r2 & 0x7f) << 13)
628 | ((f1 & 0x7f) << 6)
629 | (qp & 0x3f);
632 static inline uint64_t tcg_opc_m19(int qp, uint64_t opc, int r1, int f2)
634 return opc
635 | ((f2 & 0x7f) << 13)
636 | ((r1 & 0x7f) << 6)
637 | (qp & 0x3f);
640 static inline uint64_t tcg_opc_m34(int qp, uint64_t opc, int r1,
641 int sof, int sol, int sor)
643 return opc
644 | ((sor & 0x0f) << 27)
645 | ((sol & 0x7f) << 20)
646 | ((sof & 0x7f) << 13)
647 | ((r1 & 0x7f) << 6)
648 | (qp & 0x3f);
651 static inline uint64_t tcg_opc_m48(int qp, uint64_t opc, uint64_t imm)
653 return opc
654 | ((imm & 0x100000) << 16) /* i */
655 | ((imm & 0x0fffff) << 6) /* imm20a */
656 | (qp & 0x3f);
659 static inline uint64_t tcg_opc_x2(int qp, uint64_t opc,
660 int r1, uint64_t imm)
662 return opc
663 | ((imm & 0x8000000000000000ull) >> 27) /* i */
664 | (imm & 0x0000000000200000ull) /* ic */
665 | ((imm & 0x00000000001f0000ull) << 6) /* imm5c */
666 | ((imm & 0x000000000000ff80ull) << 20) /* imm9d */
667 | ((imm & 0x000000000000007full) << 13) /* imm7b */
668 | ((r1 & 0x7f) << 6)
669 | (qp & 0x3f);
672 static inline uint64_t tcg_opc_x3(int qp, uint64_t opc, uint64_t imm)
674 return opc
675 | ((imm & 0x0800000000000000ull) >> 23) /* i */
676 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
677 | (qp & 0x3f);
680 static inline uint64_t tcg_opc_x4(int qp, uint64_t opc, int b1, uint64_t imm)
682 return opc
683 | ((imm & 0x0800000000000000ull) >> 23) /* i */
684 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
685 | ((b1 & 0x7) << 6)
686 | (qp & 0x3f);
691 * Relocations - Note that we never encode branches elsewhere than slot 2.
694 static void reloc_pcrel21b_slot2(tcg_insn_unit *pc, tcg_insn_unit *target)
696 uint64_t imm = target - pc;
698 pc->hi = (pc->hi & 0xf700000fffffffffull)
699 | ((imm & 0x100000) << 39) /* s */
700 | ((imm & 0x0fffff) << 36); /* imm20b */
703 static uint64_t get_reloc_pcrel21b_slot2(tcg_insn_unit *pc)
705 int64_t high = pc->hi;
707 return ((high >> 39) & 0x100000) + /* s */
708 ((high >> 36) & 0x0fffff); /* imm20b */
711 static void patch_reloc(tcg_insn_unit *code_ptr, int type,
712 intptr_t value, intptr_t addend)
714 tcg_debug_assert(addend == 0);
715 tcg_debug_assert(type == R_IA64_PCREL21B);
716 reloc_pcrel21b_slot2(code_ptr, (tcg_insn_unit *)value);
720 * Constraints
723 /* parse target specific constraints */
724 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
726 const char *ct_str;
728 ct_str = *pct_str;
729 switch(ct_str[0]) {
730 case 'r':
731 ct->ct |= TCG_CT_REG;
732 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
733 break;
734 case 'I':
735 ct->ct |= TCG_CT_CONST_S22;
736 break;
737 case 'S':
738 ct->ct |= TCG_CT_REG;
739 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
740 #if defined(CONFIG_SOFTMMU)
741 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R56);
742 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R57);
743 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R58);
744 #endif
745 break;
746 case 'Z':
747 /* We are cheating a bit here, using the fact that the register
748 r0 is also the register number 0. Hence there is no need
749 to check for const_args in each instruction. */
750 ct->ct |= TCG_CT_CONST_ZERO;
751 break;
752 default:
753 return -1;
755 ct_str++;
756 *pct_str = ct_str;
757 return 0;
760 /* test if a constant matches the constraint */
761 static inline int tcg_target_const_match(tcg_target_long val, TCGType type,
762 const TCGArgConstraint *arg_ct)
764 int ct;
765 ct = arg_ct->ct;
766 if (ct & TCG_CT_CONST)
767 return 1;
768 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
769 return 1;
770 else if ((ct & TCG_CT_CONST_S22) && val == ((int32_t)val << 10) >> 10)
771 return 1;
772 else
773 return 0;
777 * Code generation
780 static tcg_insn_unit *tb_ret_addr;
782 static inline void tcg_out_bundle(TCGContext *s, int template,
783 uint64_t slot0, uint64_t slot1,
784 uint64_t slot2)
786 template &= 0x1f; /* 5 bits */
787 slot0 &= 0x1ffffffffffull; /* 41 bits */
788 slot1 &= 0x1ffffffffffull; /* 41 bits */
789 slot2 &= 0x1ffffffffffull; /* 41 bits */
791 *s->code_ptr++ = (tcg_insn_unit){
792 (slot1 << 46) | (slot0 << 5) | template,
793 (slot2 << 23) | (slot1 >> 18)
797 static inline uint64_t tcg_opc_mov_a(int qp, TCGReg dst, TCGReg src)
799 return tcg_opc_a4(qp, OPC_ADDS_A4, dst, 0, src);
802 static inline void tcg_out_mov(TCGContext *s, TCGType type,
803 TCGReg ret, TCGReg arg)
805 tcg_out_bundle(s, mmI,
806 INSN_NOP_M,
807 INSN_NOP_M,
808 tcg_opc_mov_a(TCG_REG_P0, ret, arg));
811 static inline uint64_t tcg_opc_movi_a(int qp, TCGReg dst, int64_t src)
813 tcg_debug_assert(src == sextract64(src, 0, 22));
814 return tcg_opc_a5(qp, OPC_ADDL_A5, dst, src, TCG_REG_R0);
817 static inline void tcg_out_movi(TCGContext *s, TCGType type,
818 TCGReg reg, tcg_target_long arg)
820 tcg_out_bundle(s, mLX,
821 INSN_NOP_M,
822 tcg_opc_l2 (arg),
823 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, reg, arg));
826 static void tcg_out_br(TCGContext *s, TCGLabel *l)
828 uint64_t imm;
830 /* We pay attention here to not modify the branch target by reading
831 the existing value and using it again. This ensure that caches and
832 memory are kept coherent during retranslation. */
833 if (l->has_value) {
834 imm = l->u.value_ptr - s->code_ptr;
835 } else {
836 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
837 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, l, 0);
840 tcg_out_bundle(s, mmB,
841 INSN_NOP_M,
842 INSN_NOP_M,
843 tcg_opc_b1(TCG_REG_P0, OPC_BR_SPTK_MANY_B1, imm));
846 static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *desc)
848 uintptr_t func = desc->lo, gp = desc->hi, disp;
850 /* Look through the function descriptor. */
851 tcg_out_bundle(s, mlx,
852 INSN_NOP_M,
853 tcg_opc_l2 (gp),
854 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, TCG_REG_R1, gp));
855 disp = (tcg_insn_unit *)func - s->code_ptr;
856 tcg_out_bundle(s, mLX,
857 INSN_NOP_M,
858 tcg_opc_l4 (disp),
859 tcg_opc_x4 (TCG_REG_P0, OPC_BRL_CALL_SPTK_MANY_X4,
860 TCG_REG_B0, disp));
863 static void tcg_out_exit_tb(TCGContext *s, tcg_target_long arg)
865 uint64_t imm, opc1;
867 /* At least arg == 0 is a common operation. */
868 if (arg == sextract64(arg, 0, 22)) {
869 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R8, arg);
870 } else {
871 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R8, arg);
872 opc1 = INSN_NOP_M;
875 imm = tb_ret_addr - s->code_ptr;
877 tcg_out_bundle(s, mLX,
878 opc1,
879 tcg_opc_l3 (imm),
880 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, imm));
883 static inline void tcg_out_goto_tb(TCGContext *s, TCGArg arg)
885 if (s->tb_jmp_insn_offset) {
886 /* direct jump method */
887 tcg_abort();
888 } else {
889 /* indirect jump method */
890 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2,
891 (tcg_target_long)(s->tb_jmp_target_addr + arg));
892 tcg_out_bundle(s, MmI,
893 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1,
894 TCG_REG_R2, TCG_REG_R2),
895 INSN_NOP_M,
896 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6,
897 TCG_REG_R2, 0));
898 tcg_out_bundle(s, mmB,
899 INSN_NOP_M,
900 INSN_NOP_M,
901 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4,
902 TCG_REG_B6));
904 s->tb_jmp_reset_offset[arg] = tcg_current_code_size(s);
907 static inline void tcg_out_jmp(TCGContext *s, TCGArg addr)
909 tcg_out_bundle(s, mmI,
910 INSN_NOP_M,
911 INSN_NOP_M,
912 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6, addr, 0));
913 tcg_out_bundle(s, mmB,
914 INSN_NOP_M,
915 INSN_NOP_M,
916 tcg_opc_b4(TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
919 static inline void tcg_out_ld_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
920 TCGArg arg1, tcg_target_long arg2)
922 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
923 tcg_out_bundle(s, MmI,
924 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
925 TCG_REG_R2, arg2, arg1),
926 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
927 INSN_NOP_I);
928 } else {
929 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
930 tcg_out_bundle(s, MmI,
931 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
932 TCG_REG_R2, TCG_REG_R2, arg1),
933 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
934 INSN_NOP_I);
938 static inline void tcg_out_st_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
939 TCGArg arg1, tcg_target_long arg2)
941 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
942 tcg_out_bundle(s, MmI,
943 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
944 TCG_REG_R2, arg2, arg1),
945 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
946 INSN_NOP_I);
947 } else {
948 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
949 tcg_out_bundle(s, MmI,
950 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
951 TCG_REG_R2, TCG_REG_R2, arg1),
952 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
953 INSN_NOP_I);
957 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
958 TCGReg arg1, intptr_t arg2)
960 if (type == TCG_TYPE_I32) {
961 tcg_out_ld_rel(s, OPC_LD4_M1, arg, arg1, arg2);
962 } else {
963 tcg_out_ld_rel(s, OPC_LD8_M1, arg, arg1, arg2);
967 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
968 TCGReg arg1, intptr_t arg2)
970 if (type == TCG_TYPE_I32) {
971 tcg_out_st_rel(s, OPC_ST4_M4, arg, arg1, arg2);
972 } else {
973 tcg_out_st_rel(s, OPC_ST8_M4, arg, arg1, arg2);
977 static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
978 TCGReg base, intptr_t ofs)
980 if (val == 0) {
981 tcg_out_st(s, type, TCG_REG_R0, base, ofs);
982 return true;
984 return false;
987 static inline void tcg_out_alu(TCGContext *s, uint64_t opc_a1, uint64_t opc_a3,
988 TCGReg ret, TCGArg arg1, int const_arg1,
989 TCGArg arg2, int const_arg2)
991 uint64_t opc1 = 0, opc2 = 0, opc3 = 0;
993 if (const_arg2 && arg2 != 0) {
994 opc2 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R3, arg2);
995 arg2 = TCG_REG_R3;
997 if (const_arg1 && arg1 != 0) {
998 if (opc_a3 && arg1 == (int8_t)arg1) {
999 opc3 = tcg_opc_a3(TCG_REG_P0, opc_a3, ret, arg1, arg2);
1000 } else {
1001 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, arg1);
1002 arg1 = TCG_REG_R2;
1005 if (opc3 == 0) {
1006 opc3 = tcg_opc_a1(TCG_REG_P0, opc_a1, ret, arg1, arg2);
1009 tcg_out_bundle(s, (opc1 || opc2 ? mII : miI),
1010 opc1 ? opc1 : INSN_NOP_M,
1011 opc2 ? opc2 : INSN_NOP_I,
1012 opc3);
1015 static inline void tcg_out_add(TCGContext *s, TCGReg ret, TCGReg arg1,
1016 TCGArg arg2, int const_arg2)
1018 if (const_arg2 && arg2 == sextract64(arg2, 0, 14)) {
1019 tcg_out_bundle(s, mmI,
1020 INSN_NOP_M,
1021 INSN_NOP_M,
1022 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, arg2, arg1));
1023 } else {
1024 tcg_out_alu(s, OPC_ADD_A1, 0, ret, arg1, 0, arg2, const_arg2);
1028 static inline void tcg_out_sub(TCGContext *s, TCGReg ret, TCGArg arg1,
1029 int const_arg1, TCGArg arg2, int const_arg2)
1031 if (!const_arg1 && const_arg2 && -arg2 == sextract64(-arg2, 0, 14)) {
1032 tcg_out_bundle(s, mmI,
1033 INSN_NOP_M,
1034 INSN_NOP_M,
1035 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, -arg2, arg1));
1036 } else {
1037 tcg_out_alu(s, OPC_SUB_A1, OPC_SUB_A3, ret,
1038 arg1, const_arg1, arg2, const_arg2);
1042 static inline void tcg_out_eqv(TCGContext *s, TCGArg ret,
1043 TCGArg arg1, int const_arg1,
1044 TCGArg arg2, int const_arg2)
1046 tcg_out_bundle(s, mII,
1047 INSN_NOP_M,
1048 tcg_opc_a1 (TCG_REG_P0, OPC_XOR_A1, ret, arg1, arg2),
1049 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1052 static inline void tcg_out_nand(TCGContext *s, TCGArg ret,
1053 TCGArg arg1, int const_arg1,
1054 TCGArg arg2, int const_arg2)
1056 tcg_out_bundle(s, mII,
1057 INSN_NOP_M,
1058 tcg_opc_a1 (TCG_REG_P0, OPC_AND_A1, ret, arg1, arg2),
1059 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1062 static inline void tcg_out_nor(TCGContext *s, TCGArg ret,
1063 TCGArg arg1, int const_arg1,
1064 TCGArg arg2, int const_arg2)
1066 tcg_out_bundle(s, mII,
1067 INSN_NOP_M,
1068 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, arg2),
1069 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1072 static inline void tcg_out_orc(TCGContext *s, TCGArg ret,
1073 TCGArg arg1, int const_arg1,
1074 TCGArg arg2, int const_arg2)
1076 tcg_out_bundle(s, mII,
1077 INSN_NOP_M,
1078 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, TCG_REG_R2, -1, arg2),
1079 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, TCG_REG_R2));
1082 static inline void tcg_out_mul(TCGContext *s, TCGArg ret,
1083 TCGArg arg1, TCGArg arg2)
1085 tcg_out_bundle(s, mmI,
1086 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F6, arg1),
1087 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F7, arg2),
1088 INSN_NOP_I);
1089 tcg_out_bundle(s, mmF,
1090 INSN_NOP_M,
1091 INSN_NOP_M,
1092 tcg_opc_f2 (TCG_REG_P0, OPC_XMA_L_F2, TCG_REG_F6, TCG_REG_F6,
1093 TCG_REG_F7, TCG_REG_F0));
1094 tcg_out_bundle(s, miI,
1095 tcg_opc_m19(TCG_REG_P0, OPC_GETF_SIG_M19, ret, TCG_REG_F6),
1096 INSN_NOP_I,
1097 INSN_NOP_I);
1100 static inline void tcg_out_sar_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1101 TCGArg arg2, int const_arg2)
1103 if (const_arg2) {
1104 tcg_out_bundle(s, miI,
1105 INSN_NOP_M,
1106 INSN_NOP_I,
1107 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1108 ret, arg1, arg2, 31 - arg2));
1109 } else {
1110 tcg_out_bundle(s, mII,
1111 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3,
1112 TCG_REG_R3, 0x1f, arg2),
1113 tcg_opc_i29(TCG_REG_P0, OPC_SXT4_I29, TCG_REG_R2, arg1),
1114 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret,
1115 TCG_REG_R2, TCG_REG_R3));
1119 static inline void tcg_out_sar_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1120 TCGArg arg2, int const_arg2)
1122 if (const_arg2) {
1123 tcg_out_bundle(s, miI,
1124 INSN_NOP_M,
1125 INSN_NOP_I,
1126 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1127 ret, arg1, arg2, 63 - arg2));
1128 } else {
1129 tcg_out_bundle(s, miI,
1130 INSN_NOP_M,
1131 INSN_NOP_I,
1132 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret, arg1, arg2));
1136 static inline void tcg_out_shl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1137 TCGArg arg2, int const_arg2)
1139 if (const_arg2) {
1140 tcg_out_bundle(s, miI,
1141 INSN_NOP_M,
1142 INSN_NOP_I,
1143 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1144 arg1, 63 - arg2, 31 - arg2));
1145 } else {
1146 tcg_out_bundle(s, mII,
1147 INSN_NOP_M,
1148 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R2,
1149 0x1f, arg2),
1150 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1151 arg1, TCG_REG_R2));
1155 static inline void tcg_out_shl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1156 TCGArg arg2, int const_arg2)
1158 if (const_arg2) {
1159 tcg_out_bundle(s, miI,
1160 INSN_NOP_M,
1161 INSN_NOP_I,
1162 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1163 arg1, 63 - arg2, 63 - arg2));
1164 } else {
1165 tcg_out_bundle(s, miI,
1166 INSN_NOP_M,
1167 INSN_NOP_I,
1168 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1169 arg1, arg2));
1173 static inline void tcg_out_shr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1174 TCGArg arg2, int const_arg2)
1176 if (const_arg2) {
1177 tcg_out_bundle(s, miI,
1178 INSN_NOP_M,
1179 INSN_NOP_I,
1180 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1181 arg1, arg2, 31 - arg2));
1182 } else {
1183 tcg_out_bundle(s, mII,
1184 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1185 0x1f, arg2),
1186 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29, TCG_REG_R2, arg1),
1187 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1188 TCG_REG_R2, TCG_REG_R3));
1192 static inline void tcg_out_shr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1193 TCGArg arg2, int const_arg2)
1195 if (const_arg2) {
1196 tcg_out_bundle(s, miI,
1197 INSN_NOP_M,
1198 INSN_NOP_I,
1199 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1200 arg1, arg2, 63 - arg2));
1201 } else {
1202 tcg_out_bundle(s, miI,
1203 INSN_NOP_M,
1204 INSN_NOP_I,
1205 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1206 arg1, arg2));
1210 static inline void tcg_out_rotl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1211 TCGArg arg2, int const_arg2)
1213 if (const_arg2) {
1214 tcg_out_bundle(s, mII,
1215 INSN_NOP_M,
1216 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1217 TCG_REG_R2, arg1, arg1),
1218 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1219 TCG_REG_R2, 32 - arg2, 31));
1220 } else {
1221 tcg_out_bundle(s, miI,
1222 INSN_NOP_M,
1223 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1224 TCG_REG_R2, arg1, arg1),
1225 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1226 0x1f, arg2));
1227 tcg_out_bundle(s, mII,
1228 INSN_NOP_M,
1229 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R3,
1230 0x20, TCG_REG_R3),
1231 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1232 TCG_REG_R2, TCG_REG_R3));
1236 static inline void tcg_out_rotl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1237 TCGArg arg2, int const_arg2)
1239 if (const_arg2) {
1240 tcg_out_bundle(s, miI,
1241 INSN_NOP_M,
1242 INSN_NOP_I,
1243 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1244 arg1, 0x40 - arg2));
1245 } else {
1246 tcg_out_bundle(s, mII,
1247 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1248 0x40, arg2),
1249 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R3,
1250 arg1, arg2),
1251 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R2,
1252 arg1, TCG_REG_R2));
1253 tcg_out_bundle(s, miI,
1254 INSN_NOP_M,
1255 INSN_NOP_I,
1256 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1257 TCG_REG_R2, TCG_REG_R3));
1261 static inline void tcg_out_rotr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1262 TCGArg arg2, int const_arg2)
1264 if (const_arg2) {
1265 tcg_out_bundle(s, mII,
1266 INSN_NOP_M,
1267 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1268 TCG_REG_R2, arg1, arg1),
1269 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1270 TCG_REG_R2, arg2, 31));
1271 } else {
1272 tcg_out_bundle(s, mII,
1273 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1274 0x1f, arg2),
1275 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1276 TCG_REG_R2, arg1, arg1),
1277 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1278 TCG_REG_R2, TCG_REG_R3));
1282 static inline void tcg_out_rotr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1283 TCGArg arg2, int const_arg2)
1285 if (const_arg2) {
1286 tcg_out_bundle(s, miI,
1287 INSN_NOP_M,
1288 INSN_NOP_I,
1289 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1290 arg1, arg2));
1291 } else {
1292 tcg_out_bundle(s, mII,
1293 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1294 0x40, arg2),
1295 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R3,
1296 arg1, arg2),
1297 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R2,
1298 arg1, TCG_REG_R2));
1299 tcg_out_bundle(s, miI,
1300 INSN_NOP_M,
1301 INSN_NOP_I,
1302 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1303 TCG_REG_R2, TCG_REG_R3));
1307 static const uint64_t opc_ext_i29[8] = {
1308 OPC_ZXT1_I29, OPC_ZXT2_I29, OPC_ZXT4_I29, 0,
1309 OPC_SXT1_I29, OPC_SXT2_I29, OPC_SXT4_I29, 0
1312 static inline uint64_t tcg_opc_ext_i(int qp, TCGMemOp opc, TCGReg d, TCGReg s)
1314 if ((opc & MO_SIZE) == MO_64) {
1315 return tcg_opc_mov_a(qp, d, s);
1316 } else {
1317 return tcg_opc_i29(qp, opc_ext_i29[opc & MO_SSIZE], d, s);
1321 static inline void tcg_out_ext(TCGContext *s, uint64_t opc_i29,
1322 TCGArg ret, TCGArg arg)
1324 tcg_out_bundle(s, miI,
1325 INSN_NOP_M,
1326 INSN_NOP_I,
1327 tcg_opc_i29(TCG_REG_P0, opc_i29, ret, arg));
1330 static inline uint64_t tcg_opc_bswap64_i(int qp, TCGReg d, TCGReg s)
1332 return tcg_opc_i3(qp, OPC_MUX1_I3, d, s, 0xb);
1335 static inline void tcg_out_bswap16(TCGContext *s, TCGArg ret, TCGArg arg)
1337 tcg_out_bundle(s, mII,
1338 INSN_NOP_M,
1339 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 15, 15),
1340 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1343 static inline void tcg_out_bswap32(TCGContext *s, TCGArg ret, TCGArg arg)
1345 tcg_out_bundle(s, mII,
1346 INSN_NOP_M,
1347 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 31, 31),
1348 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1351 static inline void tcg_out_bswap64(TCGContext *s, TCGArg ret, TCGArg arg)
1353 tcg_out_bundle(s, miI,
1354 INSN_NOP_M,
1355 INSN_NOP_I,
1356 tcg_opc_bswap64_i(TCG_REG_P0, ret, arg));
1359 static inline void tcg_out_deposit(TCGContext *s, TCGArg ret, TCGArg a1,
1360 TCGArg a2, int const_a2, int pos, int len)
1362 uint64_t i1 = 0, i2 = 0;
1363 int cpos = 63 - pos, lm1 = len - 1;
1365 if (const_a2) {
1366 /* Truncate the value of a constant a2 to the width of the field. */
1367 int mask = (1u << len) - 1;
1368 a2 &= mask;
1370 if (a2 == 0 || a2 == mask) {
1371 /* 1-bit signed constant inserted into register. */
1372 i2 = tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, ret, a2, a1, cpos, lm1);
1373 } else {
1374 /* Otherwise, load any constant into a temporary. Do this into
1375 the first I slot to help out with cross-unit delays. */
1376 i1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, a2);
1377 a2 = TCG_REG_R2;
1380 if (i2 == 0) {
1381 i2 = tcg_opc_i15(TCG_REG_P0, OPC_DEP_I15, ret, a2, a1, cpos, lm1);
1383 tcg_out_bundle(s, (i1 ? mII : miI),
1384 INSN_NOP_M,
1385 i1 ? i1 : INSN_NOP_I,
1386 i2);
1389 static inline uint64_t tcg_opc_cmp_a(int qp, TCGCond cond, TCGArg arg1,
1390 TCGArg arg2, int cmp4)
1392 uint64_t opc_eq_a6, opc_lt_a6, opc_ltu_a6;
1394 if (cmp4) {
1395 opc_eq_a6 = OPC_CMP4_EQ_A6;
1396 opc_lt_a6 = OPC_CMP4_LT_A6;
1397 opc_ltu_a6 = OPC_CMP4_LTU_A6;
1398 } else {
1399 opc_eq_a6 = OPC_CMP_EQ_A6;
1400 opc_lt_a6 = OPC_CMP_LT_A6;
1401 opc_ltu_a6 = OPC_CMP_LTU_A6;
1404 switch (cond) {
1405 case TCG_COND_EQ:
1406 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1407 case TCG_COND_NE:
1408 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1409 case TCG_COND_LT:
1410 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1411 case TCG_COND_LTU:
1412 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1413 case TCG_COND_GE:
1414 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1415 case TCG_COND_GEU:
1416 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1417 case TCG_COND_LE:
1418 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1419 case TCG_COND_LEU:
1420 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1421 case TCG_COND_GT:
1422 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1423 case TCG_COND_GTU:
1424 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1425 default:
1426 tcg_abort();
1427 break;
1431 static inline void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1,
1432 TCGReg arg2, TCGLabel *l, int cmp4)
1434 uint64_t imm;
1436 /* We pay attention here to not modify the branch target by reading
1437 the existing value and using it again. This ensure that caches and
1438 memory are kept coherent during retranslation. */
1439 if (l->has_value) {
1440 imm = l->u.value_ptr - s->code_ptr;
1441 } else {
1442 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
1443 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, l, 0);
1446 tcg_out_bundle(s, miB,
1447 INSN_NOP_M,
1448 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1449 tcg_opc_b1(TCG_REG_P6, OPC_BR_DPTK_FEW_B1, imm));
1452 static inline void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGArg ret,
1453 TCGArg arg1, TCGArg arg2, int cmp4)
1455 tcg_out_bundle(s, MmI,
1456 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1457 tcg_opc_movi_a(TCG_REG_P6, ret, 1),
1458 tcg_opc_movi_a(TCG_REG_P7, ret, 0));
1461 static inline void tcg_out_movcond(TCGContext *s, TCGCond cond, TCGArg ret,
1462 TCGArg c1, TCGArg c2,
1463 TCGArg v1, int const_v1,
1464 TCGArg v2, int const_v2, int cmp4)
1466 uint64_t opc1, opc2;
1468 if (const_v1) {
1469 opc1 = tcg_opc_movi_a(TCG_REG_P6, ret, v1);
1470 } else if (ret == v1) {
1471 opc1 = INSN_NOP_M;
1472 } else {
1473 opc1 = tcg_opc_mov_a(TCG_REG_P6, ret, v1);
1475 if (const_v2) {
1476 opc2 = tcg_opc_movi_a(TCG_REG_P7, ret, v2);
1477 } else if (ret == v2) {
1478 opc2 = INSN_NOP_I;
1479 } else {
1480 opc2 = tcg_opc_mov_a(TCG_REG_P7, ret, v2);
1483 tcg_out_bundle(s, MmI,
1484 tcg_opc_cmp_a(TCG_REG_P0, cond, c1, c2, cmp4),
1485 opc1,
1486 opc2);
1489 #if defined(CONFIG_SOFTMMU)
1490 /* We're expecting to use an signed 22-bit immediate add. */
1491 QEMU_BUILD_BUG_ON(offsetof(CPUArchState, tlb_table[NB_MMU_MODES - 1][1])
1492 > 0x1fffff)
1494 /* Load and compare a TLB entry, and return the result in (p6, p7).
1495 R2 is loaded with the addend TLB entry.
1496 R57 is loaded with the address, zero extented on 32-bit targets.
1497 R1, R3 are clobbered, leaving R56 free for...
1498 BSWAP_1, BSWAP_2 and I-slot insns for swapping data for store. */
1499 static inline void tcg_out_qemu_tlb(TCGContext *s, TCGReg addr_reg,
1500 TCGMemOp opc, int off_rw, int off_add,
1501 uint64_t bswap1, uint64_t bswap2)
1503 unsigned s_bits = opc & MO_SIZE;
1504 unsigned a_bits = get_alignment_bits(opc);
1506 /* We don't support unaligned accesses, but overalignment is easy. */
1507 if (a_bits < s_bits) {
1508 a_bits = s_bits;
1512 .mii
1513 mov r2 = off_rw
1514 extr.u r3 = addr_reg, ... # extract tlb page
1515 zxt4 r57 = addr_reg # or mov for 64-bit guest
1517 .mii
1518 addl r2 = r2, areg0
1519 shl r3 = r3, cteb # via dep.z
1520 dep r1 = 0, r57, ... # zero page ofs, keep align
1522 .mmi
1523 add r2 = r2, r3
1525 ld4 r3 = [r2], off_add-off_rw # or ld8 for 64-bit guest
1528 .mmi
1530 cmp.eq p6, p7 = r3, r58
1534 tcg_out_bundle(s, miI,
1535 tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, off_rw),
1536 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, TCG_REG_R3,
1537 addr_reg, TARGET_PAGE_BITS, CPU_TLB_BITS - 1),
1538 tcg_opc_ext_i(TCG_REG_P0,
1539 TARGET_LONG_BITS == 32 ? MO_UL : MO_Q,
1540 TCG_REG_R57, addr_reg));
1541 tcg_out_bundle(s, miI,
1542 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1543 TCG_REG_R2, TCG_AREG0),
1544 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, TCG_REG_R3,
1545 TCG_REG_R3, 63 - CPU_TLB_ENTRY_BITS,
1546 63 - CPU_TLB_ENTRY_BITS),
1547 tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, TCG_REG_R1, 0,
1548 TCG_REG_R57, 63 - a_bits,
1549 TARGET_PAGE_BITS - a_bits - 1));
1550 tcg_out_bundle(s, MmI,
1551 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
1552 TCG_REG_R2, TCG_REG_R2, TCG_REG_R3),
1553 tcg_opc_m3 (TCG_REG_P0,
1554 (TARGET_LONG_BITS == 32
1555 ? OPC_LD4_M3 : OPC_LD8_M3), TCG_REG_R3,
1556 TCG_REG_R2, off_add - off_rw),
1557 bswap1);
1558 tcg_out_bundle(s, mmI,
1559 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1, TCG_REG_R2, TCG_REG_R2),
1560 tcg_opc_a6 (TCG_REG_P0, OPC_CMP_EQ_A6, TCG_REG_P6,
1561 TCG_REG_P7, TCG_REG_R1, TCG_REG_R3),
1562 bswap2);
1565 typedef struct TCGLabelQemuLdst {
1566 bool is_ld;
1567 TCGMemOp size;
1568 tcg_insn_unit *label_ptr; /* label pointers to be updated */
1569 struct TCGLabelQemuLdst *next;
1570 } TCGLabelQemuLdst;
1572 typedef struct TCGBackendData {
1573 TCGLabelQemuLdst *labels;
1574 } TCGBackendData;
1576 static inline void tcg_out_tb_init(TCGContext *s)
1578 s->be->labels = NULL;
1581 static void add_qemu_ldst_label(TCGContext *s, bool is_ld, TCGMemOp opc,
1582 tcg_insn_unit *label_ptr)
1584 TCGBackendData *be = s->be;
1585 TCGLabelQemuLdst *l = tcg_malloc(sizeof(*l));
1587 l->is_ld = is_ld;
1588 l->size = opc & MO_SIZE;
1589 l->label_ptr = label_ptr;
1590 l->next = be->labels;
1591 be->labels = l;
1594 static bool tcg_out_tb_finalize(TCGContext *s)
1596 static const void * const helpers[8] = {
1597 helper_ret_stb_mmu,
1598 helper_le_stw_mmu,
1599 helper_le_stl_mmu,
1600 helper_le_stq_mmu,
1601 helper_ret_ldub_mmu,
1602 helper_le_lduw_mmu,
1603 helper_le_ldul_mmu,
1604 helper_le_ldq_mmu,
1606 tcg_insn_unit *thunks[8] = { };
1607 TCGLabelQemuLdst *l;
1609 for (l = s->be->labels; l != NULL; l = l->next) {
1610 long x = l->is_ld * 4 + l->size;
1611 tcg_insn_unit *dest = thunks[x];
1613 /* The out-of-line thunks are all the same; load the return address
1614 from B0, load the GP, and branch to the code. Note that we are
1615 always post-call, so the register window has rolled, so we're
1616 using incoming parameter register numbers, not outgoing. */
1617 if (dest == NULL) {
1618 uintptr_t *desc = (uintptr_t *)helpers[x];
1619 uintptr_t func = desc[0], gp = desc[1], disp;
1621 thunks[x] = dest = s->code_ptr;
1623 tcg_out_bundle(s, mlx,
1624 INSN_NOP_M,
1625 tcg_opc_l2 (gp),
1626 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
1627 TCG_REG_R1, gp));
1628 tcg_out_bundle(s, mii,
1629 INSN_NOP_M,
1630 INSN_NOP_I,
1631 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
1632 l->is_ld ? TCG_REG_R35 : TCG_REG_R36,
1633 TCG_REG_B0));
1634 disp = (tcg_insn_unit *)func - s->code_ptr;
1635 tcg_out_bundle(s, mLX,
1636 INSN_NOP_M,
1637 tcg_opc_l3 (disp),
1638 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, disp));
1641 reloc_pcrel21b_slot2(l->label_ptr, dest);
1643 /* Test for (pending) buffer overflow. The assumption is that any
1644 one operation beginning below the high water mark cannot overrun
1645 the buffer completely. Thus we can test for overflow after
1646 generating code without having to check during generation. */
1647 if (unlikely((void *)s->code_ptr > s->code_gen_highwater)) {
1648 return false;
1651 return true;
1654 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1656 static const uint64_t opc_ld_m1[4] = {
1657 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1659 int addr_reg, data_reg, mem_index;
1660 TCGMemOpIdx oi;
1661 TCGMemOp opc, s_bits;
1662 uint64_t fin1, fin2;
1663 tcg_insn_unit *label_ptr;
1665 data_reg = args[0];
1666 addr_reg = args[1];
1667 oi = args[2];
1668 opc = get_memop(oi);
1669 mem_index = get_mmuidx(oi);
1670 s_bits = opc & MO_SIZE;
1672 /* Read the TLB entry */
1673 tcg_out_qemu_tlb(s, addr_reg, opc,
1674 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read),
1675 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1676 INSN_NOP_I, INSN_NOP_I);
1678 /* P6 is the fast path, and P7 the slow path */
1680 fin2 = 0;
1681 if (opc & MO_BSWAP) {
1682 fin1 = tcg_opc_bswap64_i(TCG_REG_P0, data_reg, TCG_REG_R8);
1683 if (s_bits < MO_64) {
1684 int shift = 64 - (8 << s_bits);
1685 fin2 = (opc & MO_SIGN ? OPC_EXTR_I11 : OPC_EXTR_U_I11);
1686 fin2 = tcg_opc_i11(TCG_REG_P0, fin2,
1687 data_reg, data_reg, shift, 63 - shift);
1689 } else {
1690 fin1 = tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, TCG_REG_R8);
1693 tcg_out_bundle(s, mmI,
1694 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1695 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1696 TCG_REG_R2, TCG_REG_R57),
1697 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R58, oi));
1698 label_ptr = s->code_ptr;
1699 tcg_out_bundle(s, miB,
1700 tcg_opc_m1 (TCG_REG_P6, opc_ld_m1[s_bits],
1701 TCG_REG_R8, TCG_REG_R2),
1702 INSN_NOP_I,
1703 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1704 get_reloc_pcrel21b_slot2(label_ptr)));
1706 add_qemu_ldst_label(s, 1, opc, label_ptr);
1708 /* Note that we always use LE helper functions, so the bswap insns
1709 here for the fast path also apply to the slow path. */
1710 tcg_out_bundle(s, (fin2 ? mII : miI),
1711 INSN_NOP_M,
1712 fin1,
1713 fin2 ? fin2 : INSN_NOP_I);
1716 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1718 static const uint64_t opc_st_m4[4] = {
1719 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1721 TCGReg addr_reg, data_reg;
1722 int mem_index;
1723 uint64_t pre1, pre2;
1724 TCGMemOpIdx oi;
1725 TCGMemOp opc, s_bits;
1726 tcg_insn_unit *label_ptr;
1728 data_reg = args[0];
1729 addr_reg = args[1];
1730 oi = args[2];
1731 opc = get_memop(oi);
1732 mem_index = get_mmuidx(oi);
1733 s_bits = opc & MO_SIZE;
1735 /* Note that we always use LE helper functions, so the bswap insns
1736 that are here for the fast path also apply to the slow path,
1737 and move the data into the argument register. */
1738 pre2 = INSN_NOP_I;
1739 if (opc & MO_BSWAP) {
1740 pre1 = tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R58, data_reg);
1741 if (s_bits < MO_64) {
1742 int shift = 64 - (8 << s_bits);
1743 pre2 = tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11,
1744 TCG_REG_R58, TCG_REG_R58, shift, 63 - shift);
1746 } else {
1747 /* Just move the data into place for the slow path. */
1748 pre1 = tcg_opc_ext_i(TCG_REG_P0, opc, TCG_REG_R58, data_reg);
1751 tcg_out_qemu_tlb(s, addr_reg, opc,
1752 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write),
1753 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1754 pre1, pre2);
1756 /* P6 is the fast path, and P7 the slow path */
1757 tcg_out_bundle(s, mmI,
1758 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1759 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1760 TCG_REG_R2, TCG_REG_R57),
1761 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R59, oi));
1762 label_ptr = s->code_ptr;
1763 tcg_out_bundle(s, miB,
1764 tcg_opc_m4 (TCG_REG_P6, opc_st_m4[s_bits],
1765 TCG_REG_R58, TCG_REG_R2),
1766 INSN_NOP_I,
1767 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1768 get_reloc_pcrel21b_slot2(label_ptr)));
1770 add_qemu_ldst_label(s, 0, opc, label_ptr);
1773 #else /* !CONFIG_SOFTMMU */
1774 # include "tcg-be-null.h"
1776 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1778 static uint64_t const opc_ld_m1[4] = {
1779 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1781 int addr_reg, data_reg;
1782 TCGMemOp opc, s_bits, bswap;
1784 data_reg = args[0];
1785 addr_reg = args[1];
1786 opc = args[2];
1787 s_bits = opc & MO_SIZE;
1788 bswap = opc & MO_BSWAP;
1790 #if TARGET_LONG_BITS == 32
1791 if (guest_base != 0) {
1792 tcg_out_bundle(s, mII,
1793 INSN_NOP_M,
1794 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1795 TCG_REG_R3, addr_reg),
1796 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1797 TCG_GUEST_BASE_REG, TCG_REG_R3));
1798 } else {
1799 tcg_out_bundle(s, miI,
1800 INSN_NOP_M,
1801 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1802 TCG_REG_R2, addr_reg),
1803 INSN_NOP_I);
1806 if (!bswap) {
1807 if (!(opc & MO_SIGN)) {
1808 tcg_out_bundle(s, miI,
1809 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1810 data_reg, TCG_REG_R2),
1811 INSN_NOP_I,
1812 INSN_NOP_I);
1813 } else {
1814 tcg_out_bundle(s, mII,
1815 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1816 data_reg, TCG_REG_R2),
1817 INSN_NOP_I,
1818 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1820 } else if (s_bits == MO_64) {
1821 tcg_out_bundle(s, mII,
1822 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1823 data_reg, TCG_REG_R2),
1824 INSN_NOP_I,
1825 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1826 } else {
1827 if (s_bits == MO_16) {
1828 tcg_out_bundle(s, mII,
1829 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1830 data_reg, TCG_REG_R2),
1831 INSN_NOP_I,
1832 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1833 data_reg, data_reg, 15, 15));
1834 } else {
1835 tcg_out_bundle(s, mII,
1836 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1837 data_reg, TCG_REG_R2),
1838 INSN_NOP_I,
1839 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1840 data_reg, data_reg, 31, 31));
1842 if (!(opc & MO_SIGN)) {
1843 tcg_out_bundle(s, miI,
1844 INSN_NOP_M,
1845 INSN_NOP_I,
1846 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1847 } else {
1848 tcg_out_bundle(s, mII,
1849 INSN_NOP_M,
1850 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg),
1851 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1854 #else
1855 if (guest_base != 0) {
1856 tcg_out_bundle(s, MmI,
1857 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1858 TCG_GUEST_BASE_REG, addr_reg),
1859 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1860 data_reg, TCG_REG_R2),
1861 INSN_NOP_I);
1862 } else {
1863 tcg_out_bundle(s, mmI,
1864 INSN_NOP_M,
1865 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1866 data_reg, addr_reg),
1867 INSN_NOP_I);
1870 if (bswap && s_bits == MO_16) {
1871 tcg_out_bundle(s, mII,
1872 INSN_NOP_M,
1873 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1874 data_reg, data_reg, 15, 15),
1875 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1876 } else if (bswap && s_bits == MO_32) {
1877 tcg_out_bundle(s, mII,
1878 INSN_NOP_M,
1879 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1880 data_reg, data_reg, 31, 31),
1881 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1882 } else if (bswap && s_bits == MO_64) {
1883 tcg_out_bundle(s, miI,
1884 INSN_NOP_M,
1885 INSN_NOP_I,
1886 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1888 if (opc & MO_SIGN) {
1889 tcg_out_bundle(s, miI,
1890 INSN_NOP_M,
1891 INSN_NOP_I,
1892 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1894 #endif
1897 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1899 static uint64_t const opc_st_m4[4] = {
1900 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1902 int addr_reg, data_reg;
1903 #if TARGET_LONG_BITS == 64
1904 uint64_t add_guest_base;
1905 #endif
1906 TCGMemOp opc, s_bits, bswap;
1908 data_reg = args[0];
1909 addr_reg = args[1];
1910 opc = args[2];
1911 s_bits = opc & MO_SIZE;
1912 bswap = opc & MO_BSWAP;
1914 #if TARGET_LONG_BITS == 32
1915 if (guest_base != 0) {
1916 tcg_out_bundle(s, mII,
1917 INSN_NOP_M,
1918 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1919 TCG_REG_R3, addr_reg),
1920 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1921 TCG_GUEST_BASE_REG, TCG_REG_R3));
1922 } else {
1923 tcg_out_bundle(s, miI,
1924 INSN_NOP_M,
1925 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1926 TCG_REG_R2, addr_reg),
1927 INSN_NOP_I);
1930 if (bswap) {
1931 if (s_bits == MO_16) {
1932 tcg_out_bundle(s, mII,
1933 INSN_NOP_M,
1934 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1935 TCG_REG_R3, data_reg, 15, 15),
1936 tcg_opc_bswap64_i(TCG_REG_P0,
1937 TCG_REG_R3, TCG_REG_R3));
1938 data_reg = TCG_REG_R3;
1939 } else if (s_bits == MO_32) {
1940 tcg_out_bundle(s, mII,
1941 INSN_NOP_M,
1942 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1943 TCG_REG_R3, data_reg, 31, 31),
1944 tcg_opc_bswap64_i(TCG_REG_P0,
1945 TCG_REG_R3, TCG_REG_R3));
1946 data_reg = TCG_REG_R3;
1947 } else if (s_bits == MO_64) {
1948 tcg_out_bundle(s, miI,
1949 INSN_NOP_M,
1950 INSN_NOP_I,
1951 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1952 data_reg = TCG_REG_R3;
1955 tcg_out_bundle(s, mmI,
1956 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1957 data_reg, TCG_REG_R2),
1958 INSN_NOP_M,
1959 INSN_NOP_I);
1960 #else
1961 if (guest_base != 0) {
1962 add_guest_base = tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1963 TCG_GUEST_BASE_REG, addr_reg);
1964 addr_reg = TCG_REG_R2;
1965 } else {
1966 add_guest_base = INSN_NOP_M;
1969 if (!bswap) {
1970 tcg_out_bundle(s, (guest_base ? MmI : mmI),
1971 add_guest_base,
1972 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1973 data_reg, addr_reg),
1974 INSN_NOP_I);
1975 } else {
1976 if (s_bits == MO_16) {
1977 tcg_out_bundle(s, mII,
1978 add_guest_base,
1979 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1980 TCG_REG_R3, data_reg, 15, 15),
1981 tcg_opc_bswap64_i(TCG_REG_P0,
1982 TCG_REG_R3, TCG_REG_R3));
1983 data_reg = TCG_REG_R3;
1984 } else if (s_bits == MO_32) {
1985 tcg_out_bundle(s, mII,
1986 add_guest_base,
1987 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1988 TCG_REG_R3, data_reg, 31, 31),
1989 tcg_opc_bswap64_i(TCG_REG_P0,
1990 TCG_REG_R3, TCG_REG_R3));
1991 data_reg = TCG_REG_R3;
1992 } else if (s_bits == MO_64) {
1993 tcg_out_bundle(s, miI,
1994 add_guest_base,
1995 INSN_NOP_I,
1996 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1997 data_reg = TCG_REG_R3;
1999 tcg_out_bundle(s, miI,
2000 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
2001 data_reg, addr_reg),
2002 INSN_NOP_I,
2003 INSN_NOP_I);
2005 #endif
2008 #endif
2010 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
2011 const TCGArg *args, const int *const_args)
2013 switch(opc) {
2014 case INDEX_op_exit_tb:
2015 tcg_out_exit_tb(s, args[0]);
2016 break;
2017 case INDEX_op_br:
2018 tcg_out_br(s, arg_label(args[0]));
2019 break;
2020 case INDEX_op_goto_tb:
2021 tcg_out_goto_tb(s, args[0]);
2022 break;
2024 case INDEX_op_ld8u_i32:
2025 case INDEX_op_ld8u_i64:
2026 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2027 break;
2028 case INDEX_op_ld8s_i32:
2029 case INDEX_op_ld8s_i64:
2030 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2031 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[0]);
2032 break;
2033 case INDEX_op_ld16u_i32:
2034 case INDEX_op_ld16u_i64:
2035 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2036 break;
2037 case INDEX_op_ld16s_i32:
2038 case INDEX_op_ld16s_i64:
2039 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2040 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[0]);
2041 break;
2042 case INDEX_op_ld_i32:
2043 case INDEX_op_ld32u_i64:
2044 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2045 break;
2046 case INDEX_op_ld32s_i64:
2047 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2048 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[0]);
2049 break;
2050 case INDEX_op_ld_i64:
2051 tcg_out_ld_rel(s, OPC_LD8_M1, args[0], args[1], args[2]);
2052 break;
2053 case INDEX_op_st8_i32:
2054 case INDEX_op_st8_i64:
2055 tcg_out_st_rel(s, OPC_ST1_M4, args[0], args[1], args[2]);
2056 break;
2057 case INDEX_op_st16_i32:
2058 case INDEX_op_st16_i64:
2059 tcg_out_st_rel(s, OPC_ST2_M4, args[0], args[1], args[2]);
2060 break;
2061 case INDEX_op_st_i32:
2062 case INDEX_op_st32_i64:
2063 tcg_out_st_rel(s, OPC_ST4_M4, args[0], args[1], args[2]);
2064 break;
2065 case INDEX_op_st_i64:
2066 tcg_out_st_rel(s, OPC_ST8_M4, args[0], args[1], args[2]);
2067 break;
2069 case INDEX_op_add_i32:
2070 case INDEX_op_add_i64:
2071 tcg_out_add(s, args[0], args[1], args[2], const_args[2]);
2072 break;
2073 case INDEX_op_sub_i32:
2074 case INDEX_op_sub_i64:
2075 tcg_out_sub(s, args[0], args[1], const_args[1], args[2], const_args[2]);
2076 break;
2078 case INDEX_op_and_i32:
2079 case INDEX_op_and_i64:
2080 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2081 tcg_out_alu(s, OPC_AND_A1, OPC_AND_A3, args[0],
2082 args[2], const_args[2], args[1], const_args[1]);
2083 break;
2084 case INDEX_op_andc_i32:
2085 case INDEX_op_andc_i64:
2086 tcg_out_alu(s, OPC_ANDCM_A1, OPC_ANDCM_A3, args[0],
2087 args[1], const_args[1], args[2], const_args[2]);
2088 break;
2089 case INDEX_op_eqv_i32:
2090 case INDEX_op_eqv_i64:
2091 tcg_out_eqv(s, args[0], args[1], const_args[1],
2092 args[2], const_args[2]);
2093 break;
2094 case INDEX_op_nand_i32:
2095 case INDEX_op_nand_i64:
2096 tcg_out_nand(s, args[0], args[1], const_args[1],
2097 args[2], const_args[2]);
2098 break;
2099 case INDEX_op_nor_i32:
2100 case INDEX_op_nor_i64:
2101 tcg_out_nor(s, args[0], args[1], const_args[1],
2102 args[2], const_args[2]);
2103 break;
2104 case INDEX_op_or_i32:
2105 case INDEX_op_or_i64:
2106 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2107 tcg_out_alu(s, OPC_OR_A1, OPC_OR_A3, args[0],
2108 args[2], const_args[2], args[1], const_args[1]);
2109 break;
2110 case INDEX_op_orc_i32:
2111 case INDEX_op_orc_i64:
2112 tcg_out_orc(s, args[0], args[1], const_args[1],
2113 args[2], const_args[2]);
2114 break;
2115 case INDEX_op_xor_i32:
2116 case INDEX_op_xor_i64:
2117 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2118 tcg_out_alu(s, OPC_XOR_A1, OPC_XOR_A3, args[0],
2119 args[2], const_args[2], args[1], const_args[1]);
2120 break;
2122 case INDEX_op_mul_i32:
2123 case INDEX_op_mul_i64:
2124 tcg_out_mul(s, args[0], args[1], args[2]);
2125 break;
2127 case INDEX_op_sar_i32:
2128 tcg_out_sar_i32(s, args[0], args[1], args[2], const_args[2]);
2129 break;
2130 case INDEX_op_sar_i64:
2131 tcg_out_sar_i64(s, args[0], args[1], args[2], const_args[2]);
2132 break;
2133 case INDEX_op_shl_i32:
2134 tcg_out_shl_i32(s, args[0], args[1], args[2], const_args[2]);
2135 break;
2136 case INDEX_op_shl_i64:
2137 tcg_out_shl_i64(s, args[0], args[1], args[2], const_args[2]);
2138 break;
2139 case INDEX_op_shr_i32:
2140 tcg_out_shr_i32(s, args[0], args[1], args[2], const_args[2]);
2141 break;
2142 case INDEX_op_shr_i64:
2143 tcg_out_shr_i64(s, args[0], args[1], args[2], const_args[2]);
2144 break;
2145 case INDEX_op_rotl_i32:
2146 tcg_out_rotl_i32(s, args[0], args[1], args[2], const_args[2]);
2147 break;
2148 case INDEX_op_rotl_i64:
2149 tcg_out_rotl_i64(s, args[0], args[1], args[2], const_args[2]);
2150 break;
2151 case INDEX_op_rotr_i32:
2152 tcg_out_rotr_i32(s, args[0], args[1], args[2], const_args[2]);
2153 break;
2154 case INDEX_op_rotr_i64:
2155 tcg_out_rotr_i64(s, args[0], args[1], args[2], const_args[2]);
2156 break;
2158 case INDEX_op_ext8s_i32:
2159 case INDEX_op_ext8s_i64:
2160 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[1]);
2161 break;
2162 case INDEX_op_ext8u_i32:
2163 case INDEX_op_ext8u_i64:
2164 tcg_out_ext(s, OPC_ZXT1_I29, args[0], args[1]);
2165 break;
2166 case INDEX_op_ext16s_i32:
2167 case INDEX_op_ext16s_i64:
2168 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[1]);
2169 break;
2170 case INDEX_op_ext16u_i32:
2171 case INDEX_op_ext16u_i64:
2172 tcg_out_ext(s, OPC_ZXT2_I29, args[0], args[1]);
2173 break;
2174 case INDEX_op_ext_i32_i64:
2175 case INDEX_op_ext32s_i64:
2176 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[1]);
2177 break;
2178 case INDEX_op_extu_i32_i64:
2179 case INDEX_op_ext32u_i64:
2180 tcg_out_ext(s, OPC_ZXT4_I29, args[0], args[1]);
2181 break;
2183 case INDEX_op_bswap16_i32:
2184 case INDEX_op_bswap16_i64:
2185 tcg_out_bswap16(s, args[0], args[1]);
2186 break;
2187 case INDEX_op_bswap32_i32:
2188 case INDEX_op_bswap32_i64:
2189 tcg_out_bswap32(s, args[0], args[1]);
2190 break;
2191 case INDEX_op_bswap64_i64:
2192 tcg_out_bswap64(s, args[0], args[1]);
2193 break;
2195 case INDEX_op_deposit_i32:
2196 case INDEX_op_deposit_i64:
2197 tcg_out_deposit(s, args[0], args[1], args[2], const_args[2],
2198 args[3], args[4]);
2199 break;
2201 case INDEX_op_brcond_i32:
2202 tcg_out_brcond(s, args[2], args[0], args[1], arg_label(args[3]), 1);
2203 break;
2204 case INDEX_op_brcond_i64:
2205 tcg_out_brcond(s, args[2], args[0], args[1], arg_label(args[3]), 0);
2206 break;
2207 case INDEX_op_setcond_i32:
2208 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 1);
2209 break;
2210 case INDEX_op_setcond_i64:
2211 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 0);
2212 break;
2213 case INDEX_op_movcond_i32:
2214 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2215 args[3], const_args[3], args[4], const_args[4], 1);
2216 break;
2217 case INDEX_op_movcond_i64:
2218 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2219 args[3], const_args[3], args[4], const_args[4], 0);
2220 break;
2222 case INDEX_op_qemu_ld_i32:
2223 tcg_out_qemu_ld(s, args);
2224 break;
2225 case INDEX_op_qemu_ld_i64:
2226 tcg_out_qemu_ld(s, args);
2227 break;
2228 case INDEX_op_qemu_st_i32:
2229 tcg_out_qemu_st(s, args);
2230 break;
2231 case INDEX_op_qemu_st_i64:
2232 tcg_out_qemu_st(s, args);
2233 break;
2235 case INDEX_op_mb:
2236 tcg_out_bundle(s, mmI, OPC_MF_M24, INSN_NOP_M, INSN_NOP_I);
2237 break;
2238 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
2239 case INDEX_op_mov_i64:
2240 case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
2241 case INDEX_op_movi_i64:
2242 case INDEX_op_call: /* Always emitted via tcg_out_call. */
2243 default:
2244 tcg_abort();
2248 static const TCGTargetOpDef ia64_op_defs[] = {
2249 { INDEX_op_br, { } },
2250 { INDEX_op_exit_tb, { } },
2251 { INDEX_op_goto_tb, { } },
2253 { INDEX_op_ld8u_i32, { "r", "r" } },
2254 { INDEX_op_ld8s_i32, { "r", "r" } },
2255 { INDEX_op_ld16u_i32, { "r", "r" } },
2256 { INDEX_op_ld16s_i32, { "r", "r" } },
2257 { INDEX_op_ld_i32, { "r", "r" } },
2258 { INDEX_op_st8_i32, { "rZ", "r" } },
2259 { INDEX_op_st16_i32, { "rZ", "r" } },
2260 { INDEX_op_st_i32, { "rZ", "r" } },
2262 { INDEX_op_add_i32, { "r", "rZ", "rI" } },
2263 { INDEX_op_sub_i32, { "r", "rI", "rI" } },
2265 { INDEX_op_and_i32, { "r", "rI", "rI" } },
2266 { INDEX_op_andc_i32, { "r", "rI", "rI" } },
2267 { INDEX_op_eqv_i32, { "r", "rZ", "rZ" } },
2268 { INDEX_op_nand_i32, { "r", "rZ", "rZ" } },
2269 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
2270 { INDEX_op_or_i32, { "r", "rI", "rI" } },
2271 { INDEX_op_orc_i32, { "r", "rZ", "rZ" } },
2272 { INDEX_op_xor_i32, { "r", "rI", "rI" } },
2274 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
2276 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
2277 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
2278 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
2279 { INDEX_op_rotl_i32, { "r", "rZ", "ri" } },
2280 { INDEX_op_rotr_i32, { "r", "rZ", "ri" } },
2282 { INDEX_op_ext8s_i32, { "r", "rZ"} },
2283 { INDEX_op_ext8u_i32, { "r", "rZ"} },
2284 { INDEX_op_ext16s_i32, { "r", "rZ"} },
2285 { INDEX_op_ext16u_i32, { "r", "rZ"} },
2287 { INDEX_op_bswap16_i32, { "r", "rZ" } },
2288 { INDEX_op_bswap32_i32, { "r", "rZ" } },
2290 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
2291 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
2292 { INDEX_op_movcond_i32, { "r", "rZ", "rZ", "rI", "rI" } },
2294 { INDEX_op_ld8u_i64, { "r", "r" } },
2295 { INDEX_op_ld8s_i64, { "r", "r" } },
2296 { INDEX_op_ld16u_i64, { "r", "r" } },
2297 { INDEX_op_ld16s_i64, { "r", "r" } },
2298 { INDEX_op_ld32u_i64, { "r", "r" } },
2299 { INDEX_op_ld32s_i64, { "r", "r" } },
2300 { INDEX_op_ld_i64, { "r", "r" } },
2301 { INDEX_op_st8_i64, { "rZ", "r" } },
2302 { INDEX_op_st16_i64, { "rZ", "r" } },
2303 { INDEX_op_st32_i64, { "rZ", "r" } },
2304 { INDEX_op_st_i64, { "rZ", "r" } },
2306 { INDEX_op_add_i64, { "r", "rZ", "rI" } },
2307 { INDEX_op_sub_i64, { "r", "rI", "rI" } },
2309 { INDEX_op_and_i64, { "r", "rI", "rI" } },
2310 { INDEX_op_andc_i64, { "r", "rI", "rI" } },
2311 { INDEX_op_eqv_i64, { "r", "rZ", "rZ" } },
2312 { INDEX_op_nand_i64, { "r", "rZ", "rZ" } },
2313 { INDEX_op_nor_i64, { "r", "rZ", "rZ" } },
2314 { INDEX_op_or_i64, { "r", "rI", "rI" } },
2315 { INDEX_op_orc_i64, { "r", "rZ", "rZ" } },
2316 { INDEX_op_xor_i64, { "r", "rI", "rI" } },
2318 { INDEX_op_mul_i64, { "r", "rZ", "rZ" } },
2320 { INDEX_op_sar_i64, { "r", "rZ", "ri" } },
2321 { INDEX_op_shl_i64, { "r", "rZ", "ri" } },
2322 { INDEX_op_shr_i64, { "r", "rZ", "ri" } },
2323 { INDEX_op_rotl_i64, { "r", "rZ", "ri" } },
2324 { INDEX_op_rotr_i64, { "r", "rZ", "ri" } },
2326 { INDEX_op_ext8s_i64, { "r", "rZ"} },
2327 { INDEX_op_ext8u_i64, { "r", "rZ"} },
2328 { INDEX_op_ext16s_i64, { "r", "rZ"} },
2329 { INDEX_op_ext16u_i64, { "r", "rZ"} },
2330 { INDEX_op_ext32s_i64, { "r", "rZ"} },
2331 { INDEX_op_ext32u_i64, { "r", "rZ"} },
2332 { INDEX_op_ext_i32_i64, { "r", "rZ" } },
2333 { INDEX_op_extu_i32_i64, { "r", "rZ" } },
2335 { INDEX_op_bswap16_i64, { "r", "rZ" } },
2336 { INDEX_op_bswap32_i64, { "r", "rZ" } },
2337 { INDEX_op_bswap64_i64, { "r", "rZ" } },
2339 { INDEX_op_brcond_i64, { "rZ", "rZ" } },
2340 { INDEX_op_setcond_i64, { "r", "rZ", "rZ" } },
2341 { INDEX_op_movcond_i64, { "r", "rZ", "rZ", "rI", "rI" } },
2343 { INDEX_op_deposit_i32, { "r", "rZ", "ri" } },
2344 { INDEX_op_deposit_i64, { "r", "rZ", "ri" } },
2346 { INDEX_op_qemu_ld_i32, { "r", "r" } },
2347 { INDEX_op_qemu_ld_i64, { "r", "r" } },
2348 { INDEX_op_qemu_st_i32, { "SZ", "r" } },
2349 { INDEX_op_qemu_st_i64, { "SZ", "r" } },
2351 { INDEX_op_mb, { } },
2352 { -1 },
2355 /* Generate global QEMU prologue and epilogue code */
2356 static void tcg_target_qemu_prologue(TCGContext *s)
2358 int frame_size;
2360 /* reserve some stack space */
2361 frame_size = TCG_STATIC_CALL_ARGS_SIZE +
2362 CPU_TEMP_BUF_NLONGS * sizeof(long);
2363 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
2364 ~(TCG_TARGET_STACK_ALIGN - 1);
2365 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
2366 CPU_TEMP_BUF_NLONGS * sizeof(long));
2368 /* First emit adhoc function descriptor */
2369 *s->code_ptr = (tcg_insn_unit){
2370 (uint64_t)(s->code_ptr + 1), /* entry point */
2371 0 /* skip gp */
2373 s->code_ptr++;
2375 /* prologue */
2376 tcg_out_bundle(s, miI,
2377 tcg_opc_m34(TCG_REG_P0, OPC_ALLOC_M34,
2378 TCG_REG_R34, 32, 24, 0),
2379 INSN_NOP_I,
2380 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2381 TCG_REG_B6, TCG_REG_R33, 0));
2383 /* ??? If guest_base < 0x200000, we could load the register via
2384 an ADDL in the M slot of the next bundle. */
2385 if (guest_base != 0) {
2386 tcg_out_bundle(s, mlx,
2387 INSN_NOP_M,
2388 tcg_opc_l2(guest_base),
2389 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
2390 TCG_GUEST_BASE_REG, guest_base));
2391 tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
2394 tcg_out_bundle(s, miB,
2395 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2396 TCG_REG_R12, -frame_size, TCG_REG_R12),
2397 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
2398 TCG_REG_R33, TCG_REG_B0),
2399 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
2401 /* epilogue */
2402 tb_ret_addr = s->code_ptr;
2403 tcg_out_bundle(s, miI,
2404 INSN_NOP_M,
2405 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2406 TCG_REG_B0, TCG_REG_R33, 0),
2407 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2408 TCG_REG_R12, frame_size, TCG_REG_R12));
2409 tcg_out_bundle(s, miB,
2410 INSN_NOP_M,
2411 tcg_opc_i26(TCG_REG_P0, OPC_MOV_I_I26,
2412 TCG_REG_PFS, TCG_REG_R34),
2413 tcg_opc_b4 (TCG_REG_P0, OPC_BR_RET_SPTK_MANY_B4,
2414 TCG_REG_B0));
2417 static void tcg_target_init(TCGContext *s)
2419 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32],
2420 0xffffffffffffffffull);
2421 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I64],
2422 0xffffffffffffffffull);
2424 tcg_regset_clear(tcg_target_call_clobber_regs);
2425 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R8);
2426 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R9);
2427 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R10);
2428 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R11);
2429 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R14);
2430 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R15);
2431 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R16);
2432 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R17);
2433 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R18);
2434 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R19);
2435 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R20);
2436 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R21);
2437 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R22);
2438 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R23);
2439 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R24);
2440 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R25);
2441 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R26);
2442 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R27);
2443 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R28);
2444 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R29);
2445 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R30);
2446 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R31);
2447 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R56);
2448 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R57);
2449 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R58);
2450 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R59);
2451 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R60);
2452 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R61);
2453 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R62);
2454 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R63);
2456 tcg_regset_clear(s->reserved_regs);
2457 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0); /* zero register */
2458 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1); /* global pointer */
2459 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2); /* internal use */
2460 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R3); /* internal use */
2461 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R12); /* stack pointer */
2462 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R13); /* thread pointer */
2463 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R33); /* return address */
2464 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R34); /* PFS */
2466 /* The following 4 are not in use, are call-saved, but *not* saved
2467 by the prologue. Therefore we cannot use them without modifying
2468 the prologue. There doesn't seem to be any good reason to use
2469 these as opposed to the windowed registers. */
2470 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R4);
2471 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R5);
2472 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R6);
2473 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R7);
2475 tcg_add_target_add_op_defs(ia64_op_defs);