migration: size_t'ify some of qemu-file
[qemu/kevin.git] / tcg / ia64 / tcg-target.c
blob647e9a6f299318c2d651f0ffc6435a909a37a8fa
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009-2010 Aurelien Jarno <aurelien@aurel32.net>
5 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
27 * Register definitions
30 #ifndef NDEBUG
31 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
32 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
33 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
34 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
35 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
36 "r32", "r33", "r34", "r35", "r36", "r37", "r38", "r39",
37 "r40", "r41", "r42", "r43", "r44", "r45", "r46", "r47",
38 "r48", "r49", "r50", "r51", "r52", "r53", "r54", "r55",
39 "r56", "r57", "r58", "r59", "r60", "r61", "r62", "r63",
41 #endif
43 #ifndef CONFIG_SOFTMMU
44 #define TCG_GUEST_BASE_REG TCG_REG_R55
45 #endif
47 /* Branch registers */
48 enum {
49 TCG_REG_B0 = 0,
50 TCG_REG_B1,
51 TCG_REG_B2,
52 TCG_REG_B3,
53 TCG_REG_B4,
54 TCG_REG_B5,
55 TCG_REG_B6,
56 TCG_REG_B7,
59 /* Floating point registers */
60 enum {
61 TCG_REG_F0 = 0,
62 TCG_REG_F1,
63 TCG_REG_F2,
64 TCG_REG_F3,
65 TCG_REG_F4,
66 TCG_REG_F5,
67 TCG_REG_F6,
68 TCG_REG_F7,
69 TCG_REG_F8,
70 TCG_REG_F9,
71 TCG_REG_F10,
72 TCG_REG_F11,
73 TCG_REG_F12,
74 TCG_REG_F13,
75 TCG_REG_F14,
76 TCG_REG_F15,
79 /* Predicate registers */
80 enum {
81 TCG_REG_P0 = 0,
82 TCG_REG_P1,
83 TCG_REG_P2,
84 TCG_REG_P3,
85 TCG_REG_P4,
86 TCG_REG_P5,
87 TCG_REG_P6,
88 TCG_REG_P7,
89 TCG_REG_P8,
90 TCG_REG_P9,
91 TCG_REG_P10,
92 TCG_REG_P11,
93 TCG_REG_P12,
94 TCG_REG_P13,
95 TCG_REG_P14,
96 TCG_REG_P15,
99 /* Application registers */
100 enum {
101 TCG_REG_PFS = 64,
104 static const int tcg_target_reg_alloc_order[] = {
105 TCG_REG_R35,
106 TCG_REG_R36,
107 TCG_REG_R37,
108 TCG_REG_R38,
109 TCG_REG_R39,
110 TCG_REG_R40,
111 TCG_REG_R41,
112 TCG_REG_R42,
113 TCG_REG_R43,
114 TCG_REG_R44,
115 TCG_REG_R45,
116 TCG_REG_R46,
117 TCG_REG_R47,
118 TCG_REG_R48,
119 TCG_REG_R49,
120 TCG_REG_R50,
121 TCG_REG_R51,
122 TCG_REG_R52,
123 TCG_REG_R53,
124 TCG_REG_R54,
125 TCG_REG_R55,
126 TCG_REG_R14,
127 TCG_REG_R15,
128 TCG_REG_R16,
129 TCG_REG_R17,
130 TCG_REG_R18,
131 TCG_REG_R19,
132 TCG_REG_R20,
133 TCG_REG_R21,
134 TCG_REG_R22,
135 TCG_REG_R23,
136 TCG_REG_R24,
137 TCG_REG_R25,
138 TCG_REG_R26,
139 TCG_REG_R27,
140 TCG_REG_R28,
141 TCG_REG_R29,
142 TCG_REG_R30,
143 TCG_REG_R31,
144 TCG_REG_R56,
145 TCG_REG_R57,
146 TCG_REG_R58,
147 TCG_REG_R59,
148 TCG_REG_R60,
149 TCG_REG_R61,
150 TCG_REG_R62,
151 TCG_REG_R63,
152 TCG_REG_R8,
153 TCG_REG_R9,
154 TCG_REG_R10,
155 TCG_REG_R11
158 static const int tcg_target_call_iarg_regs[8] = {
159 TCG_REG_R56,
160 TCG_REG_R57,
161 TCG_REG_R58,
162 TCG_REG_R59,
163 TCG_REG_R60,
164 TCG_REG_R61,
165 TCG_REG_R62,
166 TCG_REG_R63,
169 static const int tcg_target_call_oarg_regs[] = {
170 TCG_REG_R8
174 * opcode formation
177 /* bundle templates: stops (double bar in the IA64 manual) are marked with
178 an uppercase letter. */
179 enum {
180 mii = 0x00,
181 miI = 0x01,
182 mIi = 0x02,
183 mII = 0x03,
184 mlx = 0x04,
185 mLX = 0x05,
186 mmi = 0x08,
187 mmI = 0x09,
188 Mmi = 0x0a,
189 MmI = 0x0b,
190 mfi = 0x0c,
191 mfI = 0x0d,
192 mmf = 0x0e,
193 mmF = 0x0f,
194 mib = 0x10,
195 miB = 0x11,
196 mbb = 0x12,
197 mbB = 0x13,
198 bbb = 0x16,
199 bbB = 0x17,
200 mmb = 0x18,
201 mmB = 0x19,
202 mfb = 0x1c,
203 mfB = 0x1d,
206 enum {
207 OPC_ADD_A1 = 0x10000000000ull,
208 OPC_AND_A1 = 0x10060000000ull,
209 OPC_AND_A3 = 0x10160000000ull,
210 OPC_ANDCM_A1 = 0x10068000000ull,
211 OPC_ANDCM_A3 = 0x10168000000ull,
212 OPC_ADDS_A4 = 0x10800000000ull,
213 OPC_ADDL_A5 = 0x12000000000ull,
214 OPC_ALLOC_M34 = 0x02c00000000ull,
215 OPC_BR_DPTK_FEW_B1 = 0x08400000000ull,
216 OPC_BR_SPTK_MANY_B1 = 0x08000001000ull,
217 OPC_BR_CALL_SPNT_FEW_B3 = 0x0a200000000ull,
218 OPC_BR_SPTK_MANY_B4 = 0x00100001000ull,
219 OPC_BR_CALL_SPTK_MANY_B5 = 0x02100001000ull,
220 OPC_BR_RET_SPTK_MANY_B4 = 0x00108001100ull,
221 OPC_BRL_SPTK_MANY_X3 = 0x18000001000ull,
222 OPC_BRL_CALL_SPNT_MANY_X4 = 0x1a200001000ull,
223 OPC_BRL_CALL_SPTK_MANY_X4 = 0x1a000001000ull,
224 OPC_CMP_LT_A6 = 0x18000000000ull,
225 OPC_CMP_LTU_A6 = 0x1a000000000ull,
226 OPC_CMP_EQ_A6 = 0x1c000000000ull,
227 OPC_CMP4_LT_A6 = 0x18400000000ull,
228 OPC_CMP4_LTU_A6 = 0x1a400000000ull,
229 OPC_CMP4_EQ_A6 = 0x1c400000000ull,
230 OPC_DEP_I14 = 0x0ae00000000ull,
231 OPC_DEP_I15 = 0x08000000000ull,
232 OPC_DEP_Z_I12 = 0x0a600000000ull,
233 OPC_EXTR_I11 = 0x0a400002000ull,
234 OPC_EXTR_U_I11 = 0x0a400000000ull,
235 OPC_FCVT_FX_TRUNC_S1_F10 = 0x004d0000000ull,
236 OPC_FCVT_FXU_TRUNC_S1_F10 = 0x004d8000000ull,
237 OPC_FCVT_XF_F11 = 0x000e0000000ull,
238 OPC_FMA_S1_F1 = 0x10400000000ull,
239 OPC_FNMA_S1_F1 = 0x18400000000ull,
240 OPC_FRCPA_S1_F6 = 0x00600000000ull,
241 OPC_GETF_SIG_M19 = 0x08708000000ull,
242 OPC_LD1_M1 = 0x08000000000ull,
243 OPC_LD1_M3 = 0x0a000000000ull,
244 OPC_LD2_M1 = 0x08040000000ull,
245 OPC_LD2_M3 = 0x0a040000000ull,
246 OPC_LD4_M1 = 0x08080000000ull,
247 OPC_LD4_M3 = 0x0a080000000ull,
248 OPC_LD8_M1 = 0x080c0000000ull,
249 OPC_LD8_M3 = 0x0a0c0000000ull,
250 OPC_MUX1_I3 = 0x0eca0000000ull,
251 OPC_NOP_B9 = 0x04008000000ull,
252 OPC_NOP_F16 = 0x00008000000ull,
253 OPC_NOP_I18 = 0x00008000000ull,
254 OPC_NOP_M48 = 0x00008000000ull,
255 OPC_MOV_I21 = 0x00e00100000ull,
256 OPC_MOV_RET_I21 = 0x00e00500000ull,
257 OPC_MOV_I22 = 0x00188000000ull,
258 OPC_MOV_I_I26 = 0x00150000000ull,
259 OPC_MOVL_X2 = 0x0c000000000ull,
260 OPC_OR_A1 = 0x10070000000ull,
261 OPC_OR_A3 = 0x10170000000ull,
262 OPC_SETF_EXP_M18 = 0x0c748000000ull,
263 OPC_SETF_SIG_M18 = 0x0c708000000ull,
264 OPC_SHL_I7 = 0x0f240000000ull,
265 OPC_SHR_I5 = 0x0f220000000ull,
266 OPC_SHR_U_I5 = 0x0f200000000ull,
267 OPC_SHRP_I10 = 0x0ac00000000ull,
268 OPC_SXT1_I29 = 0x000a0000000ull,
269 OPC_SXT2_I29 = 0x000a8000000ull,
270 OPC_SXT4_I29 = 0x000b0000000ull,
271 OPC_ST1_M4 = 0x08c00000000ull,
272 OPC_ST2_M4 = 0x08c40000000ull,
273 OPC_ST4_M4 = 0x08c80000000ull,
274 OPC_ST8_M4 = 0x08cc0000000ull,
275 OPC_SUB_A1 = 0x10028000000ull,
276 OPC_SUB_A3 = 0x10128000000ull,
277 OPC_UNPACK4_L_I2 = 0x0f860000000ull,
278 OPC_XMA_L_F2 = 0x1d000000000ull,
279 OPC_XOR_A1 = 0x10078000000ull,
280 OPC_XOR_A3 = 0x10178000000ull,
281 OPC_ZXT1_I29 = 0x00080000000ull,
282 OPC_ZXT2_I29 = 0x00088000000ull,
283 OPC_ZXT4_I29 = 0x00090000000ull,
285 INSN_NOP_M = OPC_NOP_M48, /* nop.m 0 */
286 INSN_NOP_I = OPC_NOP_I18, /* nop.i 0 */
289 static inline uint64_t tcg_opc_a1(int qp, uint64_t opc, int r1,
290 int r2, int r3)
292 return opc
293 | ((r3 & 0x7f) << 20)
294 | ((r2 & 0x7f) << 13)
295 | ((r1 & 0x7f) << 6)
296 | (qp & 0x3f);
299 static inline uint64_t tcg_opc_a3(int qp, uint64_t opc, int r1,
300 uint64_t imm, int r3)
302 return opc
303 | ((imm & 0x80) << 29) /* s */
304 | ((imm & 0x7f) << 13) /* imm7b */
305 | ((r3 & 0x7f) << 20)
306 | ((r1 & 0x7f) << 6)
307 | (qp & 0x3f);
310 static inline uint64_t tcg_opc_a4(int qp, uint64_t opc, int r1,
311 uint64_t imm, int r3)
313 return opc
314 | ((imm & 0x2000) << 23) /* s */
315 | ((imm & 0x1f80) << 20) /* imm6d */
316 | ((imm & 0x007f) << 13) /* imm7b */
317 | ((r3 & 0x7f) << 20)
318 | ((r1 & 0x7f) << 6)
319 | (qp & 0x3f);
322 static inline uint64_t tcg_opc_a5(int qp, uint64_t opc, int r1,
323 uint64_t imm, int r3)
325 return opc
326 | ((imm & 0x200000) << 15) /* s */
327 | ((imm & 0x1f0000) << 6) /* imm5c */
328 | ((imm & 0x00ff80) << 20) /* imm9d */
329 | ((imm & 0x00007f) << 13) /* imm7b */
330 | ((r3 & 0x03) << 20)
331 | ((r1 & 0x7f) << 6)
332 | (qp & 0x3f);
335 static inline uint64_t tcg_opc_a6(int qp, uint64_t opc, int p1,
336 int p2, int r2, int r3)
338 return opc
339 | ((p2 & 0x3f) << 27)
340 | ((r3 & 0x7f) << 20)
341 | ((r2 & 0x7f) << 13)
342 | ((p1 & 0x3f) << 6)
343 | (qp & 0x3f);
346 static inline uint64_t tcg_opc_b1(int qp, uint64_t opc, uint64_t imm)
348 return opc
349 | ((imm & 0x100000) << 16) /* s */
350 | ((imm & 0x0fffff) << 13) /* imm20b */
351 | (qp & 0x3f);
354 static inline uint64_t tcg_opc_b3(int qp, uint64_t opc, int b1, uint64_t imm)
356 return opc
357 | ((imm & 0x100000) << 16) /* s */
358 | ((imm & 0x0fffff) << 13) /* imm20b */
359 | ((b1 & 0x7) << 6)
360 | (qp & 0x3f);
363 static inline uint64_t tcg_opc_b4(int qp, uint64_t opc, int b2)
365 return opc
366 | ((b2 & 0x7) << 13)
367 | (qp & 0x3f);
370 static inline uint64_t tcg_opc_b5(int qp, uint64_t opc, int b1, int b2)
372 return opc
373 | ((b2 & 0x7) << 13)
374 | ((b1 & 0x7) << 6)
375 | (qp & 0x3f);
379 static inline uint64_t tcg_opc_b9(int qp, uint64_t opc, uint64_t imm)
381 return opc
382 | ((imm & 0x100000) << 16) /* i */
383 | ((imm & 0x0fffff) << 6) /* imm20a */
384 | (qp & 0x3f);
387 static inline uint64_t tcg_opc_f1(int qp, uint64_t opc, int f1,
388 int f3, int f4, int f2)
390 return opc
391 | ((f4 & 0x7f) << 27)
392 | ((f3 & 0x7f) << 20)
393 | ((f2 & 0x7f) << 13)
394 | ((f1 & 0x7f) << 6)
395 | (qp & 0x3f);
398 static inline uint64_t tcg_opc_f2(int qp, uint64_t opc, int f1,
399 int f3, int f4, int f2)
401 return opc
402 | ((f4 & 0x7f) << 27)
403 | ((f3 & 0x7f) << 20)
404 | ((f2 & 0x7f) << 13)
405 | ((f1 & 0x7f) << 6)
406 | (qp & 0x3f);
409 static inline uint64_t tcg_opc_f6(int qp, uint64_t opc, int f1,
410 int p2, int f2, int f3)
412 return opc
413 | ((p2 & 0x3f) << 27)
414 | ((f3 & 0x7f) << 20)
415 | ((f2 & 0x7f) << 13)
416 | ((f1 & 0x7f) << 6)
417 | (qp & 0x3f);
420 static inline uint64_t tcg_opc_f10(int qp, uint64_t opc, int f1, int f2)
422 return opc
423 | ((f2 & 0x7f) << 13)
424 | ((f1 & 0x7f) << 6)
425 | (qp & 0x3f);
428 static inline uint64_t tcg_opc_f11(int qp, uint64_t opc, int f1, int f2)
430 return opc
431 | ((f2 & 0x7f) << 13)
432 | ((f1 & 0x7f) << 6)
433 | (qp & 0x3f);
436 static inline uint64_t tcg_opc_f16(int qp, uint64_t opc, uint64_t imm)
438 return opc
439 | ((imm & 0x100000) << 16) /* i */
440 | ((imm & 0x0fffff) << 6) /* imm20a */
441 | (qp & 0x3f);
444 static inline uint64_t tcg_opc_i2(int qp, uint64_t opc, int r1,
445 int r2, int r3)
447 return opc
448 | ((r3 & 0x7f) << 20)
449 | ((r2 & 0x7f) << 13)
450 | ((r1 & 0x7f) << 6)
451 | (qp & 0x3f);
454 static inline uint64_t tcg_opc_i3(int qp, uint64_t opc, int r1,
455 int r2, int mbtype)
457 return opc
458 | ((mbtype & 0x0f) << 20)
459 | ((r2 & 0x7f) << 13)
460 | ((r1 & 0x7f) << 6)
461 | (qp & 0x3f);
464 static inline uint64_t tcg_opc_i5(int qp, uint64_t opc, int r1,
465 int r3, int r2)
467 return opc
468 | ((r3 & 0x7f) << 20)
469 | ((r2 & 0x7f) << 13)
470 | ((r1 & 0x7f) << 6)
471 | (qp & 0x3f);
474 static inline uint64_t tcg_opc_i7(int qp, uint64_t opc, int r1,
475 int r2, int r3)
477 return opc
478 | ((r3 & 0x7f) << 20)
479 | ((r2 & 0x7f) << 13)
480 | ((r1 & 0x7f) << 6)
481 | (qp & 0x3f);
484 static inline uint64_t tcg_opc_i10(int qp, uint64_t opc, int r1,
485 int r2, int r3, uint64_t count)
487 return opc
488 | ((count & 0x3f) << 27)
489 | ((r3 & 0x7f) << 20)
490 | ((r2 & 0x7f) << 13)
491 | ((r1 & 0x7f) << 6)
492 | (qp & 0x3f);
495 static inline uint64_t tcg_opc_i11(int qp, uint64_t opc, int r1,
496 int r3, uint64_t pos, uint64_t len)
498 return opc
499 | ((len & 0x3f) << 27)
500 | ((r3 & 0x7f) << 20)
501 | ((pos & 0x3f) << 14)
502 | ((r1 & 0x7f) << 6)
503 | (qp & 0x3f);
506 static inline uint64_t tcg_opc_i12(int qp, uint64_t opc, int r1,
507 int r2, uint64_t pos, uint64_t len)
509 return opc
510 | ((len & 0x3f) << 27)
511 | ((pos & 0x3f) << 20)
512 | ((r2 & 0x7f) << 13)
513 | ((r1 & 0x7f) << 6)
514 | (qp & 0x3f);
517 static inline uint64_t tcg_opc_i14(int qp, uint64_t opc, int r1, uint64_t imm,
518 int r3, uint64_t pos, uint64_t len)
520 return opc
521 | ((imm & 0x01) << 36)
522 | ((len & 0x3f) << 27)
523 | ((r3 & 0x7f) << 20)
524 | ((pos & 0x3f) << 14)
525 | ((r1 & 0x7f) << 6)
526 | (qp & 0x3f);
529 static inline uint64_t tcg_opc_i15(int qp, uint64_t opc, int r1, int r2,
530 int r3, uint64_t pos, uint64_t len)
532 return opc
533 | ((pos & 0x3f) << 31)
534 | ((len & 0x0f) << 27)
535 | ((r3 & 0x7f) << 20)
536 | ((r2 & 0x7f) << 13)
537 | ((r1 & 0x7f) << 6)
538 | (qp & 0x3f);
541 static inline uint64_t tcg_opc_i18(int qp, uint64_t opc, uint64_t imm)
543 return opc
544 | ((imm & 0x100000) << 16) /* i */
545 | ((imm & 0x0fffff) << 6) /* imm20a */
546 | (qp & 0x3f);
549 static inline uint64_t tcg_opc_i21(int qp, uint64_t opc, int b1,
550 int r2, uint64_t imm)
552 return opc
553 | ((imm & 0x1ff) << 24)
554 | ((r2 & 0x7f) << 13)
555 | ((b1 & 0x7) << 6)
556 | (qp & 0x3f);
559 static inline uint64_t tcg_opc_i22(int qp, uint64_t opc, int r1, int b2)
561 return opc
562 | ((b2 & 0x7) << 13)
563 | ((r1 & 0x7f) << 6)
564 | (qp & 0x3f);
567 static inline uint64_t tcg_opc_i26(int qp, uint64_t opc, int ar3, int r2)
569 return opc
570 | ((ar3 & 0x7f) << 20)
571 | ((r2 & 0x7f) << 13)
572 | (qp & 0x3f);
575 static inline uint64_t tcg_opc_i29(int qp, uint64_t opc, int r1, int r3)
577 return opc
578 | ((r3 & 0x7f) << 20)
579 | ((r1 & 0x7f) << 6)
580 | (qp & 0x3f);
583 static inline uint64_t tcg_opc_l2(uint64_t imm)
585 return (imm & 0x7fffffffffc00000ull) >> 22;
588 static inline uint64_t tcg_opc_l3(uint64_t imm)
590 return (imm & 0x07fffffffff00000ull) >> 18;
593 #define tcg_opc_l4 tcg_opc_l3
595 static inline uint64_t tcg_opc_m1(int qp, uint64_t opc, int r1, int r3)
597 return opc
598 | ((r3 & 0x7f) << 20)
599 | ((r1 & 0x7f) << 6)
600 | (qp & 0x3f);
603 static inline uint64_t tcg_opc_m3(int qp, uint64_t opc, int r1,
604 int r3, uint64_t imm)
606 return opc
607 | ((imm & 0x100) << 28) /* s */
608 | ((imm & 0x080) << 20) /* i */
609 | ((imm & 0x07f) << 13) /* imm7b */
610 | ((r3 & 0x7f) << 20)
611 | ((r1 & 0x7f) << 6)
612 | (qp & 0x3f);
615 static inline uint64_t tcg_opc_m4(int qp, uint64_t opc, int r2, int r3)
617 return opc
618 | ((r3 & 0x7f) << 20)
619 | ((r2 & 0x7f) << 13)
620 | (qp & 0x3f);
623 static inline uint64_t tcg_opc_m18(int qp, uint64_t opc, int f1, int r2)
625 return opc
626 | ((r2 & 0x7f) << 13)
627 | ((f1 & 0x7f) << 6)
628 | (qp & 0x3f);
631 static inline uint64_t tcg_opc_m19(int qp, uint64_t opc, int r1, int f2)
633 return opc
634 | ((f2 & 0x7f) << 13)
635 | ((r1 & 0x7f) << 6)
636 | (qp & 0x3f);
639 static inline uint64_t tcg_opc_m34(int qp, uint64_t opc, int r1,
640 int sof, int sol, int sor)
642 return opc
643 | ((sor & 0x0f) << 27)
644 | ((sol & 0x7f) << 20)
645 | ((sof & 0x7f) << 13)
646 | ((r1 & 0x7f) << 6)
647 | (qp & 0x3f);
650 static inline uint64_t tcg_opc_m48(int qp, uint64_t opc, uint64_t imm)
652 return opc
653 | ((imm & 0x100000) << 16) /* i */
654 | ((imm & 0x0fffff) << 6) /* imm20a */
655 | (qp & 0x3f);
658 static inline uint64_t tcg_opc_x2(int qp, uint64_t opc,
659 int r1, uint64_t imm)
661 return opc
662 | ((imm & 0x8000000000000000ull) >> 27) /* i */
663 | (imm & 0x0000000000200000ull) /* ic */
664 | ((imm & 0x00000000001f0000ull) << 6) /* imm5c */
665 | ((imm & 0x000000000000ff80ull) << 20) /* imm9d */
666 | ((imm & 0x000000000000007full) << 13) /* imm7b */
667 | ((r1 & 0x7f) << 6)
668 | (qp & 0x3f);
671 static inline uint64_t tcg_opc_x3(int qp, uint64_t opc, uint64_t imm)
673 return opc
674 | ((imm & 0x0800000000000000ull) >> 23) /* i */
675 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
676 | (qp & 0x3f);
679 static inline uint64_t tcg_opc_x4(int qp, uint64_t opc, int b1, uint64_t imm)
681 return opc
682 | ((imm & 0x0800000000000000ull) >> 23) /* i */
683 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
684 | ((b1 & 0x7) << 6)
685 | (qp & 0x3f);
690 * Relocations - Note that we never encode branches elsewhere than slot 2.
693 static void reloc_pcrel21b_slot2(tcg_insn_unit *pc, tcg_insn_unit *target)
695 uint64_t imm = target - pc;
697 pc->hi = (pc->hi & 0xf700000fffffffffull)
698 | ((imm & 0x100000) << 39) /* s */
699 | ((imm & 0x0fffff) << 36); /* imm20b */
702 static uint64_t get_reloc_pcrel21b_slot2(tcg_insn_unit *pc)
704 int64_t high = pc->hi;
706 return ((high >> 39) & 0x100000) + /* s */
707 ((high >> 36) & 0x0fffff); /* imm20b */
710 static void patch_reloc(tcg_insn_unit *code_ptr, int type,
711 intptr_t value, intptr_t addend)
713 assert(addend == 0);
714 assert(type == R_IA64_PCREL21B);
715 reloc_pcrel21b_slot2(code_ptr, (tcg_insn_unit *)value);
719 * Constraints
722 /* parse target specific constraints */
723 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
725 const char *ct_str;
727 ct_str = *pct_str;
728 switch(ct_str[0]) {
729 case 'r':
730 ct->ct |= TCG_CT_REG;
731 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
732 break;
733 case 'I':
734 ct->ct |= TCG_CT_CONST_S22;
735 break;
736 case 'S':
737 ct->ct |= TCG_CT_REG;
738 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
739 #if defined(CONFIG_SOFTMMU)
740 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R56);
741 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R57);
742 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R58);
743 #endif
744 break;
745 case 'Z':
746 /* We are cheating a bit here, using the fact that the register
747 r0 is also the register number 0. Hence there is no need
748 to check for const_args in each instruction. */
749 ct->ct |= TCG_CT_CONST_ZERO;
750 break;
751 default:
752 return -1;
754 ct_str++;
755 *pct_str = ct_str;
756 return 0;
759 /* test if a constant matches the constraint */
760 static inline int tcg_target_const_match(tcg_target_long val, TCGType type,
761 const TCGArgConstraint *arg_ct)
763 int ct;
764 ct = arg_ct->ct;
765 if (ct & TCG_CT_CONST)
766 return 1;
767 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
768 return 1;
769 else if ((ct & TCG_CT_CONST_S22) && val == ((int32_t)val << 10) >> 10)
770 return 1;
771 else
772 return 0;
776 * Code generation
779 static tcg_insn_unit *tb_ret_addr;
781 static inline void tcg_out_bundle(TCGContext *s, int template,
782 uint64_t slot0, uint64_t slot1,
783 uint64_t slot2)
785 template &= 0x1f; /* 5 bits */
786 slot0 &= 0x1ffffffffffull; /* 41 bits */
787 slot1 &= 0x1ffffffffffull; /* 41 bits */
788 slot2 &= 0x1ffffffffffull; /* 41 bits */
790 *s->code_ptr++ = (tcg_insn_unit){
791 (slot1 << 46) | (slot0 << 5) | template,
792 (slot2 << 23) | (slot1 >> 18)
796 static inline uint64_t tcg_opc_mov_a(int qp, TCGReg dst, TCGReg src)
798 return tcg_opc_a4(qp, OPC_ADDS_A4, dst, 0, src);
801 static inline void tcg_out_mov(TCGContext *s, TCGType type,
802 TCGReg ret, TCGReg arg)
804 tcg_out_bundle(s, mmI,
805 INSN_NOP_M,
806 INSN_NOP_M,
807 tcg_opc_mov_a(TCG_REG_P0, ret, arg));
810 static inline uint64_t tcg_opc_movi_a(int qp, TCGReg dst, int64_t src)
812 assert(src == sextract64(src, 0, 22));
813 return tcg_opc_a5(qp, OPC_ADDL_A5, dst, src, TCG_REG_R0);
816 static inline void tcg_out_movi(TCGContext *s, TCGType type,
817 TCGReg reg, tcg_target_long arg)
819 tcg_out_bundle(s, mLX,
820 INSN_NOP_M,
821 tcg_opc_l2 (arg),
822 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, reg, arg));
825 static void tcg_out_br(TCGContext *s, TCGLabel *l)
827 uint64_t imm;
829 /* We pay attention here to not modify the branch target by reading
830 the existing value and using it again. This ensure that caches and
831 memory are kept coherent during retranslation. */
832 if (l->has_value) {
833 imm = l->u.value_ptr - s->code_ptr;
834 } else {
835 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
836 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, l, 0);
839 tcg_out_bundle(s, mmB,
840 INSN_NOP_M,
841 INSN_NOP_M,
842 tcg_opc_b1(TCG_REG_P0, OPC_BR_SPTK_MANY_B1, imm));
845 static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *desc)
847 uintptr_t func = desc->lo, gp = desc->hi, disp;
849 /* Look through the function descriptor. */
850 tcg_out_bundle(s, mlx,
851 INSN_NOP_M,
852 tcg_opc_l2 (gp),
853 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, TCG_REG_R1, gp));
854 disp = (tcg_insn_unit *)func - s->code_ptr;
855 tcg_out_bundle(s, mLX,
856 INSN_NOP_M,
857 tcg_opc_l4 (disp),
858 tcg_opc_x4 (TCG_REG_P0, OPC_BRL_CALL_SPTK_MANY_X4,
859 TCG_REG_B0, disp));
862 static void tcg_out_exit_tb(TCGContext *s, tcg_target_long arg)
864 uint64_t imm, opc1;
866 /* At least arg == 0 is a common operation. */
867 if (arg == sextract64(arg, 0, 22)) {
868 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R8, arg);
869 } else {
870 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R8, arg);
871 opc1 = INSN_NOP_M;
874 imm = tb_ret_addr - s->code_ptr;
876 tcg_out_bundle(s, mLX,
877 opc1,
878 tcg_opc_l3 (imm),
879 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, imm));
882 static inline void tcg_out_goto_tb(TCGContext *s, TCGArg arg)
884 if (s->tb_jmp_offset) {
885 /* direct jump method */
886 tcg_abort();
887 } else {
888 /* indirect jump method */
889 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2,
890 (tcg_target_long)(s->tb_next + arg));
891 tcg_out_bundle(s, MmI,
892 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1,
893 TCG_REG_R2, TCG_REG_R2),
894 INSN_NOP_M,
895 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6,
896 TCG_REG_R2, 0));
897 tcg_out_bundle(s, mmB,
898 INSN_NOP_M,
899 INSN_NOP_M,
900 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4,
901 TCG_REG_B6));
903 s->tb_next_offset[arg] = tcg_current_code_size(s);
906 static inline void tcg_out_jmp(TCGContext *s, TCGArg addr)
908 tcg_out_bundle(s, mmI,
909 INSN_NOP_M,
910 INSN_NOP_M,
911 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6, addr, 0));
912 tcg_out_bundle(s, mmB,
913 INSN_NOP_M,
914 INSN_NOP_M,
915 tcg_opc_b4(TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
918 static inline void tcg_out_ld_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
919 TCGArg arg1, tcg_target_long arg2)
921 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
922 tcg_out_bundle(s, MmI,
923 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
924 TCG_REG_R2, arg2, arg1),
925 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
926 INSN_NOP_I);
927 } else {
928 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
929 tcg_out_bundle(s, MmI,
930 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
931 TCG_REG_R2, TCG_REG_R2, arg1),
932 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
933 INSN_NOP_I);
937 static inline void tcg_out_st_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
938 TCGArg arg1, tcg_target_long arg2)
940 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
941 tcg_out_bundle(s, MmI,
942 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
943 TCG_REG_R2, arg2, arg1),
944 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
945 INSN_NOP_I);
946 } else {
947 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
948 tcg_out_bundle(s, MmI,
949 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
950 TCG_REG_R2, TCG_REG_R2, arg1),
951 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
952 INSN_NOP_I);
956 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
957 TCGReg arg1, intptr_t arg2)
959 if (type == TCG_TYPE_I32) {
960 tcg_out_ld_rel(s, OPC_LD4_M1, arg, arg1, arg2);
961 } else {
962 tcg_out_ld_rel(s, OPC_LD8_M1, arg, arg1, arg2);
966 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
967 TCGReg arg1, intptr_t arg2)
969 if (type == TCG_TYPE_I32) {
970 tcg_out_st_rel(s, OPC_ST4_M4, arg, arg1, arg2);
971 } else {
972 tcg_out_st_rel(s, OPC_ST8_M4, arg, arg1, arg2);
976 static inline void tcg_out_alu(TCGContext *s, uint64_t opc_a1, uint64_t opc_a3,
977 TCGReg ret, TCGArg arg1, int const_arg1,
978 TCGArg arg2, int const_arg2)
980 uint64_t opc1 = 0, opc2 = 0, opc3 = 0;
982 if (const_arg2 && arg2 != 0) {
983 opc2 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R3, arg2);
984 arg2 = TCG_REG_R3;
986 if (const_arg1 && arg1 != 0) {
987 if (opc_a3 && arg1 == (int8_t)arg1) {
988 opc3 = tcg_opc_a3(TCG_REG_P0, opc_a3, ret, arg1, arg2);
989 } else {
990 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, arg1);
991 arg1 = TCG_REG_R2;
994 if (opc3 == 0) {
995 opc3 = tcg_opc_a1(TCG_REG_P0, opc_a1, ret, arg1, arg2);
998 tcg_out_bundle(s, (opc1 || opc2 ? mII : miI),
999 opc1 ? opc1 : INSN_NOP_M,
1000 opc2 ? opc2 : INSN_NOP_I,
1001 opc3);
1004 static inline void tcg_out_add(TCGContext *s, TCGReg ret, TCGReg arg1,
1005 TCGArg arg2, int const_arg2)
1007 if (const_arg2 && arg2 == sextract64(arg2, 0, 14)) {
1008 tcg_out_bundle(s, mmI,
1009 INSN_NOP_M,
1010 INSN_NOP_M,
1011 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, arg2, arg1));
1012 } else {
1013 tcg_out_alu(s, OPC_ADD_A1, 0, ret, arg1, 0, arg2, const_arg2);
1017 static inline void tcg_out_sub(TCGContext *s, TCGReg ret, TCGArg arg1,
1018 int const_arg1, TCGArg arg2, int const_arg2)
1020 if (!const_arg1 && const_arg2 && -arg2 == sextract64(-arg2, 0, 14)) {
1021 tcg_out_bundle(s, mmI,
1022 INSN_NOP_M,
1023 INSN_NOP_M,
1024 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, -arg2, arg1));
1025 } else {
1026 tcg_out_alu(s, OPC_SUB_A1, OPC_SUB_A3, ret,
1027 arg1, const_arg1, arg2, const_arg2);
1031 static inline void tcg_out_eqv(TCGContext *s, TCGArg ret,
1032 TCGArg arg1, int const_arg1,
1033 TCGArg arg2, int const_arg2)
1035 tcg_out_bundle(s, mII,
1036 INSN_NOP_M,
1037 tcg_opc_a1 (TCG_REG_P0, OPC_XOR_A1, ret, arg1, arg2),
1038 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1041 static inline void tcg_out_nand(TCGContext *s, TCGArg ret,
1042 TCGArg arg1, int const_arg1,
1043 TCGArg arg2, int const_arg2)
1045 tcg_out_bundle(s, mII,
1046 INSN_NOP_M,
1047 tcg_opc_a1 (TCG_REG_P0, OPC_AND_A1, ret, arg1, arg2),
1048 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1051 static inline void tcg_out_nor(TCGContext *s, TCGArg ret,
1052 TCGArg arg1, int const_arg1,
1053 TCGArg arg2, int const_arg2)
1055 tcg_out_bundle(s, mII,
1056 INSN_NOP_M,
1057 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, arg2),
1058 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1061 static inline void tcg_out_orc(TCGContext *s, TCGArg ret,
1062 TCGArg arg1, int const_arg1,
1063 TCGArg arg2, int const_arg2)
1065 tcg_out_bundle(s, mII,
1066 INSN_NOP_M,
1067 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, TCG_REG_R2, -1, arg2),
1068 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, TCG_REG_R2));
1071 static inline void tcg_out_mul(TCGContext *s, TCGArg ret,
1072 TCGArg arg1, TCGArg arg2)
1074 tcg_out_bundle(s, mmI,
1075 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F6, arg1),
1076 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F7, arg2),
1077 INSN_NOP_I);
1078 tcg_out_bundle(s, mmF,
1079 INSN_NOP_M,
1080 INSN_NOP_M,
1081 tcg_opc_f2 (TCG_REG_P0, OPC_XMA_L_F2, TCG_REG_F6, TCG_REG_F6,
1082 TCG_REG_F7, TCG_REG_F0));
1083 tcg_out_bundle(s, miI,
1084 tcg_opc_m19(TCG_REG_P0, OPC_GETF_SIG_M19, ret, TCG_REG_F6),
1085 INSN_NOP_I,
1086 INSN_NOP_I);
1089 static inline void tcg_out_sar_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1090 TCGArg arg2, int const_arg2)
1092 if (const_arg2) {
1093 tcg_out_bundle(s, miI,
1094 INSN_NOP_M,
1095 INSN_NOP_I,
1096 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1097 ret, arg1, arg2, 31 - arg2));
1098 } else {
1099 tcg_out_bundle(s, mII,
1100 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3,
1101 TCG_REG_R3, 0x1f, arg2),
1102 tcg_opc_i29(TCG_REG_P0, OPC_SXT4_I29, TCG_REG_R2, arg1),
1103 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret,
1104 TCG_REG_R2, TCG_REG_R3));
1108 static inline void tcg_out_sar_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1109 TCGArg arg2, int const_arg2)
1111 if (const_arg2) {
1112 tcg_out_bundle(s, miI,
1113 INSN_NOP_M,
1114 INSN_NOP_I,
1115 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1116 ret, arg1, arg2, 63 - arg2));
1117 } else {
1118 tcg_out_bundle(s, miI,
1119 INSN_NOP_M,
1120 INSN_NOP_I,
1121 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret, arg1, arg2));
1125 static inline void tcg_out_shl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1126 TCGArg arg2, int const_arg2)
1128 if (const_arg2) {
1129 tcg_out_bundle(s, miI,
1130 INSN_NOP_M,
1131 INSN_NOP_I,
1132 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1133 arg1, 63 - arg2, 31 - arg2));
1134 } else {
1135 tcg_out_bundle(s, mII,
1136 INSN_NOP_M,
1137 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R2,
1138 0x1f, arg2),
1139 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1140 arg1, TCG_REG_R2));
1144 static inline void tcg_out_shl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1145 TCGArg arg2, int const_arg2)
1147 if (const_arg2) {
1148 tcg_out_bundle(s, miI,
1149 INSN_NOP_M,
1150 INSN_NOP_I,
1151 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1152 arg1, 63 - arg2, 63 - arg2));
1153 } else {
1154 tcg_out_bundle(s, miI,
1155 INSN_NOP_M,
1156 INSN_NOP_I,
1157 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1158 arg1, arg2));
1162 static inline void tcg_out_shr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1163 TCGArg arg2, int const_arg2)
1165 if (const_arg2) {
1166 tcg_out_bundle(s, miI,
1167 INSN_NOP_M,
1168 INSN_NOP_I,
1169 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1170 arg1, arg2, 31 - arg2));
1171 } else {
1172 tcg_out_bundle(s, mII,
1173 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1174 0x1f, arg2),
1175 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29, TCG_REG_R2, arg1),
1176 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1177 TCG_REG_R2, TCG_REG_R3));
1181 static inline void tcg_out_shr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1182 TCGArg arg2, int const_arg2)
1184 if (const_arg2) {
1185 tcg_out_bundle(s, miI,
1186 INSN_NOP_M,
1187 INSN_NOP_I,
1188 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1189 arg1, arg2, 63 - arg2));
1190 } else {
1191 tcg_out_bundle(s, miI,
1192 INSN_NOP_M,
1193 INSN_NOP_I,
1194 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1195 arg1, arg2));
1199 static inline void tcg_out_rotl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1200 TCGArg arg2, int const_arg2)
1202 if (const_arg2) {
1203 tcg_out_bundle(s, mII,
1204 INSN_NOP_M,
1205 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1206 TCG_REG_R2, arg1, arg1),
1207 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1208 TCG_REG_R2, 32 - arg2, 31));
1209 } else {
1210 tcg_out_bundle(s, miI,
1211 INSN_NOP_M,
1212 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1213 TCG_REG_R2, arg1, arg1),
1214 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1215 0x1f, arg2));
1216 tcg_out_bundle(s, mII,
1217 INSN_NOP_M,
1218 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R3,
1219 0x20, TCG_REG_R3),
1220 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1221 TCG_REG_R2, TCG_REG_R3));
1225 static inline void tcg_out_rotl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1226 TCGArg arg2, int const_arg2)
1228 if (const_arg2) {
1229 tcg_out_bundle(s, miI,
1230 INSN_NOP_M,
1231 INSN_NOP_I,
1232 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1233 arg1, 0x40 - arg2));
1234 } else {
1235 tcg_out_bundle(s, mII,
1236 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1237 0x40, arg2),
1238 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R3,
1239 arg1, arg2),
1240 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R2,
1241 arg1, TCG_REG_R2));
1242 tcg_out_bundle(s, miI,
1243 INSN_NOP_M,
1244 INSN_NOP_I,
1245 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1246 TCG_REG_R2, TCG_REG_R3));
1250 static inline void tcg_out_rotr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1251 TCGArg arg2, int const_arg2)
1253 if (const_arg2) {
1254 tcg_out_bundle(s, mII,
1255 INSN_NOP_M,
1256 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1257 TCG_REG_R2, arg1, arg1),
1258 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1259 TCG_REG_R2, arg2, 31));
1260 } else {
1261 tcg_out_bundle(s, mII,
1262 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1263 0x1f, arg2),
1264 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1265 TCG_REG_R2, arg1, arg1),
1266 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1267 TCG_REG_R2, TCG_REG_R3));
1271 static inline void tcg_out_rotr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1272 TCGArg arg2, int const_arg2)
1274 if (const_arg2) {
1275 tcg_out_bundle(s, miI,
1276 INSN_NOP_M,
1277 INSN_NOP_I,
1278 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1279 arg1, arg2));
1280 } else {
1281 tcg_out_bundle(s, mII,
1282 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1283 0x40, arg2),
1284 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R3,
1285 arg1, arg2),
1286 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R2,
1287 arg1, TCG_REG_R2));
1288 tcg_out_bundle(s, miI,
1289 INSN_NOP_M,
1290 INSN_NOP_I,
1291 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1292 TCG_REG_R2, TCG_REG_R3));
1296 static const uint64_t opc_ext_i29[8] = {
1297 OPC_ZXT1_I29, OPC_ZXT2_I29, OPC_ZXT4_I29, 0,
1298 OPC_SXT1_I29, OPC_SXT2_I29, OPC_SXT4_I29, 0
1301 static inline uint64_t tcg_opc_ext_i(int qp, TCGMemOp opc, TCGReg d, TCGReg s)
1303 if ((opc & MO_SIZE) == MO_64) {
1304 return tcg_opc_mov_a(qp, d, s);
1305 } else {
1306 return tcg_opc_i29(qp, opc_ext_i29[opc & MO_SSIZE], d, s);
1310 static inline void tcg_out_ext(TCGContext *s, uint64_t opc_i29,
1311 TCGArg ret, TCGArg arg)
1313 tcg_out_bundle(s, miI,
1314 INSN_NOP_M,
1315 INSN_NOP_I,
1316 tcg_opc_i29(TCG_REG_P0, opc_i29, ret, arg));
1319 static inline uint64_t tcg_opc_bswap64_i(int qp, TCGReg d, TCGReg s)
1321 return tcg_opc_i3(qp, OPC_MUX1_I3, d, s, 0xb);
1324 static inline void tcg_out_bswap16(TCGContext *s, TCGArg ret, TCGArg arg)
1326 tcg_out_bundle(s, mII,
1327 INSN_NOP_M,
1328 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 15, 15),
1329 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1332 static inline void tcg_out_bswap32(TCGContext *s, TCGArg ret, TCGArg arg)
1334 tcg_out_bundle(s, mII,
1335 INSN_NOP_M,
1336 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 31, 31),
1337 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1340 static inline void tcg_out_bswap64(TCGContext *s, TCGArg ret, TCGArg arg)
1342 tcg_out_bundle(s, miI,
1343 INSN_NOP_M,
1344 INSN_NOP_I,
1345 tcg_opc_bswap64_i(TCG_REG_P0, ret, arg));
1348 static inline void tcg_out_deposit(TCGContext *s, TCGArg ret, TCGArg a1,
1349 TCGArg a2, int const_a2, int pos, int len)
1351 uint64_t i1 = 0, i2 = 0;
1352 int cpos = 63 - pos, lm1 = len - 1;
1354 if (const_a2) {
1355 /* Truncate the value of a constant a2 to the width of the field. */
1356 int mask = (1u << len) - 1;
1357 a2 &= mask;
1359 if (a2 == 0 || a2 == mask) {
1360 /* 1-bit signed constant inserted into register. */
1361 i2 = tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, ret, a2, a1, cpos, lm1);
1362 } else {
1363 /* Otherwise, load any constant into a temporary. Do this into
1364 the first I slot to help out with cross-unit delays. */
1365 i1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, a2);
1366 a2 = TCG_REG_R2;
1369 if (i2 == 0) {
1370 i2 = tcg_opc_i15(TCG_REG_P0, OPC_DEP_I15, ret, a2, a1, cpos, lm1);
1372 tcg_out_bundle(s, (i1 ? mII : miI),
1373 INSN_NOP_M,
1374 i1 ? i1 : INSN_NOP_I,
1375 i2);
1378 static inline uint64_t tcg_opc_cmp_a(int qp, TCGCond cond, TCGArg arg1,
1379 TCGArg arg2, int cmp4)
1381 uint64_t opc_eq_a6, opc_lt_a6, opc_ltu_a6;
1383 if (cmp4) {
1384 opc_eq_a6 = OPC_CMP4_EQ_A6;
1385 opc_lt_a6 = OPC_CMP4_LT_A6;
1386 opc_ltu_a6 = OPC_CMP4_LTU_A6;
1387 } else {
1388 opc_eq_a6 = OPC_CMP_EQ_A6;
1389 opc_lt_a6 = OPC_CMP_LT_A6;
1390 opc_ltu_a6 = OPC_CMP_LTU_A6;
1393 switch (cond) {
1394 case TCG_COND_EQ:
1395 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1396 case TCG_COND_NE:
1397 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1398 case TCG_COND_LT:
1399 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1400 case TCG_COND_LTU:
1401 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1402 case TCG_COND_GE:
1403 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1404 case TCG_COND_GEU:
1405 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1406 case TCG_COND_LE:
1407 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1408 case TCG_COND_LEU:
1409 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1410 case TCG_COND_GT:
1411 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1412 case TCG_COND_GTU:
1413 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1414 default:
1415 tcg_abort();
1416 break;
1420 static inline void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1,
1421 TCGReg arg2, TCGLabel *l, int cmp4)
1423 uint64_t imm;
1425 /* We pay attention here to not modify the branch target by reading
1426 the existing value and using it again. This ensure that caches and
1427 memory are kept coherent during retranslation. */
1428 if (l->has_value) {
1429 imm = l->u.value_ptr - s->code_ptr;
1430 } else {
1431 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
1432 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, l, 0);
1435 tcg_out_bundle(s, miB,
1436 INSN_NOP_M,
1437 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1438 tcg_opc_b1(TCG_REG_P6, OPC_BR_DPTK_FEW_B1, imm));
1441 static inline void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGArg ret,
1442 TCGArg arg1, TCGArg arg2, int cmp4)
1444 tcg_out_bundle(s, MmI,
1445 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1446 tcg_opc_movi_a(TCG_REG_P6, ret, 1),
1447 tcg_opc_movi_a(TCG_REG_P7, ret, 0));
1450 static inline void tcg_out_movcond(TCGContext *s, TCGCond cond, TCGArg ret,
1451 TCGArg c1, TCGArg c2,
1452 TCGArg v1, int const_v1,
1453 TCGArg v2, int const_v2, int cmp4)
1455 uint64_t opc1, opc2;
1457 if (const_v1) {
1458 opc1 = tcg_opc_movi_a(TCG_REG_P6, ret, v1);
1459 } else if (ret == v1) {
1460 opc1 = INSN_NOP_M;
1461 } else {
1462 opc1 = tcg_opc_mov_a(TCG_REG_P6, ret, v1);
1464 if (const_v2) {
1465 opc2 = tcg_opc_movi_a(TCG_REG_P7, ret, v2);
1466 } else if (ret == v2) {
1467 opc2 = INSN_NOP_I;
1468 } else {
1469 opc2 = tcg_opc_mov_a(TCG_REG_P7, ret, v2);
1472 tcg_out_bundle(s, MmI,
1473 tcg_opc_cmp_a(TCG_REG_P0, cond, c1, c2, cmp4),
1474 opc1,
1475 opc2);
1478 #if defined(CONFIG_SOFTMMU)
1479 /* We're expecting to use an signed 22-bit immediate add. */
1480 QEMU_BUILD_BUG_ON(offsetof(CPUArchState, tlb_table[NB_MMU_MODES - 1][1])
1481 > 0x1fffff)
1483 /* Load and compare a TLB entry, and return the result in (p6, p7).
1484 R2 is loaded with the addend TLB entry.
1485 R57 is loaded with the address, zero extented on 32-bit targets.
1486 R1, R3 are clobbered, leaving R56 free for...
1487 BSWAP_1, BSWAP_2 and I-slot insns for swapping data for store. */
1488 static inline void tcg_out_qemu_tlb(TCGContext *s, TCGReg addr_reg,
1489 TCGMemOp s_bits, int off_rw, int off_add,
1490 uint64_t bswap1, uint64_t bswap2)
1493 .mii
1494 mov r2 = off_rw
1495 extr.u r3 = addr_reg, ... # extract tlb page
1496 zxt4 r57 = addr_reg # or mov for 64-bit guest
1498 .mii
1499 addl r2 = r2, areg0
1500 shl r3 = r3, cteb # via dep.z
1501 dep r1 = 0, r57, ... # zero page ofs, keep align
1503 .mmi
1504 add r2 = r2, r3
1506 ld4 r3 = [r2], off_add-off_rw # or ld8 for 64-bit guest
1509 .mmi
1511 cmp.eq p6, p7 = r3, r58
1515 tcg_out_bundle(s, miI,
1516 tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, off_rw),
1517 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, TCG_REG_R3,
1518 addr_reg, TARGET_PAGE_BITS, CPU_TLB_BITS - 1),
1519 tcg_opc_ext_i(TCG_REG_P0,
1520 TARGET_LONG_BITS == 32 ? MO_UL : MO_Q,
1521 TCG_REG_R57, addr_reg));
1522 tcg_out_bundle(s, miI,
1523 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1524 TCG_REG_R2, TCG_AREG0),
1525 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, TCG_REG_R3,
1526 TCG_REG_R3, 63 - CPU_TLB_ENTRY_BITS,
1527 63 - CPU_TLB_ENTRY_BITS),
1528 tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, TCG_REG_R1, 0,
1529 TCG_REG_R57, 63 - s_bits,
1530 TARGET_PAGE_BITS - s_bits - 1));
1531 tcg_out_bundle(s, MmI,
1532 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
1533 TCG_REG_R2, TCG_REG_R2, TCG_REG_R3),
1534 tcg_opc_m3 (TCG_REG_P0,
1535 (TARGET_LONG_BITS == 32
1536 ? OPC_LD4_M3 : OPC_LD8_M3), TCG_REG_R3,
1537 TCG_REG_R2, off_add - off_rw),
1538 bswap1);
1539 tcg_out_bundle(s, mmI,
1540 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1, TCG_REG_R2, TCG_REG_R2),
1541 tcg_opc_a6 (TCG_REG_P0, OPC_CMP_EQ_A6, TCG_REG_P6,
1542 TCG_REG_P7, TCG_REG_R1, TCG_REG_R3),
1543 bswap2);
1546 typedef struct TCGLabelQemuLdst {
1547 bool is_ld;
1548 TCGMemOp size;
1549 tcg_insn_unit *label_ptr; /* label pointers to be updated */
1550 struct TCGLabelQemuLdst *next;
1551 } TCGLabelQemuLdst;
1553 typedef struct TCGBackendData {
1554 TCGLabelQemuLdst *labels;
1555 } TCGBackendData;
1557 static inline void tcg_out_tb_init(TCGContext *s)
1559 s->be->labels = NULL;
1562 static void add_qemu_ldst_label(TCGContext *s, bool is_ld, TCGMemOp opc,
1563 tcg_insn_unit *label_ptr)
1565 TCGBackendData *be = s->be;
1566 TCGLabelQemuLdst *l = tcg_malloc(sizeof(*l));
1568 l->is_ld = is_ld;
1569 l->size = opc & MO_SIZE;
1570 l->label_ptr = label_ptr;
1571 l->next = be->labels;
1572 be->labels = l;
1575 static void tcg_out_tb_finalize(TCGContext *s)
1577 static const void * const helpers[8] = {
1578 helper_ret_stb_mmu,
1579 helper_le_stw_mmu,
1580 helper_le_stl_mmu,
1581 helper_le_stq_mmu,
1582 helper_ret_ldub_mmu,
1583 helper_le_lduw_mmu,
1584 helper_le_ldul_mmu,
1585 helper_le_ldq_mmu,
1587 tcg_insn_unit *thunks[8] = { };
1588 TCGLabelQemuLdst *l;
1590 for (l = s->be->labels; l != NULL; l = l->next) {
1591 long x = l->is_ld * 4 + l->size;
1592 tcg_insn_unit *dest = thunks[x];
1594 /* The out-of-line thunks are all the same; load the return address
1595 from B0, load the GP, and branch to the code. Note that we are
1596 always post-call, so the register window has rolled, so we're
1597 using incoming parameter register numbers, not outgoing. */
1598 if (dest == NULL) {
1599 uintptr_t *desc = (uintptr_t *)helpers[x];
1600 uintptr_t func = desc[0], gp = desc[1], disp;
1602 thunks[x] = dest = s->code_ptr;
1604 tcg_out_bundle(s, mlx,
1605 INSN_NOP_M,
1606 tcg_opc_l2 (gp),
1607 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
1608 TCG_REG_R1, gp));
1609 tcg_out_bundle(s, mii,
1610 INSN_NOP_M,
1611 INSN_NOP_I,
1612 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
1613 l->is_ld ? TCG_REG_R35 : TCG_REG_R36,
1614 TCG_REG_B0));
1615 disp = (tcg_insn_unit *)func - s->code_ptr;
1616 tcg_out_bundle(s, mLX,
1617 INSN_NOP_M,
1618 tcg_opc_l3 (disp),
1619 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, disp));
1622 reloc_pcrel21b_slot2(l->label_ptr, dest);
1626 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1628 static const uint64_t opc_ld_m1[4] = {
1629 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1631 int addr_reg, data_reg, mem_index;
1632 TCGMemOpIdx oi;
1633 TCGMemOp opc, s_bits;
1634 uint64_t fin1, fin2;
1635 tcg_insn_unit *label_ptr;
1637 data_reg = args[0];
1638 addr_reg = args[1];
1639 oi = args[2];
1640 opc = get_memop(oi);
1641 mem_index = get_mmuidx(oi);
1642 s_bits = opc & MO_SIZE;
1644 /* Read the TLB entry */
1645 tcg_out_qemu_tlb(s, addr_reg, s_bits,
1646 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read),
1647 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1648 INSN_NOP_I, INSN_NOP_I);
1650 /* P6 is the fast path, and P7 the slow path */
1652 fin2 = 0;
1653 if (opc & MO_BSWAP) {
1654 fin1 = tcg_opc_bswap64_i(TCG_REG_P0, data_reg, TCG_REG_R8);
1655 if (s_bits < MO_64) {
1656 int shift = 64 - (8 << s_bits);
1657 fin2 = (opc & MO_SIGN ? OPC_EXTR_I11 : OPC_EXTR_U_I11);
1658 fin2 = tcg_opc_i11(TCG_REG_P0, fin2,
1659 data_reg, data_reg, shift, 63 - shift);
1661 } else {
1662 fin1 = tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, TCG_REG_R8);
1665 tcg_out_bundle(s, mmI,
1666 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1667 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1668 TCG_REG_R2, TCG_REG_R57),
1669 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R58, oi));
1670 label_ptr = s->code_ptr;
1671 tcg_out_bundle(s, miB,
1672 tcg_opc_m1 (TCG_REG_P6, opc_ld_m1[s_bits],
1673 TCG_REG_R8, TCG_REG_R2),
1674 INSN_NOP_I,
1675 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1676 get_reloc_pcrel21b_slot2(label_ptr)));
1678 add_qemu_ldst_label(s, 1, opc, label_ptr);
1680 /* Note that we always use LE helper functions, so the bswap insns
1681 here for the fast path also apply to the slow path. */
1682 tcg_out_bundle(s, (fin2 ? mII : miI),
1683 INSN_NOP_M,
1684 fin1,
1685 fin2 ? fin2 : INSN_NOP_I);
1688 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1690 static const uint64_t opc_st_m4[4] = {
1691 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1693 TCGReg addr_reg, data_reg;
1694 int mem_index;
1695 uint64_t pre1, pre2;
1696 TCGMemOpIdx oi;
1697 TCGMemOp opc, s_bits;
1698 tcg_insn_unit *label_ptr;
1700 data_reg = args[0];
1701 addr_reg = args[1];
1702 oi = args[2];
1703 opc = get_memop(oi);
1704 mem_index = get_mmuidx(oi);
1705 s_bits = opc & MO_SIZE;
1707 /* Note that we always use LE helper functions, so the bswap insns
1708 that are here for the fast path also apply to the slow path,
1709 and move the data into the argument register. */
1710 pre2 = INSN_NOP_I;
1711 if (opc & MO_BSWAP) {
1712 pre1 = tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R58, data_reg);
1713 if (s_bits < MO_64) {
1714 int shift = 64 - (8 << s_bits);
1715 pre2 = tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11,
1716 TCG_REG_R58, TCG_REG_R58, shift, 63 - shift);
1718 } else {
1719 /* Just move the data into place for the slow path. */
1720 pre1 = tcg_opc_ext_i(TCG_REG_P0, opc, TCG_REG_R58, data_reg);
1723 tcg_out_qemu_tlb(s, addr_reg, s_bits,
1724 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write),
1725 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1726 pre1, pre2);
1728 /* P6 is the fast path, and P7 the slow path */
1729 tcg_out_bundle(s, mmI,
1730 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1731 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1732 TCG_REG_R2, TCG_REG_R57),
1733 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R59, oi));
1734 label_ptr = s->code_ptr;
1735 tcg_out_bundle(s, miB,
1736 tcg_opc_m4 (TCG_REG_P6, opc_st_m4[s_bits],
1737 TCG_REG_R58, TCG_REG_R2),
1738 INSN_NOP_I,
1739 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1740 get_reloc_pcrel21b_slot2(label_ptr)));
1742 add_qemu_ldst_label(s, 0, opc, label_ptr);
1745 #else /* !CONFIG_SOFTMMU */
1746 # include "tcg-be-null.h"
1748 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1750 static uint64_t const opc_ld_m1[4] = {
1751 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1753 int addr_reg, data_reg;
1754 TCGMemOp opc, s_bits, bswap;
1756 data_reg = args[0];
1757 addr_reg = args[1];
1758 opc = args[2];
1759 s_bits = opc & MO_SIZE;
1760 bswap = opc & MO_BSWAP;
1762 #if TARGET_LONG_BITS == 32
1763 if (guest_base != 0) {
1764 tcg_out_bundle(s, mII,
1765 INSN_NOP_M,
1766 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1767 TCG_REG_R3, addr_reg),
1768 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1769 TCG_GUEST_BASE_REG, TCG_REG_R3));
1770 } else {
1771 tcg_out_bundle(s, miI,
1772 INSN_NOP_M,
1773 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1774 TCG_REG_R2, addr_reg),
1775 INSN_NOP_I);
1778 if (!bswap) {
1779 if (!(opc & MO_SIGN)) {
1780 tcg_out_bundle(s, miI,
1781 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1782 data_reg, TCG_REG_R2),
1783 INSN_NOP_I,
1784 INSN_NOP_I);
1785 } else {
1786 tcg_out_bundle(s, mII,
1787 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1788 data_reg, TCG_REG_R2),
1789 INSN_NOP_I,
1790 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1792 } else if (s_bits == MO_64) {
1793 tcg_out_bundle(s, mII,
1794 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1795 data_reg, TCG_REG_R2),
1796 INSN_NOP_I,
1797 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1798 } else {
1799 if (s_bits == MO_16) {
1800 tcg_out_bundle(s, mII,
1801 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1802 data_reg, TCG_REG_R2),
1803 INSN_NOP_I,
1804 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1805 data_reg, data_reg, 15, 15));
1806 } else {
1807 tcg_out_bundle(s, mII,
1808 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1809 data_reg, TCG_REG_R2),
1810 INSN_NOP_I,
1811 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1812 data_reg, data_reg, 31, 31));
1814 if (!(opc & MO_SIGN)) {
1815 tcg_out_bundle(s, miI,
1816 INSN_NOP_M,
1817 INSN_NOP_I,
1818 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1819 } else {
1820 tcg_out_bundle(s, mII,
1821 INSN_NOP_M,
1822 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg),
1823 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1826 #else
1827 if (guest_base != 0) {
1828 tcg_out_bundle(s, MmI,
1829 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1830 TCG_GUEST_BASE_REG, addr_reg),
1831 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1832 data_reg, TCG_REG_R2),
1833 INSN_NOP_I);
1834 } else {
1835 tcg_out_bundle(s, mmI,
1836 INSN_NOP_M,
1837 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1838 data_reg, addr_reg),
1839 INSN_NOP_I);
1842 if (bswap && s_bits == MO_16) {
1843 tcg_out_bundle(s, mII,
1844 INSN_NOP_M,
1845 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1846 data_reg, data_reg, 15, 15),
1847 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1848 } else if (bswap && s_bits == MO_32) {
1849 tcg_out_bundle(s, mII,
1850 INSN_NOP_M,
1851 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1852 data_reg, data_reg, 31, 31),
1853 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1854 } else if (bswap && s_bits == MO_64) {
1855 tcg_out_bundle(s, miI,
1856 INSN_NOP_M,
1857 INSN_NOP_I,
1858 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1860 if (opc & MO_SIGN) {
1861 tcg_out_bundle(s, miI,
1862 INSN_NOP_M,
1863 INSN_NOP_I,
1864 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1866 #endif
1869 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1871 static uint64_t const opc_st_m4[4] = {
1872 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1874 int addr_reg, data_reg;
1875 #if TARGET_LONG_BITS == 64
1876 uint64_t add_guest_base;
1877 #endif
1878 TCGMemOp opc, s_bits, bswap;
1880 data_reg = args[0];
1881 addr_reg = args[1];
1882 opc = args[2];
1883 s_bits = opc & MO_SIZE;
1884 bswap = opc & MO_BSWAP;
1886 #if TARGET_LONG_BITS == 32
1887 if (guest_base != 0) {
1888 tcg_out_bundle(s, mII,
1889 INSN_NOP_M,
1890 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1891 TCG_REG_R3, addr_reg),
1892 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1893 TCG_GUEST_BASE_REG, TCG_REG_R3));
1894 } else {
1895 tcg_out_bundle(s, miI,
1896 INSN_NOP_M,
1897 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1898 TCG_REG_R2, addr_reg),
1899 INSN_NOP_I);
1902 if (bswap) {
1903 if (s_bits == MO_16) {
1904 tcg_out_bundle(s, mII,
1905 INSN_NOP_M,
1906 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1907 TCG_REG_R3, data_reg, 15, 15),
1908 tcg_opc_bswap64_i(TCG_REG_P0,
1909 TCG_REG_R3, TCG_REG_R3));
1910 data_reg = TCG_REG_R3;
1911 } else if (s_bits == MO_32) {
1912 tcg_out_bundle(s, mII,
1913 INSN_NOP_M,
1914 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1915 TCG_REG_R3, data_reg, 31, 31),
1916 tcg_opc_bswap64_i(TCG_REG_P0,
1917 TCG_REG_R3, TCG_REG_R3));
1918 data_reg = TCG_REG_R3;
1919 } else if (s_bits == MO_64) {
1920 tcg_out_bundle(s, miI,
1921 INSN_NOP_M,
1922 INSN_NOP_I,
1923 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1924 data_reg = TCG_REG_R3;
1927 tcg_out_bundle(s, mmI,
1928 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1929 data_reg, TCG_REG_R2),
1930 INSN_NOP_M,
1931 INSN_NOP_I);
1932 #else
1933 if (guest_base != 0) {
1934 add_guest_base = tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1935 TCG_GUEST_BASE_REG, addr_reg);
1936 addr_reg = TCG_REG_R2;
1937 } else {
1938 add_guest_base = INSN_NOP_M;
1941 if (!bswap) {
1942 tcg_out_bundle(s, (guest_base ? MmI : mmI),
1943 add_guest_base,
1944 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1945 data_reg, addr_reg),
1946 INSN_NOP_I);
1947 } else {
1948 if (s_bits == MO_16) {
1949 tcg_out_bundle(s, mII,
1950 add_guest_base,
1951 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1952 TCG_REG_R3, data_reg, 15, 15),
1953 tcg_opc_bswap64_i(TCG_REG_P0,
1954 TCG_REG_R3, TCG_REG_R3));
1955 data_reg = TCG_REG_R3;
1956 } else if (s_bits == MO_32) {
1957 tcg_out_bundle(s, mII,
1958 add_guest_base,
1959 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1960 TCG_REG_R3, data_reg, 31, 31),
1961 tcg_opc_bswap64_i(TCG_REG_P0,
1962 TCG_REG_R3, TCG_REG_R3));
1963 data_reg = TCG_REG_R3;
1964 } else if (s_bits == MO_64) {
1965 tcg_out_bundle(s, miI,
1966 add_guest_base,
1967 INSN_NOP_I,
1968 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1969 data_reg = TCG_REG_R3;
1971 tcg_out_bundle(s, miI,
1972 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1973 data_reg, addr_reg),
1974 INSN_NOP_I,
1975 INSN_NOP_I);
1977 #endif
1980 #endif
1982 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1983 const TCGArg *args, const int *const_args)
1985 switch(opc) {
1986 case INDEX_op_exit_tb:
1987 tcg_out_exit_tb(s, args[0]);
1988 break;
1989 case INDEX_op_br:
1990 tcg_out_br(s, arg_label(args[0]));
1991 break;
1992 case INDEX_op_goto_tb:
1993 tcg_out_goto_tb(s, args[0]);
1994 break;
1996 case INDEX_op_ld8u_i32:
1997 case INDEX_op_ld8u_i64:
1998 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
1999 break;
2000 case INDEX_op_ld8s_i32:
2001 case INDEX_op_ld8s_i64:
2002 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2003 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[0]);
2004 break;
2005 case INDEX_op_ld16u_i32:
2006 case INDEX_op_ld16u_i64:
2007 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2008 break;
2009 case INDEX_op_ld16s_i32:
2010 case INDEX_op_ld16s_i64:
2011 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2012 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[0]);
2013 break;
2014 case INDEX_op_ld_i32:
2015 case INDEX_op_ld32u_i64:
2016 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2017 break;
2018 case INDEX_op_ld32s_i64:
2019 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2020 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[0]);
2021 break;
2022 case INDEX_op_ld_i64:
2023 tcg_out_ld_rel(s, OPC_LD8_M1, args[0], args[1], args[2]);
2024 break;
2025 case INDEX_op_st8_i32:
2026 case INDEX_op_st8_i64:
2027 tcg_out_st_rel(s, OPC_ST1_M4, args[0], args[1], args[2]);
2028 break;
2029 case INDEX_op_st16_i32:
2030 case INDEX_op_st16_i64:
2031 tcg_out_st_rel(s, OPC_ST2_M4, args[0], args[1], args[2]);
2032 break;
2033 case INDEX_op_st_i32:
2034 case INDEX_op_st32_i64:
2035 tcg_out_st_rel(s, OPC_ST4_M4, args[0], args[1], args[2]);
2036 break;
2037 case INDEX_op_st_i64:
2038 tcg_out_st_rel(s, OPC_ST8_M4, args[0], args[1], args[2]);
2039 break;
2041 case INDEX_op_add_i32:
2042 case INDEX_op_add_i64:
2043 tcg_out_add(s, args[0], args[1], args[2], const_args[2]);
2044 break;
2045 case INDEX_op_sub_i32:
2046 case INDEX_op_sub_i64:
2047 tcg_out_sub(s, args[0], args[1], const_args[1], args[2], const_args[2]);
2048 break;
2050 case INDEX_op_and_i32:
2051 case INDEX_op_and_i64:
2052 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2053 tcg_out_alu(s, OPC_AND_A1, OPC_AND_A3, args[0],
2054 args[2], const_args[2], args[1], const_args[1]);
2055 break;
2056 case INDEX_op_andc_i32:
2057 case INDEX_op_andc_i64:
2058 tcg_out_alu(s, OPC_ANDCM_A1, OPC_ANDCM_A3, args[0],
2059 args[1], const_args[1], args[2], const_args[2]);
2060 break;
2061 case INDEX_op_eqv_i32:
2062 case INDEX_op_eqv_i64:
2063 tcg_out_eqv(s, args[0], args[1], const_args[1],
2064 args[2], const_args[2]);
2065 break;
2066 case INDEX_op_nand_i32:
2067 case INDEX_op_nand_i64:
2068 tcg_out_nand(s, args[0], args[1], const_args[1],
2069 args[2], const_args[2]);
2070 break;
2071 case INDEX_op_nor_i32:
2072 case INDEX_op_nor_i64:
2073 tcg_out_nor(s, args[0], args[1], const_args[1],
2074 args[2], const_args[2]);
2075 break;
2076 case INDEX_op_or_i32:
2077 case INDEX_op_or_i64:
2078 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2079 tcg_out_alu(s, OPC_OR_A1, OPC_OR_A3, args[0],
2080 args[2], const_args[2], args[1], const_args[1]);
2081 break;
2082 case INDEX_op_orc_i32:
2083 case INDEX_op_orc_i64:
2084 tcg_out_orc(s, args[0], args[1], const_args[1],
2085 args[2], const_args[2]);
2086 break;
2087 case INDEX_op_xor_i32:
2088 case INDEX_op_xor_i64:
2089 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2090 tcg_out_alu(s, OPC_XOR_A1, OPC_XOR_A3, args[0],
2091 args[2], const_args[2], args[1], const_args[1]);
2092 break;
2094 case INDEX_op_mul_i32:
2095 case INDEX_op_mul_i64:
2096 tcg_out_mul(s, args[0], args[1], args[2]);
2097 break;
2099 case INDEX_op_sar_i32:
2100 tcg_out_sar_i32(s, args[0], args[1], args[2], const_args[2]);
2101 break;
2102 case INDEX_op_sar_i64:
2103 tcg_out_sar_i64(s, args[0], args[1], args[2], const_args[2]);
2104 break;
2105 case INDEX_op_shl_i32:
2106 tcg_out_shl_i32(s, args[0], args[1], args[2], const_args[2]);
2107 break;
2108 case INDEX_op_shl_i64:
2109 tcg_out_shl_i64(s, args[0], args[1], args[2], const_args[2]);
2110 break;
2111 case INDEX_op_shr_i32:
2112 tcg_out_shr_i32(s, args[0], args[1], args[2], const_args[2]);
2113 break;
2114 case INDEX_op_shr_i64:
2115 tcg_out_shr_i64(s, args[0], args[1], args[2], const_args[2]);
2116 break;
2117 case INDEX_op_rotl_i32:
2118 tcg_out_rotl_i32(s, args[0], args[1], args[2], const_args[2]);
2119 break;
2120 case INDEX_op_rotl_i64:
2121 tcg_out_rotl_i64(s, args[0], args[1], args[2], const_args[2]);
2122 break;
2123 case INDEX_op_rotr_i32:
2124 tcg_out_rotr_i32(s, args[0], args[1], args[2], const_args[2]);
2125 break;
2126 case INDEX_op_rotr_i64:
2127 tcg_out_rotr_i64(s, args[0], args[1], args[2], const_args[2]);
2128 break;
2130 case INDEX_op_ext8s_i32:
2131 case INDEX_op_ext8s_i64:
2132 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[1]);
2133 break;
2134 case INDEX_op_ext8u_i32:
2135 case INDEX_op_ext8u_i64:
2136 tcg_out_ext(s, OPC_ZXT1_I29, args[0], args[1]);
2137 break;
2138 case INDEX_op_ext16s_i32:
2139 case INDEX_op_ext16s_i64:
2140 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[1]);
2141 break;
2142 case INDEX_op_ext16u_i32:
2143 case INDEX_op_ext16u_i64:
2144 tcg_out_ext(s, OPC_ZXT2_I29, args[0], args[1]);
2145 break;
2146 case INDEX_op_ext_i32_i64:
2147 case INDEX_op_ext32s_i64:
2148 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[1]);
2149 break;
2150 case INDEX_op_extu_i32_i64:
2151 case INDEX_op_ext32u_i64:
2152 tcg_out_ext(s, OPC_ZXT4_I29, args[0], args[1]);
2153 break;
2155 case INDEX_op_bswap16_i32:
2156 case INDEX_op_bswap16_i64:
2157 tcg_out_bswap16(s, args[0], args[1]);
2158 break;
2159 case INDEX_op_bswap32_i32:
2160 case INDEX_op_bswap32_i64:
2161 tcg_out_bswap32(s, args[0], args[1]);
2162 break;
2163 case INDEX_op_bswap64_i64:
2164 tcg_out_bswap64(s, args[0], args[1]);
2165 break;
2167 case INDEX_op_deposit_i32:
2168 case INDEX_op_deposit_i64:
2169 tcg_out_deposit(s, args[0], args[1], args[2], const_args[2],
2170 args[3], args[4]);
2171 break;
2173 case INDEX_op_brcond_i32:
2174 tcg_out_brcond(s, args[2], args[0], args[1], arg_label(args[3]), 1);
2175 break;
2176 case INDEX_op_brcond_i64:
2177 tcg_out_brcond(s, args[2], args[0], args[1], arg_label(args[3]), 0);
2178 break;
2179 case INDEX_op_setcond_i32:
2180 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 1);
2181 break;
2182 case INDEX_op_setcond_i64:
2183 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 0);
2184 break;
2185 case INDEX_op_movcond_i32:
2186 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2187 args[3], const_args[3], args[4], const_args[4], 1);
2188 break;
2189 case INDEX_op_movcond_i64:
2190 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2191 args[3], const_args[3], args[4], const_args[4], 0);
2192 break;
2194 case INDEX_op_qemu_ld_i32:
2195 tcg_out_qemu_ld(s, args);
2196 break;
2197 case INDEX_op_qemu_ld_i64:
2198 tcg_out_qemu_ld(s, args);
2199 break;
2200 case INDEX_op_qemu_st_i32:
2201 tcg_out_qemu_st(s, args);
2202 break;
2203 case INDEX_op_qemu_st_i64:
2204 tcg_out_qemu_st(s, args);
2205 break;
2207 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
2208 case INDEX_op_mov_i64:
2209 case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
2210 case INDEX_op_movi_i64:
2211 case INDEX_op_call: /* Always emitted via tcg_out_call. */
2212 default:
2213 tcg_abort();
2217 static const TCGTargetOpDef ia64_op_defs[] = {
2218 { INDEX_op_br, { } },
2219 { INDEX_op_exit_tb, { } },
2220 { INDEX_op_goto_tb, { } },
2222 { INDEX_op_ld8u_i32, { "r", "r" } },
2223 { INDEX_op_ld8s_i32, { "r", "r" } },
2224 { INDEX_op_ld16u_i32, { "r", "r" } },
2225 { INDEX_op_ld16s_i32, { "r", "r" } },
2226 { INDEX_op_ld_i32, { "r", "r" } },
2227 { INDEX_op_st8_i32, { "rZ", "r" } },
2228 { INDEX_op_st16_i32, { "rZ", "r" } },
2229 { INDEX_op_st_i32, { "rZ", "r" } },
2231 { INDEX_op_add_i32, { "r", "rZ", "rI" } },
2232 { INDEX_op_sub_i32, { "r", "rI", "rI" } },
2234 { INDEX_op_and_i32, { "r", "rI", "rI" } },
2235 { INDEX_op_andc_i32, { "r", "rI", "rI" } },
2236 { INDEX_op_eqv_i32, { "r", "rZ", "rZ" } },
2237 { INDEX_op_nand_i32, { "r", "rZ", "rZ" } },
2238 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
2239 { INDEX_op_or_i32, { "r", "rI", "rI" } },
2240 { INDEX_op_orc_i32, { "r", "rZ", "rZ" } },
2241 { INDEX_op_xor_i32, { "r", "rI", "rI" } },
2243 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
2245 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
2246 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
2247 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
2248 { INDEX_op_rotl_i32, { "r", "rZ", "ri" } },
2249 { INDEX_op_rotr_i32, { "r", "rZ", "ri" } },
2251 { INDEX_op_ext8s_i32, { "r", "rZ"} },
2252 { INDEX_op_ext8u_i32, { "r", "rZ"} },
2253 { INDEX_op_ext16s_i32, { "r", "rZ"} },
2254 { INDEX_op_ext16u_i32, { "r", "rZ"} },
2256 { INDEX_op_bswap16_i32, { "r", "rZ" } },
2257 { INDEX_op_bswap32_i32, { "r", "rZ" } },
2259 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
2260 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
2261 { INDEX_op_movcond_i32, { "r", "rZ", "rZ", "rI", "rI" } },
2263 { INDEX_op_ld8u_i64, { "r", "r" } },
2264 { INDEX_op_ld8s_i64, { "r", "r" } },
2265 { INDEX_op_ld16u_i64, { "r", "r" } },
2266 { INDEX_op_ld16s_i64, { "r", "r" } },
2267 { INDEX_op_ld32u_i64, { "r", "r" } },
2268 { INDEX_op_ld32s_i64, { "r", "r" } },
2269 { INDEX_op_ld_i64, { "r", "r" } },
2270 { INDEX_op_st8_i64, { "rZ", "r" } },
2271 { INDEX_op_st16_i64, { "rZ", "r" } },
2272 { INDEX_op_st32_i64, { "rZ", "r" } },
2273 { INDEX_op_st_i64, { "rZ", "r" } },
2275 { INDEX_op_add_i64, { "r", "rZ", "rI" } },
2276 { INDEX_op_sub_i64, { "r", "rI", "rI" } },
2278 { INDEX_op_and_i64, { "r", "rI", "rI" } },
2279 { INDEX_op_andc_i64, { "r", "rI", "rI" } },
2280 { INDEX_op_eqv_i64, { "r", "rZ", "rZ" } },
2281 { INDEX_op_nand_i64, { "r", "rZ", "rZ" } },
2282 { INDEX_op_nor_i64, { "r", "rZ", "rZ" } },
2283 { INDEX_op_or_i64, { "r", "rI", "rI" } },
2284 { INDEX_op_orc_i64, { "r", "rZ", "rZ" } },
2285 { INDEX_op_xor_i64, { "r", "rI", "rI" } },
2287 { INDEX_op_mul_i64, { "r", "rZ", "rZ" } },
2289 { INDEX_op_sar_i64, { "r", "rZ", "ri" } },
2290 { INDEX_op_shl_i64, { "r", "rZ", "ri" } },
2291 { INDEX_op_shr_i64, { "r", "rZ", "ri" } },
2292 { INDEX_op_rotl_i64, { "r", "rZ", "ri" } },
2293 { INDEX_op_rotr_i64, { "r", "rZ", "ri" } },
2295 { INDEX_op_ext8s_i64, { "r", "rZ"} },
2296 { INDEX_op_ext8u_i64, { "r", "rZ"} },
2297 { INDEX_op_ext16s_i64, { "r", "rZ"} },
2298 { INDEX_op_ext16u_i64, { "r", "rZ"} },
2299 { INDEX_op_ext32s_i64, { "r", "rZ"} },
2300 { INDEX_op_ext32u_i64, { "r", "rZ"} },
2301 { INDEX_op_ext_i32_i64, { "r", "rZ" } },
2302 { INDEX_op_extu_i32_i64, { "r", "rZ" } },
2304 { INDEX_op_bswap16_i64, { "r", "rZ" } },
2305 { INDEX_op_bswap32_i64, { "r", "rZ" } },
2306 { INDEX_op_bswap64_i64, { "r", "rZ" } },
2308 { INDEX_op_brcond_i64, { "rZ", "rZ" } },
2309 { INDEX_op_setcond_i64, { "r", "rZ", "rZ" } },
2310 { INDEX_op_movcond_i64, { "r", "rZ", "rZ", "rI", "rI" } },
2312 { INDEX_op_deposit_i32, { "r", "rZ", "ri" } },
2313 { INDEX_op_deposit_i64, { "r", "rZ", "ri" } },
2315 { INDEX_op_qemu_ld_i32, { "r", "r" } },
2316 { INDEX_op_qemu_ld_i64, { "r", "r" } },
2317 { INDEX_op_qemu_st_i32, { "SZ", "r" } },
2318 { INDEX_op_qemu_st_i64, { "SZ", "r" } },
2320 { -1 },
2323 /* Generate global QEMU prologue and epilogue code */
2324 static void tcg_target_qemu_prologue(TCGContext *s)
2326 int frame_size;
2328 /* reserve some stack space */
2329 frame_size = TCG_STATIC_CALL_ARGS_SIZE +
2330 CPU_TEMP_BUF_NLONGS * sizeof(long);
2331 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
2332 ~(TCG_TARGET_STACK_ALIGN - 1);
2333 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
2334 CPU_TEMP_BUF_NLONGS * sizeof(long));
2336 /* First emit adhoc function descriptor */
2337 *s->code_ptr = (tcg_insn_unit){
2338 (uint64_t)(s->code_ptr + 1), /* entry point */
2339 0 /* skip gp */
2341 s->code_ptr++;
2343 /* prologue */
2344 tcg_out_bundle(s, miI,
2345 tcg_opc_m34(TCG_REG_P0, OPC_ALLOC_M34,
2346 TCG_REG_R34, 32, 24, 0),
2347 INSN_NOP_I,
2348 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2349 TCG_REG_B6, TCG_REG_R33, 0));
2351 /* ??? If guest_base < 0x200000, we could load the register via
2352 an ADDL in the M slot of the next bundle. */
2353 if (guest_base != 0) {
2354 tcg_out_bundle(s, mlx,
2355 INSN_NOP_M,
2356 tcg_opc_l2(guest_base),
2357 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
2358 TCG_GUEST_BASE_REG, guest_base));
2359 tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
2362 tcg_out_bundle(s, miB,
2363 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2364 TCG_REG_R12, -frame_size, TCG_REG_R12),
2365 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
2366 TCG_REG_R33, TCG_REG_B0),
2367 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
2369 /* epilogue */
2370 tb_ret_addr = s->code_ptr;
2371 tcg_out_bundle(s, miI,
2372 INSN_NOP_M,
2373 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2374 TCG_REG_B0, TCG_REG_R33, 0),
2375 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2376 TCG_REG_R12, frame_size, TCG_REG_R12));
2377 tcg_out_bundle(s, miB,
2378 INSN_NOP_M,
2379 tcg_opc_i26(TCG_REG_P0, OPC_MOV_I_I26,
2380 TCG_REG_PFS, TCG_REG_R34),
2381 tcg_opc_b4 (TCG_REG_P0, OPC_BR_RET_SPTK_MANY_B4,
2382 TCG_REG_B0));
2385 static void tcg_target_init(TCGContext *s)
2387 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32],
2388 0xffffffffffffffffull);
2389 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I64],
2390 0xffffffffffffffffull);
2392 tcg_regset_clear(tcg_target_call_clobber_regs);
2393 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R8);
2394 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R9);
2395 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R10);
2396 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R11);
2397 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R14);
2398 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R15);
2399 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R16);
2400 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R17);
2401 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R18);
2402 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R19);
2403 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R20);
2404 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R21);
2405 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R22);
2406 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R23);
2407 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R24);
2408 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R25);
2409 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R26);
2410 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R27);
2411 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R28);
2412 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R29);
2413 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R30);
2414 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R31);
2415 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R56);
2416 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R57);
2417 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R58);
2418 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R59);
2419 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R60);
2420 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R61);
2421 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R62);
2422 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R63);
2424 tcg_regset_clear(s->reserved_regs);
2425 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0); /* zero register */
2426 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1); /* global pointer */
2427 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2); /* internal use */
2428 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R3); /* internal use */
2429 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R12); /* stack pointer */
2430 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R13); /* thread pointer */
2431 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R33); /* return address */
2432 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R34); /* PFS */
2434 /* The following 4 are not in use, are call-saved, but *not* saved
2435 by the prologue. Therefore we cannot use them without modifying
2436 the prologue. There doesn't seem to be any good reason to use
2437 these as opposed to the windowed registers. */
2438 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R4);
2439 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R5);
2440 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R6);
2441 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R7);
2443 tcg_add_target_add_op_defs(ia64_op_defs);