scsi: esp: check TI buffer index before read/write
[qemu/ar7.git] / tcg / ia64 / tcg-target.inc.c
blob395223e3400350f177a2ad73d63491a204f11a33
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009-2010 Aurelien Jarno <aurelien@aurel32.net>
5 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
27 * Register definitions
30 #ifdef CONFIG_DEBUG_TCG
31 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
32 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
33 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
34 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
35 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
36 "r32", "r33", "r34", "r35", "r36", "r37", "r38", "r39",
37 "r40", "r41", "r42", "r43", "r44", "r45", "r46", "r47",
38 "r48", "r49", "r50", "r51", "r52", "r53", "r54", "r55",
39 "r56", "r57", "r58", "r59", "r60", "r61", "r62", "r63",
41 #endif
43 #ifndef CONFIG_SOFTMMU
44 #define TCG_GUEST_BASE_REG TCG_REG_R55
45 #endif
47 /* Branch registers */
48 enum {
49 TCG_REG_B0 = 0,
50 TCG_REG_B1,
51 TCG_REG_B2,
52 TCG_REG_B3,
53 TCG_REG_B4,
54 TCG_REG_B5,
55 TCG_REG_B6,
56 TCG_REG_B7,
59 /* Floating point registers */
60 enum {
61 TCG_REG_F0 = 0,
62 TCG_REG_F1,
63 TCG_REG_F2,
64 TCG_REG_F3,
65 TCG_REG_F4,
66 TCG_REG_F5,
67 TCG_REG_F6,
68 TCG_REG_F7,
69 TCG_REG_F8,
70 TCG_REG_F9,
71 TCG_REG_F10,
72 TCG_REG_F11,
73 TCG_REG_F12,
74 TCG_REG_F13,
75 TCG_REG_F14,
76 TCG_REG_F15,
79 /* Predicate registers */
80 enum {
81 TCG_REG_P0 = 0,
82 TCG_REG_P1,
83 TCG_REG_P2,
84 TCG_REG_P3,
85 TCG_REG_P4,
86 TCG_REG_P5,
87 TCG_REG_P6,
88 TCG_REG_P7,
89 TCG_REG_P8,
90 TCG_REG_P9,
91 TCG_REG_P10,
92 TCG_REG_P11,
93 TCG_REG_P12,
94 TCG_REG_P13,
95 TCG_REG_P14,
96 TCG_REG_P15,
99 /* Application registers */
100 enum {
101 TCG_REG_PFS = 64,
104 static const int tcg_target_reg_alloc_order[] = {
105 TCG_REG_R35,
106 TCG_REG_R36,
107 TCG_REG_R37,
108 TCG_REG_R38,
109 TCG_REG_R39,
110 TCG_REG_R40,
111 TCG_REG_R41,
112 TCG_REG_R42,
113 TCG_REG_R43,
114 TCG_REG_R44,
115 TCG_REG_R45,
116 TCG_REG_R46,
117 TCG_REG_R47,
118 TCG_REG_R48,
119 TCG_REG_R49,
120 TCG_REG_R50,
121 TCG_REG_R51,
122 TCG_REG_R52,
123 TCG_REG_R53,
124 TCG_REG_R54,
125 TCG_REG_R55,
126 TCG_REG_R14,
127 TCG_REG_R15,
128 TCG_REG_R16,
129 TCG_REG_R17,
130 TCG_REG_R18,
131 TCG_REG_R19,
132 TCG_REG_R20,
133 TCG_REG_R21,
134 TCG_REG_R22,
135 TCG_REG_R23,
136 TCG_REG_R24,
137 TCG_REG_R25,
138 TCG_REG_R26,
139 TCG_REG_R27,
140 TCG_REG_R28,
141 TCG_REG_R29,
142 TCG_REG_R30,
143 TCG_REG_R31,
144 TCG_REG_R56,
145 TCG_REG_R57,
146 TCG_REG_R58,
147 TCG_REG_R59,
148 TCG_REG_R60,
149 TCG_REG_R61,
150 TCG_REG_R62,
151 TCG_REG_R63,
152 TCG_REG_R8,
153 TCG_REG_R9,
154 TCG_REG_R10,
155 TCG_REG_R11
158 static const int tcg_target_call_iarg_regs[8] = {
159 TCG_REG_R56,
160 TCG_REG_R57,
161 TCG_REG_R58,
162 TCG_REG_R59,
163 TCG_REG_R60,
164 TCG_REG_R61,
165 TCG_REG_R62,
166 TCG_REG_R63,
169 static const int tcg_target_call_oarg_regs[] = {
170 TCG_REG_R8
174 * opcode formation
177 /* bundle templates: stops (double bar in the IA64 manual) are marked with
178 an uppercase letter. */
179 enum {
180 mii = 0x00,
181 miI = 0x01,
182 mIi = 0x02,
183 mII = 0x03,
184 mlx = 0x04,
185 mLX = 0x05,
186 mmi = 0x08,
187 mmI = 0x09,
188 Mmi = 0x0a,
189 MmI = 0x0b,
190 mfi = 0x0c,
191 mfI = 0x0d,
192 mmf = 0x0e,
193 mmF = 0x0f,
194 mib = 0x10,
195 miB = 0x11,
196 mbb = 0x12,
197 mbB = 0x13,
198 bbb = 0x16,
199 bbB = 0x17,
200 mmb = 0x18,
201 mmB = 0x19,
202 mfb = 0x1c,
203 mfB = 0x1d,
206 enum {
207 OPC_ADD_A1 = 0x10000000000ull,
208 OPC_AND_A1 = 0x10060000000ull,
209 OPC_AND_A3 = 0x10160000000ull,
210 OPC_ANDCM_A1 = 0x10068000000ull,
211 OPC_ANDCM_A3 = 0x10168000000ull,
212 OPC_ADDS_A4 = 0x10800000000ull,
213 OPC_ADDL_A5 = 0x12000000000ull,
214 OPC_ALLOC_M34 = 0x02c00000000ull,
215 OPC_BR_DPTK_FEW_B1 = 0x08400000000ull,
216 OPC_BR_SPTK_MANY_B1 = 0x08000001000ull,
217 OPC_BR_CALL_SPNT_FEW_B3 = 0x0a200000000ull,
218 OPC_BR_SPTK_MANY_B4 = 0x00100001000ull,
219 OPC_BR_CALL_SPTK_MANY_B5 = 0x02100001000ull,
220 OPC_BR_RET_SPTK_MANY_B4 = 0x00108001100ull,
221 OPC_BRL_SPTK_MANY_X3 = 0x18000001000ull,
222 OPC_BRL_CALL_SPNT_MANY_X4 = 0x1a200001000ull,
223 OPC_BRL_CALL_SPTK_MANY_X4 = 0x1a000001000ull,
224 OPC_CMP_LT_A6 = 0x18000000000ull,
225 OPC_CMP_LTU_A6 = 0x1a000000000ull,
226 OPC_CMP_EQ_A6 = 0x1c000000000ull,
227 OPC_CMP4_LT_A6 = 0x18400000000ull,
228 OPC_CMP4_LTU_A6 = 0x1a400000000ull,
229 OPC_CMP4_EQ_A6 = 0x1c400000000ull,
230 OPC_DEP_I14 = 0x0ae00000000ull,
231 OPC_DEP_I15 = 0x08000000000ull,
232 OPC_DEP_Z_I12 = 0x0a600000000ull,
233 OPC_EXTR_I11 = 0x0a400002000ull,
234 OPC_EXTR_U_I11 = 0x0a400000000ull,
235 OPC_FCVT_FX_TRUNC_S1_F10 = 0x004d0000000ull,
236 OPC_FCVT_FXU_TRUNC_S1_F10 = 0x004d8000000ull,
237 OPC_FCVT_XF_F11 = 0x000e0000000ull,
238 OPC_FMA_S1_F1 = 0x10400000000ull,
239 OPC_FNMA_S1_F1 = 0x18400000000ull,
240 OPC_FRCPA_S1_F6 = 0x00600000000ull,
241 OPC_GETF_SIG_M19 = 0x08708000000ull,
242 OPC_LD1_M1 = 0x08000000000ull,
243 OPC_LD1_M3 = 0x0a000000000ull,
244 OPC_LD2_M1 = 0x08040000000ull,
245 OPC_LD2_M3 = 0x0a040000000ull,
246 OPC_LD4_M1 = 0x08080000000ull,
247 OPC_LD4_M3 = 0x0a080000000ull,
248 OPC_LD8_M1 = 0x080c0000000ull,
249 OPC_LD8_M3 = 0x0a0c0000000ull,
250 OPC_MUX1_I3 = 0x0eca0000000ull,
251 OPC_NOP_B9 = 0x04008000000ull,
252 OPC_NOP_F16 = 0x00008000000ull,
253 OPC_NOP_I18 = 0x00008000000ull,
254 OPC_NOP_M48 = 0x00008000000ull,
255 OPC_MOV_I21 = 0x00e00100000ull,
256 OPC_MOV_RET_I21 = 0x00e00500000ull,
257 OPC_MOV_I22 = 0x00188000000ull,
258 OPC_MOV_I_I26 = 0x00150000000ull,
259 OPC_MOVL_X2 = 0x0c000000000ull,
260 OPC_OR_A1 = 0x10070000000ull,
261 OPC_OR_A3 = 0x10170000000ull,
262 OPC_SETF_EXP_M18 = 0x0c748000000ull,
263 OPC_SETF_SIG_M18 = 0x0c708000000ull,
264 OPC_SHL_I7 = 0x0f240000000ull,
265 OPC_SHR_I5 = 0x0f220000000ull,
266 OPC_SHR_U_I5 = 0x0f200000000ull,
267 OPC_SHRP_I10 = 0x0ac00000000ull,
268 OPC_SXT1_I29 = 0x000a0000000ull,
269 OPC_SXT2_I29 = 0x000a8000000ull,
270 OPC_SXT4_I29 = 0x000b0000000ull,
271 OPC_ST1_M4 = 0x08c00000000ull,
272 OPC_ST2_M4 = 0x08c40000000ull,
273 OPC_ST4_M4 = 0x08c80000000ull,
274 OPC_ST8_M4 = 0x08cc0000000ull,
275 OPC_SUB_A1 = 0x10028000000ull,
276 OPC_SUB_A3 = 0x10128000000ull,
277 OPC_UNPACK4_L_I2 = 0x0f860000000ull,
278 OPC_XMA_L_F2 = 0x1d000000000ull,
279 OPC_XOR_A1 = 0x10078000000ull,
280 OPC_XOR_A3 = 0x10178000000ull,
281 OPC_ZXT1_I29 = 0x00080000000ull,
282 OPC_ZXT2_I29 = 0x00088000000ull,
283 OPC_ZXT4_I29 = 0x00090000000ull,
285 INSN_NOP_M = OPC_NOP_M48, /* nop.m 0 */
286 INSN_NOP_I = OPC_NOP_I18, /* nop.i 0 */
289 static inline uint64_t tcg_opc_a1(int qp, uint64_t opc, int r1,
290 int r2, int r3)
292 return opc
293 | ((r3 & 0x7f) << 20)
294 | ((r2 & 0x7f) << 13)
295 | ((r1 & 0x7f) << 6)
296 | (qp & 0x3f);
299 static inline uint64_t tcg_opc_a3(int qp, uint64_t opc, int r1,
300 uint64_t imm, int r3)
302 return opc
303 | ((imm & 0x80) << 29) /* s */
304 | ((imm & 0x7f) << 13) /* imm7b */
305 | ((r3 & 0x7f) << 20)
306 | ((r1 & 0x7f) << 6)
307 | (qp & 0x3f);
310 static inline uint64_t tcg_opc_a4(int qp, uint64_t opc, int r1,
311 uint64_t imm, int r3)
313 return opc
314 | ((imm & 0x2000) << 23) /* s */
315 | ((imm & 0x1f80) << 20) /* imm6d */
316 | ((imm & 0x007f) << 13) /* imm7b */
317 | ((r3 & 0x7f) << 20)
318 | ((r1 & 0x7f) << 6)
319 | (qp & 0x3f);
322 static inline uint64_t tcg_opc_a5(int qp, uint64_t opc, int r1,
323 uint64_t imm, int r3)
325 return opc
326 | ((imm & 0x200000) << 15) /* s */
327 | ((imm & 0x1f0000) << 6) /* imm5c */
328 | ((imm & 0x00ff80) << 20) /* imm9d */
329 | ((imm & 0x00007f) << 13) /* imm7b */
330 | ((r3 & 0x03) << 20)
331 | ((r1 & 0x7f) << 6)
332 | (qp & 0x3f);
335 static inline uint64_t tcg_opc_a6(int qp, uint64_t opc, int p1,
336 int p2, int r2, int r3)
338 return opc
339 | ((p2 & 0x3f) << 27)
340 | ((r3 & 0x7f) << 20)
341 | ((r2 & 0x7f) << 13)
342 | ((p1 & 0x3f) << 6)
343 | (qp & 0x3f);
346 static inline uint64_t tcg_opc_b1(int qp, uint64_t opc, uint64_t imm)
348 return opc
349 | ((imm & 0x100000) << 16) /* s */
350 | ((imm & 0x0fffff) << 13) /* imm20b */
351 | (qp & 0x3f);
354 static inline uint64_t tcg_opc_b3(int qp, uint64_t opc, int b1, uint64_t imm)
356 return opc
357 | ((imm & 0x100000) << 16) /* s */
358 | ((imm & 0x0fffff) << 13) /* imm20b */
359 | ((b1 & 0x7) << 6)
360 | (qp & 0x3f);
363 static inline uint64_t tcg_opc_b4(int qp, uint64_t opc, int b2)
365 return opc
366 | ((b2 & 0x7) << 13)
367 | (qp & 0x3f);
370 static inline uint64_t tcg_opc_b5(int qp, uint64_t opc, int b1, int b2)
372 return opc
373 | ((b2 & 0x7) << 13)
374 | ((b1 & 0x7) << 6)
375 | (qp & 0x3f);
379 static inline uint64_t tcg_opc_b9(int qp, uint64_t opc, uint64_t imm)
381 return opc
382 | ((imm & 0x100000) << 16) /* i */
383 | ((imm & 0x0fffff) << 6) /* imm20a */
384 | (qp & 0x3f);
387 static inline uint64_t tcg_opc_f1(int qp, uint64_t opc, int f1,
388 int f3, int f4, int f2)
390 return opc
391 | ((f4 & 0x7f) << 27)
392 | ((f3 & 0x7f) << 20)
393 | ((f2 & 0x7f) << 13)
394 | ((f1 & 0x7f) << 6)
395 | (qp & 0x3f);
398 static inline uint64_t tcg_opc_f2(int qp, uint64_t opc, int f1,
399 int f3, int f4, int f2)
401 return opc
402 | ((f4 & 0x7f) << 27)
403 | ((f3 & 0x7f) << 20)
404 | ((f2 & 0x7f) << 13)
405 | ((f1 & 0x7f) << 6)
406 | (qp & 0x3f);
409 static inline uint64_t tcg_opc_f6(int qp, uint64_t opc, int f1,
410 int p2, int f2, int f3)
412 return opc
413 | ((p2 & 0x3f) << 27)
414 | ((f3 & 0x7f) << 20)
415 | ((f2 & 0x7f) << 13)
416 | ((f1 & 0x7f) << 6)
417 | (qp & 0x3f);
420 static inline uint64_t tcg_opc_f10(int qp, uint64_t opc, int f1, int f2)
422 return opc
423 | ((f2 & 0x7f) << 13)
424 | ((f1 & 0x7f) << 6)
425 | (qp & 0x3f);
428 static inline uint64_t tcg_opc_f11(int qp, uint64_t opc, int f1, int f2)
430 return opc
431 | ((f2 & 0x7f) << 13)
432 | ((f1 & 0x7f) << 6)
433 | (qp & 0x3f);
436 static inline uint64_t tcg_opc_f16(int qp, uint64_t opc, uint64_t imm)
438 return opc
439 | ((imm & 0x100000) << 16) /* i */
440 | ((imm & 0x0fffff) << 6) /* imm20a */
441 | (qp & 0x3f);
444 static inline uint64_t tcg_opc_i2(int qp, uint64_t opc, int r1,
445 int r2, int r3)
447 return opc
448 | ((r3 & 0x7f) << 20)
449 | ((r2 & 0x7f) << 13)
450 | ((r1 & 0x7f) << 6)
451 | (qp & 0x3f);
454 static inline uint64_t tcg_opc_i3(int qp, uint64_t opc, int r1,
455 int r2, int mbtype)
457 return opc
458 | ((mbtype & 0x0f) << 20)
459 | ((r2 & 0x7f) << 13)
460 | ((r1 & 0x7f) << 6)
461 | (qp & 0x3f);
464 static inline uint64_t tcg_opc_i5(int qp, uint64_t opc, int r1,
465 int r3, int r2)
467 return opc
468 | ((r3 & 0x7f) << 20)
469 | ((r2 & 0x7f) << 13)
470 | ((r1 & 0x7f) << 6)
471 | (qp & 0x3f);
474 static inline uint64_t tcg_opc_i7(int qp, uint64_t opc, int r1,
475 int r2, int r3)
477 return opc
478 | ((r3 & 0x7f) << 20)
479 | ((r2 & 0x7f) << 13)
480 | ((r1 & 0x7f) << 6)
481 | (qp & 0x3f);
484 static inline uint64_t tcg_opc_i10(int qp, uint64_t opc, int r1,
485 int r2, int r3, uint64_t count)
487 return opc
488 | ((count & 0x3f) << 27)
489 | ((r3 & 0x7f) << 20)
490 | ((r2 & 0x7f) << 13)
491 | ((r1 & 0x7f) << 6)
492 | (qp & 0x3f);
495 static inline uint64_t tcg_opc_i11(int qp, uint64_t opc, int r1,
496 int r3, uint64_t pos, uint64_t len)
498 return opc
499 | ((len & 0x3f) << 27)
500 | ((r3 & 0x7f) << 20)
501 | ((pos & 0x3f) << 14)
502 | ((r1 & 0x7f) << 6)
503 | (qp & 0x3f);
506 static inline uint64_t tcg_opc_i12(int qp, uint64_t opc, int r1,
507 int r2, uint64_t pos, uint64_t len)
509 return opc
510 | ((len & 0x3f) << 27)
511 | ((pos & 0x3f) << 20)
512 | ((r2 & 0x7f) << 13)
513 | ((r1 & 0x7f) << 6)
514 | (qp & 0x3f);
517 static inline uint64_t tcg_opc_i14(int qp, uint64_t opc, int r1, uint64_t imm,
518 int r3, uint64_t pos, uint64_t len)
520 return opc
521 | ((imm & 0x01) << 36)
522 | ((len & 0x3f) << 27)
523 | ((r3 & 0x7f) << 20)
524 | ((pos & 0x3f) << 14)
525 | ((r1 & 0x7f) << 6)
526 | (qp & 0x3f);
529 static inline uint64_t tcg_opc_i15(int qp, uint64_t opc, int r1, int r2,
530 int r3, uint64_t pos, uint64_t len)
532 return opc
533 | ((pos & 0x3f) << 31)
534 | ((len & 0x0f) << 27)
535 | ((r3 & 0x7f) << 20)
536 | ((r2 & 0x7f) << 13)
537 | ((r1 & 0x7f) << 6)
538 | (qp & 0x3f);
541 static inline uint64_t tcg_opc_i18(int qp, uint64_t opc, uint64_t imm)
543 return opc
544 | ((imm & 0x100000) << 16) /* i */
545 | ((imm & 0x0fffff) << 6) /* imm20a */
546 | (qp & 0x3f);
549 static inline uint64_t tcg_opc_i21(int qp, uint64_t opc, int b1,
550 int r2, uint64_t imm)
552 return opc
553 | ((imm & 0x1ff) << 24)
554 | ((r2 & 0x7f) << 13)
555 | ((b1 & 0x7) << 6)
556 | (qp & 0x3f);
559 static inline uint64_t tcg_opc_i22(int qp, uint64_t opc, int r1, int b2)
561 return opc
562 | ((b2 & 0x7) << 13)
563 | ((r1 & 0x7f) << 6)
564 | (qp & 0x3f);
567 static inline uint64_t tcg_opc_i26(int qp, uint64_t opc, int ar3, int r2)
569 return opc
570 | ((ar3 & 0x7f) << 20)
571 | ((r2 & 0x7f) << 13)
572 | (qp & 0x3f);
575 static inline uint64_t tcg_opc_i29(int qp, uint64_t opc, int r1, int r3)
577 return opc
578 | ((r3 & 0x7f) << 20)
579 | ((r1 & 0x7f) << 6)
580 | (qp & 0x3f);
583 static inline uint64_t tcg_opc_l2(uint64_t imm)
585 return (imm & 0x7fffffffffc00000ull) >> 22;
588 static inline uint64_t tcg_opc_l3(uint64_t imm)
590 return (imm & 0x07fffffffff00000ull) >> 18;
593 #define tcg_opc_l4 tcg_opc_l3
595 static inline uint64_t tcg_opc_m1(int qp, uint64_t opc, int r1, int r3)
597 return opc
598 | ((r3 & 0x7f) << 20)
599 | ((r1 & 0x7f) << 6)
600 | (qp & 0x3f);
603 static inline uint64_t tcg_opc_m3(int qp, uint64_t opc, int r1,
604 int r3, uint64_t imm)
606 return opc
607 | ((imm & 0x100) << 28) /* s */
608 | ((imm & 0x080) << 20) /* i */
609 | ((imm & 0x07f) << 13) /* imm7b */
610 | ((r3 & 0x7f) << 20)
611 | ((r1 & 0x7f) << 6)
612 | (qp & 0x3f);
615 static inline uint64_t tcg_opc_m4(int qp, uint64_t opc, int r2, int r3)
617 return opc
618 | ((r3 & 0x7f) << 20)
619 | ((r2 & 0x7f) << 13)
620 | (qp & 0x3f);
623 static inline uint64_t tcg_opc_m18(int qp, uint64_t opc, int f1, int r2)
625 return opc
626 | ((r2 & 0x7f) << 13)
627 | ((f1 & 0x7f) << 6)
628 | (qp & 0x3f);
631 static inline uint64_t tcg_opc_m19(int qp, uint64_t opc, int r1, int f2)
633 return opc
634 | ((f2 & 0x7f) << 13)
635 | ((r1 & 0x7f) << 6)
636 | (qp & 0x3f);
639 static inline uint64_t tcg_opc_m34(int qp, uint64_t opc, int r1,
640 int sof, int sol, int sor)
642 return opc
643 | ((sor & 0x0f) << 27)
644 | ((sol & 0x7f) << 20)
645 | ((sof & 0x7f) << 13)
646 | ((r1 & 0x7f) << 6)
647 | (qp & 0x3f);
650 static inline uint64_t tcg_opc_m48(int qp, uint64_t opc, uint64_t imm)
652 return opc
653 | ((imm & 0x100000) << 16) /* i */
654 | ((imm & 0x0fffff) << 6) /* imm20a */
655 | (qp & 0x3f);
658 static inline uint64_t tcg_opc_x2(int qp, uint64_t opc,
659 int r1, uint64_t imm)
661 return opc
662 | ((imm & 0x8000000000000000ull) >> 27) /* i */
663 | (imm & 0x0000000000200000ull) /* ic */
664 | ((imm & 0x00000000001f0000ull) << 6) /* imm5c */
665 | ((imm & 0x000000000000ff80ull) << 20) /* imm9d */
666 | ((imm & 0x000000000000007full) << 13) /* imm7b */
667 | ((r1 & 0x7f) << 6)
668 | (qp & 0x3f);
671 static inline uint64_t tcg_opc_x3(int qp, uint64_t opc, uint64_t imm)
673 return opc
674 | ((imm & 0x0800000000000000ull) >> 23) /* i */
675 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
676 | (qp & 0x3f);
679 static inline uint64_t tcg_opc_x4(int qp, uint64_t opc, int b1, uint64_t imm)
681 return opc
682 | ((imm & 0x0800000000000000ull) >> 23) /* i */
683 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
684 | ((b1 & 0x7) << 6)
685 | (qp & 0x3f);
690 * Relocations - Note that we never encode branches elsewhere than slot 2.
693 static void reloc_pcrel21b_slot2(tcg_insn_unit *pc, tcg_insn_unit *target)
695 uint64_t imm = target - pc;
697 pc->hi = (pc->hi & 0xf700000fffffffffull)
698 | ((imm & 0x100000) << 39) /* s */
699 | ((imm & 0x0fffff) << 36); /* imm20b */
702 static uint64_t get_reloc_pcrel21b_slot2(tcg_insn_unit *pc)
704 int64_t high = pc->hi;
706 return ((high >> 39) & 0x100000) + /* s */
707 ((high >> 36) & 0x0fffff); /* imm20b */
710 static void patch_reloc(tcg_insn_unit *code_ptr, int type,
711 intptr_t value, intptr_t addend)
713 tcg_debug_assert(addend == 0);
714 tcg_debug_assert(type == R_IA64_PCREL21B);
715 reloc_pcrel21b_slot2(code_ptr, (tcg_insn_unit *)value);
719 * Constraints
722 /* parse target specific constraints */
723 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
725 const char *ct_str;
727 ct_str = *pct_str;
728 switch(ct_str[0]) {
729 case 'r':
730 ct->ct |= TCG_CT_REG;
731 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
732 break;
733 case 'I':
734 ct->ct |= TCG_CT_CONST_S22;
735 break;
736 case 'S':
737 ct->ct |= TCG_CT_REG;
738 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
739 #if defined(CONFIG_SOFTMMU)
740 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R56);
741 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R57);
742 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R58);
743 #endif
744 break;
745 case 'Z':
746 /* We are cheating a bit here, using the fact that the register
747 r0 is also the register number 0. Hence there is no need
748 to check for const_args in each instruction. */
749 ct->ct |= TCG_CT_CONST_ZERO;
750 break;
751 default:
752 return -1;
754 ct_str++;
755 *pct_str = ct_str;
756 return 0;
759 /* test if a constant matches the constraint */
760 static inline int tcg_target_const_match(tcg_target_long val, TCGType type,
761 const TCGArgConstraint *arg_ct)
763 int ct;
764 ct = arg_ct->ct;
765 if (ct & TCG_CT_CONST)
766 return 1;
767 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
768 return 1;
769 else if ((ct & TCG_CT_CONST_S22) && val == ((int32_t)val << 10) >> 10)
770 return 1;
771 else
772 return 0;
776 * Code generation
779 static tcg_insn_unit *tb_ret_addr;
781 static inline void tcg_out_bundle(TCGContext *s, int template,
782 uint64_t slot0, uint64_t slot1,
783 uint64_t slot2)
785 template &= 0x1f; /* 5 bits */
786 slot0 &= 0x1ffffffffffull; /* 41 bits */
787 slot1 &= 0x1ffffffffffull; /* 41 bits */
788 slot2 &= 0x1ffffffffffull; /* 41 bits */
790 *s->code_ptr++ = (tcg_insn_unit){
791 (slot1 << 46) | (slot0 << 5) | template,
792 (slot2 << 23) | (slot1 >> 18)
796 static inline uint64_t tcg_opc_mov_a(int qp, TCGReg dst, TCGReg src)
798 return tcg_opc_a4(qp, OPC_ADDS_A4, dst, 0, src);
801 static inline void tcg_out_mov(TCGContext *s, TCGType type,
802 TCGReg ret, TCGReg arg)
804 tcg_out_bundle(s, mmI,
805 INSN_NOP_M,
806 INSN_NOP_M,
807 tcg_opc_mov_a(TCG_REG_P0, ret, arg));
810 static inline uint64_t tcg_opc_movi_a(int qp, TCGReg dst, int64_t src)
812 tcg_debug_assert(src == sextract64(src, 0, 22));
813 return tcg_opc_a5(qp, OPC_ADDL_A5, dst, src, TCG_REG_R0);
816 static inline void tcg_out_movi(TCGContext *s, TCGType type,
817 TCGReg reg, tcg_target_long arg)
819 tcg_out_bundle(s, mLX,
820 INSN_NOP_M,
821 tcg_opc_l2 (arg),
822 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, reg, arg));
825 static void tcg_out_br(TCGContext *s, TCGLabel *l)
827 uint64_t imm;
829 /* We pay attention here to not modify the branch target by reading
830 the existing value and using it again. This ensure that caches and
831 memory are kept coherent during retranslation. */
832 if (l->has_value) {
833 imm = l->u.value_ptr - s->code_ptr;
834 } else {
835 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
836 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, l, 0);
839 tcg_out_bundle(s, mmB,
840 INSN_NOP_M,
841 INSN_NOP_M,
842 tcg_opc_b1(TCG_REG_P0, OPC_BR_SPTK_MANY_B1, imm));
845 static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *desc)
847 uintptr_t func = desc->lo, gp = desc->hi, disp;
849 /* Look through the function descriptor. */
850 tcg_out_bundle(s, mlx,
851 INSN_NOP_M,
852 tcg_opc_l2 (gp),
853 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, TCG_REG_R1, gp));
854 disp = (tcg_insn_unit *)func - s->code_ptr;
855 tcg_out_bundle(s, mLX,
856 INSN_NOP_M,
857 tcg_opc_l4 (disp),
858 tcg_opc_x4 (TCG_REG_P0, OPC_BRL_CALL_SPTK_MANY_X4,
859 TCG_REG_B0, disp));
862 static void tcg_out_exit_tb(TCGContext *s, tcg_target_long arg)
864 uint64_t imm, opc1;
866 /* At least arg == 0 is a common operation. */
867 if (arg == sextract64(arg, 0, 22)) {
868 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R8, arg);
869 } else {
870 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R8, arg);
871 opc1 = INSN_NOP_M;
874 imm = tb_ret_addr - s->code_ptr;
876 tcg_out_bundle(s, mLX,
877 opc1,
878 tcg_opc_l3 (imm),
879 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, imm));
882 static inline void tcg_out_goto_tb(TCGContext *s, TCGArg arg)
884 if (s->tb_jmp_insn_offset) {
885 /* direct jump method */
886 tcg_abort();
887 } else {
888 /* indirect jump method */
889 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2,
890 (tcg_target_long)(s->tb_jmp_target_addr + arg));
891 tcg_out_bundle(s, MmI,
892 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1,
893 TCG_REG_R2, TCG_REG_R2),
894 INSN_NOP_M,
895 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6,
896 TCG_REG_R2, 0));
897 tcg_out_bundle(s, mmB,
898 INSN_NOP_M,
899 INSN_NOP_M,
900 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4,
901 TCG_REG_B6));
903 s->tb_jmp_reset_offset[arg] = tcg_current_code_size(s);
906 static inline void tcg_out_jmp(TCGContext *s, TCGArg addr)
908 tcg_out_bundle(s, mmI,
909 INSN_NOP_M,
910 INSN_NOP_M,
911 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6, addr, 0));
912 tcg_out_bundle(s, mmB,
913 INSN_NOP_M,
914 INSN_NOP_M,
915 tcg_opc_b4(TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
918 static inline void tcg_out_ld_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
919 TCGArg arg1, tcg_target_long arg2)
921 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
922 tcg_out_bundle(s, MmI,
923 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
924 TCG_REG_R2, arg2, arg1),
925 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
926 INSN_NOP_I);
927 } else {
928 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
929 tcg_out_bundle(s, MmI,
930 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
931 TCG_REG_R2, TCG_REG_R2, arg1),
932 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
933 INSN_NOP_I);
937 static inline void tcg_out_st_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
938 TCGArg arg1, tcg_target_long arg2)
940 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
941 tcg_out_bundle(s, MmI,
942 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
943 TCG_REG_R2, arg2, arg1),
944 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
945 INSN_NOP_I);
946 } else {
947 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
948 tcg_out_bundle(s, MmI,
949 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
950 TCG_REG_R2, TCG_REG_R2, arg1),
951 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
952 INSN_NOP_I);
956 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
957 TCGReg arg1, intptr_t arg2)
959 if (type == TCG_TYPE_I32) {
960 tcg_out_ld_rel(s, OPC_LD4_M1, arg, arg1, arg2);
961 } else {
962 tcg_out_ld_rel(s, OPC_LD8_M1, arg, arg1, arg2);
966 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
967 TCGReg arg1, intptr_t arg2)
969 if (type == TCG_TYPE_I32) {
970 tcg_out_st_rel(s, OPC_ST4_M4, arg, arg1, arg2);
971 } else {
972 tcg_out_st_rel(s, OPC_ST8_M4, arg, arg1, arg2);
976 static inline void tcg_out_alu(TCGContext *s, uint64_t opc_a1, uint64_t opc_a3,
977 TCGReg ret, TCGArg arg1, int const_arg1,
978 TCGArg arg2, int const_arg2)
980 uint64_t opc1 = 0, opc2 = 0, opc3 = 0;
982 if (const_arg2 && arg2 != 0) {
983 opc2 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R3, arg2);
984 arg2 = TCG_REG_R3;
986 if (const_arg1 && arg1 != 0) {
987 if (opc_a3 && arg1 == (int8_t)arg1) {
988 opc3 = tcg_opc_a3(TCG_REG_P0, opc_a3, ret, arg1, arg2);
989 } else {
990 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, arg1);
991 arg1 = TCG_REG_R2;
994 if (opc3 == 0) {
995 opc3 = tcg_opc_a1(TCG_REG_P0, opc_a1, ret, arg1, arg2);
998 tcg_out_bundle(s, (opc1 || opc2 ? mII : miI),
999 opc1 ? opc1 : INSN_NOP_M,
1000 opc2 ? opc2 : INSN_NOP_I,
1001 opc3);
1004 static inline void tcg_out_add(TCGContext *s, TCGReg ret, TCGReg arg1,
1005 TCGArg arg2, int const_arg2)
1007 if (const_arg2 && arg2 == sextract64(arg2, 0, 14)) {
1008 tcg_out_bundle(s, mmI,
1009 INSN_NOP_M,
1010 INSN_NOP_M,
1011 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, arg2, arg1));
1012 } else {
1013 tcg_out_alu(s, OPC_ADD_A1, 0, ret, arg1, 0, arg2, const_arg2);
1017 static inline void tcg_out_sub(TCGContext *s, TCGReg ret, TCGArg arg1,
1018 int const_arg1, TCGArg arg2, int const_arg2)
1020 if (!const_arg1 && const_arg2 && -arg2 == sextract64(-arg2, 0, 14)) {
1021 tcg_out_bundle(s, mmI,
1022 INSN_NOP_M,
1023 INSN_NOP_M,
1024 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, -arg2, arg1));
1025 } else {
1026 tcg_out_alu(s, OPC_SUB_A1, OPC_SUB_A3, ret,
1027 arg1, const_arg1, arg2, const_arg2);
1031 static inline void tcg_out_eqv(TCGContext *s, TCGArg ret,
1032 TCGArg arg1, int const_arg1,
1033 TCGArg arg2, int const_arg2)
1035 tcg_out_bundle(s, mII,
1036 INSN_NOP_M,
1037 tcg_opc_a1 (TCG_REG_P0, OPC_XOR_A1, ret, arg1, arg2),
1038 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1041 static inline void tcg_out_nand(TCGContext *s, TCGArg ret,
1042 TCGArg arg1, int const_arg1,
1043 TCGArg arg2, int const_arg2)
1045 tcg_out_bundle(s, mII,
1046 INSN_NOP_M,
1047 tcg_opc_a1 (TCG_REG_P0, OPC_AND_A1, ret, arg1, arg2),
1048 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1051 static inline void tcg_out_nor(TCGContext *s, TCGArg ret,
1052 TCGArg arg1, int const_arg1,
1053 TCGArg arg2, int const_arg2)
1055 tcg_out_bundle(s, mII,
1056 INSN_NOP_M,
1057 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, arg2),
1058 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1061 static inline void tcg_out_orc(TCGContext *s, TCGArg ret,
1062 TCGArg arg1, int const_arg1,
1063 TCGArg arg2, int const_arg2)
1065 tcg_out_bundle(s, mII,
1066 INSN_NOP_M,
1067 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, TCG_REG_R2, -1, arg2),
1068 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, TCG_REG_R2));
1071 static inline void tcg_out_mul(TCGContext *s, TCGArg ret,
1072 TCGArg arg1, TCGArg arg2)
1074 tcg_out_bundle(s, mmI,
1075 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F6, arg1),
1076 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F7, arg2),
1077 INSN_NOP_I);
1078 tcg_out_bundle(s, mmF,
1079 INSN_NOP_M,
1080 INSN_NOP_M,
1081 tcg_opc_f2 (TCG_REG_P0, OPC_XMA_L_F2, TCG_REG_F6, TCG_REG_F6,
1082 TCG_REG_F7, TCG_REG_F0));
1083 tcg_out_bundle(s, miI,
1084 tcg_opc_m19(TCG_REG_P0, OPC_GETF_SIG_M19, ret, TCG_REG_F6),
1085 INSN_NOP_I,
1086 INSN_NOP_I);
1089 static inline void tcg_out_sar_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1090 TCGArg arg2, int const_arg2)
1092 if (const_arg2) {
1093 tcg_out_bundle(s, miI,
1094 INSN_NOP_M,
1095 INSN_NOP_I,
1096 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1097 ret, arg1, arg2, 31 - arg2));
1098 } else {
1099 tcg_out_bundle(s, mII,
1100 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3,
1101 TCG_REG_R3, 0x1f, arg2),
1102 tcg_opc_i29(TCG_REG_P0, OPC_SXT4_I29, TCG_REG_R2, arg1),
1103 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret,
1104 TCG_REG_R2, TCG_REG_R3));
1108 static inline void tcg_out_sar_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1109 TCGArg arg2, int const_arg2)
1111 if (const_arg2) {
1112 tcg_out_bundle(s, miI,
1113 INSN_NOP_M,
1114 INSN_NOP_I,
1115 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1116 ret, arg1, arg2, 63 - arg2));
1117 } else {
1118 tcg_out_bundle(s, miI,
1119 INSN_NOP_M,
1120 INSN_NOP_I,
1121 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret, arg1, arg2));
1125 static inline void tcg_out_shl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1126 TCGArg arg2, int const_arg2)
1128 if (const_arg2) {
1129 tcg_out_bundle(s, miI,
1130 INSN_NOP_M,
1131 INSN_NOP_I,
1132 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1133 arg1, 63 - arg2, 31 - arg2));
1134 } else {
1135 tcg_out_bundle(s, mII,
1136 INSN_NOP_M,
1137 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R2,
1138 0x1f, arg2),
1139 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1140 arg1, TCG_REG_R2));
1144 static inline void tcg_out_shl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1145 TCGArg arg2, int const_arg2)
1147 if (const_arg2) {
1148 tcg_out_bundle(s, miI,
1149 INSN_NOP_M,
1150 INSN_NOP_I,
1151 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1152 arg1, 63 - arg2, 63 - arg2));
1153 } else {
1154 tcg_out_bundle(s, miI,
1155 INSN_NOP_M,
1156 INSN_NOP_I,
1157 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1158 arg1, arg2));
1162 static inline void tcg_out_shr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1163 TCGArg arg2, int const_arg2)
1165 if (const_arg2) {
1166 tcg_out_bundle(s, miI,
1167 INSN_NOP_M,
1168 INSN_NOP_I,
1169 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1170 arg1, arg2, 31 - arg2));
1171 } else {
1172 tcg_out_bundle(s, mII,
1173 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1174 0x1f, arg2),
1175 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29, TCG_REG_R2, arg1),
1176 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1177 TCG_REG_R2, TCG_REG_R3));
1181 static inline void tcg_out_shr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1182 TCGArg arg2, int const_arg2)
1184 if (const_arg2) {
1185 tcg_out_bundle(s, miI,
1186 INSN_NOP_M,
1187 INSN_NOP_I,
1188 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1189 arg1, arg2, 63 - arg2));
1190 } else {
1191 tcg_out_bundle(s, miI,
1192 INSN_NOP_M,
1193 INSN_NOP_I,
1194 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1195 arg1, arg2));
1199 static inline void tcg_out_rotl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1200 TCGArg arg2, int const_arg2)
1202 if (const_arg2) {
1203 tcg_out_bundle(s, mII,
1204 INSN_NOP_M,
1205 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1206 TCG_REG_R2, arg1, arg1),
1207 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1208 TCG_REG_R2, 32 - arg2, 31));
1209 } else {
1210 tcg_out_bundle(s, miI,
1211 INSN_NOP_M,
1212 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1213 TCG_REG_R2, arg1, arg1),
1214 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1215 0x1f, arg2));
1216 tcg_out_bundle(s, mII,
1217 INSN_NOP_M,
1218 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R3,
1219 0x20, TCG_REG_R3),
1220 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1221 TCG_REG_R2, TCG_REG_R3));
1225 static inline void tcg_out_rotl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1226 TCGArg arg2, int const_arg2)
1228 if (const_arg2) {
1229 tcg_out_bundle(s, miI,
1230 INSN_NOP_M,
1231 INSN_NOP_I,
1232 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1233 arg1, 0x40 - arg2));
1234 } else {
1235 tcg_out_bundle(s, mII,
1236 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1237 0x40, arg2),
1238 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R3,
1239 arg1, arg2),
1240 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R2,
1241 arg1, TCG_REG_R2));
1242 tcg_out_bundle(s, miI,
1243 INSN_NOP_M,
1244 INSN_NOP_I,
1245 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1246 TCG_REG_R2, TCG_REG_R3));
1250 static inline void tcg_out_rotr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1251 TCGArg arg2, int const_arg2)
1253 if (const_arg2) {
1254 tcg_out_bundle(s, mII,
1255 INSN_NOP_M,
1256 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1257 TCG_REG_R2, arg1, arg1),
1258 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1259 TCG_REG_R2, arg2, 31));
1260 } else {
1261 tcg_out_bundle(s, mII,
1262 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1263 0x1f, arg2),
1264 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1265 TCG_REG_R2, arg1, arg1),
1266 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1267 TCG_REG_R2, TCG_REG_R3));
1271 static inline void tcg_out_rotr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1272 TCGArg arg2, int const_arg2)
1274 if (const_arg2) {
1275 tcg_out_bundle(s, miI,
1276 INSN_NOP_M,
1277 INSN_NOP_I,
1278 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1279 arg1, arg2));
1280 } else {
1281 tcg_out_bundle(s, mII,
1282 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1283 0x40, arg2),
1284 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R3,
1285 arg1, arg2),
1286 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R2,
1287 arg1, TCG_REG_R2));
1288 tcg_out_bundle(s, miI,
1289 INSN_NOP_M,
1290 INSN_NOP_I,
1291 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1292 TCG_REG_R2, TCG_REG_R3));
1296 static const uint64_t opc_ext_i29[8] = {
1297 OPC_ZXT1_I29, OPC_ZXT2_I29, OPC_ZXT4_I29, 0,
1298 OPC_SXT1_I29, OPC_SXT2_I29, OPC_SXT4_I29, 0
1301 static inline uint64_t tcg_opc_ext_i(int qp, TCGMemOp opc, TCGReg d, TCGReg s)
1303 if ((opc & MO_SIZE) == MO_64) {
1304 return tcg_opc_mov_a(qp, d, s);
1305 } else {
1306 return tcg_opc_i29(qp, opc_ext_i29[opc & MO_SSIZE], d, s);
1310 static inline void tcg_out_ext(TCGContext *s, uint64_t opc_i29,
1311 TCGArg ret, TCGArg arg)
1313 tcg_out_bundle(s, miI,
1314 INSN_NOP_M,
1315 INSN_NOP_I,
1316 tcg_opc_i29(TCG_REG_P0, opc_i29, ret, arg));
1319 static inline uint64_t tcg_opc_bswap64_i(int qp, TCGReg d, TCGReg s)
1321 return tcg_opc_i3(qp, OPC_MUX1_I3, d, s, 0xb);
1324 static inline void tcg_out_bswap16(TCGContext *s, TCGArg ret, TCGArg arg)
1326 tcg_out_bundle(s, mII,
1327 INSN_NOP_M,
1328 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 15, 15),
1329 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1332 static inline void tcg_out_bswap32(TCGContext *s, TCGArg ret, TCGArg arg)
1334 tcg_out_bundle(s, mII,
1335 INSN_NOP_M,
1336 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 31, 31),
1337 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1340 static inline void tcg_out_bswap64(TCGContext *s, TCGArg ret, TCGArg arg)
1342 tcg_out_bundle(s, miI,
1343 INSN_NOP_M,
1344 INSN_NOP_I,
1345 tcg_opc_bswap64_i(TCG_REG_P0, ret, arg));
1348 static inline void tcg_out_deposit(TCGContext *s, TCGArg ret, TCGArg a1,
1349 TCGArg a2, int const_a2, int pos, int len)
1351 uint64_t i1 = 0, i2 = 0;
1352 int cpos = 63 - pos, lm1 = len - 1;
1354 if (const_a2) {
1355 /* Truncate the value of a constant a2 to the width of the field. */
1356 int mask = (1u << len) - 1;
1357 a2 &= mask;
1359 if (a2 == 0 || a2 == mask) {
1360 /* 1-bit signed constant inserted into register. */
1361 i2 = tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, ret, a2, a1, cpos, lm1);
1362 } else {
1363 /* Otherwise, load any constant into a temporary. Do this into
1364 the first I slot to help out with cross-unit delays. */
1365 i1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, a2);
1366 a2 = TCG_REG_R2;
1369 if (i2 == 0) {
1370 i2 = tcg_opc_i15(TCG_REG_P0, OPC_DEP_I15, ret, a2, a1, cpos, lm1);
1372 tcg_out_bundle(s, (i1 ? mII : miI),
1373 INSN_NOP_M,
1374 i1 ? i1 : INSN_NOP_I,
1375 i2);
1378 static inline uint64_t tcg_opc_cmp_a(int qp, TCGCond cond, TCGArg arg1,
1379 TCGArg arg2, int cmp4)
1381 uint64_t opc_eq_a6, opc_lt_a6, opc_ltu_a6;
1383 if (cmp4) {
1384 opc_eq_a6 = OPC_CMP4_EQ_A6;
1385 opc_lt_a6 = OPC_CMP4_LT_A6;
1386 opc_ltu_a6 = OPC_CMP4_LTU_A6;
1387 } else {
1388 opc_eq_a6 = OPC_CMP_EQ_A6;
1389 opc_lt_a6 = OPC_CMP_LT_A6;
1390 opc_ltu_a6 = OPC_CMP_LTU_A6;
1393 switch (cond) {
1394 case TCG_COND_EQ:
1395 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1396 case TCG_COND_NE:
1397 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1398 case TCG_COND_LT:
1399 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1400 case TCG_COND_LTU:
1401 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1402 case TCG_COND_GE:
1403 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1404 case TCG_COND_GEU:
1405 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1406 case TCG_COND_LE:
1407 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1408 case TCG_COND_LEU:
1409 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1410 case TCG_COND_GT:
1411 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1412 case TCG_COND_GTU:
1413 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1414 default:
1415 tcg_abort();
1416 break;
1420 static inline void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1,
1421 TCGReg arg2, TCGLabel *l, int cmp4)
1423 uint64_t imm;
1425 /* We pay attention here to not modify the branch target by reading
1426 the existing value and using it again. This ensure that caches and
1427 memory are kept coherent during retranslation. */
1428 if (l->has_value) {
1429 imm = l->u.value_ptr - s->code_ptr;
1430 } else {
1431 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
1432 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, l, 0);
1435 tcg_out_bundle(s, miB,
1436 INSN_NOP_M,
1437 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1438 tcg_opc_b1(TCG_REG_P6, OPC_BR_DPTK_FEW_B1, imm));
1441 static inline void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGArg ret,
1442 TCGArg arg1, TCGArg arg2, int cmp4)
1444 tcg_out_bundle(s, MmI,
1445 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1446 tcg_opc_movi_a(TCG_REG_P6, ret, 1),
1447 tcg_opc_movi_a(TCG_REG_P7, ret, 0));
1450 static inline void tcg_out_movcond(TCGContext *s, TCGCond cond, TCGArg ret,
1451 TCGArg c1, TCGArg c2,
1452 TCGArg v1, int const_v1,
1453 TCGArg v2, int const_v2, int cmp4)
1455 uint64_t opc1, opc2;
1457 if (const_v1) {
1458 opc1 = tcg_opc_movi_a(TCG_REG_P6, ret, v1);
1459 } else if (ret == v1) {
1460 opc1 = INSN_NOP_M;
1461 } else {
1462 opc1 = tcg_opc_mov_a(TCG_REG_P6, ret, v1);
1464 if (const_v2) {
1465 opc2 = tcg_opc_movi_a(TCG_REG_P7, ret, v2);
1466 } else if (ret == v2) {
1467 opc2 = INSN_NOP_I;
1468 } else {
1469 opc2 = tcg_opc_mov_a(TCG_REG_P7, ret, v2);
1472 tcg_out_bundle(s, MmI,
1473 tcg_opc_cmp_a(TCG_REG_P0, cond, c1, c2, cmp4),
1474 opc1,
1475 opc2);
1478 #if defined(CONFIG_SOFTMMU)
1479 /* We're expecting to use an signed 22-bit immediate add. */
1480 QEMU_BUILD_BUG_ON(offsetof(CPUArchState, tlb_table[NB_MMU_MODES - 1][1])
1481 > 0x1fffff)
1483 /* Load and compare a TLB entry, and return the result in (p6, p7).
1484 R2 is loaded with the addend TLB entry.
1485 R57 is loaded with the address, zero extented on 32-bit targets.
1486 R1, R3 are clobbered, leaving R56 free for...
1487 BSWAP_1, BSWAP_2 and I-slot insns for swapping data for store. */
1488 static inline void tcg_out_qemu_tlb(TCGContext *s, TCGReg addr_reg,
1489 TCGMemOp s_bits, int off_rw, int off_add,
1490 uint64_t bswap1, uint64_t bswap2)
1493 .mii
1494 mov r2 = off_rw
1495 extr.u r3 = addr_reg, ... # extract tlb page
1496 zxt4 r57 = addr_reg # or mov for 64-bit guest
1498 .mii
1499 addl r2 = r2, areg0
1500 shl r3 = r3, cteb # via dep.z
1501 dep r1 = 0, r57, ... # zero page ofs, keep align
1503 .mmi
1504 add r2 = r2, r3
1506 ld4 r3 = [r2], off_add-off_rw # or ld8 for 64-bit guest
1509 .mmi
1511 cmp.eq p6, p7 = r3, r58
1515 tcg_out_bundle(s, miI,
1516 tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, off_rw),
1517 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, TCG_REG_R3,
1518 addr_reg, TARGET_PAGE_BITS, CPU_TLB_BITS - 1),
1519 tcg_opc_ext_i(TCG_REG_P0,
1520 TARGET_LONG_BITS == 32 ? MO_UL : MO_Q,
1521 TCG_REG_R57, addr_reg));
1522 tcg_out_bundle(s, miI,
1523 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1524 TCG_REG_R2, TCG_AREG0),
1525 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, TCG_REG_R3,
1526 TCG_REG_R3, 63 - CPU_TLB_ENTRY_BITS,
1527 63 - CPU_TLB_ENTRY_BITS),
1528 tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, TCG_REG_R1, 0,
1529 TCG_REG_R57, 63 - s_bits,
1530 TARGET_PAGE_BITS - s_bits - 1));
1531 tcg_out_bundle(s, MmI,
1532 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
1533 TCG_REG_R2, TCG_REG_R2, TCG_REG_R3),
1534 tcg_opc_m3 (TCG_REG_P0,
1535 (TARGET_LONG_BITS == 32
1536 ? OPC_LD4_M3 : OPC_LD8_M3), TCG_REG_R3,
1537 TCG_REG_R2, off_add - off_rw),
1538 bswap1);
1539 tcg_out_bundle(s, mmI,
1540 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1, TCG_REG_R2, TCG_REG_R2),
1541 tcg_opc_a6 (TCG_REG_P0, OPC_CMP_EQ_A6, TCG_REG_P6,
1542 TCG_REG_P7, TCG_REG_R1, TCG_REG_R3),
1543 bswap2);
1546 typedef struct TCGLabelQemuLdst {
1547 bool is_ld;
1548 TCGMemOp size;
1549 tcg_insn_unit *label_ptr; /* label pointers to be updated */
1550 struct TCGLabelQemuLdst *next;
1551 } TCGLabelQemuLdst;
1553 typedef struct TCGBackendData {
1554 TCGLabelQemuLdst *labels;
1555 } TCGBackendData;
1557 static inline void tcg_out_tb_init(TCGContext *s)
1559 s->be->labels = NULL;
1562 static void add_qemu_ldst_label(TCGContext *s, bool is_ld, TCGMemOp opc,
1563 tcg_insn_unit *label_ptr)
1565 TCGBackendData *be = s->be;
1566 TCGLabelQemuLdst *l = tcg_malloc(sizeof(*l));
1568 l->is_ld = is_ld;
1569 l->size = opc & MO_SIZE;
1570 l->label_ptr = label_ptr;
1571 l->next = be->labels;
1572 be->labels = l;
1575 static bool tcg_out_tb_finalize(TCGContext *s)
1577 static const void * const helpers[8] = {
1578 helper_ret_stb_mmu,
1579 helper_le_stw_mmu,
1580 helper_le_stl_mmu,
1581 helper_le_stq_mmu,
1582 helper_ret_ldub_mmu,
1583 helper_le_lduw_mmu,
1584 helper_le_ldul_mmu,
1585 helper_le_ldq_mmu,
1587 tcg_insn_unit *thunks[8] = { };
1588 TCGLabelQemuLdst *l;
1590 for (l = s->be->labels; l != NULL; l = l->next) {
1591 long x = l->is_ld * 4 + l->size;
1592 tcg_insn_unit *dest = thunks[x];
1594 /* The out-of-line thunks are all the same; load the return address
1595 from B0, load the GP, and branch to the code. Note that we are
1596 always post-call, so the register window has rolled, so we're
1597 using incoming parameter register numbers, not outgoing. */
1598 if (dest == NULL) {
1599 uintptr_t *desc = (uintptr_t *)helpers[x];
1600 uintptr_t func = desc[0], gp = desc[1], disp;
1602 thunks[x] = dest = s->code_ptr;
1604 tcg_out_bundle(s, mlx,
1605 INSN_NOP_M,
1606 tcg_opc_l2 (gp),
1607 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
1608 TCG_REG_R1, gp));
1609 tcg_out_bundle(s, mii,
1610 INSN_NOP_M,
1611 INSN_NOP_I,
1612 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
1613 l->is_ld ? TCG_REG_R35 : TCG_REG_R36,
1614 TCG_REG_B0));
1615 disp = (tcg_insn_unit *)func - s->code_ptr;
1616 tcg_out_bundle(s, mLX,
1617 INSN_NOP_M,
1618 tcg_opc_l3 (disp),
1619 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, disp));
1622 reloc_pcrel21b_slot2(l->label_ptr, dest);
1624 /* Test for (pending) buffer overflow. The assumption is that any
1625 one operation beginning below the high water mark cannot overrun
1626 the buffer completely. Thus we can test for overflow after
1627 generating code without having to check during generation. */
1628 if (unlikely((void *)s->code_ptr > s->code_gen_highwater)) {
1629 return false;
1632 return true;
1635 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1637 static const uint64_t opc_ld_m1[4] = {
1638 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1640 int addr_reg, data_reg, mem_index;
1641 TCGMemOpIdx oi;
1642 TCGMemOp opc, s_bits;
1643 uint64_t fin1, fin2;
1644 tcg_insn_unit *label_ptr;
1646 data_reg = args[0];
1647 addr_reg = args[1];
1648 oi = args[2];
1649 opc = get_memop(oi);
1650 mem_index = get_mmuidx(oi);
1651 s_bits = opc & MO_SIZE;
1653 /* Read the TLB entry */
1654 tcg_out_qemu_tlb(s, addr_reg, s_bits,
1655 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read),
1656 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1657 INSN_NOP_I, INSN_NOP_I);
1659 /* P6 is the fast path, and P7 the slow path */
1661 fin2 = 0;
1662 if (opc & MO_BSWAP) {
1663 fin1 = tcg_opc_bswap64_i(TCG_REG_P0, data_reg, TCG_REG_R8);
1664 if (s_bits < MO_64) {
1665 int shift = 64 - (8 << s_bits);
1666 fin2 = (opc & MO_SIGN ? OPC_EXTR_I11 : OPC_EXTR_U_I11);
1667 fin2 = tcg_opc_i11(TCG_REG_P0, fin2,
1668 data_reg, data_reg, shift, 63 - shift);
1670 } else {
1671 fin1 = tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, TCG_REG_R8);
1674 tcg_out_bundle(s, mmI,
1675 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1676 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1677 TCG_REG_R2, TCG_REG_R57),
1678 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R58, oi));
1679 label_ptr = s->code_ptr;
1680 tcg_out_bundle(s, miB,
1681 tcg_opc_m1 (TCG_REG_P6, opc_ld_m1[s_bits],
1682 TCG_REG_R8, TCG_REG_R2),
1683 INSN_NOP_I,
1684 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1685 get_reloc_pcrel21b_slot2(label_ptr)));
1687 add_qemu_ldst_label(s, 1, opc, label_ptr);
1689 /* Note that we always use LE helper functions, so the bswap insns
1690 here for the fast path also apply to the slow path. */
1691 tcg_out_bundle(s, (fin2 ? mII : miI),
1692 INSN_NOP_M,
1693 fin1,
1694 fin2 ? fin2 : INSN_NOP_I);
1697 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1699 static const uint64_t opc_st_m4[4] = {
1700 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1702 TCGReg addr_reg, data_reg;
1703 int mem_index;
1704 uint64_t pre1, pre2;
1705 TCGMemOpIdx oi;
1706 TCGMemOp opc, s_bits;
1707 tcg_insn_unit *label_ptr;
1709 data_reg = args[0];
1710 addr_reg = args[1];
1711 oi = args[2];
1712 opc = get_memop(oi);
1713 mem_index = get_mmuidx(oi);
1714 s_bits = opc & MO_SIZE;
1716 /* Note that we always use LE helper functions, so the bswap insns
1717 that are here for the fast path also apply to the slow path,
1718 and move the data into the argument register. */
1719 pre2 = INSN_NOP_I;
1720 if (opc & MO_BSWAP) {
1721 pre1 = tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R58, data_reg);
1722 if (s_bits < MO_64) {
1723 int shift = 64 - (8 << s_bits);
1724 pre2 = tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11,
1725 TCG_REG_R58, TCG_REG_R58, shift, 63 - shift);
1727 } else {
1728 /* Just move the data into place for the slow path. */
1729 pre1 = tcg_opc_ext_i(TCG_REG_P0, opc, TCG_REG_R58, data_reg);
1732 tcg_out_qemu_tlb(s, addr_reg, s_bits,
1733 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write),
1734 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1735 pre1, pre2);
1737 /* P6 is the fast path, and P7 the slow path */
1738 tcg_out_bundle(s, mmI,
1739 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1740 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1741 TCG_REG_R2, TCG_REG_R57),
1742 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R59, oi));
1743 label_ptr = s->code_ptr;
1744 tcg_out_bundle(s, miB,
1745 tcg_opc_m4 (TCG_REG_P6, opc_st_m4[s_bits],
1746 TCG_REG_R58, TCG_REG_R2),
1747 INSN_NOP_I,
1748 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1749 get_reloc_pcrel21b_slot2(label_ptr)));
1751 add_qemu_ldst_label(s, 0, opc, label_ptr);
1754 #else /* !CONFIG_SOFTMMU */
1755 # include "tcg-be-null.h"
1757 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1759 static uint64_t const opc_ld_m1[4] = {
1760 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1762 int addr_reg, data_reg;
1763 TCGMemOp opc, s_bits, bswap;
1765 data_reg = args[0];
1766 addr_reg = args[1];
1767 opc = args[2];
1768 s_bits = opc & MO_SIZE;
1769 bswap = opc & MO_BSWAP;
1771 #if TARGET_LONG_BITS == 32
1772 if (guest_base != 0) {
1773 tcg_out_bundle(s, mII,
1774 INSN_NOP_M,
1775 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1776 TCG_REG_R3, addr_reg),
1777 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1778 TCG_GUEST_BASE_REG, TCG_REG_R3));
1779 } else {
1780 tcg_out_bundle(s, miI,
1781 INSN_NOP_M,
1782 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1783 TCG_REG_R2, addr_reg),
1784 INSN_NOP_I);
1787 if (!bswap) {
1788 if (!(opc & MO_SIGN)) {
1789 tcg_out_bundle(s, miI,
1790 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1791 data_reg, TCG_REG_R2),
1792 INSN_NOP_I,
1793 INSN_NOP_I);
1794 } else {
1795 tcg_out_bundle(s, mII,
1796 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1797 data_reg, TCG_REG_R2),
1798 INSN_NOP_I,
1799 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1801 } else if (s_bits == MO_64) {
1802 tcg_out_bundle(s, mII,
1803 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1804 data_reg, TCG_REG_R2),
1805 INSN_NOP_I,
1806 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1807 } else {
1808 if (s_bits == MO_16) {
1809 tcg_out_bundle(s, mII,
1810 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1811 data_reg, TCG_REG_R2),
1812 INSN_NOP_I,
1813 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1814 data_reg, data_reg, 15, 15));
1815 } else {
1816 tcg_out_bundle(s, mII,
1817 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1818 data_reg, TCG_REG_R2),
1819 INSN_NOP_I,
1820 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1821 data_reg, data_reg, 31, 31));
1823 if (!(opc & MO_SIGN)) {
1824 tcg_out_bundle(s, miI,
1825 INSN_NOP_M,
1826 INSN_NOP_I,
1827 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1828 } else {
1829 tcg_out_bundle(s, mII,
1830 INSN_NOP_M,
1831 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg),
1832 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1835 #else
1836 if (guest_base != 0) {
1837 tcg_out_bundle(s, MmI,
1838 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1839 TCG_GUEST_BASE_REG, addr_reg),
1840 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1841 data_reg, TCG_REG_R2),
1842 INSN_NOP_I);
1843 } else {
1844 tcg_out_bundle(s, mmI,
1845 INSN_NOP_M,
1846 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1847 data_reg, addr_reg),
1848 INSN_NOP_I);
1851 if (bswap && s_bits == MO_16) {
1852 tcg_out_bundle(s, mII,
1853 INSN_NOP_M,
1854 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1855 data_reg, data_reg, 15, 15),
1856 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1857 } else if (bswap && s_bits == MO_32) {
1858 tcg_out_bundle(s, mII,
1859 INSN_NOP_M,
1860 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1861 data_reg, data_reg, 31, 31),
1862 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1863 } else if (bswap && s_bits == MO_64) {
1864 tcg_out_bundle(s, miI,
1865 INSN_NOP_M,
1866 INSN_NOP_I,
1867 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1869 if (opc & MO_SIGN) {
1870 tcg_out_bundle(s, miI,
1871 INSN_NOP_M,
1872 INSN_NOP_I,
1873 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1875 #endif
1878 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1880 static uint64_t const opc_st_m4[4] = {
1881 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1883 int addr_reg, data_reg;
1884 #if TARGET_LONG_BITS == 64
1885 uint64_t add_guest_base;
1886 #endif
1887 TCGMemOp opc, s_bits, bswap;
1889 data_reg = args[0];
1890 addr_reg = args[1];
1891 opc = args[2];
1892 s_bits = opc & MO_SIZE;
1893 bswap = opc & MO_BSWAP;
1895 #if TARGET_LONG_BITS == 32
1896 if (guest_base != 0) {
1897 tcg_out_bundle(s, mII,
1898 INSN_NOP_M,
1899 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1900 TCG_REG_R3, addr_reg),
1901 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1902 TCG_GUEST_BASE_REG, TCG_REG_R3));
1903 } else {
1904 tcg_out_bundle(s, miI,
1905 INSN_NOP_M,
1906 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1907 TCG_REG_R2, addr_reg),
1908 INSN_NOP_I);
1911 if (bswap) {
1912 if (s_bits == MO_16) {
1913 tcg_out_bundle(s, mII,
1914 INSN_NOP_M,
1915 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1916 TCG_REG_R3, data_reg, 15, 15),
1917 tcg_opc_bswap64_i(TCG_REG_P0,
1918 TCG_REG_R3, TCG_REG_R3));
1919 data_reg = TCG_REG_R3;
1920 } else if (s_bits == MO_32) {
1921 tcg_out_bundle(s, mII,
1922 INSN_NOP_M,
1923 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1924 TCG_REG_R3, data_reg, 31, 31),
1925 tcg_opc_bswap64_i(TCG_REG_P0,
1926 TCG_REG_R3, TCG_REG_R3));
1927 data_reg = TCG_REG_R3;
1928 } else if (s_bits == MO_64) {
1929 tcg_out_bundle(s, miI,
1930 INSN_NOP_M,
1931 INSN_NOP_I,
1932 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1933 data_reg = TCG_REG_R3;
1936 tcg_out_bundle(s, mmI,
1937 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1938 data_reg, TCG_REG_R2),
1939 INSN_NOP_M,
1940 INSN_NOP_I);
1941 #else
1942 if (guest_base != 0) {
1943 add_guest_base = tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1944 TCG_GUEST_BASE_REG, addr_reg);
1945 addr_reg = TCG_REG_R2;
1946 } else {
1947 add_guest_base = INSN_NOP_M;
1950 if (!bswap) {
1951 tcg_out_bundle(s, (guest_base ? MmI : mmI),
1952 add_guest_base,
1953 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1954 data_reg, addr_reg),
1955 INSN_NOP_I);
1956 } else {
1957 if (s_bits == MO_16) {
1958 tcg_out_bundle(s, mII,
1959 add_guest_base,
1960 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1961 TCG_REG_R3, data_reg, 15, 15),
1962 tcg_opc_bswap64_i(TCG_REG_P0,
1963 TCG_REG_R3, TCG_REG_R3));
1964 data_reg = TCG_REG_R3;
1965 } else if (s_bits == MO_32) {
1966 tcg_out_bundle(s, mII,
1967 add_guest_base,
1968 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1969 TCG_REG_R3, data_reg, 31, 31),
1970 tcg_opc_bswap64_i(TCG_REG_P0,
1971 TCG_REG_R3, TCG_REG_R3));
1972 data_reg = TCG_REG_R3;
1973 } else if (s_bits == MO_64) {
1974 tcg_out_bundle(s, miI,
1975 add_guest_base,
1976 INSN_NOP_I,
1977 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1978 data_reg = TCG_REG_R3;
1980 tcg_out_bundle(s, miI,
1981 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1982 data_reg, addr_reg),
1983 INSN_NOP_I,
1984 INSN_NOP_I);
1986 #endif
1989 #endif
1991 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1992 const TCGArg *args, const int *const_args)
1994 switch(opc) {
1995 case INDEX_op_exit_tb:
1996 tcg_out_exit_tb(s, args[0]);
1997 break;
1998 case INDEX_op_br:
1999 tcg_out_br(s, arg_label(args[0]));
2000 break;
2001 case INDEX_op_goto_tb:
2002 tcg_out_goto_tb(s, args[0]);
2003 break;
2005 case INDEX_op_ld8u_i32:
2006 case INDEX_op_ld8u_i64:
2007 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2008 break;
2009 case INDEX_op_ld8s_i32:
2010 case INDEX_op_ld8s_i64:
2011 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2012 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[0]);
2013 break;
2014 case INDEX_op_ld16u_i32:
2015 case INDEX_op_ld16u_i64:
2016 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2017 break;
2018 case INDEX_op_ld16s_i32:
2019 case INDEX_op_ld16s_i64:
2020 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2021 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[0]);
2022 break;
2023 case INDEX_op_ld_i32:
2024 case INDEX_op_ld32u_i64:
2025 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2026 break;
2027 case INDEX_op_ld32s_i64:
2028 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2029 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[0]);
2030 break;
2031 case INDEX_op_ld_i64:
2032 tcg_out_ld_rel(s, OPC_LD8_M1, args[0], args[1], args[2]);
2033 break;
2034 case INDEX_op_st8_i32:
2035 case INDEX_op_st8_i64:
2036 tcg_out_st_rel(s, OPC_ST1_M4, args[0], args[1], args[2]);
2037 break;
2038 case INDEX_op_st16_i32:
2039 case INDEX_op_st16_i64:
2040 tcg_out_st_rel(s, OPC_ST2_M4, args[0], args[1], args[2]);
2041 break;
2042 case INDEX_op_st_i32:
2043 case INDEX_op_st32_i64:
2044 tcg_out_st_rel(s, OPC_ST4_M4, args[0], args[1], args[2]);
2045 break;
2046 case INDEX_op_st_i64:
2047 tcg_out_st_rel(s, OPC_ST8_M4, args[0], args[1], args[2]);
2048 break;
2050 case INDEX_op_add_i32:
2051 case INDEX_op_add_i64:
2052 tcg_out_add(s, args[0], args[1], args[2], const_args[2]);
2053 break;
2054 case INDEX_op_sub_i32:
2055 case INDEX_op_sub_i64:
2056 tcg_out_sub(s, args[0], args[1], const_args[1], args[2], const_args[2]);
2057 break;
2059 case INDEX_op_and_i32:
2060 case INDEX_op_and_i64:
2061 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2062 tcg_out_alu(s, OPC_AND_A1, OPC_AND_A3, args[0],
2063 args[2], const_args[2], args[1], const_args[1]);
2064 break;
2065 case INDEX_op_andc_i32:
2066 case INDEX_op_andc_i64:
2067 tcg_out_alu(s, OPC_ANDCM_A1, OPC_ANDCM_A3, args[0],
2068 args[1], const_args[1], args[2], const_args[2]);
2069 break;
2070 case INDEX_op_eqv_i32:
2071 case INDEX_op_eqv_i64:
2072 tcg_out_eqv(s, args[0], args[1], const_args[1],
2073 args[2], const_args[2]);
2074 break;
2075 case INDEX_op_nand_i32:
2076 case INDEX_op_nand_i64:
2077 tcg_out_nand(s, args[0], args[1], const_args[1],
2078 args[2], const_args[2]);
2079 break;
2080 case INDEX_op_nor_i32:
2081 case INDEX_op_nor_i64:
2082 tcg_out_nor(s, args[0], args[1], const_args[1],
2083 args[2], const_args[2]);
2084 break;
2085 case INDEX_op_or_i32:
2086 case INDEX_op_or_i64:
2087 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2088 tcg_out_alu(s, OPC_OR_A1, OPC_OR_A3, args[0],
2089 args[2], const_args[2], args[1], const_args[1]);
2090 break;
2091 case INDEX_op_orc_i32:
2092 case INDEX_op_orc_i64:
2093 tcg_out_orc(s, args[0], args[1], const_args[1],
2094 args[2], const_args[2]);
2095 break;
2096 case INDEX_op_xor_i32:
2097 case INDEX_op_xor_i64:
2098 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2099 tcg_out_alu(s, OPC_XOR_A1, OPC_XOR_A3, args[0],
2100 args[2], const_args[2], args[1], const_args[1]);
2101 break;
2103 case INDEX_op_mul_i32:
2104 case INDEX_op_mul_i64:
2105 tcg_out_mul(s, args[0], args[1], args[2]);
2106 break;
2108 case INDEX_op_sar_i32:
2109 tcg_out_sar_i32(s, args[0], args[1], args[2], const_args[2]);
2110 break;
2111 case INDEX_op_sar_i64:
2112 tcg_out_sar_i64(s, args[0], args[1], args[2], const_args[2]);
2113 break;
2114 case INDEX_op_shl_i32:
2115 tcg_out_shl_i32(s, args[0], args[1], args[2], const_args[2]);
2116 break;
2117 case INDEX_op_shl_i64:
2118 tcg_out_shl_i64(s, args[0], args[1], args[2], const_args[2]);
2119 break;
2120 case INDEX_op_shr_i32:
2121 tcg_out_shr_i32(s, args[0], args[1], args[2], const_args[2]);
2122 break;
2123 case INDEX_op_shr_i64:
2124 tcg_out_shr_i64(s, args[0], args[1], args[2], const_args[2]);
2125 break;
2126 case INDEX_op_rotl_i32:
2127 tcg_out_rotl_i32(s, args[0], args[1], args[2], const_args[2]);
2128 break;
2129 case INDEX_op_rotl_i64:
2130 tcg_out_rotl_i64(s, args[0], args[1], args[2], const_args[2]);
2131 break;
2132 case INDEX_op_rotr_i32:
2133 tcg_out_rotr_i32(s, args[0], args[1], args[2], const_args[2]);
2134 break;
2135 case INDEX_op_rotr_i64:
2136 tcg_out_rotr_i64(s, args[0], args[1], args[2], const_args[2]);
2137 break;
2139 case INDEX_op_ext8s_i32:
2140 case INDEX_op_ext8s_i64:
2141 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[1]);
2142 break;
2143 case INDEX_op_ext8u_i32:
2144 case INDEX_op_ext8u_i64:
2145 tcg_out_ext(s, OPC_ZXT1_I29, args[0], args[1]);
2146 break;
2147 case INDEX_op_ext16s_i32:
2148 case INDEX_op_ext16s_i64:
2149 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[1]);
2150 break;
2151 case INDEX_op_ext16u_i32:
2152 case INDEX_op_ext16u_i64:
2153 tcg_out_ext(s, OPC_ZXT2_I29, args[0], args[1]);
2154 break;
2155 case INDEX_op_ext_i32_i64:
2156 case INDEX_op_ext32s_i64:
2157 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[1]);
2158 break;
2159 case INDEX_op_extu_i32_i64:
2160 case INDEX_op_ext32u_i64:
2161 tcg_out_ext(s, OPC_ZXT4_I29, args[0], args[1]);
2162 break;
2164 case INDEX_op_bswap16_i32:
2165 case INDEX_op_bswap16_i64:
2166 tcg_out_bswap16(s, args[0], args[1]);
2167 break;
2168 case INDEX_op_bswap32_i32:
2169 case INDEX_op_bswap32_i64:
2170 tcg_out_bswap32(s, args[0], args[1]);
2171 break;
2172 case INDEX_op_bswap64_i64:
2173 tcg_out_bswap64(s, args[0], args[1]);
2174 break;
2176 case INDEX_op_deposit_i32:
2177 case INDEX_op_deposit_i64:
2178 tcg_out_deposit(s, args[0], args[1], args[2], const_args[2],
2179 args[3], args[4]);
2180 break;
2182 case INDEX_op_brcond_i32:
2183 tcg_out_brcond(s, args[2], args[0], args[1], arg_label(args[3]), 1);
2184 break;
2185 case INDEX_op_brcond_i64:
2186 tcg_out_brcond(s, args[2], args[0], args[1], arg_label(args[3]), 0);
2187 break;
2188 case INDEX_op_setcond_i32:
2189 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 1);
2190 break;
2191 case INDEX_op_setcond_i64:
2192 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 0);
2193 break;
2194 case INDEX_op_movcond_i32:
2195 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2196 args[3], const_args[3], args[4], const_args[4], 1);
2197 break;
2198 case INDEX_op_movcond_i64:
2199 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2200 args[3], const_args[3], args[4], const_args[4], 0);
2201 break;
2203 case INDEX_op_qemu_ld_i32:
2204 tcg_out_qemu_ld(s, args);
2205 break;
2206 case INDEX_op_qemu_ld_i64:
2207 tcg_out_qemu_ld(s, args);
2208 break;
2209 case INDEX_op_qemu_st_i32:
2210 tcg_out_qemu_st(s, args);
2211 break;
2212 case INDEX_op_qemu_st_i64:
2213 tcg_out_qemu_st(s, args);
2214 break;
2216 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
2217 case INDEX_op_mov_i64:
2218 case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
2219 case INDEX_op_movi_i64:
2220 case INDEX_op_call: /* Always emitted via tcg_out_call. */
2221 default:
2222 tcg_abort();
2226 static const TCGTargetOpDef ia64_op_defs[] = {
2227 { INDEX_op_br, { } },
2228 { INDEX_op_exit_tb, { } },
2229 { INDEX_op_goto_tb, { } },
2231 { INDEX_op_ld8u_i32, { "r", "r" } },
2232 { INDEX_op_ld8s_i32, { "r", "r" } },
2233 { INDEX_op_ld16u_i32, { "r", "r" } },
2234 { INDEX_op_ld16s_i32, { "r", "r" } },
2235 { INDEX_op_ld_i32, { "r", "r" } },
2236 { INDEX_op_st8_i32, { "rZ", "r" } },
2237 { INDEX_op_st16_i32, { "rZ", "r" } },
2238 { INDEX_op_st_i32, { "rZ", "r" } },
2240 { INDEX_op_add_i32, { "r", "rZ", "rI" } },
2241 { INDEX_op_sub_i32, { "r", "rI", "rI" } },
2243 { INDEX_op_and_i32, { "r", "rI", "rI" } },
2244 { INDEX_op_andc_i32, { "r", "rI", "rI" } },
2245 { INDEX_op_eqv_i32, { "r", "rZ", "rZ" } },
2246 { INDEX_op_nand_i32, { "r", "rZ", "rZ" } },
2247 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
2248 { INDEX_op_or_i32, { "r", "rI", "rI" } },
2249 { INDEX_op_orc_i32, { "r", "rZ", "rZ" } },
2250 { INDEX_op_xor_i32, { "r", "rI", "rI" } },
2252 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
2254 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
2255 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
2256 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
2257 { INDEX_op_rotl_i32, { "r", "rZ", "ri" } },
2258 { INDEX_op_rotr_i32, { "r", "rZ", "ri" } },
2260 { INDEX_op_ext8s_i32, { "r", "rZ"} },
2261 { INDEX_op_ext8u_i32, { "r", "rZ"} },
2262 { INDEX_op_ext16s_i32, { "r", "rZ"} },
2263 { INDEX_op_ext16u_i32, { "r", "rZ"} },
2265 { INDEX_op_bswap16_i32, { "r", "rZ" } },
2266 { INDEX_op_bswap32_i32, { "r", "rZ" } },
2268 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
2269 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
2270 { INDEX_op_movcond_i32, { "r", "rZ", "rZ", "rI", "rI" } },
2272 { INDEX_op_ld8u_i64, { "r", "r" } },
2273 { INDEX_op_ld8s_i64, { "r", "r" } },
2274 { INDEX_op_ld16u_i64, { "r", "r" } },
2275 { INDEX_op_ld16s_i64, { "r", "r" } },
2276 { INDEX_op_ld32u_i64, { "r", "r" } },
2277 { INDEX_op_ld32s_i64, { "r", "r" } },
2278 { INDEX_op_ld_i64, { "r", "r" } },
2279 { INDEX_op_st8_i64, { "rZ", "r" } },
2280 { INDEX_op_st16_i64, { "rZ", "r" } },
2281 { INDEX_op_st32_i64, { "rZ", "r" } },
2282 { INDEX_op_st_i64, { "rZ", "r" } },
2284 { INDEX_op_add_i64, { "r", "rZ", "rI" } },
2285 { INDEX_op_sub_i64, { "r", "rI", "rI" } },
2287 { INDEX_op_and_i64, { "r", "rI", "rI" } },
2288 { INDEX_op_andc_i64, { "r", "rI", "rI" } },
2289 { INDEX_op_eqv_i64, { "r", "rZ", "rZ" } },
2290 { INDEX_op_nand_i64, { "r", "rZ", "rZ" } },
2291 { INDEX_op_nor_i64, { "r", "rZ", "rZ" } },
2292 { INDEX_op_or_i64, { "r", "rI", "rI" } },
2293 { INDEX_op_orc_i64, { "r", "rZ", "rZ" } },
2294 { INDEX_op_xor_i64, { "r", "rI", "rI" } },
2296 { INDEX_op_mul_i64, { "r", "rZ", "rZ" } },
2298 { INDEX_op_sar_i64, { "r", "rZ", "ri" } },
2299 { INDEX_op_shl_i64, { "r", "rZ", "ri" } },
2300 { INDEX_op_shr_i64, { "r", "rZ", "ri" } },
2301 { INDEX_op_rotl_i64, { "r", "rZ", "ri" } },
2302 { INDEX_op_rotr_i64, { "r", "rZ", "ri" } },
2304 { INDEX_op_ext8s_i64, { "r", "rZ"} },
2305 { INDEX_op_ext8u_i64, { "r", "rZ"} },
2306 { INDEX_op_ext16s_i64, { "r", "rZ"} },
2307 { INDEX_op_ext16u_i64, { "r", "rZ"} },
2308 { INDEX_op_ext32s_i64, { "r", "rZ"} },
2309 { INDEX_op_ext32u_i64, { "r", "rZ"} },
2310 { INDEX_op_ext_i32_i64, { "r", "rZ" } },
2311 { INDEX_op_extu_i32_i64, { "r", "rZ" } },
2313 { INDEX_op_bswap16_i64, { "r", "rZ" } },
2314 { INDEX_op_bswap32_i64, { "r", "rZ" } },
2315 { INDEX_op_bswap64_i64, { "r", "rZ" } },
2317 { INDEX_op_brcond_i64, { "rZ", "rZ" } },
2318 { INDEX_op_setcond_i64, { "r", "rZ", "rZ" } },
2319 { INDEX_op_movcond_i64, { "r", "rZ", "rZ", "rI", "rI" } },
2321 { INDEX_op_deposit_i32, { "r", "rZ", "ri" } },
2322 { INDEX_op_deposit_i64, { "r", "rZ", "ri" } },
2324 { INDEX_op_qemu_ld_i32, { "r", "r" } },
2325 { INDEX_op_qemu_ld_i64, { "r", "r" } },
2326 { INDEX_op_qemu_st_i32, { "SZ", "r" } },
2327 { INDEX_op_qemu_st_i64, { "SZ", "r" } },
2329 { -1 },
2332 /* Generate global QEMU prologue and epilogue code */
2333 static void tcg_target_qemu_prologue(TCGContext *s)
2335 int frame_size;
2337 /* reserve some stack space */
2338 frame_size = TCG_STATIC_CALL_ARGS_SIZE +
2339 CPU_TEMP_BUF_NLONGS * sizeof(long);
2340 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
2341 ~(TCG_TARGET_STACK_ALIGN - 1);
2342 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
2343 CPU_TEMP_BUF_NLONGS * sizeof(long));
2345 /* First emit adhoc function descriptor */
2346 *s->code_ptr = (tcg_insn_unit){
2347 (uint64_t)(s->code_ptr + 1), /* entry point */
2348 0 /* skip gp */
2350 s->code_ptr++;
2352 /* prologue */
2353 tcg_out_bundle(s, miI,
2354 tcg_opc_m34(TCG_REG_P0, OPC_ALLOC_M34,
2355 TCG_REG_R34, 32, 24, 0),
2356 INSN_NOP_I,
2357 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2358 TCG_REG_B6, TCG_REG_R33, 0));
2360 /* ??? If guest_base < 0x200000, we could load the register via
2361 an ADDL in the M slot of the next bundle. */
2362 if (guest_base != 0) {
2363 tcg_out_bundle(s, mlx,
2364 INSN_NOP_M,
2365 tcg_opc_l2(guest_base),
2366 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
2367 TCG_GUEST_BASE_REG, guest_base));
2368 tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
2371 tcg_out_bundle(s, miB,
2372 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2373 TCG_REG_R12, -frame_size, TCG_REG_R12),
2374 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
2375 TCG_REG_R33, TCG_REG_B0),
2376 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
2378 /* epilogue */
2379 tb_ret_addr = s->code_ptr;
2380 tcg_out_bundle(s, miI,
2381 INSN_NOP_M,
2382 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2383 TCG_REG_B0, TCG_REG_R33, 0),
2384 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2385 TCG_REG_R12, frame_size, TCG_REG_R12));
2386 tcg_out_bundle(s, miB,
2387 INSN_NOP_M,
2388 tcg_opc_i26(TCG_REG_P0, OPC_MOV_I_I26,
2389 TCG_REG_PFS, TCG_REG_R34),
2390 tcg_opc_b4 (TCG_REG_P0, OPC_BR_RET_SPTK_MANY_B4,
2391 TCG_REG_B0));
2394 static void tcg_target_init(TCGContext *s)
2396 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32],
2397 0xffffffffffffffffull);
2398 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I64],
2399 0xffffffffffffffffull);
2401 tcg_regset_clear(tcg_target_call_clobber_regs);
2402 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R8);
2403 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R9);
2404 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R10);
2405 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R11);
2406 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R14);
2407 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R15);
2408 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R16);
2409 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R17);
2410 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R18);
2411 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R19);
2412 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R20);
2413 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R21);
2414 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R22);
2415 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R23);
2416 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R24);
2417 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R25);
2418 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R26);
2419 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R27);
2420 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R28);
2421 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R29);
2422 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R30);
2423 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R31);
2424 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R56);
2425 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R57);
2426 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R58);
2427 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R59);
2428 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R60);
2429 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R61);
2430 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R62);
2431 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R63);
2433 tcg_regset_clear(s->reserved_regs);
2434 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0); /* zero register */
2435 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1); /* global pointer */
2436 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2); /* internal use */
2437 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R3); /* internal use */
2438 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R12); /* stack pointer */
2439 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R13); /* thread pointer */
2440 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R33); /* return address */
2441 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R34); /* PFS */
2443 /* The following 4 are not in use, are call-saved, but *not* saved
2444 by the prologue. Therefore we cannot use them without modifying
2445 the prologue. There doesn't seem to be any good reason to use
2446 these as opposed to the windowed registers. */
2447 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R4);
2448 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R5);
2449 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R6);
2450 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R7);
2452 tcg_add_target_add_op_defs(ia64_op_defs);