Update to SeaBIOS 0.5.1
[qemu.git] / tcg / sparc / tcg-target.c
blob8f094e55178ee181c0492bad42709578d9df37a5
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 #ifndef NDEBUG
26 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
27 "%g0",
28 "%g1",
29 "%g2",
30 "%g3",
31 "%g4",
32 "%g5",
33 "%g6",
34 "%g7",
35 "%o0",
36 "%o1",
37 "%o2",
38 "%o3",
39 "%o4",
40 "%o5",
41 "%o6",
42 "%o7",
43 "%l0",
44 "%l1",
45 "%l2",
46 "%l3",
47 "%l4",
48 "%l5",
49 "%l6",
50 "%l7",
51 "%i0",
52 "%i1",
53 "%i2",
54 "%i3",
55 "%i4",
56 "%i5",
57 "%i6",
58 "%i7",
60 #endif
62 static const int tcg_target_reg_alloc_order[] = {
63 TCG_REG_L0,
64 TCG_REG_L1,
65 TCG_REG_L2,
66 TCG_REG_L3,
67 TCG_REG_L4,
68 TCG_REG_L5,
69 TCG_REG_L6,
70 TCG_REG_L7,
71 TCG_REG_I0,
72 TCG_REG_I1,
73 TCG_REG_I2,
74 TCG_REG_I3,
75 TCG_REG_I4,
78 static const int tcg_target_call_iarg_regs[6] = {
79 TCG_REG_O0,
80 TCG_REG_O1,
81 TCG_REG_O2,
82 TCG_REG_O3,
83 TCG_REG_O4,
84 TCG_REG_O5,
87 static const int tcg_target_call_oarg_regs[2] = {
88 TCG_REG_O0,
89 TCG_REG_O1,
92 static inline int check_fit_tl(tcg_target_long val, unsigned int bits)
94 return (val << ((sizeof(tcg_target_long) * 8 - bits))
95 >> (sizeof(tcg_target_long) * 8 - bits)) == val;
98 static inline int check_fit_i32(uint32_t val, unsigned int bits)
100 return ((val << (32 - bits)) >> (32 - bits)) == val;
103 static void patch_reloc(uint8_t *code_ptr, int type,
104 tcg_target_long value, tcg_target_long addend)
106 value += addend;
107 switch (type) {
108 case R_SPARC_32:
109 if (value != (uint32_t)value)
110 tcg_abort();
111 *(uint32_t *)code_ptr = value;
112 break;
113 case R_SPARC_WDISP22:
114 value -= (long)code_ptr;
115 value >>= 2;
116 if (!check_fit_tl(value, 22))
117 tcg_abort();
118 *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x3fffff) | value;
119 break;
120 case R_SPARC_WDISP19:
121 value -= (long)code_ptr;
122 value >>= 2;
123 if (!check_fit_tl(value, 19))
124 tcg_abort();
125 *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x7ffff) | value;
126 break;
127 default:
128 tcg_abort();
132 /* maximum number of register used for input function arguments */
133 static inline int tcg_target_get_call_iarg_regs_count(int flags)
135 return 6;
138 /* parse target specific constraints */
139 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
141 const char *ct_str;
143 ct_str = *pct_str;
144 switch (ct_str[0]) {
145 case 'r':
146 case 'L': /* qemu_ld/st constraint */
147 ct->ct |= TCG_CT_REG;
148 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
149 // Helper args
150 tcg_regset_reset_reg(ct->u.regs, TCG_REG_O0);
151 tcg_regset_reset_reg(ct->u.regs, TCG_REG_O1);
152 tcg_regset_reset_reg(ct->u.regs, TCG_REG_O2);
153 break;
154 case 'I':
155 ct->ct |= TCG_CT_CONST_S11;
156 break;
157 case 'J':
158 ct->ct |= TCG_CT_CONST_S13;
159 break;
160 default:
161 return -1;
163 ct_str++;
164 *pct_str = ct_str;
165 return 0;
168 /* test if a constant matches the constraint */
169 static inline int tcg_target_const_match(tcg_target_long val,
170 const TCGArgConstraint *arg_ct)
172 int ct;
174 ct = arg_ct->ct;
175 if (ct & TCG_CT_CONST)
176 return 1;
177 else if ((ct & TCG_CT_CONST_S11) && check_fit_tl(val, 11))
178 return 1;
179 else if ((ct & TCG_CT_CONST_S13) && check_fit_tl(val, 13))
180 return 1;
181 else
182 return 0;
185 #define INSN_OP(x) ((x) << 30)
186 #define INSN_OP2(x) ((x) << 22)
187 #define INSN_OP3(x) ((x) << 19)
188 #define INSN_OPF(x) ((x) << 5)
189 #define INSN_RD(x) ((x) << 25)
190 #define INSN_RS1(x) ((x) << 14)
191 #define INSN_RS2(x) (x)
192 #define INSN_ASI(x) ((x) << 5)
194 #define INSN_IMM13(x) ((1 << 13) | ((x) & 0x1fff))
195 #define INSN_OFF19(x) (((x) >> 2) & 0x07ffff)
196 #define INSN_OFF22(x) (((x) >> 2) & 0x3fffff)
198 #define INSN_COND(x, a) (((x) << 25) | ((a) << 29))
199 #define COND_N 0x0
200 #define COND_E 0x1
201 #define COND_LE 0x2
202 #define COND_L 0x3
203 #define COND_LEU 0x4
204 #define COND_CS 0x5
205 #define COND_NEG 0x6
206 #define COND_VS 0x7
207 #define COND_A 0x8
208 #define COND_NE 0x9
209 #define COND_G 0xa
210 #define COND_GE 0xb
211 #define COND_GU 0xc
212 #define COND_CC 0xd
213 #define COND_POS 0xe
214 #define COND_VC 0xf
215 #define BA (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2))
217 #define ARITH_ADD (INSN_OP(2) | INSN_OP3(0x00))
218 #define ARITH_AND (INSN_OP(2) | INSN_OP3(0x01))
219 #define ARITH_OR (INSN_OP(2) | INSN_OP3(0x02))
220 #define ARITH_ORCC (INSN_OP(2) | INSN_OP3(0x12))
221 #define ARITH_XOR (INSN_OP(2) | INSN_OP3(0x03))
222 #define ARITH_SUB (INSN_OP(2) | INSN_OP3(0x04))
223 #define ARITH_SUBCC (INSN_OP(2) | INSN_OP3(0x14))
224 #define ARITH_ADDX (INSN_OP(2) | INSN_OP3(0x10))
225 #define ARITH_SUBX (INSN_OP(2) | INSN_OP3(0x0c))
226 #define ARITH_UMUL (INSN_OP(2) | INSN_OP3(0x0a))
227 #define ARITH_UDIV (INSN_OP(2) | INSN_OP3(0x0e))
228 #define ARITH_SDIV (INSN_OP(2) | INSN_OP3(0x0f))
229 #define ARITH_MULX (INSN_OP(2) | INSN_OP3(0x09))
230 #define ARITH_UDIVX (INSN_OP(2) | INSN_OP3(0x0d))
231 #define ARITH_SDIVX (INSN_OP(2) | INSN_OP3(0x2d))
233 #define SHIFT_SLL (INSN_OP(2) | INSN_OP3(0x25))
234 #define SHIFT_SRL (INSN_OP(2) | INSN_OP3(0x26))
235 #define SHIFT_SRA (INSN_OP(2) | INSN_OP3(0x27))
237 #define SHIFT_SLLX (INSN_OP(2) | INSN_OP3(0x25) | (1 << 12))
238 #define SHIFT_SRLX (INSN_OP(2) | INSN_OP3(0x26) | (1 << 12))
239 #define SHIFT_SRAX (INSN_OP(2) | INSN_OP3(0x27) | (1 << 12))
241 #define WRY (INSN_OP(2) | INSN_OP3(0x30))
242 #define JMPL (INSN_OP(2) | INSN_OP3(0x38))
243 #define SAVE (INSN_OP(2) | INSN_OP3(0x3c))
244 #define RESTORE (INSN_OP(2) | INSN_OP3(0x3d))
245 #define SETHI (INSN_OP(0) | INSN_OP2(0x4))
246 #define CALL INSN_OP(1)
247 #define LDUB (INSN_OP(3) | INSN_OP3(0x01))
248 #define LDSB (INSN_OP(3) | INSN_OP3(0x09))
249 #define LDUH (INSN_OP(3) | INSN_OP3(0x02))
250 #define LDSH (INSN_OP(3) | INSN_OP3(0x0a))
251 #define LDUW (INSN_OP(3) | INSN_OP3(0x00))
252 #define LDSW (INSN_OP(3) | INSN_OP3(0x08))
253 #define LDX (INSN_OP(3) | INSN_OP3(0x0b))
254 #define STB (INSN_OP(3) | INSN_OP3(0x05))
255 #define STH (INSN_OP(3) | INSN_OP3(0x06))
256 #define STW (INSN_OP(3) | INSN_OP3(0x04))
257 #define STX (INSN_OP(3) | INSN_OP3(0x0e))
258 #define LDUBA (INSN_OP(3) | INSN_OP3(0x11))
259 #define LDSBA (INSN_OP(3) | INSN_OP3(0x19))
260 #define LDUHA (INSN_OP(3) | INSN_OP3(0x12))
261 #define LDSHA (INSN_OP(3) | INSN_OP3(0x1a))
262 #define LDUWA (INSN_OP(3) | INSN_OP3(0x10))
263 #define LDSWA (INSN_OP(3) | INSN_OP3(0x18))
264 #define LDXA (INSN_OP(3) | INSN_OP3(0x1b))
265 #define STBA (INSN_OP(3) | INSN_OP3(0x15))
266 #define STHA (INSN_OP(3) | INSN_OP3(0x16))
267 #define STWA (INSN_OP(3) | INSN_OP3(0x14))
268 #define STXA (INSN_OP(3) | INSN_OP3(0x1e))
270 #ifndef ASI_PRIMARY_LITTLE
271 #define ASI_PRIMARY_LITTLE 0x88
272 #endif
274 static inline void tcg_out_arith(TCGContext *s, int rd, int rs1, int rs2,
275 int op)
277 tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
278 INSN_RS2(rs2));
281 static inline void tcg_out_arithi(TCGContext *s, int rd, int rs1,
282 uint32_t offset, int op)
284 tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
285 INSN_IMM13(offset));
288 static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
290 tcg_out_arith(s, ret, arg, TCG_REG_G0, ARITH_OR);
293 static inline void tcg_out_sethi(TCGContext *s, int ret, uint32_t arg)
295 tcg_out32(s, SETHI | INSN_RD(ret) | ((arg & 0xfffffc00) >> 10));
298 static inline void tcg_out_movi_imm13(TCGContext *s, int ret, uint32_t arg)
300 tcg_out_arithi(s, ret, TCG_REG_G0, arg, ARITH_OR);
303 static inline void tcg_out_movi_imm32(TCGContext *s, int ret, uint32_t arg)
305 if (check_fit_tl(arg, 13))
306 tcg_out_movi_imm13(s, ret, arg);
307 else {
308 tcg_out_sethi(s, ret, arg);
309 if (arg & 0x3ff)
310 tcg_out_arithi(s, ret, ret, arg & 0x3ff, ARITH_OR);
314 static inline void tcg_out_movi(TCGContext *s, TCGType type,
315 int ret, tcg_target_long arg)
317 /* All 32-bit constants, as well as 64-bit constants with
318 no high bits set go through movi_imm32. */
319 if (TCG_TARGET_REG_BITS == 32
320 || type == TCG_TYPE_I32
321 || (arg & ~(tcg_target_long)0xffffffff) == 0) {
322 tcg_out_movi_imm32(s, ret, arg);
323 } else if (check_fit_tl(arg, 13)) {
324 /* A 13-bit constant sign-extended to 64-bits. */
325 tcg_out_movi_imm13(s, ret, arg);
326 } else if (check_fit_tl(arg, 32)) {
327 /* A 32-bit constant sign-extended to 64-bits. */
328 tcg_out_sethi(s, ret, ~arg);
329 tcg_out_arithi(s, ret, ret, (arg & 0x3ff) | -0x400, ARITH_XOR);
330 } else {
331 tcg_out_movi_imm32(s, TCG_REG_I4, arg >> (TCG_TARGET_REG_BITS / 2));
332 tcg_out_arithi(s, TCG_REG_I4, TCG_REG_I4, 32, SHIFT_SLLX);
333 tcg_out_movi_imm32(s, ret, arg);
334 tcg_out_arith(s, ret, ret, TCG_REG_I4, ARITH_OR);
338 static inline void tcg_out_ld_raw(TCGContext *s, int ret,
339 tcg_target_long arg)
341 tcg_out_sethi(s, ret, arg);
342 tcg_out32(s, LDUW | INSN_RD(ret) | INSN_RS1(ret) |
343 INSN_IMM13(arg & 0x3ff));
346 static inline void tcg_out_ld_ptr(TCGContext *s, int ret,
347 tcg_target_long arg)
349 if (!check_fit_tl(arg, 10))
350 tcg_out_movi(s, TCG_TYPE_PTR, ret, arg & ~0x3ffULL);
351 if (TCG_TARGET_REG_BITS == 64) {
352 tcg_out32(s, LDX | INSN_RD(ret) | INSN_RS1(ret) |
353 INSN_IMM13(arg & 0x3ff));
354 } else {
355 tcg_out32(s, LDUW | INSN_RD(ret) | INSN_RS1(ret) |
356 INSN_IMM13(arg & 0x3ff));
360 static inline void tcg_out_ldst(TCGContext *s, int ret, int addr, int offset, int op)
362 if (check_fit_tl(offset, 13))
363 tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(addr) |
364 INSN_IMM13(offset));
365 else {
366 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, offset);
367 tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(TCG_REG_I5) |
368 INSN_RS2(addr));
372 static inline void tcg_out_ldst_asi(TCGContext *s, int ret, int addr,
373 int offset, int op, int asi)
375 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, offset);
376 tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(TCG_REG_I5) |
377 INSN_ASI(asi) | INSN_RS2(addr));
380 static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
381 int arg1, tcg_target_long arg2)
383 if (type == TCG_TYPE_I32)
384 tcg_out_ldst(s, ret, arg1, arg2, LDUW);
385 else
386 tcg_out_ldst(s, ret, arg1, arg2, LDX);
389 static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
390 int arg1, tcg_target_long arg2)
392 if (type == TCG_TYPE_I32)
393 tcg_out_ldst(s, arg, arg1, arg2, STW);
394 else
395 tcg_out_ldst(s, arg, arg1, arg2, STX);
398 static inline void tcg_out_sety(TCGContext *s, tcg_target_long val)
400 if (val == 0 || val == -1)
401 tcg_out32(s, WRY | INSN_IMM13(val));
402 else
403 fprintf(stderr, "unimplemented sety %ld\n", (long)val);
406 static inline void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
408 if (val != 0) {
409 if (check_fit_tl(val, 13))
410 tcg_out_arithi(s, reg, reg, val, ARITH_ADD);
411 else {
412 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, val);
413 tcg_out_arith(s, reg, reg, TCG_REG_I5, ARITH_ADD);
418 static inline void tcg_out_andi(TCGContext *s, int reg, tcg_target_long val)
420 if (val != 0) {
421 if (check_fit_tl(val, 13))
422 tcg_out_arithi(s, reg, reg, val, ARITH_AND);
423 else {
424 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, val);
425 tcg_out_arith(s, reg, reg, TCG_REG_I5, ARITH_AND);
430 static inline void tcg_out_nop(TCGContext *s)
432 tcg_out_sethi(s, TCG_REG_G0, 0);
435 static void tcg_out_branch_i32(TCGContext *s, int opc, int label_index)
437 int32_t val;
438 TCGLabel *l = &s->labels[label_index];
440 if (l->has_value) {
441 val = l->u.value - (tcg_target_long)s->code_ptr;
442 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x2)
443 | INSN_OFF22(l->u.value - (unsigned long)s->code_ptr)));
444 } else {
445 tcg_out_reloc(s, s->code_ptr, R_SPARC_WDISP22, label_index, 0);
446 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x2) | 0));
450 #if TCG_TARGET_REG_BITS == 64
451 static void tcg_out_branch_i64(TCGContext *s, int opc, int label_index)
453 int32_t val;
454 TCGLabel *l = &s->labels[label_index];
456 if (l->has_value) {
457 val = l->u.value - (tcg_target_long)s->code_ptr;
458 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x1) |
459 (0x5 << 19) |
460 INSN_OFF19(l->u.value - (unsigned long)s->code_ptr)));
461 } else {
462 tcg_out_reloc(s, s->code_ptr, R_SPARC_WDISP19, label_index, 0);
463 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x1) |
464 (0x5 << 19) | 0));
467 #endif
469 static const uint8_t tcg_cond_to_bcond[10] = {
470 [TCG_COND_EQ] = COND_E,
471 [TCG_COND_NE] = COND_NE,
472 [TCG_COND_LT] = COND_L,
473 [TCG_COND_GE] = COND_GE,
474 [TCG_COND_LE] = COND_LE,
475 [TCG_COND_GT] = COND_G,
476 [TCG_COND_LTU] = COND_CS,
477 [TCG_COND_GEU] = COND_CC,
478 [TCG_COND_LEU] = COND_LEU,
479 [TCG_COND_GTU] = COND_GU,
482 static void tcg_out_cmp(TCGContext *s, TCGArg c1, TCGArg c2, int c2const)
484 if (c2const)
485 tcg_out_arithi(s, TCG_REG_G0, c1, c2, ARITH_SUBCC);
486 else
487 tcg_out_arith(s, TCG_REG_G0, c1, c2, ARITH_SUBCC);
490 static void tcg_out_brcond_i32(TCGContext *s, int cond,
491 TCGArg arg1, TCGArg arg2, int const_arg2,
492 int label_index)
494 tcg_out_cmp(s, arg1, arg2, const_arg2);
495 tcg_out_branch_i32(s, tcg_cond_to_bcond[cond], label_index);
496 tcg_out_nop(s);
499 #if TCG_TARGET_REG_BITS == 64
500 static void tcg_out_brcond_i64(TCGContext *s, int cond,
501 TCGArg arg1, TCGArg arg2, int const_arg2,
502 int label_index)
504 tcg_out_cmp(s, arg1, arg2, const_arg2);
505 tcg_out_branch_i64(s, tcg_cond_to_bcond[cond], label_index);
506 tcg_out_nop(s);
508 #else
509 static void tcg_out_brcond2_i32(TCGContext *s, int cond,
510 TCGArg al, TCGArg ah,
511 TCGArg bl, int blconst,
512 TCGArg bh, int bhconst, int label_dest)
514 int cc, label_next = gen_new_label();
516 tcg_out_cmp(s, ah, bh, bhconst);
518 /* Note that we fill one of the delay slots with the second compare. */
519 switch (cond) {
520 case TCG_COND_EQ:
521 cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_NE], 0);
522 tcg_out_branch_i32(s, cc, label_next);
523 tcg_out_cmp(s, al, bl, blconst);
524 cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_EQ], 0);
525 tcg_out_branch_i32(s, cc, label_dest);
526 break;
528 case TCG_COND_NE:
529 cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_NE], 0);
530 tcg_out_branch_i32(s, cc, label_dest);
531 tcg_out_cmp(s, al, bl, blconst);
532 tcg_out_branch_i32(s, cc, label_dest);
533 break;
535 default:
536 /* ??? One could fairly easily special-case 64-bit unsigned
537 compares against 32-bit zero-extended constants. For instance,
538 we know that (unsigned)AH < 0 is false and need not emit it.
539 Similarly, (unsigned)AH > 0 being true implies AH != 0, so the
540 second branch will never be taken. */
541 cc = INSN_COND(tcg_cond_to_bcond[cond], 0);
542 tcg_out_branch_i32(s, cc, label_dest);
543 tcg_out_nop(s);
544 cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_NE], 0);
545 tcg_out_branch_i32(s, cc, label_next);
546 tcg_out_cmp(s, al, bl, blconst);
547 cc = INSN_COND(tcg_cond_to_bcond[tcg_unsigned_cond(cond)], 0);
548 tcg_out_branch_i32(s, cc, label_dest);
549 break;
551 tcg_out_nop(s);
553 tcg_out_label(s, label_next, (tcg_target_long)s->code_ptr);
555 #endif
557 /* Generate global QEMU prologue and epilogue code */
558 void tcg_target_qemu_prologue(TCGContext *s)
560 tcg_out32(s, SAVE | INSN_RD(TCG_REG_O6) | INSN_RS1(TCG_REG_O6) |
561 INSN_IMM13(-TCG_TARGET_STACK_MINFRAME));
562 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I0) |
563 INSN_RS2(TCG_REG_G0));
564 tcg_out_nop(s);
567 #if defined(CONFIG_SOFTMMU)
569 #include "../../softmmu_defs.h"
571 static const void * const qemu_ld_helpers[4] = {
572 __ldb_mmu,
573 __ldw_mmu,
574 __ldl_mmu,
575 __ldq_mmu,
578 static const void * const qemu_st_helpers[4] = {
579 __stb_mmu,
580 __stw_mmu,
581 __stl_mmu,
582 __stq_mmu,
584 #endif
586 #if TARGET_LONG_BITS == 32
587 #define TARGET_LD_OP LDUW
588 #else
589 #define TARGET_LD_OP LDX
590 #endif
592 #if TARGET_PHYS_ADDR_BITS == 32
593 #define TARGET_ADDEND_LD_OP LDUW
594 #else
595 #define TARGET_ADDEND_LD_OP LDX
596 #endif
598 #ifdef __arch64__
599 #define HOST_LD_OP LDX
600 #define HOST_ST_OP STX
601 #define HOST_SLL_OP SHIFT_SLLX
602 #define HOST_SRA_OP SHIFT_SRAX
603 #else
604 #define HOST_LD_OP LDUW
605 #define HOST_ST_OP STW
606 #define HOST_SLL_OP SHIFT_SLL
607 #define HOST_SRA_OP SHIFT_SRA
608 #endif
610 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
611 int opc)
613 int addr_reg, data_reg, arg0, arg1, arg2, mem_index, s_bits;
614 #if defined(CONFIG_SOFTMMU)
615 uint32_t *label1_ptr, *label2_ptr;
616 #endif
618 data_reg = *args++;
619 addr_reg = *args++;
620 mem_index = *args;
621 s_bits = opc & 3;
623 arg0 = TCG_REG_O0;
624 arg1 = TCG_REG_O1;
625 arg2 = TCG_REG_O2;
627 #if defined(CONFIG_SOFTMMU)
628 /* srl addr_reg, x, arg1 */
629 tcg_out_arithi(s, arg1, addr_reg, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS,
630 SHIFT_SRL);
631 /* and addr_reg, x, arg0 */
632 tcg_out_arithi(s, arg0, addr_reg, TARGET_PAGE_MASK | ((1 << s_bits) - 1),
633 ARITH_AND);
635 /* and arg1, x, arg1 */
636 tcg_out_andi(s, arg1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
638 /* add arg1, x, arg1 */
639 tcg_out_addi(s, arg1, offsetof(CPUState,
640 tlb_table[mem_index][0].addr_read));
642 /* add env, arg1, arg1 */
643 tcg_out_arith(s, arg1, TCG_AREG0, arg1, ARITH_ADD);
645 /* ld [arg1], arg2 */
646 tcg_out32(s, TARGET_LD_OP | INSN_RD(arg2) | INSN_RS1(arg1) |
647 INSN_RS2(TCG_REG_G0));
649 /* subcc arg0, arg2, %g0 */
650 tcg_out_arith(s, TCG_REG_G0, arg0, arg2, ARITH_SUBCC);
652 /* will become:
653 be label1
655 be,pt %xcc label1 */
656 label1_ptr = (uint32_t *)s->code_ptr;
657 tcg_out32(s, 0);
659 /* mov (delay slot) */
660 tcg_out_mov(s, arg0, addr_reg);
662 /* mov */
663 tcg_out_movi(s, TCG_TYPE_I32, arg1, mem_index);
665 /* XXX: move that code at the end of the TB */
666 /* qemu_ld_helper[s_bits](arg0, arg1) */
667 tcg_out32(s, CALL | ((((tcg_target_ulong)qemu_ld_helpers[s_bits]
668 - (tcg_target_ulong)s->code_ptr) >> 2)
669 & 0x3fffffff));
670 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
671 global registers */
672 // delay slot
673 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
674 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
675 sizeof(long), HOST_ST_OP);
676 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
677 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
678 sizeof(long), HOST_LD_OP);
680 /* data_reg = sign_extend(arg0) */
681 switch(opc) {
682 case 0 | 4:
683 /* sll arg0, 24/56, data_reg */
684 tcg_out_arithi(s, data_reg, arg0, (int)sizeof(tcg_target_long) * 8 - 8,
685 HOST_SLL_OP);
686 /* sra data_reg, 24/56, data_reg */
687 tcg_out_arithi(s, data_reg, data_reg,
688 (int)sizeof(tcg_target_long) * 8 - 8, HOST_SRA_OP);
689 break;
690 case 1 | 4:
691 /* sll arg0, 16/48, data_reg */
692 tcg_out_arithi(s, data_reg, arg0,
693 (int)sizeof(tcg_target_long) * 8 - 16, HOST_SLL_OP);
694 /* sra data_reg, 16/48, data_reg */
695 tcg_out_arithi(s, data_reg, data_reg,
696 (int)sizeof(tcg_target_long) * 8 - 16, HOST_SRA_OP);
697 break;
698 case 2 | 4:
699 /* sll arg0, 32, data_reg */
700 tcg_out_arithi(s, data_reg, arg0, 32, HOST_SLL_OP);
701 /* sra data_reg, 32, data_reg */
702 tcg_out_arithi(s, data_reg, data_reg, 32, HOST_SRA_OP);
703 break;
704 case 0:
705 case 1:
706 case 2:
707 case 3:
708 default:
709 /* mov */
710 tcg_out_mov(s, data_reg, arg0);
711 break;
714 /* will become:
715 ba label2 */
716 label2_ptr = (uint32_t *)s->code_ptr;
717 tcg_out32(s, 0);
719 /* nop (delay slot */
720 tcg_out_nop(s);
722 /* label1: */
723 #if TARGET_LONG_BITS == 32
724 /* be label1 */
725 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x2) |
726 INSN_OFF22((unsigned long)s->code_ptr -
727 (unsigned long)label1_ptr));
728 #else
729 /* be,pt %xcc label1 */
730 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x1) |
731 (0x5 << 19) | INSN_OFF19((unsigned long)s->code_ptr -
732 (unsigned long)label1_ptr));
733 #endif
735 /* ld [arg1 + x], arg1 */
736 tcg_out_ldst(s, arg1, arg1, offsetof(CPUTLBEntry, addend) -
737 offsetof(CPUTLBEntry, addr_read), TARGET_ADDEND_LD_OP);
739 #if TARGET_LONG_BITS == 32
740 /* and addr_reg, x, arg0 */
741 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, 0xffffffff);
742 tcg_out_arith(s, arg0, addr_reg, TCG_REG_I5, ARITH_AND);
743 /* add arg0, arg1, arg0 */
744 tcg_out_arith(s, arg0, arg0, arg1, ARITH_ADD);
745 #else
746 /* add addr_reg, arg1, arg0 */
747 tcg_out_arith(s, arg0, addr_reg, arg1, ARITH_ADD);
748 #endif
750 #else
751 arg0 = addr_reg;
752 #endif
754 switch(opc) {
755 case 0:
756 /* ldub [arg0], data_reg */
757 tcg_out_ldst(s, data_reg, arg0, 0, LDUB);
758 break;
759 case 0 | 4:
760 /* ldsb [arg0], data_reg */
761 tcg_out_ldst(s, data_reg, arg0, 0, LDSB);
762 break;
763 case 1:
764 #ifdef TARGET_WORDS_BIGENDIAN
765 /* lduh [arg0], data_reg */
766 tcg_out_ldst(s, data_reg, arg0, 0, LDUH);
767 #else
768 /* lduha [arg0] ASI_PRIMARY_LITTLE, data_reg */
769 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDUHA, ASI_PRIMARY_LITTLE);
770 #endif
771 break;
772 case 1 | 4:
773 #ifdef TARGET_WORDS_BIGENDIAN
774 /* ldsh [arg0], data_reg */
775 tcg_out_ldst(s, data_reg, arg0, 0, LDSH);
776 #else
777 /* ldsha [arg0] ASI_PRIMARY_LITTLE, data_reg */
778 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDSHA, ASI_PRIMARY_LITTLE);
779 #endif
780 break;
781 case 2:
782 #ifdef TARGET_WORDS_BIGENDIAN
783 /* lduw [arg0], data_reg */
784 tcg_out_ldst(s, data_reg, arg0, 0, LDUW);
785 #else
786 /* lduwa [arg0] ASI_PRIMARY_LITTLE, data_reg */
787 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDUWA, ASI_PRIMARY_LITTLE);
788 #endif
789 break;
790 case 2 | 4:
791 #ifdef TARGET_WORDS_BIGENDIAN
792 /* ldsw [arg0], data_reg */
793 tcg_out_ldst(s, data_reg, arg0, 0, LDSW);
794 #else
795 /* ldswa [arg0] ASI_PRIMARY_LITTLE, data_reg */
796 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDSWA, ASI_PRIMARY_LITTLE);
797 #endif
798 break;
799 case 3:
800 #ifdef TARGET_WORDS_BIGENDIAN
801 /* ldx [arg0], data_reg */
802 tcg_out_ldst(s, data_reg, arg0, 0, LDX);
803 #else
804 /* ldxa [arg0] ASI_PRIMARY_LITTLE, data_reg */
805 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDXA, ASI_PRIMARY_LITTLE);
806 #endif
807 break;
808 default:
809 tcg_abort();
812 #if defined(CONFIG_SOFTMMU)
813 /* label2: */
814 *label2_ptr = (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2) |
815 INSN_OFF22((unsigned long)s->code_ptr -
816 (unsigned long)label2_ptr));
817 #endif
820 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
821 int opc)
823 int addr_reg, data_reg, arg0, arg1, arg2, mem_index, s_bits;
824 #if defined(CONFIG_SOFTMMU)
825 uint32_t *label1_ptr, *label2_ptr;
826 #endif
828 data_reg = *args++;
829 addr_reg = *args++;
830 mem_index = *args;
832 s_bits = opc;
834 arg0 = TCG_REG_O0;
835 arg1 = TCG_REG_O1;
836 arg2 = TCG_REG_O2;
838 #if defined(CONFIG_SOFTMMU)
839 /* srl addr_reg, x, arg1 */
840 tcg_out_arithi(s, arg1, addr_reg, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS,
841 SHIFT_SRL);
843 /* and addr_reg, x, arg0 */
844 tcg_out_arithi(s, arg0, addr_reg, TARGET_PAGE_MASK | ((1 << s_bits) - 1),
845 ARITH_AND);
847 /* and arg1, x, arg1 */
848 tcg_out_andi(s, arg1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
850 /* add arg1, x, arg1 */
851 tcg_out_addi(s, arg1, offsetof(CPUState,
852 tlb_table[mem_index][0].addr_write));
854 /* add env, arg1, arg1 */
855 tcg_out_arith(s, arg1, TCG_AREG0, arg1, ARITH_ADD);
857 /* ld [arg1], arg2 */
858 tcg_out32(s, TARGET_LD_OP | INSN_RD(arg2) | INSN_RS1(arg1) |
859 INSN_RS2(TCG_REG_G0));
861 /* subcc arg0, arg2, %g0 */
862 tcg_out_arith(s, TCG_REG_G0, arg0, arg2, ARITH_SUBCC);
864 /* will become:
865 be label1
867 be,pt %xcc label1 */
868 label1_ptr = (uint32_t *)s->code_ptr;
869 tcg_out32(s, 0);
871 /* mov (delay slot) */
872 tcg_out_mov(s, arg0, addr_reg);
874 /* mov */
875 tcg_out_mov(s, arg1, data_reg);
877 /* mov */
878 tcg_out_movi(s, TCG_TYPE_I32, arg2, mem_index);
880 /* XXX: move that code at the end of the TB */
881 /* qemu_st_helper[s_bits](arg0, arg1, arg2) */
882 tcg_out32(s, CALL | ((((tcg_target_ulong)qemu_st_helpers[s_bits]
883 - (tcg_target_ulong)s->code_ptr) >> 2)
884 & 0x3fffffff));
885 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
886 global registers */
887 // delay slot
888 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
889 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
890 sizeof(long), HOST_ST_OP);
891 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
892 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
893 sizeof(long), HOST_LD_OP);
895 /* will become:
896 ba label2 */
897 label2_ptr = (uint32_t *)s->code_ptr;
898 tcg_out32(s, 0);
900 /* nop (delay slot) */
901 tcg_out_nop(s);
903 #if TARGET_LONG_BITS == 32
904 /* be label1 */
905 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x2) |
906 INSN_OFF22((unsigned long)s->code_ptr -
907 (unsigned long)label1_ptr));
908 #else
909 /* be,pt %xcc label1 */
910 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x1) |
911 (0x5 << 19) | INSN_OFF19((unsigned long)s->code_ptr -
912 (unsigned long)label1_ptr));
913 #endif
915 /* ld [arg1 + x], arg1 */
916 tcg_out_ldst(s, arg1, arg1, offsetof(CPUTLBEntry, addend) -
917 offsetof(CPUTLBEntry, addr_write), TARGET_ADDEND_LD_OP);
919 #if TARGET_LONG_BITS == 32
920 /* and addr_reg, x, arg0 */
921 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, 0xffffffff);
922 tcg_out_arith(s, arg0, addr_reg, TCG_REG_I5, ARITH_AND);
923 /* add arg0, arg1, arg0 */
924 tcg_out_arith(s, arg0, arg0, arg1, ARITH_ADD);
925 #else
926 /* add addr_reg, arg1, arg0 */
927 tcg_out_arith(s, arg0, addr_reg, arg1, ARITH_ADD);
928 #endif
930 #else
931 arg0 = addr_reg;
932 #endif
934 switch(opc) {
935 case 0:
936 /* stb data_reg, [arg0] */
937 tcg_out_ldst(s, data_reg, arg0, 0, STB);
938 break;
939 case 1:
940 #ifdef TARGET_WORDS_BIGENDIAN
941 /* sth data_reg, [arg0] */
942 tcg_out_ldst(s, data_reg, arg0, 0, STH);
943 #else
944 /* stha data_reg, [arg0] ASI_PRIMARY_LITTLE */
945 tcg_out_ldst_asi(s, data_reg, arg0, 0, STHA, ASI_PRIMARY_LITTLE);
946 #endif
947 break;
948 case 2:
949 #ifdef TARGET_WORDS_BIGENDIAN
950 /* stw data_reg, [arg0] */
951 tcg_out_ldst(s, data_reg, arg0, 0, STW);
952 #else
953 /* stwa data_reg, [arg0] ASI_PRIMARY_LITTLE */
954 tcg_out_ldst_asi(s, data_reg, arg0, 0, STWA, ASI_PRIMARY_LITTLE);
955 #endif
956 break;
957 case 3:
958 #ifdef TARGET_WORDS_BIGENDIAN
959 /* stx data_reg, [arg0] */
960 tcg_out_ldst(s, data_reg, arg0, 0, STX);
961 #else
962 /* stxa data_reg, [arg0] ASI_PRIMARY_LITTLE */
963 tcg_out_ldst_asi(s, data_reg, arg0, 0, STXA, ASI_PRIMARY_LITTLE);
964 #endif
965 break;
966 default:
967 tcg_abort();
970 #if defined(CONFIG_SOFTMMU)
971 /* label2: */
972 *label2_ptr = (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2) |
973 INSN_OFF22((unsigned long)s->code_ptr -
974 (unsigned long)label2_ptr));
975 #endif
978 static inline void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
979 const int *const_args)
981 int c;
983 switch (opc) {
984 case INDEX_op_exit_tb:
985 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I0, args[0]);
986 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I7) |
987 INSN_IMM13(8));
988 tcg_out32(s, RESTORE | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_G0) |
989 INSN_RS2(TCG_REG_G0));
990 break;
991 case INDEX_op_goto_tb:
992 if (s->tb_jmp_offset) {
993 /* direct jump method */
994 tcg_out_sethi(s, TCG_REG_I5, args[0] & 0xffffe000);
995 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
996 INSN_IMM13((args[0] & 0x1fff)));
997 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
998 } else {
999 /* indirect jump method */
1000 tcg_out_ld_ptr(s, TCG_REG_I5, (tcg_target_long)(s->tb_next + args[0]));
1001 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
1002 INSN_RS2(TCG_REG_G0));
1004 tcg_out_nop(s);
1005 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1006 break;
1007 case INDEX_op_call:
1008 if (const_args[0])
1009 tcg_out32(s, CALL | ((((tcg_target_ulong)args[0]
1010 - (tcg_target_ulong)s->code_ptr) >> 2)
1011 & 0x3fffffff));
1012 else {
1013 tcg_out_ld_ptr(s, TCG_REG_I5,
1014 (tcg_target_long)(s->tb_next + args[0]));
1015 tcg_out32(s, JMPL | INSN_RD(TCG_REG_O7) | INSN_RS1(TCG_REG_I5) |
1016 INSN_RS2(TCG_REG_G0));
1018 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
1019 global registers */
1020 // delay slot
1021 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
1022 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1023 sizeof(long), HOST_ST_OP);
1024 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
1025 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1026 sizeof(long), HOST_LD_OP);
1027 break;
1028 case INDEX_op_jmp:
1029 case INDEX_op_br:
1030 tcg_out_branch_i32(s, COND_A, args[0]);
1031 tcg_out_nop(s);
1032 break;
1033 case INDEX_op_movi_i32:
1034 tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
1035 break;
1037 #if TCG_TARGET_REG_BITS == 64
1038 #define OP_32_64(x) \
1039 glue(glue(case INDEX_op_, x), _i32:) \
1040 glue(glue(case INDEX_op_, x), _i64:)
1041 #else
1042 #define OP_32_64(x) \
1043 glue(glue(case INDEX_op_, x), _i32:)
1044 #endif
1045 OP_32_64(ld8u);
1046 tcg_out_ldst(s, args[0], args[1], args[2], LDUB);
1047 break;
1048 OP_32_64(ld8s);
1049 tcg_out_ldst(s, args[0], args[1], args[2], LDSB);
1050 break;
1051 OP_32_64(ld16u);
1052 tcg_out_ldst(s, args[0], args[1], args[2], LDUH);
1053 break;
1054 OP_32_64(ld16s);
1055 tcg_out_ldst(s, args[0], args[1], args[2], LDSH);
1056 break;
1057 case INDEX_op_ld_i32:
1058 #if TCG_TARGET_REG_BITS == 64
1059 case INDEX_op_ld32u_i64:
1060 #endif
1061 tcg_out_ldst(s, args[0], args[1], args[2], LDUW);
1062 break;
1063 OP_32_64(st8);
1064 tcg_out_ldst(s, args[0], args[1], args[2], STB);
1065 break;
1066 OP_32_64(st16);
1067 tcg_out_ldst(s, args[0], args[1], args[2], STH);
1068 break;
1069 case INDEX_op_st_i32:
1070 #if TCG_TARGET_REG_BITS == 64
1071 case INDEX_op_st32_i64:
1072 #endif
1073 tcg_out_ldst(s, args[0], args[1], args[2], STW);
1074 break;
1075 OP_32_64(add);
1076 c = ARITH_ADD;
1077 goto gen_arith32;
1078 OP_32_64(sub);
1079 c = ARITH_SUB;
1080 goto gen_arith32;
1081 OP_32_64(and);
1082 c = ARITH_AND;
1083 goto gen_arith32;
1084 OP_32_64(or);
1085 c = ARITH_OR;
1086 goto gen_arith32;
1087 OP_32_64(xor);
1088 c = ARITH_XOR;
1089 goto gen_arith32;
1090 case INDEX_op_shl_i32:
1091 c = SHIFT_SLL;
1092 goto gen_arith32;
1093 case INDEX_op_shr_i32:
1094 c = SHIFT_SRL;
1095 goto gen_arith32;
1096 case INDEX_op_sar_i32:
1097 c = SHIFT_SRA;
1098 goto gen_arith32;
1099 case INDEX_op_mul_i32:
1100 c = ARITH_UMUL;
1101 goto gen_arith32;
1102 case INDEX_op_div2_i32:
1103 #if defined(__sparc_v9__) || defined(__sparc_v8plus__)
1104 c = ARITH_SDIVX;
1105 goto gen_arith32;
1106 #else
1107 tcg_out_sety(s, 0);
1108 c = ARITH_SDIV;
1109 goto gen_arith32;
1110 #endif
1111 case INDEX_op_divu2_i32:
1112 #if defined(__sparc_v9__) || defined(__sparc_v8plus__)
1113 c = ARITH_UDIVX;
1114 goto gen_arith32;
1115 #else
1116 tcg_out_sety(s, 0);
1117 c = ARITH_UDIV;
1118 goto gen_arith32;
1119 #endif
1121 case INDEX_op_brcond_i32:
1122 tcg_out_brcond_i32(s, args[2], args[0], args[1], const_args[1],
1123 args[3]);
1124 break;
1125 #if TCG_TARGET_REG_BITS == 32
1126 case INDEX_op_brcond2_i32:
1127 tcg_out_brcond2_i32(s, args[4], args[0], args[1],
1128 args[2], const_args[2],
1129 args[3], const_args[3], args[5]);
1130 break;
1131 #endif
1133 case INDEX_op_qemu_ld8u:
1134 tcg_out_qemu_ld(s, args, 0);
1135 break;
1136 case INDEX_op_qemu_ld8s:
1137 tcg_out_qemu_ld(s, args, 0 | 4);
1138 break;
1139 case INDEX_op_qemu_ld16u:
1140 tcg_out_qemu_ld(s, args, 1);
1141 break;
1142 case INDEX_op_qemu_ld16s:
1143 tcg_out_qemu_ld(s, args, 1 | 4);
1144 break;
1145 case INDEX_op_qemu_ld32u:
1146 tcg_out_qemu_ld(s, args, 2);
1147 break;
1148 case INDEX_op_qemu_ld32s:
1149 tcg_out_qemu_ld(s, args, 2 | 4);
1150 break;
1151 case INDEX_op_qemu_st8:
1152 tcg_out_qemu_st(s, args, 0);
1153 break;
1154 case INDEX_op_qemu_st16:
1155 tcg_out_qemu_st(s, args, 1);
1156 break;
1157 case INDEX_op_qemu_st32:
1158 tcg_out_qemu_st(s, args, 2);
1159 break;
1161 #if TCG_TARGET_REG_BITS == 64
1162 case INDEX_op_movi_i64:
1163 tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
1164 break;
1165 case INDEX_op_ld32s_i64:
1166 tcg_out_ldst(s, args[0], args[1], args[2], LDSW);
1167 break;
1168 case INDEX_op_ld_i64:
1169 tcg_out_ldst(s, args[0], args[1], args[2], LDX);
1170 break;
1171 case INDEX_op_st_i64:
1172 tcg_out_ldst(s, args[0], args[1], args[2], STX);
1173 break;
1174 case INDEX_op_shl_i64:
1175 c = SHIFT_SLLX;
1176 goto gen_arith32;
1177 case INDEX_op_shr_i64:
1178 c = SHIFT_SRLX;
1179 goto gen_arith32;
1180 case INDEX_op_sar_i64:
1181 c = SHIFT_SRAX;
1182 goto gen_arith32;
1183 case INDEX_op_mul_i64:
1184 c = ARITH_MULX;
1185 goto gen_arith32;
1186 case INDEX_op_div2_i64:
1187 c = ARITH_SDIVX;
1188 goto gen_arith32;
1189 case INDEX_op_divu2_i64:
1190 c = ARITH_UDIVX;
1191 goto gen_arith32;
1193 case INDEX_op_brcond_i64:
1194 tcg_out_brcond_i64(s, args[2], args[0], args[1], const_args[1],
1195 args[3]);
1196 break;
1197 case INDEX_op_qemu_ld64:
1198 tcg_out_qemu_ld(s, args, 3);
1199 break;
1200 case INDEX_op_qemu_st64:
1201 tcg_out_qemu_st(s, args, 3);
1202 break;
1204 #endif
1205 gen_arith32:
1206 if (const_args[2]) {
1207 tcg_out_arithi(s, args[0], args[1], args[2], c);
1208 } else {
1209 tcg_out_arith(s, args[0], args[1], args[2], c);
1211 break;
1213 default:
1214 fprintf(stderr, "unknown opcode 0x%x\n", opc);
1215 tcg_abort();
1219 static const TCGTargetOpDef sparc_op_defs[] = {
1220 { INDEX_op_exit_tb, { } },
1221 { INDEX_op_goto_tb, { } },
1222 { INDEX_op_call, { "ri" } },
1223 { INDEX_op_jmp, { "ri" } },
1224 { INDEX_op_br, { } },
1226 { INDEX_op_mov_i32, { "r", "r" } },
1227 { INDEX_op_movi_i32, { "r" } },
1228 { INDEX_op_ld8u_i32, { "r", "r" } },
1229 { INDEX_op_ld8s_i32, { "r", "r" } },
1230 { INDEX_op_ld16u_i32, { "r", "r" } },
1231 { INDEX_op_ld16s_i32, { "r", "r" } },
1232 { INDEX_op_ld_i32, { "r", "r" } },
1233 { INDEX_op_st8_i32, { "r", "r" } },
1234 { INDEX_op_st16_i32, { "r", "r" } },
1235 { INDEX_op_st_i32, { "r", "r" } },
1237 { INDEX_op_add_i32, { "r", "r", "rJ" } },
1238 { INDEX_op_mul_i32, { "r", "r", "rJ" } },
1239 { INDEX_op_div2_i32, { "r", "r", "0", "1", "r" } },
1240 { INDEX_op_divu2_i32, { "r", "r", "0", "1", "r" } },
1241 { INDEX_op_sub_i32, { "r", "r", "rJ" } },
1242 { INDEX_op_and_i32, { "r", "r", "rJ" } },
1243 { INDEX_op_or_i32, { "r", "r", "rJ" } },
1244 { INDEX_op_xor_i32, { "r", "r", "rJ" } },
1246 { INDEX_op_shl_i32, { "r", "r", "rJ" } },
1247 { INDEX_op_shr_i32, { "r", "r", "rJ" } },
1248 { INDEX_op_sar_i32, { "r", "r", "rJ" } },
1250 { INDEX_op_brcond_i32, { "r", "rJ" } },
1251 #if TCG_TARGET_REG_BITS == 32
1252 { INDEX_op_brcond2_i32, { "r", "r", "rJ", "rJ" } },
1253 #endif
1255 { INDEX_op_qemu_ld8u, { "r", "L" } },
1256 { INDEX_op_qemu_ld8s, { "r", "L" } },
1257 { INDEX_op_qemu_ld16u, { "r", "L" } },
1258 { INDEX_op_qemu_ld16s, { "r", "L" } },
1259 { INDEX_op_qemu_ld32u, { "r", "L" } },
1260 { INDEX_op_qemu_ld32s, { "r", "L" } },
1262 { INDEX_op_qemu_st8, { "L", "L" } },
1263 { INDEX_op_qemu_st16, { "L", "L" } },
1264 { INDEX_op_qemu_st32, { "L", "L" } },
1266 #if TCG_TARGET_REG_BITS == 64
1267 { INDEX_op_mov_i64, { "r", "r" } },
1268 { INDEX_op_movi_i64, { "r" } },
1269 { INDEX_op_ld8u_i64, { "r", "r" } },
1270 { INDEX_op_ld8s_i64, { "r", "r" } },
1271 { INDEX_op_ld16u_i64, { "r", "r" } },
1272 { INDEX_op_ld16s_i64, { "r", "r" } },
1273 { INDEX_op_ld32u_i64, { "r", "r" } },
1274 { INDEX_op_ld32s_i64, { "r", "r" } },
1275 { INDEX_op_ld_i64, { "r", "r" } },
1276 { INDEX_op_st8_i64, { "r", "r" } },
1277 { INDEX_op_st16_i64, { "r", "r" } },
1278 { INDEX_op_st32_i64, { "r", "r" } },
1279 { INDEX_op_st_i64, { "r", "r" } },
1280 { INDEX_op_qemu_ld64, { "L", "L" } },
1281 { INDEX_op_qemu_st64, { "L", "L" } },
1283 { INDEX_op_add_i64, { "r", "r", "rJ" } },
1284 { INDEX_op_mul_i64, { "r", "r", "rJ" } },
1285 { INDEX_op_div2_i64, { "r", "r", "0", "1", "r" } },
1286 { INDEX_op_divu2_i64, { "r", "r", "0", "1", "r" } },
1287 { INDEX_op_sub_i64, { "r", "r", "rJ" } },
1288 { INDEX_op_and_i64, { "r", "r", "rJ" } },
1289 { INDEX_op_or_i64, { "r", "r", "rJ" } },
1290 { INDEX_op_xor_i64, { "r", "r", "rJ" } },
1292 { INDEX_op_shl_i64, { "r", "r", "rJ" } },
1293 { INDEX_op_shr_i64, { "r", "r", "rJ" } },
1294 { INDEX_op_sar_i64, { "r", "r", "rJ" } },
1296 { INDEX_op_brcond_i64, { "r", "rJ" } },
1297 #endif
1298 { -1 },
1301 void tcg_target_init(TCGContext *s)
1303 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
1304 #if TCG_TARGET_REG_BITS == 64
1305 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffffffff);
1306 #endif
1307 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1308 (1 << TCG_REG_G1) |
1309 (1 << TCG_REG_G2) |
1310 (1 << TCG_REG_G3) |
1311 (1 << TCG_REG_G4) |
1312 (1 << TCG_REG_G5) |
1313 (1 << TCG_REG_G6) |
1314 (1 << TCG_REG_G7) |
1315 (1 << TCG_REG_O0) |
1316 (1 << TCG_REG_O1) |
1317 (1 << TCG_REG_O2) |
1318 (1 << TCG_REG_O3) |
1319 (1 << TCG_REG_O4) |
1320 (1 << TCG_REG_O5) |
1321 (1 << TCG_REG_O7));
1323 tcg_regset_clear(s->reserved_regs);
1324 tcg_regset_set_reg(s->reserved_regs, TCG_REG_G0);
1325 #if TCG_TARGET_REG_BITS == 64
1326 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I4); // for internal use
1327 #endif
1328 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I5); // for internal use
1329 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I6);
1330 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I7);
1331 tcg_regset_set_reg(s->reserved_regs, TCG_REG_O6);
1332 tcg_regset_set_reg(s->reserved_regs, TCG_REG_O7);
1333 tcg_add_target_add_op_defs(sparc_op_defs);