block: add interface to toggle copy-on-read
[qemu/kevin.git] / tcg / sparc / tcg-target.c
blob5cd5a3b6f6eddc5e5aae90b8a7ce44e48714a1a8
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 #ifndef NDEBUG
26 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
27 "%g0",
28 "%g1",
29 "%g2",
30 "%g3",
31 "%g4",
32 "%g5",
33 "%g6",
34 "%g7",
35 "%o0",
36 "%o1",
37 "%o2",
38 "%o3",
39 "%o4",
40 "%o5",
41 "%o6",
42 "%o7",
43 "%l0",
44 "%l1",
45 "%l2",
46 "%l3",
47 "%l4",
48 "%l5",
49 "%l6",
50 "%l7",
51 "%i0",
52 "%i1",
53 "%i2",
54 "%i3",
55 "%i4",
56 "%i5",
57 "%i6",
58 "%i7",
60 #endif
62 static const int tcg_target_reg_alloc_order[] = {
63 TCG_REG_L0,
64 TCG_REG_L1,
65 TCG_REG_L2,
66 TCG_REG_L3,
67 TCG_REG_L4,
68 TCG_REG_L5,
69 TCG_REG_L6,
70 TCG_REG_L7,
71 TCG_REG_I0,
72 TCG_REG_I1,
73 TCG_REG_I2,
74 TCG_REG_I3,
75 TCG_REG_I4,
78 static const int tcg_target_call_iarg_regs[6] = {
79 TCG_REG_O0,
80 TCG_REG_O1,
81 TCG_REG_O2,
82 TCG_REG_O3,
83 TCG_REG_O4,
84 TCG_REG_O5,
87 static const int tcg_target_call_oarg_regs[] = {
88 TCG_REG_O0,
89 #if TCG_TARGET_REG_BITS == 32
90 TCG_REG_O1
91 #endif
94 static inline int check_fit_tl(tcg_target_long val, unsigned int bits)
96 return (val << ((sizeof(tcg_target_long) * 8 - bits))
97 >> (sizeof(tcg_target_long) * 8 - bits)) == val;
100 static inline int check_fit_i32(uint32_t val, unsigned int bits)
102 return ((val << (32 - bits)) >> (32 - bits)) == val;
105 static void patch_reloc(uint8_t *code_ptr, int type,
106 tcg_target_long value, tcg_target_long addend)
108 value += addend;
109 switch (type) {
110 case R_SPARC_32:
111 if (value != (uint32_t)value)
112 tcg_abort();
113 *(uint32_t *)code_ptr = value;
114 break;
115 case R_SPARC_WDISP22:
116 value -= (long)code_ptr;
117 value >>= 2;
118 if (!check_fit_tl(value, 22))
119 tcg_abort();
120 *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x3fffff) | value;
121 break;
122 case R_SPARC_WDISP19:
123 value -= (long)code_ptr;
124 value >>= 2;
125 if (!check_fit_tl(value, 19))
126 tcg_abort();
127 *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x7ffff) | value;
128 break;
129 default:
130 tcg_abort();
134 /* maximum number of register used for input function arguments */
135 static inline int tcg_target_get_call_iarg_regs_count(int flags)
137 return 6;
140 /* parse target specific constraints */
141 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
143 const char *ct_str;
145 ct_str = *pct_str;
146 switch (ct_str[0]) {
147 case 'r':
148 ct->ct |= TCG_CT_REG;
149 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
150 break;
151 case 'L': /* qemu_ld/st constraint */
152 ct->ct |= TCG_CT_REG;
153 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
154 // Helper args
155 tcg_regset_reset_reg(ct->u.regs, TCG_REG_O0);
156 tcg_regset_reset_reg(ct->u.regs, TCG_REG_O1);
157 tcg_regset_reset_reg(ct->u.regs, TCG_REG_O2);
158 break;
159 case 'I':
160 ct->ct |= TCG_CT_CONST_S11;
161 break;
162 case 'J':
163 ct->ct |= TCG_CT_CONST_S13;
164 break;
165 default:
166 return -1;
168 ct_str++;
169 *pct_str = ct_str;
170 return 0;
173 /* test if a constant matches the constraint */
174 static inline int tcg_target_const_match(tcg_target_long val,
175 const TCGArgConstraint *arg_ct)
177 int ct;
179 ct = arg_ct->ct;
180 if (ct & TCG_CT_CONST)
181 return 1;
182 else if ((ct & TCG_CT_CONST_S11) && check_fit_tl(val, 11))
183 return 1;
184 else if ((ct & TCG_CT_CONST_S13) && check_fit_tl(val, 13))
185 return 1;
186 else
187 return 0;
190 #define INSN_OP(x) ((x) << 30)
191 #define INSN_OP2(x) ((x) << 22)
192 #define INSN_OP3(x) ((x) << 19)
193 #define INSN_OPF(x) ((x) << 5)
194 #define INSN_RD(x) ((x) << 25)
195 #define INSN_RS1(x) ((x) << 14)
196 #define INSN_RS2(x) (x)
197 #define INSN_ASI(x) ((x) << 5)
199 #define INSN_IMM11(x) ((1 << 13) | ((x) & 0x7ff))
200 #define INSN_IMM13(x) ((1 << 13) | ((x) & 0x1fff))
201 #define INSN_OFF19(x) (((x) >> 2) & 0x07ffff)
202 #define INSN_OFF22(x) (((x) >> 2) & 0x3fffff)
204 #define INSN_COND(x, a) (((x) << 25) | ((a) << 29))
205 #define COND_N 0x0
206 #define COND_E 0x1
207 #define COND_LE 0x2
208 #define COND_L 0x3
209 #define COND_LEU 0x4
210 #define COND_CS 0x5
211 #define COND_NEG 0x6
212 #define COND_VS 0x7
213 #define COND_A 0x8
214 #define COND_NE 0x9
215 #define COND_G 0xa
216 #define COND_GE 0xb
217 #define COND_GU 0xc
218 #define COND_CC 0xd
219 #define COND_POS 0xe
220 #define COND_VC 0xf
221 #define BA (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2))
223 #define MOVCC_ICC (1 << 18)
224 #define MOVCC_XCC (1 << 18 | 1 << 12)
226 #define ARITH_ADD (INSN_OP(2) | INSN_OP3(0x00))
227 #define ARITH_ADDCC (INSN_OP(2) | INSN_OP3(0x10))
228 #define ARITH_AND (INSN_OP(2) | INSN_OP3(0x01))
229 #define ARITH_ANDN (INSN_OP(2) | INSN_OP3(0x05))
230 #define ARITH_OR (INSN_OP(2) | INSN_OP3(0x02))
231 #define ARITH_ORCC (INSN_OP(2) | INSN_OP3(0x12))
232 #define ARITH_ORN (INSN_OP(2) | INSN_OP3(0x06))
233 #define ARITH_XOR (INSN_OP(2) | INSN_OP3(0x03))
234 #define ARITH_SUB (INSN_OP(2) | INSN_OP3(0x04))
235 #define ARITH_SUBCC (INSN_OP(2) | INSN_OP3(0x14))
236 #define ARITH_ADDX (INSN_OP(2) | INSN_OP3(0x10))
237 #define ARITH_SUBX (INSN_OP(2) | INSN_OP3(0x0c))
238 #define ARITH_UMUL (INSN_OP(2) | INSN_OP3(0x0a))
239 #define ARITH_UDIV (INSN_OP(2) | INSN_OP3(0x0e))
240 #define ARITH_SDIV (INSN_OP(2) | INSN_OP3(0x0f))
241 #define ARITH_MULX (INSN_OP(2) | INSN_OP3(0x09))
242 #define ARITH_UDIVX (INSN_OP(2) | INSN_OP3(0x0d))
243 #define ARITH_SDIVX (INSN_OP(2) | INSN_OP3(0x2d))
244 #define ARITH_MOVCC (INSN_OP(2) | INSN_OP3(0x2c))
246 #define SHIFT_SLL (INSN_OP(2) | INSN_OP3(0x25))
247 #define SHIFT_SRL (INSN_OP(2) | INSN_OP3(0x26))
248 #define SHIFT_SRA (INSN_OP(2) | INSN_OP3(0x27))
250 #define SHIFT_SLLX (INSN_OP(2) | INSN_OP3(0x25) | (1 << 12))
251 #define SHIFT_SRLX (INSN_OP(2) | INSN_OP3(0x26) | (1 << 12))
252 #define SHIFT_SRAX (INSN_OP(2) | INSN_OP3(0x27) | (1 << 12))
254 #define RDY (INSN_OP(2) | INSN_OP3(0x28) | INSN_RS1(0))
255 #define WRY (INSN_OP(2) | INSN_OP3(0x30) | INSN_RD(0))
256 #define JMPL (INSN_OP(2) | INSN_OP3(0x38))
257 #define SAVE (INSN_OP(2) | INSN_OP3(0x3c))
258 #define RESTORE (INSN_OP(2) | INSN_OP3(0x3d))
259 #define SETHI (INSN_OP(0) | INSN_OP2(0x4))
260 #define CALL INSN_OP(1)
261 #define LDUB (INSN_OP(3) | INSN_OP3(0x01))
262 #define LDSB (INSN_OP(3) | INSN_OP3(0x09))
263 #define LDUH (INSN_OP(3) | INSN_OP3(0x02))
264 #define LDSH (INSN_OP(3) | INSN_OP3(0x0a))
265 #define LDUW (INSN_OP(3) | INSN_OP3(0x00))
266 #define LDSW (INSN_OP(3) | INSN_OP3(0x08))
267 #define LDX (INSN_OP(3) | INSN_OP3(0x0b))
268 #define STB (INSN_OP(3) | INSN_OP3(0x05))
269 #define STH (INSN_OP(3) | INSN_OP3(0x06))
270 #define STW (INSN_OP(3) | INSN_OP3(0x04))
271 #define STX (INSN_OP(3) | INSN_OP3(0x0e))
272 #define LDUBA (INSN_OP(3) | INSN_OP3(0x11))
273 #define LDSBA (INSN_OP(3) | INSN_OP3(0x19))
274 #define LDUHA (INSN_OP(3) | INSN_OP3(0x12))
275 #define LDSHA (INSN_OP(3) | INSN_OP3(0x1a))
276 #define LDUWA (INSN_OP(3) | INSN_OP3(0x10))
277 #define LDSWA (INSN_OP(3) | INSN_OP3(0x18))
278 #define LDXA (INSN_OP(3) | INSN_OP3(0x1b))
279 #define STBA (INSN_OP(3) | INSN_OP3(0x15))
280 #define STHA (INSN_OP(3) | INSN_OP3(0x16))
281 #define STWA (INSN_OP(3) | INSN_OP3(0x14))
282 #define STXA (INSN_OP(3) | INSN_OP3(0x1e))
284 #ifndef ASI_PRIMARY_LITTLE
285 #define ASI_PRIMARY_LITTLE 0x88
286 #endif
288 static inline void tcg_out_arith(TCGContext *s, int rd, int rs1, int rs2,
289 int op)
291 tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
292 INSN_RS2(rs2));
295 static inline void tcg_out_arithi(TCGContext *s, int rd, int rs1,
296 uint32_t offset, int op)
298 tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
299 INSN_IMM13(offset));
302 static void tcg_out_arithc(TCGContext *s, int rd, int rs1,
303 int val2, int val2const, int op)
305 tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1)
306 | (val2const ? INSN_IMM13(val2) : INSN_RS2(val2)));
309 static inline void tcg_out_mov(TCGContext *s, TCGType type,
310 TCGReg ret, TCGReg arg)
312 tcg_out_arith(s, ret, arg, TCG_REG_G0, ARITH_OR);
315 static inline void tcg_out_sethi(TCGContext *s, int ret, uint32_t arg)
317 tcg_out32(s, SETHI | INSN_RD(ret) | ((arg & 0xfffffc00) >> 10));
320 static inline void tcg_out_movi_imm13(TCGContext *s, int ret, uint32_t arg)
322 tcg_out_arithi(s, ret, TCG_REG_G0, arg, ARITH_OR);
325 static inline void tcg_out_movi_imm32(TCGContext *s, int ret, uint32_t arg)
327 if (check_fit_tl(arg, 13))
328 tcg_out_movi_imm13(s, ret, arg);
329 else {
330 tcg_out_sethi(s, ret, arg);
331 if (arg & 0x3ff)
332 tcg_out_arithi(s, ret, ret, arg & 0x3ff, ARITH_OR);
336 static inline void tcg_out_movi(TCGContext *s, TCGType type,
337 TCGReg ret, tcg_target_long arg)
339 /* All 32-bit constants, as well as 64-bit constants with
340 no high bits set go through movi_imm32. */
341 if (TCG_TARGET_REG_BITS == 32
342 || type == TCG_TYPE_I32
343 || (arg & ~(tcg_target_long)0xffffffff) == 0) {
344 tcg_out_movi_imm32(s, ret, arg);
345 } else if (check_fit_tl(arg, 13)) {
346 /* A 13-bit constant sign-extended to 64-bits. */
347 tcg_out_movi_imm13(s, ret, arg);
348 } else if (check_fit_tl(arg, 32)) {
349 /* A 32-bit constant sign-extended to 64-bits. */
350 tcg_out_sethi(s, ret, ~arg);
351 tcg_out_arithi(s, ret, ret, (arg & 0x3ff) | -0x400, ARITH_XOR);
352 } else {
353 tcg_out_movi_imm32(s, TCG_REG_I4, arg >> (TCG_TARGET_REG_BITS / 2));
354 tcg_out_arithi(s, TCG_REG_I4, TCG_REG_I4, 32, SHIFT_SLLX);
355 tcg_out_movi_imm32(s, ret, arg);
356 tcg_out_arith(s, ret, ret, TCG_REG_I4, ARITH_OR);
360 static inline void tcg_out_ld_raw(TCGContext *s, int ret,
361 tcg_target_long arg)
363 tcg_out_sethi(s, ret, arg);
364 tcg_out32(s, LDUW | INSN_RD(ret) | INSN_RS1(ret) |
365 INSN_IMM13(arg & 0x3ff));
368 static inline void tcg_out_ld_ptr(TCGContext *s, int ret,
369 tcg_target_long arg)
371 if (!check_fit_tl(arg, 10))
372 tcg_out_movi(s, TCG_TYPE_PTR, ret, arg & ~0x3ffULL);
373 if (TCG_TARGET_REG_BITS == 64) {
374 tcg_out32(s, LDX | INSN_RD(ret) | INSN_RS1(ret) |
375 INSN_IMM13(arg & 0x3ff));
376 } else {
377 tcg_out32(s, LDUW | INSN_RD(ret) | INSN_RS1(ret) |
378 INSN_IMM13(arg & 0x3ff));
382 static inline void tcg_out_ldst(TCGContext *s, int ret, int addr, int offset, int op)
384 if (check_fit_tl(offset, 13))
385 tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(addr) |
386 INSN_IMM13(offset));
387 else {
388 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, offset);
389 tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(TCG_REG_I5) |
390 INSN_RS2(addr));
394 static inline void tcg_out_ldst_asi(TCGContext *s, int ret, int addr,
395 int offset, int op, int asi)
397 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, offset);
398 tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(TCG_REG_I5) |
399 INSN_ASI(asi) | INSN_RS2(addr));
402 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret,
403 TCGReg arg1, tcg_target_long arg2)
405 if (type == TCG_TYPE_I32)
406 tcg_out_ldst(s, ret, arg1, arg2, LDUW);
407 else
408 tcg_out_ldst(s, ret, arg1, arg2, LDX);
411 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
412 TCGReg arg1, tcg_target_long arg2)
414 if (type == TCG_TYPE_I32)
415 tcg_out_ldst(s, arg, arg1, arg2, STW);
416 else
417 tcg_out_ldst(s, arg, arg1, arg2, STX);
420 static inline void tcg_out_sety(TCGContext *s, int rs)
422 tcg_out32(s, WRY | INSN_RS1(TCG_REG_G0) | INSN_RS2(rs));
425 static inline void tcg_out_rdy(TCGContext *s, int rd)
427 tcg_out32(s, RDY | INSN_RD(rd));
430 static inline void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
432 if (val != 0) {
433 if (check_fit_tl(val, 13))
434 tcg_out_arithi(s, reg, reg, val, ARITH_ADD);
435 else {
436 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, val);
437 tcg_out_arith(s, reg, reg, TCG_REG_I5, ARITH_ADD);
442 static inline void tcg_out_andi(TCGContext *s, int reg, tcg_target_long val)
444 if (val != 0) {
445 if (check_fit_tl(val, 13))
446 tcg_out_arithi(s, reg, reg, val, ARITH_AND);
447 else {
448 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, val);
449 tcg_out_arith(s, reg, reg, TCG_REG_I5, ARITH_AND);
454 static void tcg_out_div32(TCGContext *s, int rd, int rs1,
455 int val2, int val2const, int uns)
457 /* Load Y with the sign/zero extension of RS1 to 64-bits. */
458 if (uns) {
459 tcg_out_sety(s, TCG_REG_G0);
460 } else {
461 tcg_out_arithi(s, TCG_REG_I5, rs1, 31, SHIFT_SRA);
462 tcg_out_sety(s, TCG_REG_I5);
465 tcg_out_arithc(s, rd, rs1, val2, val2const,
466 uns ? ARITH_UDIV : ARITH_SDIV);
469 static inline void tcg_out_nop(TCGContext *s)
471 tcg_out_sethi(s, TCG_REG_G0, 0);
474 static void tcg_out_branch_i32(TCGContext *s, int opc, int label_index)
476 TCGLabel *l = &s->labels[label_index];
478 if (l->has_value) {
479 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x2)
480 | INSN_OFF22(l->u.value - (unsigned long)s->code_ptr)));
481 } else {
482 tcg_out_reloc(s, s->code_ptr, R_SPARC_WDISP22, label_index, 0);
483 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x2) | 0));
487 #if TCG_TARGET_REG_BITS == 64
488 static void tcg_out_branch_i64(TCGContext *s, int opc, int label_index)
490 TCGLabel *l = &s->labels[label_index];
492 if (l->has_value) {
493 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x1) |
494 (0x5 << 19) |
495 INSN_OFF19(l->u.value - (unsigned long)s->code_ptr)));
496 } else {
497 tcg_out_reloc(s, s->code_ptr, R_SPARC_WDISP19, label_index, 0);
498 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x1) |
499 (0x5 << 19) | 0));
502 #endif
504 static const uint8_t tcg_cond_to_bcond[10] = {
505 [TCG_COND_EQ] = COND_E,
506 [TCG_COND_NE] = COND_NE,
507 [TCG_COND_LT] = COND_L,
508 [TCG_COND_GE] = COND_GE,
509 [TCG_COND_LE] = COND_LE,
510 [TCG_COND_GT] = COND_G,
511 [TCG_COND_LTU] = COND_CS,
512 [TCG_COND_GEU] = COND_CC,
513 [TCG_COND_LEU] = COND_LEU,
514 [TCG_COND_GTU] = COND_GU,
517 static void tcg_out_cmp(TCGContext *s, TCGArg c1, TCGArg c2, int c2const)
519 tcg_out_arithc(s, TCG_REG_G0, c1, c2, c2const, ARITH_SUBCC);
522 static void tcg_out_brcond_i32(TCGContext *s, TCGCond cond,
523 TCGArg arg1, TCGArg arg2, int const_arg2,
524 int label_index)
526 tcg_out_cmp(s, arg1, arg2, const_arg2);
527 tcg_out_branch_i32(s, tcg_cond_to_bcond[cond], label_index);
528 tcg_out_nop(s);
531 #if TCG_TARGET_REG_BITS == 64
532 static void tcg_out_brcond_i64(TCGContext *s, TCGCond cond,
533 TCGArg arg1, TCGArg arg2, int const_arg2,
534 int label_index)
536 tcg_out_cmp(s, arg1, arg2, const_arg2);
537 tcg_out_branch_i64(s, tcg_cond_to_bcond[cond], label_index);
538 tcg_out_nop(s);
540 #else
541 static void tcg_out_brcond2_i32(TCGContext *s, TCGCond cond,
542 TCGArg al, TCGArg ah,
543 TCGArg bl, int blconst,
544 TCGArg bh, int bhconst, int label_dest)
546 int cc, label_next = gen_new_label();
548 tcg_out_cmp(s, ah, bh, bhconst);
550 /* Note that we fill one of the delay slots with the second compare. */
551 switch (cond) {
552 case TCG_COND_EQ:
553 cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_NE], 0);
554 tcg_out_branch_i32(s, cc, label_next);
555 tcg_out_cmp(s, al, bl, blconst);
556 cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_EQ], 0);
557 tcg_out_branch_i32(s, cc, label_dest);
558 break;
560 case TCG_COND_NE:
561 cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_NE], 0);
562 tcg_out_branch_i32(s, cc, label_dest);
563 tcg_out_cmp(s, al, bl, blconst);
564 tcg_out_branch_i32(s, cc, label_dest);
565 break;
567 default:
568 /* ??? One could fairly easily special-case 64-bit unsigned
569 compares against 32-bit zero-extended constants. For instance,
570 we know that (unsigned)AH < 0 is false and need not emit it.
571 Similarly, (unsigned)AH > 0 being true implies AH != 0, so the
572 second branch will never be taken. */
573 cc = INSN_COND(tcg_cond_to_bcond[cond], 0);
574 tcg_out_branch_i32(s, cc, label_dest);
575 tcg_out_nop(s);
576 cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_NE], 0);
577 tcg_out_branch_i32(s, cc, label_next);
578 tcg_out_cmp(s, al, bl, blconst);
579 cc = INSN_COND(tcg_cond_to_bcond[tcg_unsigned_cond(cond)], 0);
580 tcg_out_branch_i32(s, cc, label_dest);
581 break;
583 tcg_out_nop(s);
585 tcg_out_label(s, label_next, (tcg_target_long)s->code_ptr);
587 #endif
589 static void tcg_out_setcond_i32(TCGContext *s, TCGCond cond, TCGArg ret,
590 TCGArg c1, TCGArg c2, int c2const)
592 TCGArg t;
594 /* For 32-bit comparisons, we can play games with ADDX/SUBX. */
595 switch (cond) {
596 case TCG_COND_EQ:
597 case TCG_COND_NE:
598 if (c2 != 0) {
599 tcg_out_arithc(s, ret, c1, c2, c2const, ARITH_XOR);
601 c1 = TCG_REG_G0, c2 = ret, c2const = 0;
602 cond = (cond == TCG_COND_EQ ? TCG_COND_LEU : TCG_COND_LTU);
603 break;
605 case TCG_COND_GTU:
606 case TCG_COND_GEU:
607 if (c2const && c2 != 0) {
608 tcg_out_movi_imm13(s, TCG_REG_I5, c2);
609 c2 = TCG_REG_I5;
611 t = c1, c1 = c2, c2 = t, c2const = 0;
612 cond = tcg_swap_cond(cond);
613 break;
615 case TCG_COND_LTU:
616 case TCG_COND_LEU:
617 break;
619 default:
620 tcg_out_cmp(s, c1, c2, c2const);
621 #if defined(__sparc_v9__) || defined(__sparc_v8plus__)
622 tcg_out_movi_imm13(s, ret, 0);
623 tcg_out32 (s, ARITH_MOVCC | INSN_RD(ret)
624 | INSN_RS1(tcg_cond_to_bcond[cond])
625 | MOVCC_ICC | INSN_IMM11(1));
626 #else
627 t = gen_new_label();
628 tcg_out_branch_i32(s, INSN_COND(tcg_cond_to_bcond[cond], 1), t);
629 tcg_out_movi_imm13(s, ret, 1);
630 tcg_out_movi_imm13(s, ret, 0);
631 tcg_out_label(s, t, (tcg_target_long)s->code_ptr);
632 #endif
633 return;
636 tcg_out_cmp(s, c1, c2, c2const);
637 if (cond == TCG_COND_LTU) {
638 tcg_out_arithi(s, ret, TCG_REG_G0, 0, ARITH_ADDX);
639 } else {
640 tcg_out_arithi(s, ret, TCG_REG_G0, -1, ARITH_SUBX);
644 #if TCG_TARGET_REG_BITS == 64
645 static void tcg_out_setcond_i64(TCGContext *s, TCGCond cond, TCGArg ret,
646 TCGArg c1, TCGArg c2, int c2const)
648 tcg_out_cmp(s, c1, c2, c2const);
649 tcg_out_movi_imm13(s, ret, 0);
650 tcg_out32 (s, ARITH_MOVCC | INSN_RD(ret)
651 | INSN_RS1(tcg_cond_to_bcond[cond])
652 | MOVCC_XCC | INSN_IMM11(1));
654 #else
655 static void tcg_out_setcond2_i32(TCGContext *s, TCGCond cond, TCGArg ret,
656 TCGArg al, TCGArg ah,
657 TCGArg bl, int blconst,
658 TCGArg bh, int bhconst)
660 int lab;
662 switch (cond) {
663 case TCG_COND_EQ:
664 tcg_out_setcond_i32(s, TCG_COND_EQ, TCG_REG_I5, al, bl, blconst);
665 tcg_out_setcond_i32(s, TCG_COND_EQ, ret, ah, bh, bhconst);
666 tcg_out_arith(s, ret, ret, TCG_REG_I5, ARITH_AND);
667 break;
669 case TCG_COND_NE:
670 tcg_out_setcond_i32(s, TCG_COND_NE, TCG_REG_I5, al, al, blconst);
671 tcg_out_setcond_i32(s, TCG_COND_NE, ret, ah, bh, bhconst);
672 tcg_out_arith(s, ret, ret, TCG_REG_I5, ARITH_OR);
673 break;
675 default:
676 lab = gen_new_label();
678 tcg_out_cmp(s, ah, bh, bhconst);
679 tcg_out_branch_i32(s, INSN_COND(tcg_cond_to_bcond[cond], 1), lab);
680 tcg_out_movi_imm13(s, ret, 1);
681 tcg_out_branch_i32(s, INSN_COND(COND_NE, 1), lab);
682 tcg_out_movi_imm13(s, ret, 0);
684 tcg_out_setcond_i32(s, tcg_unsigned_cond(cond), ret, al, bl, blconst);
686 tcg_out_label(s, lab, (tcg_target_long)s->code_ptr);
687 break;
690 #endif
692 /* Generate global QEMU prologue and epilogue code */
693 static void tcg_target_qemu_prologue(TCGContext *s)
695 tcg_set_frame(s, TCG_REG_I6, TCG_TARGET_CALL_STACK_OFFSET,
696 CPU_TEMP_BUF_NLONGS * (int)sizeof(long));
697 tcg_out32(s, SAVE | INSN_RD(TCG_REG_O6) | INSN_RS1(TCG_REG_O6) |
698 INSN_IMM13(-(TCG_TARGET_STACK_MINFRAME +
699 CPU_TEMP_BUF_NLONGS * (int)sizeof(long))));
700 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I1) |
701 INSN_RS2(TCG_REG_G0));
702 tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, TCG_REG_I0);
705 #if defined(CONFIG_SOFTMMU)
707 #include "../../softmmu_defs.h"
709 static const void * const qemu_ld_helpers[4] = {
710 __ldb_mmu,
711 __ldw_mmu,
712 __ldl_mmu,
713 __ldq_mmu,
716 static const void * const qemu_st_helpers[4] = {
717 __stb_mmu,
718 __stw_mmu,
719 __stl_mmu,
720 __stq_mmu,
722 #endif
724 #if TARGET_LONG_BITS == 32
725 #define TARGET_LD_OP LDUW
726 #else
727 #define TARGET_LD_OP LDX
728 #endif
730 #if defined(CONFIG_SOFTMMU)
731 #if HOST_LONG_BITS == 32
732 #define TARGET_ADDEND_LD_OP LDUW
733 #else
734 #define TARGET_ADDEND_LD_OP LDX
735 #endif
736 #endif
738 #ifdef __arch64__
739 #define HOST_LD_OP LDX
740 #define HOST_ST_OP STX
741 #define HOST_SLL_OP SHIFT_SLLX
742 #define HOST_SRA_OP SHIFT_SRAX
743 #else
744 #define HOST_LD_OP LDUW
745 #define HOST_ST_OP STW
746 #define HOST_SLL_OP SHIFT_SLL
747 #define HOST_SRA_OP SHIFT_SRA
748 #endif
750 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
751 int opc)
753 int addr_reg, data_reg, arg0, arg1, arg2, mem_index, s_bits;
754 #if defined(CONFIG_SOFTMMU)
755 uint32_t *label1_ptr, *label2_ptr;
756 #endif
758 data_reg = *args++;
759 addr_reg = *args++;
760 mem_index = *args;
761 s_bits = opc & 3;
763 arg0 = TCG_REG_O0;
764 arg1 = TCG_REG_O1;
765 arg2 = TCG_REG_O2;
767 #if defined(CONFIG_SOFTMMU)
768 /* srl addr_reg, x, arg1 */
769 tcg_out_arithi(s, arg1, addr_reg, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS,
770 SHIFT_SRL);
771 /* and addr_reg, x, arg0 */
772 tcg_out_arithi(s, arg0, addr_reg, TARGET_PAGE_MASK | ((1 << s_bits) - 1),
773 ARITH_AND);
775 /* and arg1, x, arg1 */
776 tcg_out_andi(s, arg1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
778 /* add arg1, x, arg1 */
779 tcg_out_addi(s, arg1, offsetof(CPUState,
780 tlb_table[mem_index][0].addr_read));
782 /* add env, arg1, arg1 */
783 tcg_out_arith(s, arg1, TCG_AREG0, arg1, ARITH_ADD);
785 /* ld [arg1], arg2 */
786 tcg_out32(s, TARGET_LD_OP | INSN_RD(arg2) | INSN_RS1(arg1) |
787 INSN_RS2(TCG_REG_G0));
789 /* subcc arg0, arg2, %g0 */
790 tcg_out_arith(s, TCG_REG_G0, arg0, arg2, ARITH_SUBCC);
792 /* will become:
793 be label1
795 be,pt %xcc label1 */
796 label1_ptr = (uint32_t *)s->code_ptr;
797 tcg_out32(s, 0);
799 /* mov (delay slot) */
800 tcg_out_mov(s, TCG_TYPE_PTR, arg0, addr_reg);
802 /* mov */
803 tcg_out_movi(s, TCG_TYPE_I32, arg1, mem_index);
805 /* XXX: move that code at the end of the TB */
806 /* qemu_ld_helper[s_bits](arg0, arg1) */
807 tcg_out32(s, CALL | ((((tcg_target_ulong)qemu_ld_helpers[s_bits]
808 - (tcg_target_ulong)s->code_ptr) >> 2)
809 & 0x3fffffff));
810 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
811 global registers */
812 // delay slot
813 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
814 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
815 sizeof(long), HOST_ST_OP);
816 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
817 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
818 sizeof(long), HOST_LD_OP);
820 /* data_reg = sign_extend(arg0) */
821 switch(opc) {
822 case 0 | 4:
823 /* sll arg0, 24/56, data_reg */
824 tcg_out_arithi(s, data_reg, arg0, (int)sizeof(tcg_target_long) * 8 - 8,
825 HOST_SLL_OP);
826 /* sra data_reg, 24/56, data_reg */
827 tcg_out_arithi(s, data_reg, data_reg,
828 (int)sizeof(tcg_target_long) * 8 - 8, HOST_SRA_OP);
829 break;
830 case 1 | 4:
831 /* sll arg0, 16/48, data_reg */
832 tcg_out_arithi(s, data_reg, arg0,
833 (int)sizeof(tcg_target_long) * 8 - 16, HOST_SLL_OP);
834 /* sra data_reg, 16/48, data_reg */
835 tcg_out_arithi(s, data_reg, data_reg,
836 (int)sizeof(tcg_target_long) * 8 - 16, HOST_SRA_OP);
837 break;
838 case 2 | 4:
839 /* sll arg0, 32, data_reg */
840 tcg_out_arithi(s, data_reg, arg0, 32, HOST_SLL_OP);
841 /* sra data_reg, 32, data_reg */
842 tcg_out_arithi(s, data_reg, data_reg, 32, HOST_SRA_OP);
843 break;
844 case 0:
845 case 1:
846 case 2:
847 case 3:
848 default:
849 /* mov */
850 tcg_out_mov(s, TCG_TYPE_REG, data_reg, arg0);
851 break;
854 /* will become:
855 ba label2 */
856 label2_ptr = (uint32_t *)s->code_ptr;
857 tcg_out32(s, 0);
859 /* nop (delay slot */
860 tcg_out_nop(s);
862 /* label1: */
863 #if TARGET_LONG_BITS == 32
864 /* be label1 */
865 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x2) |
866 INSN_OFF22((unsigned long)s->code_ptr -
867 (unsigned long)label1_ptr));
868 #else
869 /* be,pt %xcc label1 */
870 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x1) |
871 (0x5 << 19) | INSN_OFF19((unsigned long)s->code_ptr -
872 (unsigned long)label1_ptr));
873 #endif
875 /* ld [arg1 + x], arg1 */
876 tcg_out_ldst(s, arg1, arg1, offsetof(CPUTLBEntry, addend) -
877 offsetof(CPUTLBEntry, addr_read), TARGET_ADDEND_LD_OP);
879 #if TARGET_LONG_BITS == 32
880 /* and addr_reg, x, arg0 */
881 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, 0xffffffff);
882 tcg_out_arith(s, arg0, addr_reg, TCG_REG_I5, ARITH_AND);
883 /* add arg0, arg1, arg0 */
884 tcg_out_arith(s, arg0, arg0, arg1, ARITH_ADD);
885 #else
886 /* add addr_reg, arg1, arg0 */
887 tcg_out_arith(s, arg0, addr_reg, arg1, ARITH_ADD);
888 #endif
890 #else
891 arg0 = addr_reg;
892 #endif
894 switch(opc) {
895 case 0:
896 /* ldub [arg0], data_reg */
897 tcg_out_ldst(s, data_reg, arg0, 0, LDUB);
898 break;
899 case 0 | 4:
900 /* ldsb [arg0], data_reg */
901 tcg_out_ldst(s, data_reg, arg0, 0, LDSB);
902 break;
903 case 1:
904 #ifdef TARGET_WORDS_BIGENDIAN
905 /* lduh [arg0], data_reg */
906 tcg_out_ldst(s, data_reg, arg0, 0, LDUH);
907 #else
908 /* lduha [arg0] ASI_PRIMARY_LITTLE, data_reg */
909 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDUHA, ASI_PRIMARY_LITTLE);
910 #endif
911 break;
912 case 1 | 4:
913 #ifdef TARGET_WORDS_BIGENDIAN
914 /* ldsh [arg0], data_reg */
915 tcg_out_ldst(s, data_reg, arg0, 0, LDSH);
916 #else
917 /* ldsha [arg0] ASI_PRIMARY_LITTLE, data_reg */
918 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDSHA, ASI_PRIMARY_LITTLE);
919 #endif
920 break;
921 case 2:
922 #ifdef TARGET_WORDS_BIGENDIAN
923 /* lduw [arg0], data_reg */
924 tcg_out_ldst(s, data_reg, arg0, 0, LDUW);
925 #else
926 /* lduwa [arg0] ASI_PRIMARY_LITTLE, data_reg */
927 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDUWA, ASI_PRIMARY_LITTLE);
928 #endif
929 break;
930 case 2 | 4:
931 #ifdef TARGET_WORDS_BIGENDIAN
932 /* ldsw [arg0], data_reg */
933 tcg_out_ldst(s, data_reg, arg0, 0, LDSW);
934 #else
935 /* ldswa [arg0] ASI_PRIMARY_LITTLE, data_reg */
936 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDSWA, ASI_PRIMARY_LITTLE);
937 #endif
938 break;
939 case 3:
940 #ifdef TARGET_WORDS_BIGENDIAN
941 /* ldx [arg0], data_reg */
942 tcg_out_ldst(s, data_reg, arg0, 0, LDX);
943 #else
944 /* ldxa [arg0] ASI_PRIMARY_LITTLE, data_reg */
945 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDXA, ASI_PRIMARY_LITTLE);
946 #endif
947 break;
948 default:
949 tcg_abort();
952 #if defined(CONFIG_SOFTMMU)
953 /* label2: */
954 *label2_ptr = (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2) |
955 INSN_OFF22((unsigned long)s->code_ptr -
956 (unsigned long)label2_ptr));
957 #endif
960 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
961 int opc)
963 int addr_reg, data_reg, arg0, arg1, arg2, mem_index, s_bits;
964 #if defined(CONFIG_SOFTMMU)
965 uint32_t *label1_ptr, *label2_ptr;
966 #endif
968 data_reg = *args++;
969 addr_reg = *args++;
970 mem_index = *args;
972 s_bits = opc;
974 arg0 = TCG_REG_O0;
975 arg1 = TCG_REG_O1;
976 arg2 = TCG_REG_O2;
978 #if defined(CONFIG_SOFTMMU)
979 /* srl addr_reg, x, arg1 */
980 tcg_out_arithi(s, arg1, addr_reg, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS,
981 SHIFT_SRL);
983 /* and addr_reg, x, arg0 */
984 tcg_out_arithi(s, arg0, addr_reg, TARGET_PAGE_MASK | ((1 << s_bits) - 1),
985 ARITH_AND);
987 /* and arg1, x, arg1 */
988 tcg_out_andi(s, arg1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
990 /* add arg1, x, arg1 */
991 tcg_out_addi(s, arg1, offsetof(CPUState,
992 tlb_table[mem_index][0].addr_write));
994 /* add env, arg1, arg1 */
995 tcg_out_arith(s, arg1, TCG_AREG0, arg1, ARITH_ADD);
997 /* ld [arg1], arg2 */
998 tcg_out32(s, TARGET_LD_OP | INSN_RD(arg2) | INSN_RS1(arg1) |
999 INSN_RS2(TCG_REG_G0));
1001 /* subcc arg0, arg2, %g0 */
1002 tcg_out_arith(s, TCG_REG_G0, arg0, arg2, ARITH_SUBCC);
1004 /* will become:
1005 be label1
1007 be,pt %xcc label1 */
1008 label1_ptr = (uint32_t *)s->code_ptr;
1009 tcg_out32(s, 0);
1011 /* mov (delay slot) */
1012 tcg_out_mov(s, TCG_TYPE_PTR, arg0, addr_reg);
1014 /* mov */
1015 tcg_out_mov(s, TCG_TYPE_REG, arg1, data_reg);
1017 /* mov */
1018 tcg_out_movi(s, TCG_TYPE_I32, arg2, mem_index);
1020 /* XXX: move that code at the end of the TB */
1021 /* qemu_st_helper[s_bits](arg0, arg1, arg2) */
1022 tcg_out32(s, CALL | ((((tcg_target_ulong)qemu_st_helpers[s_bits]
1023 - (tcg_target_ulong)s->code_ptr) >> 2)
1024 & 0x3fffffff));
1025 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
1026 global registers */
1027 // delay slot
1028 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
1029 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1030 sizeof(long), HOST_ST_OP);
1031 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
1032 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1033 sizeof(long), HOST_LD_OP);
1035 /* will become:
1036 ba label2 */
1037 label2_ptr = (uint32_t *)s->code_ptr;
1038 tcg_out32(s, 0);
1040 /* nop (delay slot) */
1041 tcg_out_nop(s);
1043 #if TARGET_LONG_BITS == 32
1044 /* be label1 */
1045 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x2) |
1046 INSN_OFF22((unsigned long)s->code_ptr -
1047 (unsigned long)label1_ptr));
1048 #else
1049 /* be,pt %xcc label1 */
1050 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x1) |
1051 (0x5 << 19) | INSN_OFF19((unsigned long)s->code_ptr -
1052 (unsigned long)label1_ptr));
1053 #endif
1055 /* ld [arg1 + x], arg1 */
1056 tcg_out_ldst(s, arg1, arg1, offsetof(CPUTLBEntry, addend) -
1057 offsetof(CPUTLBEntry, addr_write), TARGET_ADDEND_LD_OP);
1059 #if TARGET_LONG_BITS == 32
1060 /* and addr_reg, x, arg0 */
1061 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, 0xffffffff);
1062 tcg_out_arith(s, arg0, addr_reg, TCG_REG_I5, ARITH_AND);
1063 /* add arg0, arg1, arg0 */
1064 tcg_out_arith(s, arg0, arg0, arg1, ARITH_ADD);
1065 #else
1066 /* add addr_reg, arg1, arg0 */
1067 tcg_out_arith(s, arg0, addr_reg, arg1, ARITH_ADD);
1068 #endif
1070 #else
1071 arg0 = addr_reg;
1072 #endif
1074 switch(opc) {
1075 case 0:
1076 /* stb data_reg, [arg0] */
1077 tcg_out_ldst(s, data_reg, arg0, 0, STB);
1078 break;
1079 case 1:
1080 #ifdef TARGET_WORDS_BIGENDIAN
1081 /* sth data_reg, [arg0] */
1082 tcg_out_ldst(s, data_reg, arg0, 0, STH);
1083 #else
1084 /* stha data_reg, [arg0] ASI_PRIMARY_LITTLE */
1085 tcg_out_ldst_asi(s, data_reg, arg0, 0, STHA, ASI_PRIMARY_LITTLE);
1086 #endif
1087 break;
1088 case 2:
1089 #ifdef TARGET_WORDS_BIGENDIAN
1090 /* stw data_reg, [arg0] */
1091 tcg_out_ldst(s, data_reg, arg0, 0, STW);
1092 #else
1093 /* stwa data_reg, [arg0] ASI_PRIMARY_LITTLE */
1094 tcg_out_ldst_asi(s, data_reg, arg0, 0, STWA, ASI_PRIMARY_LITTLE);
1095 #endif
1096 break;
1097 case 3:
1098 #ifdef TARGET_WORDS_BIGENDIAN
1099 /* stx data_reg, [arg0] */
1100 tcg_out_ldst(s, data_reg, arg0, 0, STX);
1101 #else
1102 /* stxa data_reg, [arg0] ASI_PRIMARY_LITTLE */
1103 tcg_out_ldst_asi(s, data_reg, arg0, 0, STXA, ASI_PRIMARY_LITTLE);
1104 #endif
1105 break;
1106 default:
1107 tcg_abort();
1110 #if defined(CONFIG_SOFTMMU)
1111 /* label2: */
1112 *label2_ptr = (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2) |
1113 INSN_OFF22((unsigned long)s->code_ptr -
1114 (unsigned long)label2_ptr));
1115 #endif
1118 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
1119 const int *const_args)
1121 int c;
1123 switch (opc) {
1124 case INDEX_op_exit_tb:
1125 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I0, args[0]);
1126 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I7) |
1127 INSN_IMM13(8));
1128 tcg_out32(s, RESTORE | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_G0) |
1129 INSN_RS2(TCG_REG_G0));
1130 break;
1131 case INDEX_op_goto_tb:
1132 if (s->tb_jmp_offset) {
1133 /* direct jump method */
1134 tcg_out_sethi(s, TCG_REG_I5, args[0] & 0xffffe000);
1135 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
1136 INSN_IMM13((args[0] & 0x1fff)));
1137 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
1138 } else {
1139 /* indirect jump method */
1140 tcg_out_ld_ptr(s, TCG_REG_I5, (tcg_target_long)(s->tb_next + args[0]));
1141 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
1142 INSN_RS2(TCG_REG_G0));
1144 tcg_out_nop(s);
1145 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1146 break;
1147 case INDEX_op_call:
1148 if (const_args[0])
1149 tcg_out32(s, CALL | ((((tcg_target_ulong)args[0]
1150 - (tcg_target_ulong)s->code_ptr) >> 2)
1151 & 0x3fffffff));
1152 else {
1153 tcg_out_ld_ptr(s, TCG_REG_I5,
1154 (tcg_target_long)(s->tb_next + args[0]));
1155 tcg_out32(s, JMPL | INSN_RD(TCG_REG_O7) | INSN_RS1(TCG_REG_I5) |
1156 INSN_RS2(TCG_REG_G0));
1158 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
1159 global registers */
1160 // delay slot
1161 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
1162 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1163 sizeof(long), HOST_ST_OP);
1164 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
1165 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1166 sizeof(long), HOST_LD_OP);
1167 break;
1168 case INDEX_op_jmp:
1169 case INDEX_op_br:
1170 tcg_out_branch_i32(s, COND_A, args[0]);
1171 tcg_out_nop(s);
1172 break;
1173 case INDEX_op_movi_i32:
1174 tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
1175 break;
1177 #if TCG_TARGET_REG_BITS == 64
1178 #define OP_32_64(x) \
1179 glue(glue(case INDEX_op_, x), _i32): \
1180 glue(glue(case INDEX_op_, x), _i64)
1181 #else
1182 #define OP_32_64(x) \
1183 glue(glue(case INDEX_op_, x), _i32)
1184 #endif
1185 OP_32_64(ld8u):
1186 tcg_out_ldst(s, args[0], args[1], args[2], LDUB);
1187 break;
1188 OP_32_64(ld8s):
1189 tcg_out_ldst(s, args[0], args[1], args[2], LDSB);
1190 break;
1191 OP_32_64(ld16u):
1192 tcg_out_ldst(s, args[0], args[1], args[2], LDUH);
1193 break;
1194 OP_32_64(ld16s):
1195 tcg_out_ldst(s, args[0], args[1], args[2], LDSH);
1196 break;
1197 case INDEX_op_ld_i32:
1198 #if TCG_TARGET_REG_BITS == 64
1199 case INDEX_op_ld32u_i64:
1200 #endif
1201 tcg_out_ldst(s, args[0], args[1], args[2], LDUW);
1202 break;
1203 OP_32_64(st8):
1204 tcg_out_ldst(s, args[0], args[1], args[2], STB);
1205 break;
1206 OP_32_64(st16):
1207 tcg_out_ldst(s, args[0], args[1], args[2], STH);
1208 break;
1209 case INDEX_op_st_i32:
1210 #if TCG_TARGET_REG_BITS == 64
1211 case INDEX_op_st32_i64:
1212 #endif
1213 tcg_out_ldst(s, args[0], args[1], args[2], STW);
1214 break;
1215 OP_32_64(add):
1216 c = ARITH_ADD;
1217 goto gen_arith;
1218 OP_32_64(sub):
1219 c = ARITH_SUB;
1220 goto gen_arith;
1221 OP_32_64(and):
1222 c = ARITH_AND;
1223 goto gen_arith;
1224 OP_32_64(andc):
1225 c = ARITH_ANDN;
1226 goto gen_arith;
1227 OP_32_64(or):
1228 c = ARITH_OR;
1229 goto gen_arith;
1230 OP_32_64(orc):
1231 c = ARITH_ORN;
1232 goto gen_arith;
1233 OP_32_64(xor):
1234 c = ARITH_XOR;
1235 goto gen_arith;
1236 case INDEX_op_shl_i32:
1237 c = SHIFT_SLL;
1238 goto gen_arith;
1239 case INDEX_op_shr_i32:
1240 c = SHIFT_SRL;
1241 goto gen_arith;
1242 case INDEX_op_sar_i32:
1243 c = SHIFT_SRA;
1244 goto gen_arith;
1245 case INDEX_op_mul_i32:
1246 c = ARITH_UMUL;
1247 goto gen_arith;
1249 OP_32_64(neg):
1250 c = ARITH_SUB;
1251 goto gen_arith1;
1252 OP_32_64(not):
1253 c = ARITH_ORN;
1254 goto gen_arith1;
1256 case INDEX_op_div_i32:
1257 tcg_out_div32(s, args[0], args[1], args[2], const_args[2], 0);
1258 break;
1259 case INDEX_op_divu_i32:
1260 tcg_out_div32(s, args[0], args[1], args[2], const_args[2], 1);
1261 break;
1263 case INDEX_op_rem_i32:
1264 case INDEX_op_remu_i32:
1265 tcg_out_div32(s, TCG_REG_I5, args[1], args[2], const_args[2],
1266 opc == INDEX_op_remu_i32);
1267 tcg_out_arithc(s, TCG_REG_I5, TCG_REG_I5, args[2], const_args[2],
1268 ARITH_UMUL);
1269 tcg_out_arith(s, args[0], args[1], TCG_REG_I5, ARITH_SUB);
1270 break;
1272 case INDEX_op_brcond_i32:
1273 tcg_out_brcond_i32(s, args[2], args[0], args[1], const_args[1],
1274 args[3]);
1275 break;
1276 case INDEX_op_setcond_i32:
1277 tcg_out_setcond_i32(s, args[3], args[0], args[1],
1278 args[2], const_args[2]);
1279 break;
1281 #if TCG_TARGET_REG_BITS == 32
1282 case INDEX_op_brcond2_i32:
1283 tcg_out_brcond2_i32(s, args[4], args[0], args[1],
1284 args[2], const_args[2],
1285 args[3], const_args[3], args[5]);
1286 break;
1287 case INDEX_op_setcond2_i32:
1288 tcg_out_setcond2_i32(s, args[5], args[0], args[1], args[2],
1289 args[3], const_args[3],
1290 args[4], const_args[4]);
1291 break;
1292 case INDEX_op_add2_i32:
1293 tcg_out_arithc(s, args[0], args[2], args[4], const_args[4],
1294 ARITH_ADDCC);
1295 tcg_out_arithc(s, args[1], args[3], args[5], const_args[5],
1296 ARITH_ADDX);
1297 break;
1298 case INDEX_op_sub2_i32:
1299 tcg_out_arithc(s, args[0], args[2], args[4], const_args[4],
1300 ARITH_SUBCC);
1301 tcg_out_arithc(s, args[1], args[3], args[5], const_args[5],
1302 ARITH_SUBX);
1303 break;
1304 case INDEX_op_mulu2_i32:
1305 tcg_out_arithc(s, args[0], args[2], args[3], const_args[3],
1306 ARITH_UMUL);
1307 tcg_out_rdy(s, args[1]);
1308 break;
1309 #endif
1311 case INDEX_op_qemu_ld8u:
1312 tcg_out_qemu_ld(s, args, 0);
1313 break;
1314 case INDEX_op_qemu_ld8s:
1315 tcg_out_qemu_ld(s, args, 0 | 4);
1316 break;
1317 case INDEX_op_qemu_ld16u:
1318 tcg_out_qemu_ld(s, args, 1);
1319 break;
1320 case INDEX_op_qemu_ld16s:
1321 tcg_out_qemu_ld(s, args, 1 | 4);
1322 break;
1323 case INDEX_op_qemu_ld32:
1324 #if TCG_TARGET_REG_BITS == 64
1325 case INDEX_op_qemu_ld32u:
1326 #endif
1327 tcg_out_qemu_ld(s, args, 2);
1328 break;
1329 #if TCG_TARGET_REG_BITS == 64
1330 case INDEX_op_qemu_ld32s:
1331 tcg_out_qemu_ld(s, args, 2 | 4);
1332 break;
1333 #endif
1334 case INDEX_op_qemu_st8:
1335 tcg_out_qemu_st(s, args, 0);
1336 break;
1337 case INDEX_op_qemu_st16:
1338 tcg_out_qemu_st(s, args, 1);
1339 break;
1340 case INDEX_op_qemu_st32:
1341 tcg_out_qemu_st(s, args, 2);
1342 break;
1344 #if TCG_TARGET_REG_BITS == 64
1345 case INDEX_op_movi_i64:
1346 tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
1347 break;
1348 case INDEX_op_ld32s_i64:
1349 tcg_out_ldst(s, args[0], args[1], args[2], LDSW);
1350 break;
1351 case INDEX_op_ld_i64:
1352 tcg_out_ldst(s, args[0], args[1], args[2], LDX);
1353 break;
1354 case INDEX_op_st_i64:
1355 tcg_out_ldst(s, args[0], args[1], args[2], STX);
1356 break;
1357 case INDEX_op_shl_i64:
1358 c = SHIFT_SLLX;
1359 goto gen_arith;
1360 case INDEX_op_shr_i64:
1361 c = SHIFT_SRLX;
1362 goto gen_arith;
1363 case INDEX_op_sar_i64:
1364 c = SHIFT_SRAX;
1365 goto gen_arith;
1366 case INDEX_op_mul_i64:
1367 c = ARITH_MULX;
1368 goto gen_arith;
1369 case INDEX_op_div_i64:
1370 c = ARITH_SDIVX;
1371 goto gen_arith;
1372 case INDEX_op_divu_i64:
1373 c = ARITH_UDIVX;
1374 goto gen_arith;
1375 case INDEX_op_rem_i64:
1376 case INDEX_op_remu_i64:
1377 tcg_out_arithc(s, TCG_REG_I5, args[1], args[2], const_args[2],
1378 opc == INDEX_op_rem_i64 ? ARITH_SDIVX : ARITH_UDIVX);
1379 tcg_out_arithc(s, TCG_REG_I5, TCG_REG_I5, args[2], const_args[2],
1380 ARITH_MULX);
1381 tcg_out_arith(s, args[0], args[1], TCG_REG_I5, ARITH_SUB);
1382 break;
1383 case INDEX_op_ext32s_i64:
1384 if (const_args[1]) {
1385 tcg_out_movi(s, TCG_TYPE_I64, args[0], (int32_t)args[1]);
1386 } else {
1387 tcg_out_arithi(s, args[0], args[1], 0, SHIFT_SRA);
1389 break;
1390 case INDEX_op_ext32u_i64:
1391 if (const_args[1]) {
1392 tcg_out_movi_imm32(s, args[0], args[1]);
1393 } else {
1394 tcg_out_arithi(s, args[0], args[1], 0, SHIFT_SRL);
1396 break;
1398 case INDEX_op_brcond_i64:
1399 tcg_out_brcond_i64(s, args[2], args[0], args[1], const_args[1],
1400 args[3]);
1401 break;
1402 case INDEX_op_setcond_i64:
1403 tcg_out_setcond_i64(s, args[3], args[0], args[1],
1404 args[2], const_args[2]);
1405 break;
1407 case INDEX_op_qemu_ld64:
1408 tcg_out_qemu_ld(s, args, 3);
1409 break;
1410 case INDEX_op_qemu_st64:
1411 tcg_out_qemu_st(s, args, 3);
1412 break;
1414 #endif
1415 gen_arith:
1416 tcg_out_arithc(s, args[0], args[1], args[2], const_args[2], c);
1417 break;
1419 gen_arith1:
1420 tcg_out_arithc(s, args[0], TCG_REG_G0, args[1], const_args[1], c);
1421 break;
1423 default:
1424 fprintf(stderr, "unknown opcode 0x%x\n", opc);
1425 tcg_abort();
1429 static const TCGTargetOpDef sparc_op_defs[] = {
1430 { INDEX_op_exit_tb, { } },
1431 { INDEX_op_goto_tb, { } },
1432 { INDEX_op_call, { "ri" } },
1433 { INDEX_op_jmp, { "ri" } },
1434 { INDEX_op_br, { } },
1436 { INDEX_op_mov_i32, { "r", "r" } },
1437 { INDEX_op_movi_i32, { "r" } },
1438 { INDEX_op_ld8u_i32, { "r", "r" } },
1439 { INDEX_op_ld8s_i32, { "r", "r" } },
1440 { INDEX_op_ld16u_i32, { "r", "r" } },
1441 { INDEX_op_ld16s_i32, { "r", "r" } },
1442 { INDEX_op_ld_i32, { "r", "r" } },
1443 { INDEX_op_st8_i32, { "r", "r" } },
1444 { INDEX_op_st16_i32, { "r", "r" } },
1445 { INDEX_op_st_i32, { "r", "r" } },
1447 { INDEX_op_add_i32, { "r", "r", "rJ" } },
1448 { INDEX_op_mul_i32, { "r", "r", "rJ" } },
1449 { INDEX_op_div_i32, { "r", "r", "rJ" } },
1450 { INDEX_op_divu_i32, { "r", "r", "rJ" } },
1451 { INDEX_op_rem_i32, { "r", "r", "rJ" } },
1452 { INDEX_op_remu_i32, { "r", "r", "rJ" } },
1453 { INDEX_op_sub_i32, { "r", "r", "rJ" } },
1454 { INDEX_op_and_i32, { "r", "r", "rJ" } },
1455 { INDEX_op_andc_i32, { "r", "r", "rJ" } },
1456 { INDEX_op_or_i32, { "r", "r", "rJ" } },
1457 { INDEX_op_orc_i32, { "r", "r", "rJ" } },
1458 { INDEX_op_xor_i32, { "r", "r", "rJ" } },
1460 { INDEX_op_shl_i32, { "r", "r", "rJ" } },
1461 { INDEX_op_shr_i32, { "r", "r", "rJ" } },
1462 { INDEX_op_sar_i32, { "r", "r", "rJ" } },
1464 { INDEX_op_neg_i32, { "r", "rJ" } },
1465 { INDEX_op_not_i32, { "r", "rJ" } },
1467 { INDEX_op_brcond_i32, { "r", "rJ" } },
1468 { INDEX_op_setcond_i32, { "r", "r", "rJ" } },
1470 #if TCG_TARGET_REG_BITS == 32
1471 { INDEX_op_brcond2_i32, { "r", "r", "rJ", "rJ" } },
1472 { INDEX_op_setcond2_i32, { "r", "r", "r", "rJ", "rJ" } },
1473 { INDEX_op_add2_i32, { "r", "r", "r", "r", "rJ", "rJ" } },
1474 { INDEX_op_sub2_i32, { "r", "r", "r", "r", "rJ", "rJ" } },
1475 { INDEX_op_mulu2_i32, { "r", "r", "r", "rJ" } },
1476 #endif
1478 { INDEX_op_qemu_ld8u, { "r", "L" } },
1479 { INDEX_op_qemu_ld8s, { "r", "L" } },
1480 { INDEX_op_qemu_ld16u, { "r", "L" } },
1481 { INDEX_op_qemu_ld16s, { "r", "L" } },
1482 { INDEX_op_qemu_ld32, { "r", "L" } },
1483 #if TCG_TARGET_REG_BITS == 64
1484 { INDEX_op_qemu_ld32u, { "r", "L" } },
1485 { INDEX_op_qemu_ld32s, { "r", "L" } },
1486 #endif
1488 { INDEX_op_qemu_st8, { "L", "L" } },
1489 { INDEX_op_qemu_st16, { "L", "L" } },
1490 { INDEX_op_qemu_st32, { "L", "L" } },
1492 #if TCG_TARGET_REG_BITS == 64
1493 { INDEX_op_mov_i64, { "r", "r" } },
1494 { INDEX_op_movi_i64, { "r" } },
1495 { INDEX_op_ld8u_i64, { "r", "r" } },
1496 { INDEX_op_ld8s_i64, { "r", "r" } },
1497 { INDEX_op_ld16u_i64, { "r", "r" } },
1498 { INDEX_op_ld16s_i64, { "r", "r" } },
1499 { INDEX_op_ld32u_i64, { "r", "r" } },
1500 { INDEX_op_ld32s_i64, { "r", "r" } },
1501 { INDEX_op_ld_i64, { "r", "r" } },
1502 { INDEX_op_st8_i64, { "r", "r" } },
1503 { INDEX_op_st16_i64, { "r", "r" } },
1504 { INDEX_op_st32_i64, { "r", "r" } },
1505 { INDEX_op_st_i64, { "r", "r" } },
1506 { INDEX_op_qemu_ld64, { "L", "L" } },
1507 { INDEX_op_qemu_st64, { "L", "L" } },
1509 { INDEX_op_add_i64, { "r", "r", "rJ" } },
1510 { INDEX_op_mul_i64, { "r", "r", "rJ" } },
1511 { INDEX_op_div_i64, { "r", "r", "rJ" } },
1512 { INDEX_op_divu_i64, { "r", "r", "rJ" } },
1513 { INDEX_op_rem_i64, { "r", "r", "rJ" } },
1514 { INDEX_op_remu_i64, { "r", "r", "rJ" } },
1515 { INDEX_op_sub_i64, { "r", "r", "rJ" } },
1516 { INDEX_op_and_i64, { "r", "r", "rJ" } },
1517 { INDEX_op_andc_i64, { "r", "r", "rJ" } },
1518 { INDEX_op_or_i64, { "r", "r", "rJ" } },
1519 { INDEX_op_orc_i64, { "r", "r", "rJ" } },
1520 { INDEX_op_xor_i64, { "r", "r", "rJ" } },
1522 { INDEX_op_shl_i64, { "r", "r", "rJ" } },
1523 { INDEX_op_shr_i64, { "r", "r", "rJ" } },
1524 { INDEX_op_sar_i64, { "r", "r", "rJ" } },
1526 { INDEX_op_neg_i64, { "r", "rJ" } },
1527 { INDEX_op_not_i64, { "r", "rJ" } },
1529 { INDEX_op_ext32s_i64, { "r", "ri" } },
1530 { INDEX_op_ext32u_i64, { "r", "ri" } },
1532 { INDEX_op_brcond_i64, { "r", "rJ" } },
1533 { INDEX_op_setcond_i64, { "r", "r", "rJ" } },
1534 #endif
1535 { -1 },
1538 static void tcg_target_init(TCGContext *s)
1540 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
1541 #if TCG_TARGET_REG_BITS == 64
1542 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffffffff);
1543 #endif
1544 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1545 (1 << TCG_REG_G1) |
1546 (1 << TCG_REG_G2) |
1547 (1 << TCG_REG_G3) |
1548 (1 << TCG_REG_G4) |
1549 (1 << TCG_REG_G5) |
1550 (1 << TCG_REG_G6) |
1551 (1 << TCG_REG_G7) |
1552 (1 << TCG_REG_O0) |
1553 (1 << TCG_REG_O1) |
1554 (1 << TCG_REG_O2) |
1555 (1 << TCG_REG_O3) |
1556 (1 << TCG_REG_O4) |
1557 (1 << TCG_REG_O5) |
1558 (1 << TCG_REG_O7));
1560 tcg_regset_clear(s->reserved_regs);
1561 tcg_regset_set_reg(s->reserved_regs, TCG_REG_G0);
1562 #if TCG_TARGET_REG_BITS == 64
1563 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I4); // for internal use
1564 #endif
1565 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I5); // for internal use
1566 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I6);
1567 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I7);
1568 tcg_regset_set_reg(s->reserved_regs, TCG_REG_O6);
1569 tcg_regset_set_reg(s->reserved_regs, TCG_REG_O7);
1570 tcg_add_target_add_op_defs(sparc_op_defs);