stellaris: Removed SSI mux
[qemu-kvm.git] / tcg / mips / tcg-target.c
blob7e4013e1e606f0578f432cb28e25045dfdc7678e
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
5 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24 * THE SOFTWARE.
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
29 #else
30 # define TCG_NEED_BSWAP 1
31 #endif
33 #ifndef NDEBUG
34 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
35 "zero",
36 "at",
37 "v0",
38 "v1",
39 "a0",
40 "a1",
41 "a2",
42 "a3",
43 "t0",
44 "t1",
45 "t2",
46 "t3",
47 "t4",
48 "t5",
49 "t6",
50 "t7",
51 "s0",
52 "s1",
53 "s2",
54 "s3",
55 "s4",
56 "s5",
57 "s6",
58 "s7",
59 "t8",
60 "t9",
61 "k0",
62 "k1",
63 "gp",
64 "sp",
65 "fp",
66 "ra",
68 #endif
70 /* check if we really need so many registers :P */
71 static const TCGReg tcg_target_reg_alloc_order[] = {
72 TCG_REG_S0,
73 TCG_REG_S1,
74 TCG_REG_S2,
75 TCG_REG_S3,
76 TCG_REG_S4,
77 TCG_REG_S5,
78 TCG_REG_S6,
79 TCG_REG_S7,
80 TCG_REG_T1,
81 TCG_REG_T2,
82 TCG_REG_T3,
83 TCG_REG_T4,
84 TCG_REG_T5,
85 TCG_REG_T6,
86 TCG_REG_T7,
87 TCG_REG_T8,
88 TCG_REG_T9,
89 TCG_REG_A0,
90 TCG_REG_A1,
91 TCG_REG_A2,
92 TCG_REG_A3,
93 TCG_REG_V0,
94 TCG_REG_V1
97 static const TCGReg tcg_target_call_iarg_regs[4] = {
98 TCG_REG_A0,
99 TCG_REG_A1,
100 TCG_REG_A2,
101 TCG_REG_A3
104 static const TCGReg tcg_target_call_oarg_regs[2] = {
105 TCG_REG_V0,
106 TCG_REG_V1
109 static uint8_t *tb_ret_addr;
111 static inline uint32_t reloc_lo16_val (void *pc, tcg_target_long target)
113 return target & 0xffff;
116 static inline void reloc_lo16 (void *pc, tcg_target_long target)
118 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
119 | reloc_lo16_val(pc, target);
122 static inline uint32_t reloc_hi16_val (void *pc, tcg_target_long target)
124 return (target >> 16) & 0xffff;
127 static inline void reloc_hi16 (void *pc, tcg_target_long target)
129 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
130 | reloc_hi16_val(pc, target);
133 static inline uint32_t reloc_pc16_val (void *pc, tcg_target_long target)
135 int32_t disp;
137 disp = target - (tcg_target_long) pc - 4;
138 if (disp != (disp << 14) >> 14) {
139 tcg_abort ();
142 return (disp >> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc, tcg_target_long target)
147 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
148 | reloc_pc16_val(pc, target);
151 static inline uint32_t reloc_26_val (void *pc, tcg_target_long target)
153 if ((((tcg_target_long)pc + 4) & 0xf0000000) != (target & 0xf0000000)) {
154 tcg_abort ();
157 return (target >> 2) & 0x3ffffff;
160 static inline void reloc_pc26 (void *pc, tcg_target_long target)
162 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3ffffff)
163 | reloc_26_val(pc, target);
166 static void patch_reloc(uint8_t *code_ptr, int type,
167 tcg_target_long value, tcg_target_long addend)
169 value += addend;
170 switch(type) {
171 case R_MIPS_LO16:
172 reloc_lo16(code_ptr, value);
173 break;
174 case R_MIPS_HI16:
175 reloc_hi16(code_ptr, value);
176 break;
177 case R_MIPS_PC16:
178 reloc_pc16(code_ptr, value);
179 break;
180 case R_MIPS_26:
181 reloc_pc26(code_ptr, value);
182 break;
183 default:
184 tcg_abort();
188 /* parse target specific constraints */
189 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
191 const char *ct_str;
193 ct_str = *pct_str;
194 switch(ct_str[0]) {
195 case 'r':
196 ct->ct |= TCG_CT_REG;
197 tcg_regset_set(ct->u.regs, 0xffffffff);
198 break;
199 case 'C':
200 ct->ct |= TCG_CT_REG;
201 tcg_regset_clear(ct->u.regs);
202 tcg_regset_set_reg(ct->u.regs, TCG_REG_T9);
203 break;
204 case 'L': /* qemu_ld output arg constraint */
205 ct->ct |= TCG_CT_REG;
206 tcg_regset_set(ct->u.regs, 0xffffffff);
207 tcg_regset_reset_reg(ct->u.regs, TCG_REG_V0);
208 break;
209 case 'l': /* qemu_ld input arg constraint */
210 ct->ct |= TCG_CT_REG;
211 tcg_regset_set(ct->u.regs, 0xffffffff);
212 #if defined(CONFIG_SOFTMMU)
213 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
214 # if (TARGET_LONG_BITS == 64)
215 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
216 # endif
217 #endif
218 break;
219 case 'S': /* qemu_st constraint */
220 ct->ct |= TCG_CT_REG;
221 tcg_regset_set(ct->u.regs, 0xffffffff);
222 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
223 #if defined(CONFIG_SOFTMMU)
224 # if (TARGET_LONG_BITS == 32)
225 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A1);
226 # endif
227 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
228 # if TARGET_LONG_BITS == 64
229 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A3);
230 # endif
231 #endif
232 break;
233 case 'I':
234 ct->ct |= TCG_CT_CONST_U16;
235 break;
236 case 'J':
237 ct->ct |= TCG_CT_CONST_S16;
238 break;
239 case 'Z':
240 /* We are cheating a bit here, using the fact that the register
241 ZERO is also the register number 0. Hence there is no need
242 to check for const_args in each instruction. */
243 ct->ct |= TCG_CT_CONST_ZERO;
244 break;
245 default:
246 return -1;
248 ct_str++;
249 *pct_str = ct_str;
250 return 0;
253 /* test if a constant matches the constraint */
254 static inline int tcg_target_const_match(tcg_target_long val,
255 const TCGArgConstraint *arg_ct)
257 int ct;
258 ct = arg_ct->ct;
259 if (ct & TCG_CT_CONST)
260 return 1;
261 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
262 return 1;
263 else if ((ct & TCG_CT_CONST_U16) && val == (uint16_t)val)
264 return 1;
265 else if ((ct & TCG_CT_CONST_S16) && val == (int16_t)val)
266 return 1;
267 else
268 return 0;
271 /* instruction opcodes */
272 enum {
273 OPC_BEQ = 0x04 << 26,
274 OPC_BNE = 0x05 << 26,
275 OPC_BLEZ = 0x06 << 26,
276 OPC_BGTZ = 0x07 << 26,
277 OPC_ADDIU = 0x09 << 26,
278 OPC_SLTI = 0x0A << 26,
279 OPC_SLTIU = 0x0B << 26,
280 OPC_ANDI = 0x0C << 26,
281 OPC_ORI = 0x0D << 26,
282 OPC_XORI = 0x0E << 26,
283 OPC_LUI = 0x0F << 26,
284 OPC_LB = 0x20 << 26,
285 OPC_LH = 0x21 << 26,
286 OPC_LW = 0x23 << 26,
287 OPC_LBU = 0x24 << 26,
288 OPC_LHU = 0x25 << 26,
289 OPC_LWU = 0x27 << 26,
290 OPC_SB = 0x28 << 26,
291 OPC_SH = 0x29 << 26,
292 OPC_SW = 0x2B << 26,
294 OPC_SPECIAL = 0x00 << 26,
295 OPC_SLL = OPC_SPECIAL | 0x00,
296 OPC_SRL = OPC_SPECIAL | 0x02,
297 OPC_ROTR = OPC_SPECIAL | (0x01 << 21) | 0x02,
298 OPC_SRA = OPC_SPECIAL | 0x03,
299 OPC_SLLV = OPC_SPECIAL | 0x04,
300 OPC_SRLV = OPC_SPECIAL | 0x06,
301 OPC_ROTRV = OPC_SPECIAL | (0x01 << 6) | 0x06,
302 OPC_SRAV = OPC_SPECIAL | 0x07,
303 OPC_JR = OPC_SPECIAL | 0x08,
304 OPC_JALR = OPC_SPECIAL | 0x09,
305 OPC_MOVZ = OPC_SPECIAL | 0x0A,
306 OPC_MOVN = OPC_SPECIAL | 0x0B,
307 OPC_MFHI = OPC_SPECIAL | 0x10,
308 OPC_MFLO = OPC_SPECIAL | 0x12,
309 OPC_MULT = OPC_SPECIAL | 0x18,
310 OPC_MULTU = OPC_SPECIAL | 0x19,
311 OPC_DIV = OPC_SPECIAL | 0x1A,
312 OPC_DIVU = OPC_SPECIAL | 0x1B,
313 OPC_ADDU = OPC_SPECIAL | 0x21,
314 OPC_SUBU = OPC_SPECIAL | 0x23,
315 OPC_AND = OPC_SPECIAL | 0x24,
316 OPC_OR = OPC_SPECIAL | 0x25,
317 OPC_XOR = OPC_SPECIAL | 0x26,
318 OPC_NOR = OPC_SPECIAL | 0x27,
319 OPC_SLT = OPC_SPECIAL | 0x2A,
320 OPC_SLTU = OPC_SPECIAL | 0x2B,
322 OPC_REGIMM = 0x01 << 26,
323 OPC_BLTZ = OPC_REGIMM | (0x00 << 16),
324 OPC_BGEZ = OPC_REGIMM | (0x01 << 16),
326 OPC_SPECIAL3 = 0x1f << 26,
327 OPC_INS = OPC_SPECIAL3 | 0x004,
328 OPC_WSBH = OPC_SPECIAL3 | 0x0a0,
329 OPC_SEB = OPC_SPECIAL3 | 0x420,
330 OPC_SEH = OPC_SPECIAL3 | 0x620,
334 * Type reg
336 static inline void tcg_out_opc_reg(TCGContext *s, int opc,
337 TCGReg rd, TCGReg rs, TCGReg rt)
339 int32_t inst;
341 inst = opc;
342 inst |= (rs & 0x1F) << 21;
343 inst |= (rt & 0x1F) << 16;
344 inst |= (rd & 0x1F) << 11;
345 tcg_out32(s, inst);
349 * Type immediate
351 static inline void tcg_out_opc_imm(TCGContext *s, int opc,
352 TCGReg rt, TCGReg rs, TCGArg imm)
354 int32_t inst;
356 inst = opc;
357 inst |= (rs & 0x1F) << 21;
358 inst |= (rt & 0x1F) << 16;
359 inst |= (imm & 0xffff);
360 tcg_out32(s, inst);
364 * Type branch
366 static inline void tcg_out_opc_br(TCGContext *s, int opc,
367 TCGReg rt, TCGReg rs)
369 /* We pay attention here to not modify the branch target by reading
370 the existing value and using it again. This ensure that caches and
371 memory are kept coherent during retranslation. */
372 uint16_t offset = (uint16_t)(*(uint32_t *) s->code_ptr);
374 tcg_out_opc_imm(s, opc, rt, rs, offset);
378 * Type sa
380 static inline void tcg_out_opc_sa(TCGContext *s, int opc,
381 TCGReg rd, TCGReg rt, TCGArg sa)
383 int32_t inst;
385 inst = opc;
386 inst |= (rt & 0x1F) << 16;
387 inst |= (rd & 0x1F) << 11;
388 inst |= (sa & 0x1F) << 6;
389 tcg_out32(s, inst);
393 static inline void tcg_out_nop(TCGContext *s)
395 tcg_out32(s, 0);
398 static inline void tcg_out_mov(TCGContext *s, TCGType type,
399 TCGReg ret, TCGReg arg)
401 /* Simple reg-reg move, optimising out the 'do nothing' case */
402 if (ret != arg) {
403 tcg_out_opc_reg(s, OPC_ADDU, ret, arg, TCG_REG_ZERO);
407 static inline void tcg_out_movi(TCGContext *s, TCGType type,
408 TCGReg reg, tcg_target_long arg)
410 if (arg == (int16_t)arg) {
411 tcg_out_opc_imm(s, OPC_ADDIU, reg, TCG_REG_ZERO, arg);
412 } else if (arg == (uint16_t)arg) {
413 tcg_out_opc_imm(s, OPC_ORI, reg, TCG_REG_ZERO, arg);
414 } else {
415 tcg_out_opc_imm(s, OPC_LUI, reg, 0, arg >> 16);
416 tcg_out_opc_imm(s, OPC_ORI, reg, reg, arg & 0xffff);
420 static inline void tcg_out_bswap16(TCGContext *s, TCGReg ret, TCGReg arg)
422 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
423 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
424 #else
425 /* ret and arg can't be register at */
426 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
427 tcg_abort();
430 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
431 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 8);
432 tcg_out_opc_imm(s, OPC_ANDI, ret, ret, 0xff00);
433 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
434 #endif
437 static inline void tcg_out_bswap16s(TCGContext *s, TCGReg ret, TCGReg arg)
439 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
440 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
441 tcg_out_opc_reg(s, OPC_SEH, ret, 0, ret);
442 #else
443 /* ret and arg can't be register at */
444 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
445 tcg_abort();
448 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
449 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
450 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
451 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
452 #endif
455 static inline void tcg_out_bswap32(TCGContext *s, TCGReg ret, TCGReg arg)
457 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
458 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
459 tcg_out_opc_sa(s, OPC_ROTR, ret, ret, 16);
460 #else
461 /* ret and arg must be different and can't be register at */
462 if (ret == arg || ret == TCG_REG_AT || arg == TCG_REG_AT) {
463 tcg_abort();
466 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
468 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 24);
469 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
471 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, arg, 0xff00);
472 tcg_out_opc_sa(s, OPC_SLL, TCG_REG_AT, TCG_REG_AT, 8);
473 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
475 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
476 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0xff00);
477 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
478 #endif
481 static inline void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
483 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
484 tcg_out_opc_reg(s, OPC_SEB, ret, 0, arg);
485 #else
486 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
487 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 24);
488 #endif
491 static inline void tcg_out_ext16s(TCGContext *s, TCGReg ret, TCGReg arg)
493 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
494 tcg_out_opc_reg(s, OPC_SEH, ret, 0, arg);
495 #else
496 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 16);
497 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
498 #endif
501 static inline void tcg_out_ldst(TCGContext *s, int opc, TCGArg arg,
502 TCGReg arg1, TCGArg arg2)
504 if (arg2 == (int16_t) arg2) {
505 tcg_out_opc_imm(s, opc, arg, arg1, arg2);
506 } else {
507 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, arg2);
508 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, TCG_REG_AT, arg1);
509 tcg_out_opc_imm(s, opc, arg, TCG_REG_AT, 0);
513 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
514 TCGReg arg1, tcg_target_long arg2)
516 tcg_out_ldst(s, OPC_LW, arg, arg1, arg2);
519 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
520 TCGReg arg1, tcg_target_long arg2)
522 tcg_out_ldst(s, OPC_SW, arg, arg1, arg2);
525 static inline void tcg_out_addi(TCGContext *s, TCGReg reg, TCGArg val)
527 if (val == (int16_t)val) {
528 tcg_out_opc_imm(s, OPC_ADDIU, reg, reg, val);
529 } else {
530 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, val);
531 tcg_out_opc_reg(s, OPC_ADDU, reg, reg, TCG_REG_AT);
535 /* Helper routines for marshalling helper function arguments into
536 * the correct registers and stack.
537 * arg_num is where we want to put this argument, and is updated to be ready
538 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
539 * real registers, 4+ on stack.
541 * We provide routines for arguments which are: immediate, 32 bit
542 * value in register, 16 and 8 bit values in register (which must be zero
543 * extended before use) and 64 bit value in a lo:hi register pair.
545 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
546 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
548 if (*arg_num < 4) { \
549 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
550 } else { \
551 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
552 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
554 (*arg_num)++; \
556 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
557 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
558 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8, TCGReg arg)
559 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
560 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
561 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
562 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16, TCGReg arg)
563 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
564 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
565 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
566 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32, TCGArg arg)
567 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
569 /* We don't use the macro for this one to avoid an unnecessary reg-reg
570 move when storing to the stack. */
571 static inline void tcg_out_call_iarg_reg32(TCGContext *s, int *arg_num,
572 TCGReg arg)
574 if (*arg_num < 4) {
575 tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[*arg_num], arg);
576 } else {
577 tcg_out_st(s, TCG_TYPE_I32, arg, TCG_REG_SP, 4 * (*arg_num));
579 (*arg_num)++;
582 static inline void tcg_out_call_iarg_reg64(TCGContext *s, int *arg_num,
583 TCGReg arg_low, TCGReg arg_high)
585 (*arg_num) = (*arg_num + 1) & ~1;
587 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
588 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
589 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
590 #else
591 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
592 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
593 #endif
596 static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGArg arg1,
597 TCGArg arg2, int label_index)
599 TCGLabel *l = &s->labels[label_index];
601 switch (cond) {
602 case TCG_COND_EQ:
603 tcg_out_opc_br(s, OPC_BEQ, arg1, arg2);
604 break;
605 case TCG_COND_NE:
606 tcg_out_opc_br(s, OPC_BNE, arg1, arg2);
607 break;
608 case TCG_COND_LT:
609 if (arg2 == 0) {
610 tcg_out_opc_br(s, OPC_BLTZ, 0, arg1);
611 } else {
612 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
613 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
615 break;
616 case TCG_COND_LTU:
617 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
618 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
619 break;
620 case TCG_COND_GE:
621 if (arg2 == 0) {
622 tcg_out_opc_br(s, OPC_BGEZ, 0, arg1);
623 } else {
624 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
625 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
627 break;
628 case TCG_COND_GEU:
629 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
630 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
631 break;
632 case TCG_COND_LE:
633 if (arg2 == 0) {
634 tcg_out_opc_br(s, OPC_BLEZ, 0, arg1);
635 } else {
636 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
637 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
639 break;
640 case TCG_COND_LEU:
641 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
642 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
643 break;
644 case TCG_COND_GT:
645 if (arg2 == 0) {
646 tcg_out_opc_br(s, OPC_BGTZ, 0, arg1);
647 } else {
648 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
649 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
651 break;
652 case TCG_COND_GTU:
653 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
654 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
655 break;
656 default:
657 tcg_abort();
658 break;
660 if (l->has_value) {
661 reloc_pc16(s->code_ptr - 4, l->u.value);
662 } else {
663 tcg_out_reloc(s, s->code_ptr - 4, R_MIPS_PC16, label_index, 0);
665 tcg_out_nop(s);
668 /* XXX: we implement it at the target level to avoid having to
669 handle cross basic blocks temporaries */
670 static void tcg_out_brcond2(TCGContext *s, TCGCond cond, TCGArg arg1,
671 TCGArg arg2, TCGArg arg3, TCGArg arg4,
672 int label_index)
674 void *label_ptr;
676 switch(cond) {
677 case TCG_COND_NE:
678 tcg_out_brcond(s, TCG_COND_NE, arg2, arg4, label_index);
679 tcg_out_brcond(s, TCG_COND_NE, arg1, arg3, label_index);
680 return;
681 case TCG_COND_EQ:
682 break;
683 case TCG_COND_LT:
684 case TCG_COND_LE:
685 tcg_out_brcond(s, TCG_COND_LT, arg2, arg4, label_index);
686 break;
687 case TCG_COND_GT:
688 case TCG_COND_GE:
689 tcg_out_brcond(s, TCG_COND_GT, arg2, arg4, label_index);
690 break;
691 case TCG_COND_LTU:
692 case TCG_COND_LEU:
693 tcg_out_brcond(s, TCG_COND_LTU, arg2, arg4, label_index);
694 break;
695 case TCG_COND_GTU:
696 case TCG_COND_GEU:
697 tcg_out_brcond(s, TCG_COND_GTU, arg2, arg4, label_index);
698 break;
699 default:
700 tcg_abort();
703 label_ptr = s->code_ptr;
704 tcg_out_opc_br(s, OPC_BNE, arg2, arg4);
705 tcg_out_nop(s);
707 switch(cond) {
708 case TCG_COND_EQ:
709 tcg_out_brcond(s, TCG_COND_EQ, arg1, arg3, label_index);
710 break;
711 case TCG_COND_LT:
712 case TCG_COND_LTU:
713 tcg_out_brcond(s, TCG_COND_LTU, arg1, arg3, label_index);
714 break;
715 case TCG_COND_LE:
716 case TCG_COND_LEU:
717 tcg_out_brcond(s, TCG_COND_LEU, arg1, arg3, label_index);
718 break;
719 case TCG_COND_GT:
720 case TCG_COND_GTU:
721 tcg_out_brcond(s, TCG_COND_GTU, arg1, arg3, label_index);
722 break;
723 case TCG_COND_GE:
724 case TCG_COND_GEU:
725 tcg_out_brcond(s, TCG_COND_GEU, arg1, arg3, label_index);
726 break;
727 default:
728 tcg_abort();
731 reloc_pc16(label_ptr, (tcg_target_long) s->code_ptr);
734 static void tcg_out_movcond(TCGContext *s, TCGCond cond, TCGReg ret,
735 TCGArg c1, TCGArg c2, TCGArg v)
737 switch (cond) {
738 case TCG_COND_EQ:
739 if (c1 == 0) {
740 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, c2);
741 } else if (c2 == 0) {
742 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, c1);
743 } else {
744 tcg_out_opc_reg(s, OPC_XOR, TCG_REG_AT, c1, c2);
745 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
747 break;
748 case TCG_COND_NE:
749 if (c1 == 0) {
750 tcg_out_opc_reg(s, OPC_MOVN, ret, v, c2);
751 } else if (c2 == 0) {
752 tcg_out_opc_reg(s, OPC_MOVN, ret, v, c1);
753 } else {
754 tcg_out_opc_reg(s, OPC_XOR, TCG_REG_AT, c1, c2);
755 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
757 break;
758 case TCG_COND_LT:
759 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, c1, c2);
760 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
761 break;
762 case TCG_COND_LTU:
763 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, c1, c2);
764 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
765 break;
766 case TCG_COND_GE:
767 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, c1, c2);
768 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
769 break;
770 case TCG_COND_GEU:
771 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, c1, c2);
772 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
773 break;
774 case TCG_COND_LE:
775 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, c2, c1);
776 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
777 break;
778 case TCG_COND_LEU:
779 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, c2, c1);
780 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
781 break;
782 case TCG_COND_GT:
783 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, c2, c1);
784 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
785 break;
786 case TCG_COND_GTU:
787 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, c2, c1);
788 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
789 break;
790 default:
791 tcg_abort();
792 break;
796 static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGReg ret,
797 TCGArg arg1, TCGArg arg2)
799 switch (cond) {
800 case TCG_COND_EQ:
801 if (arg1 == 0) {
802 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg2, 1);
803 } else if (arg2 == 0) {
804 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg1, 1);
805 } else {
806 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
807 tcg_out_opc_imm(s, OPC_SLTIU, ret, ret, 1);
809 break;
810 case TCG_COND_NE:
811 if (arg1 == 0) {
812 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg2);
813 } else if (arg2 == 0) {
814 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg1);
815 } else {
816 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
817 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, ret);
819 break;
820 case TCG_COND_LT:
821 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
822 break;
823 case TCG_COND_LTU:
824 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
825 break;
826 case TCG_COND_GE:
827 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
828 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
829 break;
830 case TCG_COND_GEU:
831 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
832 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
833 break;
834 case TCG_COND_LE:
835 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
836 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
837 break;
838 case TCG_COND_LEU:
839 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
840 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
841 break;
842 case TCG_COND_GT:
843 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
844 break;
845 case TCG_COND_GTU:
846 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
847 break;
848 default:
849 tcg_abort();
850 break;
854 /* XXX: we implement it at the target level to avoid having to
855 handle cross basic blocks temporaries */
856 static void tcg_out_setcond2(TCGContext *s, TCGCond cond, TCGReg ret,
857 TCGArg arg1, TCGArg arg2, TCGArg arg3, TCGArg arg4)
859 switch (cond) {
860 case TCG_COND_EQ:
861 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_AT, arg2, arg4);
862 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg1, arg3);
863 tcg_out_opc_reg(s, OPC_AND, ret, TCG_REG_AT, TCG_REG_T0);
864 return;
865 case TCG_COND_NE:
866 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_AT, arg2, arg4);
867 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_T0, arg1, arg3);
868 tcg_out_opc_reg(s, OPC_OR, ret, TCG_REG_AT, TCG_REG_T0);
869 return;
870 case TCG_COND_LT:
871 case TCG_COND_LE:
872 tcg_out_setcond(s, TCG_COND_LT, TCG_REG_AT, arg2, arg4);
873 break;
874 case TCG_COND_GT:
875 case TCG_COND_GE:
876 tcg_out_setcond(s, TCG_COND_GT, TCG_REG_AT, arg2, arg4);
877 break;
878 case TCG_COND_LTU:
879 case TCG_COND_LEU:
880 tcg_out_setcond(s, TCG_COND_LTU, TCG_REG_AT, arg2, arg4);
881 break;
882 case TCG_COND_GTU:
883 case TCG_COND_GEU:
884 tcg_out_setcond(s, TCG_COND_GTU, TCG_REG_AT, arg2, arg4);
885 break;
886 default:
887 tcg_abort();
888 break;
891 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg2, arg4);
893 switch(cond) {
894 case TCG_COND_LT:
895 case TCG_COND_LTU:
896 tcg_out_setcond(s, TCG_COND_LTU, ret, arg1, arg3);
897 break;
898 case TCG_COND_LE:
899 case TCG_COND_LEU:
900 tcg_out_setcond(s, TCG_COND_LEU, ret, arg1, arg3);
901 break;
902 case TCG_COND_GT:
903 case TCG_COND_GTU:
904 tcg_out_setcond(s, TCG_COND_GTU, ret, arg1, arg3);
905 break;
906 case TCG_COND_GE:
907 case TCG_COND_GEU:
908 tcg_out_setcond(s, TCG_COND_GEU, ret, arg1, arg3);
909 break;
910 default:
911 tcg_abort();
914 tcg_out_opc_reg(s, OPC_AND, ret, ret, TCG_REG_T0);
915 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
918 #if defined(CONFIG_SOFTMMU)
920 #include "../../softmmu_defs.h"
922 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
923 int mmu_idx) */
924 static const void * const qemu_ld_helpers[4] = {
925 helper_ldb_mmu,
926 helper_ldw_mmu,
927 helper_ldl_mmu,
928 helper_ldq_mmu,
931 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
932 uintxx_t val, int mmu_idx) */
933 static const void * const qemu_st_helpers[4] = {
934 helper_stb_mmu,
935 helper_stw_mmu,
936 helper_stl_mmu,
937 helper_stq_mmu,
939 #endif
941 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
942 int opc)
944 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
945 #if defined(CONFIG_SOFTMMU)
946 void *label1_ptr, *label2_ptr;
947 int arg_num;
948 int mem_index, s_bits;
949 int addr_meml;
950 # if TARGET_LONG_BITS == 64
951 uint8_t *label3_ptr;
952 TCGReg addr_regh;
953 int addr_memh;
954 # endif
955 #endif
956 data_regl = *args++;
957 if (opc == 3)
958 data_regh = *args++;
959 else
960 data_regh = 0;
961 addr_regl = *args++;
962 #if defined(CONFIG_SOFTMMU)
963 # if TARGET_LONG_BITS == 64
964 addr_regh = *args++;
965 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
966 addr_memh = 0;
967 addr_meml = 4;
968 # else
969 addr_memh = 4;
970 addr_meml = 0;
971 # endif
972 # else
973 addr_meml = 0;
974 # endif
975 mem_index = *args;
976 s_bits = opc & 3;
977 #endif
979 if (opc == 3) {
980 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
981 data_reg1 = data_regh;
982 data_reg2 = data_regl;
983 #else
984 data_reg1 = data_regl;
985 data_reg2 = data_regh;
986 #endif
987 } else {
988 data_reg1 = data_regl;
989 data_reg2 = 0;
991 #if defined(CONFIG_SOFTMMU)
992 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
993 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
994 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
995 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
996 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_meml);
997 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
998 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
1000 # if TARGET_LONG_BITS == 64
1001 label3_ptr = s->code_ptr;
1002 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
1003 tcg_out_nop(s);
1005 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1006 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_memh);
1008 label1_ptr = s->code_ptr;
1009 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
1010 tcg_out_nop(s);
1012 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
1013 # else
1014 label1_ptr = s->code_ptr;
1015 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
1016 tcg_out_nop(s);
1017 # endif
1019 /* slow path */
1020 arg_num = 0;
1021 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
1022 # if TARGET_LONG_BITS == 64
1023 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
1024 # else
1025 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
1026 # endif
1027 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
1028 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_ld_helpers[s_bits]);
1029 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
1030 tcg_out_nop(s);
1032 switch(opc) {
1033 case 0:
1034 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xff);
1035 break;
1036 case 0 | 4:
1037 tcg_out_ext8s(s, data_reg1, TCG_REG_V0);
1038 break;
1039 case 1:
1040 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xffff);
1041 break;
1042 case 1 | 4:
1043 tcg_out_ext16s(s, data_reg1, TCG_REG_V0);
1044 break;
1045 case 2:
1046 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
1047 break;
1048 case 3:
1049 tcg_out_mov(s, TCG_TYPE_I32, data_reg2, TCG_REG_V1);
1050 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
1051 break;
1052 default:
1053 tcg_abort();
1056 label2_ptr = s->code_ptr;
1057 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1058 tcg_out_nop(s);
1060 /* label1: fast path */
1061 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
1063 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1064 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1065 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_A0, addr_regl);
1066 #else
1067 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1068 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_V0, addr_regl, GUEST_BASE);
1069 } else {
1070 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_V0, GUEST_BASE);
1071 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_V0, addr_regl);
1073 #endif
1075 switch(opc) {
1076 case 0:
1077 tcg_out_opc_imm(s, OPC_LBU, data_reg1, TCG_REG_V0, 0);
1078 break;
1079 case 0 | 4:
1080 tcg_out_opc_imm(s, OPC_LB, data_reg1, TCG_REG_V0, 0);
1081 break;
1082 case 1:
1083 if (TCG_NEED_BSWAP) {
1084 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
1085 tcg_out_bswap16(s, data_reg1, TCG_REG_T0);
1086 } else {
1087 tcg_out_opc_imm(s, OPC_LHU, data_reg1, TCG_REG_V0, 0);
1089 break;
1090 case 1 | 4:
1091 if (TCG_NEED_BSWAP) {
1092 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
1093 tcg_out_bswap16s(s, data_reg1, TCG_REG_T0);
1094 } else {
1095 tcg_out_opc_imm(s, OPC_LH, data_reg1, TCG_REG_V0, 0);
1097 break;
1098 case 2:
1099 if (TCG_NEED_BSWAP) {
1100 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1101 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1102 } else {
1103 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1105 break;
1106 case 3:
1107 if (TCG_NEED_BSWAP) {
1108 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 4);
1109 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1110 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1111 tcg_out_bswap32(s, data_reg2, TCG_REG_T0);
1112 } else {
1113 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1114 tcg_out_opc_imm(s, OPC_LW, data_reg2, TCG_REG_V0, 4);
1116 break;
1117 default:
1118 tcg_abort();
1121 #if defined(CONFIG_SOFTMMU)
1122 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1123 #endif
1126 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
1127 int opc)
1129 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
1130 #if defined(CONFIG_SOFTMMU)
1131 uint8_t *label1_ptr, *label2_ptr;
1132 int arg_num;
1133 int mem_index, s_bits;
1134 int addr_meml;
1135 #endif
1136 #if TARGET_LONG_BITS == 64
1137 # if defined(CONFIG_SOFTMMU)
1138 uint8_t *label3_ptr;
1139 TCGReg addr_regh;
1140 int addr_memh;
1141 # endif
1142 #endif
1143 data_regl = *args++;
1144 if (opc == 3) {
1145 data_regh = *args++;
1146 } else {
1147 data_regh = 0;
1149 addr_regl = *args++;
1150 #if defined(CONFIG_SOFTMMU)
1151 # if TARGET_LONG_BITS == 64
1152 addr_regh = *args++;
1153 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1154 addr_memh = 0;
1155 addr_meml = 4;
1156 # else
1157 addr_memh = 4;
1158 addr_meml = 0;
1159 # endif
1160 # else
1161 addr_meml = 0;
1162 # endif
1163 mem_index = *args;
1164 s_bits = opc;
1165 #endif
1167 if (opc == 3) {
1168 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1169 data_reg1 = data_regh;
1170 data_reg2 = data_regl;
1171 #else
1172 data_reg1 = data_regl;
1173 data_reg2 = data_regh;
1174 #endif
1175 } else {
1176 data_reg1 = data_regl;
1177 data_reg2 = 0;
1180 #if defined(CONFIG_SOFTMMU)
1181 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
1182 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
1183 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
1184 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1185 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_meml);
1186 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
1187 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
1189 # if TARGET_LONG_BITS == 64
1190 label3_ptr = s->code_ptr;
1191 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
1192 tcg_out_nop(s);
1194 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1195 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_memh);
1197 label1_ptr = s->code_ptr;
1198 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
1199 tcg_out_nop(s);
1201 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
1202 # else
1203 label1_ptr = s->code_ptr;
1204 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
1205 tcg_out_nop(s);
1206 # endif
1208 /* slow path */
1209 arg_num = 0;
1210 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
1211 # if TARGET_LONG_BITS == 64
1212 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
1213 # else
1214 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
1215 # endif
1216 switch(opc) {
1217 case 0:
1218 tcg_out_call_iarg_reg8(s, &arg_num, data_regl);
1219 break;
1220 case 1:
1221 tcg_out_call_iarg_reg16(s, &arg_num, data_regl);
1222 break;
1223 case 2:
1224 tcg_out_call_iarg_reg32(s, &arg_num, data_regl);
1225 break;
1226 case 3:
1227 tcg_out_call_iarg_reg64(s, &arg_num, data_regl, data_regh);
1228 break;
1229 default:
1230 tcg_abort();
1232 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
1233 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_st_helpers[s_bits]);
1234 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
1235 tcg_out_nop(s);
1237 label2_ptr = s->code_ptr;
1238 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1239 tcg_out_nop(s);
1241 /* label1: fast path */
1242 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
1244 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1245 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1246 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1247 #else
1248 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1249 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_A0, addr_regl, GUEST_BASE);
1250 } else {
1251 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_A0, GUEST_BASE);
1252 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1255 #endif
1257 switch(opc) {
1258 case 0:
1259 tcg_out_opc_imm(s, OPC_SB, data_reg1, TCG_REG_A0, 0);
1260 break;
1261 case 1:
1262 if (TCG_NEED_BSWAP) {
1263 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_T0, data_reg1, 0xffff);
1264 tcg_out_bswap16(s, TCG_REG_T0, TCG_REG_T0);
1265 tcg_out_opc_imm(s, OPC_SH, TCG_REG_T0, TCG_REG_A0, 0);
1266 } else {
1267 tcg_out_opc_imm(s, OPC_SH, data_reg1, TCG_REG_A0, 0);
1269 break;
1270 case 2:
1271 if (TCG_NEED_BSWAP) {
1272 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1273 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1274 } else {
1275 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1277 break;
1278 case 3:
1279 if (TCG_NEED_BSWAP) {
1280 tcg_out_bswap32(s, TCG_REG_T0, data_reg2);
1281 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1282 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1283 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 4);
1284 } else {
1285 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1286 tcg_out_opc_imm(s, OPC_SW, data_reg2, TCG_REG_A0, 4);
1288 break;
1289 default:
1290 tcg_abort();
1293 #if defined(CONFIG_SOFTMMU)
1294 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1295 #endif
1298 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1299 const TCGArg *args, const int *const_args)
1301 switch(opc) {
1302 case INDEX_op_exit_tb:
1303 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_V0, args[0]);
1304 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, (tcg_target_long)tb_ret_addr);
1305 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1306 tcg_out_nop(s);
1307 break;
1308 case INDEX_op_goto_tb:
1309 if (s->tb_jmp_offset) {
1310 /* direct jump method */
1311 tcg_abort();
1312 } else {
1313 /* indirect jump method */
1314 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, (tcg_target_long)(s->tb_next + args[0]));
1315 tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_AT, TCG_REG_AT, 0);
1316 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1318 tcg_out_nop(s);
1319 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1320 break;
1321 case INDEX_op_call:
1322 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, args[0], 0);
1323 tcg_out_nop(s);
1324 break;
1325 case INDEX_op_br:
1326 tcg_out_brcond(s, TCG_COND_EQ, TCG_REG_ZERO, TCG_REG_ZERO, args[0]);
1327 break;
1329 case INDEX_op_mov_i32:
1330 tcg_out_mov(s, TCG_TYPE_I32, args[0], args[1]);
1331 break;
1332 case INDEX_op_movi_i32:
1333 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1334 break;
1336 case INDEX_op_ld8u_i32:
1337 tcg_out_ldst(s, OPC_LBU, args[0], args[1], args[2]);
1338 break;
1339 case INDEX_op_ld8s_i32:
1340 tcg_out_ldst(s, OPC_LB, args[0], args[1], args[2]);
1341 break;
1342 case INDEX_op_ld16u_i32:
1343 tcg_out_ldst(s, OPC_LHU, args[0], args[1], args[2]);
1344 break;
1345 case INDEX_op_ld16s_i32:
1346 tcg_out_ldst(s, OPC_LH, args[0], args[1], args[2]);
1347 break;
1348 case INDEX_op_ld_i32:
1349 tcg_out_ldst(s, OPC_LW, args[0], args[1], args[2]);
1350 break;
1351 case INDEX_op_st8_i32:
1352 tcg_out_ldst(s, OPC_SB, args[0], args[1], args[2]);
1353 break;
1354 case INDEX_op_st16_i32:
1355 tcg_out_ldst(s, OPC_SH, args[0], args[1], args[2]);
1356 break;
1357 case INDEX_op_st_i32:
1358 tcg_out_ldst(s, OPC_SW, args[0], args[1], args[2]);
1359 break;
1361 case INDEX_op_add_i32:
1362 if (const_args[2]) {
1363 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], args[2]);
1364 } else {
1365 tcg_out_opc_reg(s, OPC_ADDU, args[0], args[1], args[2]);
1367 break;
1368 case INDEX_op_add2_i32:
1369 if (const_args[4]) {
1370 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], args[4]);
1371 } else {
1372 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, args[2], args[4]);
1374 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, TCG_REG_AT, args[2]);
1375 if (const_args[5]) {
1376 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], args[5]);
1377 } else {
1378 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[3], args[5]);
1380 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[1], TCG_REG_T0);
1381 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1382 break;
1383 case INDEX_op_sub_i32:
1384 if (const_args[2]) {
1385 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], -args[2]);
1386 } else {
1387 tcg_out_opc_reg(s, OPC_SUBU, args[0], args[1], args[2]);
1389 break;
1390 case INDEX_op_sub2_i32:
1391 if (const_args[4]) {
1392 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], -args[4]);
1393 } else {
1394 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, args[2], args[4]);
1396 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, args[2], TCG_REG_AT);
1397 if (const_args[5]) {
1398 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], -args[5]);
1399 } else {
1400 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[3], args[5]);
1402 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[1], TCG_REG_T0);
1403 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1404 break;
1405 case INDEX_op_mul_i32:
1406 tcg_out_opc_reg(s, OPC_MULT, 0, args[1], args[2]);
1407 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1408 break;
1409 case INDEX_op_mulu2_i32:
1410 tcg_out_opc_reg(s, OPC_MULTU, 0, args[2], args[3]);
1411 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1412 tcg_out_opc_reg(s, OPC_MFHI, args[1], 0, 0);
1413 break;
1414 case INDEX_op_div_i32:
1415 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1416 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1417 break;
1418 case INDEX_op_divu_i32:
1419 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1420 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1421 break;
1422 case INDEX_op_rem_i32:
1423 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1424 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1425 break;
1426 case INDEX_op_remu_i32:
1427 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1428 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1429 break;
1431 case INDEX_op_and_i32:
1432 if (const_args[2]) {
1433 tcg_out_opc_imm(s, OPC_ANDI, args[0], args[1], args[2]);
1434 } else {
1435 tcg_out_opc_reg(s, OPC_AND, args[0], args[1], args[2]);
1437 break;
1438 case INDEX_op_or_i32:
1439 if (const_args[2]) {
1440 tcg_out_opc_imm(s, OPC_ORI, args[0], args[1], args[2]);
1441 } else {
1442 tcg_out_opc_reg(s, OPC_OR, args[0], args[1], args[2]);
1444 break;
1445 case INDEX_op_nor_i32:
1446 tcg_out_opc_reg(s, OPC_NOR, args[0], args[1], args[2]);
1447 break;
1448 case INDEX_op_not_i32:
1449 tcg_out_opc_reg(s, OPC_NOR, args[0], TCG_REG_ZERO, args[1]);
1450 break;
1451 case INDEX_op_xor_i32:
1452 if (const_args[2]) {
1453 tcg_out_opc_imm(s, OPC_XORI, args[0], args[1], args[2]);
1454 } else {
1455 tcg_out_opc_reg(s, OPC_XOR, args[0], args[1], args[2]);
1457 break;
1459 case INDEX_op_sar_i32:
1460 if (const_args[2]) {
1461 tcg_out_opc_sa(s, OPC_SRA, args[0], args[1], args[2]);
1462 } else {
1463 tcg_out_opc_reg(s, OPC_SRAV, args[0], args[2], args[1]);
1465 break;
1466 case INDEX_op_shl_i32:
1467 if (const_args[2]) {
1468 tcg_out_opc_sa(s, OPC_SLL, args[0], args[1], args[2]);
1469 } else {
1470 tcg_out_opc_reg(s, OPC_SLLV, args[0], args[2], args[1]);
1472 break;
1473 case INDEX_op_shr_i32:
1474 if (const_args[2]) {
1475 tcg_out_opc_sa(s, OPC_SRL, args[0], args[1], args[2]);
1476 } else {
1477 tcg_out_opc_reg(s, OPC_SRLV, args[0], args[2], args[1]);
1479 break;
1480 case INDEX_op_rotl_i32:
1481 if (const_args[2]) {
1482 tcg_out_opc_sa(s, OPC_ROTR, args[0], args[1], 0x20 - args[2]);
1483 } else {
1484 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, 32);
1485 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, TCG_REG_AT, args[2]);
1486 tcg_out_opc_reg(s, OPC_ROTRV, args[0], TCG_REG_AT, args[1]);
1488 break;
1489 case INDEX_op_rotr_i32:
1490 if (const_args[2]) {
1491 tcg_out_opc_sa(s, OPC_ROTR, args[0], args[1], args[2]);
1492 } else {
1493 tcg_out_opc_reg(s, OPC_ROTRV, args[0], args[2], args[1]);
1495 break;
1497 /* The bswap routines do not work on non-R2 CPU. In that case
1498 we let TCG generating the corresponding code. */
1499 case INDEX_op_bswap16_i32:
1500 tcg_out_bswap16(s, args[0], args[1]);
1501 break;
1502 case INDEX_op_bswap32_i32:
1503 tcg_out_bswap32(s, args[0], args[1]);
1504 break;
1506 case INDEX_op_ext8s_i32:
1507 tcg_out_ext8s(s, args[0], args[1]);
1508 break;
1509 case INDEX_op_ext16s_i32:
1510 tcg_out_ext16s(s, args[0], args[1]);
1511 break;
1513 case INDEX_op_deposit_i32:
1514 tcg_out_opc_imm(s, OPC_INS, args[0], args[2],
1515 ((args[3] + args[4] - 1) << 11) | (args[3] << 6));
1516 break;
1518 case INDEX_op_brcond_i32:
1519 tcg_out_brcond(s, args[2], args[0], args[1], args[3]);
1520 break;
1521 case INDEX_op_brcond2_i32:
1522 tcg_out_brcond2(s, args[4], args[0], args[1], args[2], args[3], args[5]);
1523 break;
1525 case INDEX_op_movcond_i32:
1526 tcg_out_movcond(s, args[5], args[0], args[1], args[2], args[3]);
1527 break;
1529 case INDEX_op_setcond_i32:
1530 tcg_out_setcond(s, args[3], args[0], args[1], args[2]);
1531 break;
1532 case INDEX_op_setcond2_i32:
1533 tcg_out_setcond2(s, args[5], args[0], args[1], args[2], args[3], args[4]);
1534 break;
1536 case INDEX_op_qemu_ld8u:
1537 tcg_out_qemu_ld(s, args, 0);
1538 break;
1539 case INDEX_op_qemu_ld8s:
1540 tcg_out_qemu_ld(s, args, 0 | 4);
1541 break;
1542 case INDEX_op_qemu_ld16u:
1543 tcg_out_qemu_ld(s, args, 1);
1544 break;
1545 case INDEX_op_qemu_ld16s:
1546 tcg_out_qemu_ld(s, args, 1 | 4);
1547 break;
1548 case INDEX_op_qemu_ld32:
1549 tcg_out_qemu_ld(s, args, 2);
1550 break;
1551 case INDEX_op_qemu_ld64:
1552 tcg_out_qemu_ld(s, args, 3);
1553 break;
1554 case INDEX_op_qemu_st8:
1555 tcg_out_qemu_st(s, args, 0);
1556 break;
1557 case INDEX_op_qemu_st16:
1558 tcg_out_qemu_st(s, args, 1);
1559 break;
1560 case INDEX_op_qemu_st32:
1561 tcg_out_qemu_st(s, args, 2);
1562 break;
1563 case INDEX_op_qemu_st64:
1564 tcg_out_qemu_st(s, args, 3);
1565 break;
1567 default:
1568 tcg_abort();
1572 static const TCGTargetOpDef mips_op_defs[] = {
1573 { INDEX_op_exit_tb, { } },
1574 { INDEX_op_goto_tb, { } },
1575 { INDEX_op_call, { "C" } },
1576 { INDEX_op_br, { } },
1578 { INDEX_op_mov_i32, { "r", "r" } },
1579 { INDEX_op_movi_i32, { "r" } },
1580 { INDEX_op_ld8u_i32, { "r", "r" } },
1581 { INDEX_op_ld8s_i32, { "r", "r" } },
1582 { INDEX_op_ld16u_i32, { "r", "r" } },
1583 { INDEX_op_ld16s_i32, { "r", "r" } },
1584 { INDEX_op_ld_i32, { "r", "r" } },
1585 { INDEX_op_st8_i32, { "rZ", "r" } },
1586 { INDEX_op_st16_i32, { "rZ", "r" } },
1587 { INDEX_op_st_i32, { "rZ", "r" } },
1589 { INDEX_op_add_i32, { "r", "rZ", "rJ" } },
1590 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
1591 { INDEX_op_mulu2_i32, { "r", "r", "rZ", "rZ" } },
1592 { INDEX_op_div_i32, { "r", "rZ", "rZ" } },
1593 { INDEX_op_divu_i32, { "r", "rZ", "rZ" } },
1594 { INDEX_op_rem_i32, { "r", "rZ", "rZ" } },
1595 { INDEX_op_remu_i32, { "r", "rZ", "rZ" } },
1596 { INDEX_op_sub_i32, { "r", "rZ", "rJ" } },
1598 { INDEX_op_and_i32, { "r", "rZ", "rI" } },
1599 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
1600 { INDEX_op_not_i32, { "r", "rZ" } },
1601 { INDEX_op_or_i32, { "r", "rZ", "rIZ" } },
1602 { INDEX_op_xor_i32, { "r", "rZ", "rIZ" } },
1604 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
1605 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
1606 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
1607 { INDEX_op_rotr_i32, { "r", "rZ", "ri" } },
1608 { INDEX_op_rotl_i32, { "r", "rZ", "ri" } },
1610 { INDEX_op_bswap16_i32, { "r", "r" } },
1611 { INDEX_op_bswap32_i32, { "r", "r" } },
1613 { INDEX_op_ext8s_i32, { "r", "rZ" } },
1614 { INDEX_op_ext16s_i32, { "r", "rZ" } },
1616 { INDEX_op_deposit_i32, { "r", "0", "rZ" } },
1618 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
1619 { INDEX_op_movcond_i32, { "r", "rZ", "rZ", "rZ", "0" } },
1620 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
1621 { INDEX_op_setcond2_i32, { "r", "rZ", "rZ", "rZ", "rZ" } },
1623 { INDEX_op_add2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1624 { INDEX_op_sub2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1625 { INDEX_op_brcond2_i32, { "rZ", "rZ", "rZ", "rZ" } },
1627 #if TARGET_LONG_BITS == 32
1628 { INDEX_op_qemu_ld8u, { "L", "lZ" } },
1629 { INDEX_op_qemu_ld8s, { "L", "lZ" } },
1630 { INDEX_op_qemu_ld16u, { "L", "lZ" } },
1631 { INDEX_op_qemu_ld16s, { "L", "lZ" } },
1632 { INDEX_op_qemu_ld32, { "L", "lZ" } },
1633 { INDEX_op_qemu_ld64, { "L", "L", "lZ" } },
1635 { INDEX_op_qemu_st8, { "SZ", "SZ" } },
1636 { INDEX_op_qemu_st16, { "SZ", "SZ" } },
1637 { INDEX_op_qemu_st32, { "SZ", "SZ" } },
1638 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ" } },
1639 #else
1640 { INDEX_op_qemu_ld8u, { "L", "lZ", "lZ" } },
1641 { INDEX_op_qemu_ld8s, { "L", "lZ", "lZ" } },
1642 { INDEX_op_qemu_ld16u, { "L", "lZ", "lZ" } },
1643 { INDEX_op_qemu_ld16s, { "L", "lZ", "lZ" } },
1644 { INDEX_op_qemu_ld32, { "L", "lZ", "lZ" } },
1645 { INDEX_op_qemu_ld64, { "L", "L", "lZ", "lZ" } },
1647 { INDEX_op_qemu_st8, { "SZ", "SZ", "SZ" } },
1648 { INDEX_op_qemu_st16, { "SZ", "SZ", "SZ" } },
1649 { INDEX_op_qemu_st32, { "SZ", "SZ", "SZ" } },
1650 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ", "SZ" } },
1651 #endif
1652 { -1 },
1655 static int tcg_target_callee_save_regs[] = {
1656 TCG_REG_S0, /* used for the global env (TCG_AREG0) */
1657 TCG_REG_S1,
1658 TCG_REG_S2,
1659 TCG_REG_S3,
1660 TCG_REG_S4,
1661 TCG_REG_S5,
1662 TCG_REG_S6,
1663 TCG_REG_S7,
1664 TCG_REG_FP,
1665 TCG_REG_RA, /* should be last for ABI compliance */
1668 /* Generate global QEMU prologue and epilogue code */
1669 static void tcg_target_qemu_prologue(TCGContext *s)
1671 int i, frame_size;
1673 /* reserve some stack space, also for TCG temps. */
1674 frame_size = ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1675 + TCG_STATIC_CALL_ARGS_SIZE
1676 + CPU_TEMP_BUF_NLONGS * sizeof(long);
1677 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1678 ~(TCG_TARGET_STACK_ALIGN - 1);
1679 tcg_set_frame(s, TCG_REG_SP, ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1680 + TCG_STATIC_CALL_ARGS_SIZE,
1681 CPU_TEMP_BUF_NLONGS * sizeof(long));
1683 /* TB prologue */
1684 tcg_out_addi(s, TCG_REG_SP, -frame_size);
1685 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1686 tcg_out_st(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1687 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1690 /* Call generated code */
1691 tcg_out_opc_reg(s, OPC_JR, 0, tcg_target_call_iarg_regs[1], 0);
1692 tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
1693 tb_ret_addr = s->code_ptr;
1695 /* TB epilogue */
1696 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1697 tcg_out_ld(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1698 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1701 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_RA, 0);
1702 tcg_out_addi(s, TCG_REG_SP, frame_size);
1705 static void tcg_target_init(TCGContext *s)
1707 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32], 0xffffffff);
1708 tcg_regset_set(tcg_target_call_clobber_regs,
1709 (1 << TCG_REG_V0) |
1710 (1 << TCG_REG_V1) |
1711 (1 << TCG_REG_A0) |
1712 (1 << TCG_REG_A1) |
1713 (1 << TCG_REG_A2) |
1714 (1 << TCG_REG_A3) |
1715 (1 << TCG_REG_T1) |
1716 (1 << TCG_REG_T2) |
1717 (1 << TCG_REG_T3) |
1718 (1 << TCG_REG_T4) |
1719 (1 << TCG_REG_T5) |
1720 (1 << TCG_REG_T6) |
1721 (1 << TCG_REG_T7) |
1722 (1 << TCG_REG_T8) |
1723 (1 << TCG_REG_T9));
1725 tcg_regset_clear(s->reserved_regs);
1726 tcg_regset_set_reg(s->reserved_regs, TCG_REG_ZERO); /* zero register */
1727 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K0); /* kernel use only */
1728 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K1); /* kernel use only */
1729 tcg_regset_set_reg(s->reserved_regs, TCG_REG_AT); /* internal use */
1730 tcg_regset_set_reg(s->reserved_regs, TCG_REG_T0); /* internal use */
1731 tcg_regset_set_reg(s->reserved_regs, TCG_REG_RA); /* return address */
1732 tcg_regset_set_reg(s->reserved_regs, TCG_REG_SP); /* stack pointer */
1733 tcg_regset_set_reg(s->reserved_regs, TCG_REG_GP); /* global pointer */
1735 tcg_add_target_add_op_defs(mips_op_defs);