tcg-mips: Split out tcg_out_call
[qemu.git] / tcg / mips / tcg-target.c
blob65acc8677bd83996875bd1ecdd3de0755d6ec7c7
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
5 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24 * THE SOFTWARE.
27 #include "tcg-be-null.h"
29 #if defined(HOST_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
30 # define TCG_NEED_BSWAP 0
31 #else
32 # define TCG_NEED_BSWAP 1
33 #endif
35 #ifndef NDEBUG
36 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
37 "zero",
38 "at",
39 "v0",
40 "v1",
41 "a0",
42 "a1",
43 "a2",
44 "a3",
45 "t0",
46 "t1",
47 "t2",
48 "t3",
49 "t4",
50 "t5",
51 "t6",
52 "t7",
53 "s0",
54 "s1",
55 "s2",
56 "s3",
57 "s4",
58 "s5",
59 "s6",
60 "s7",
61 "t8",
62 "t9",
63 "k0",
64 "k1",
65 "gp",
66 "sp",
67 "fp",
68 "ra",
70 #endif
72 /* check if we really need so many registers :P */
73 static const TCGReg tcg_target_reg_alloc_order[] = {
74 TCG_REG_S0,
75 TCG_REG_S1,
76 TCG_REG_S2,
77 TCG_REG_S3,
78 TCG_REG_S4,
79 TCG_REG_S5,
80 TCG_REG_S6,
81 TCG_REG_S7,
82 TCG_REG_T1,
83 TCG_REG_T2,
84 TCG_REG_T3,
85 TCG_REG_T4,
86 TCG_REG_T5,
87 TCG_REG_T6,
88 TCG_REG_T7,
89 TCG_REG_T8,
90 TCG_REG_T9,
91 TCG_REG_A0,
92 TCG_REG_A1,
93 TCG_REG_A2,
94 TCG_REG_A3,
95 TCG_REG_V0,
96 TCG_REG_V1
99 static const TCGReg tcg_target_call_iarg_regs[4] = {
100 TCG_REG_A0,
101 TCG_REG_A1,
102 TCG_REG_A2,
103 TCG_REG_A3
106 static const TCGReg tcg_target_call_oarg_regs[2] = {
107 TCG_REG_V0,
108 TCG_REG_V1
111 static tcg_insn_unit *tb_ret_addr;
113 static inline uint32_t reloc_pc16_val(tcg_insn_unit *pc, tcg_insn_unit *target)
115 /* Let the compiler perform the right-shift as part of the arithmetic. */
116 ptrdiff_t disp = target - (pc + 1);
117 assert(disp == (int16_t)disp);
118 return disp & 0xffff;
121 static inline void reloc_pc16(tcg_insn_unit *pc, tcg_insn_unit *target)
123 *pc = deposit32(*pc, 0, 16, reloc_pc16_val(pc, target));
126 static inline uint32_t reloc_26_val(tcg_insn_unit *pc, tcg_insn_unit *target)
128 assert((((uintptr_t)pc ^ (uintptr_t)target) & 0xf0000000) == 0);
129 return ((uintptr_t)target >> 2) & 0x3ffffff;
132 static inline void reloc_26(tcg_insn_unit *pc, tcg_insn_unit *target)
134 *pc = deposit32(*pc, 0, 26, reloc_26_val(pc, target));
137 static void patch_reloc(tcg_insn_unit *code_ptr, int type,
138 intptr_t value, intptr_t addend)
140 assert(type == R_MIPS_PC16);
141 assert(addend == 0);
142 reloc_pc16(code_ptr, (tcg_insn_unit *)value);
145 /* parse target specific constraints */
146 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
148 const char *ct_str;
150 ct_str = *pct_str;
151 switch(ct_str[0]) {
152 case 'r':
153 ct->ct |= TCG_CT_REG;
154 tcg_regset_set(ct->u.regs, 0xffffffff);
155 break;
156 case 'L': /* qemu_ld output arg constraint */
157 ct->ct |= TCG_CT_REG;
158 tcg_regset_set(ct->u.regs, 0xffffffff);
159 tcg_regset_reset_reg(ct->u.regs, TCG_REG_V0);
160 break;
161 case 'l': /* qemu_ld input arg constraint */
162 ct->ct |= TCG_CT_REG;
163 tcg_regset_set(ct->u.regs, 0xffffffff);
164 #if defined(CONFIG_SOFTMMU)
165 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
166 # if (TARGET_LONG_BITS == 64)
167 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
168 # endif
169 #endif
170 break;
171 case 'S': /* qemu_st constraint */
172 ct->ct |= TCG_CT_REG;
173 tcg_regset_set(ct->u.regs, 0xffffffff);
174 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
175 #if defined(CONFIG_SOFTMMU)
176 # if (TARGET_LONG_BITS == 32)
177 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A1);
178 # endif
179 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
180 # if TARGET_LONG_BITS == 64
181 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A3);
182 # endif
183 #endif
184 break;
185 case 'I':
186 ct->ct |= TCG_CT_CONST_U16;
187 break;
188 case 'J':
189 ct->ct |= TCG_CT_CONST_S16;
190 break;
191 case 'Z':
192 /* We are cheating a bit here, using the fact that the register
193 ZERO is also the register number 0. Hence there is no need
194 to check for const_args in each instruction. */
195 ct->ct |= TCG_CT_CONST_ZERO;
196 break;
197 default:
198 return -1;
200 ct_str++;
201 *pct_str = ct_str;
202 return 0;
205 /* test if a constant matches the constraint */
206 static inline int tcg_target_const_match(tcg_target_long val, TCGType type,
207 const TCGArgConstraint *arg_ct)
209 int ct;
210 ct = arg_ct->ct;
211 if (ct & TCG_CT_CONST)
212 return 1;
213 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
214 return 1;
215 else if ((ct & TCG_CT_CONST_U16) && val == (uint16_t)val)
216 return 1;
217 else if ((ct & TCG_CT_CONST_S16) && val == (int16_t)val)
218 return 1;
219 else
220 return 0;
223 /* instruction opcodes */
224 enum {
225 OPC_BEQ = 0x04 << 26,
226 OPC_BNE = 0x05 << 26,
227 OPC_BLEZ = 0x06 << 26,
228 OPC_BGTZ = 0x07 << 26,
229 OPC_ADDIU = 0x09 << 26,
230 OPC_SLTI = 0x0A << 26,
231 OPC_SLTIU = 0x0B << 26,
232 OPC_ANDI = 0x0C << 26,
233 OPC_ORI = 0x0D << 26,
234 OPC_XORI = 0x0E << 26,
235 OPC_LUI = 0x0F << 26,
236 OPC_LB = 0x20 << 26,
237 OPC_LH = 0x21 << 26,
238 OPC_LW = 0x23 << 26,
239 OPC_LBU = 0x24 << 26,
240 OPC_LHU = 0x25 << 26,
241 OPC_LWU = 0x27 << 26,
242 OPC_SB = 0x28 << 26,
243 OPC_SH = 0x29 << 26,
244 OPC_SW = 0x2B << 26,
246 OPC_SPECIAL = 0x00 << 26,
247 OPC_SLL = OPC_SPECIAL | 0x00,
248 OPC_SRL = OPC_SPECIAL | 0x02,
249 OPC_ROTR = OPC_SPECIAL | (0x01 << 21) | 0x02,
250 OPC_SRA = OPC_SPECIAL | 0x03,
251 OPC_SLLV = OPC_SPECIAL | 0x04,
252 OPC_SRLV = OPC_SPECIAL | 0x06,
253 OPC_ROTRV = OPC_SPECIAL | (0x01 << 6) | 0x06,
254 OPC_SRAV = OPC_SPECIAL | 0x07,
255 OPC_JR = OPC_SPECIAL | 0x08,
256 OPC_JALR = OPC_SPECIAL | 0x09,
257 OPC_MOVZ = OPC_SPECIAL | 0x0A,
258 OPC_MOVN = OPC_SPECIAL | 0x0B,
259 OPC_MFHI = OPC_SPECIAL | 0x10,
260 OPC_MFLO = OPC_SPECIAL | 0x12,
261 OPC_MULT = OPC_SPECIAL | 0x18,
262 OPC_MULTU = OPC_SPECIAL | 0x19,
263 OPC_DIV = OPC_SPECIAL | 0x1A,
264 OPC_DIVU = OPC_SPECIAL | 0x1B,
265 OPC_ADDU = OPC_SPECIAL | 0x21,
266 OPC_SUBU = OPC_SPECIAL | 0x23,
267 OPC_AND = OPC_SPECIAL | 0x24,
268 OPC_OR = OPC_SPECIAL | 0x25,
269 OPC_XOR = OPC_SPECIAL | 0x26,
270 OPC_NOR = OPC_SPECIAL | 0x27,
271 OPC_SLT = OPC_SPECIAL | 0x2A,
272 OPC_SLTU = OPC_SPECIAL | 0x2B,
274 OPC_REGIMM = 0x01 << 26,
275 OPC_BLTZ = OPC_REGIMM | (0x00 << 16),
276 OPC_BGEZ = OPC_REGIMM | (0x01 << 16),
278 OPC_SPECIAL2 = 0x1c << 26,
279 OPC_MUL = OPC_SPECIAL2 | 0x002,
281 OPC_SPECIAL3 = 0x1f << 26,
282 OPC_INS = OPC_SPECIAL3 | 0x004,
283 OPC_WSBH = OPC_SPECIAL3 | 0x0a0,
284 OPC_SEB = OPC_SPECIAL3 | 0x420,
285 OPC_SEH = OPC_SPECIAL3 | 0x620,
289 * Type reg
291 static inline void tcg_out_opc_reg(TCGContext *s, int opc,
292 TCGReg rd, TCGReg rs, TCGReg rt)
294 int32_t inst;
296 inst = opc;
297 inst |= (rs & 0x1F) << 21;
298 inst |= (rt & 0x1F) << 16;
299 inst |= (rd & 0x1F) << 11;
300 tcg_out32(s, inst);
304 * Type immediate
306 static inline void tcg_out_opc_imm(TCGContext *s, int opc,
307 TCGReg rt, TCGReg rs, TCGArg imm)
309 int32_t inst;
311 inst = opc;
312 inst |= (rs & 0x1F) << 21;
313 inst |= (rt & 0x1F) << 16;
314 inst |= (imm & 0xffff);
315 tcg_out32(s, inst);
319 * Type branch
321 static inline void tcg_out_opc_br(TCGContext *s, int opc,
322 TCGReg rt, TCGReg rs)
324 /* We pay attention here to not modify the branch target by reading
325 the existing value and using it again. This ensure that caches and
326 memory are kept coherent during retranslation. */
327 uint16_t offset = (uint16_t)*s->code_ptr;
329 tcg_out_opc_imm(s, opc, rt, rs, offset);
333 * Type sa
335 static inline void tcg_out_opc_sa(TCGContext *s, int opc,
336 TCGReg rd, TCGReg rt, TCGArg sa)
338 int32_t inst;
340 inst = opc;
341 inst |= (rt & 0x1F) << 16;
342 inst |= (rd & 0x1F) << 11;
343 inst |= (sa & 0x1F) << 6;
344 tcg_out32(s, inst);
348 static inline void tcg_out_nop(TCGContext *s)
350 tcg_out32(s, 0);
353 static inline void tcg_out_mov(TCGContext *s, TCGType type,
354 TCGReg ret, TCGReg arg)
356 /* Simple reg-reg move, optimising out the 'do nothing' case */
357 if (ret != arg) {
358 tcg_out_opc_reg(s, OPC_ADDU, ret, arg, TCG_REG_ZERO);
362 static inline void tcg_out_movi(TCGContext *s, TCGType type,
363 TCGReg reg, tcg_target_long arg)
365 if (arg == (int16_t)arg) {
366 tcg_out_opc_imm(s, OPC_ADDIU, reg, TCG_REG_ZERO, arg);
367 } else if (arg == (uint16_t)arg) {
368 tcg_out_opc_imm(s, OPC_ORI, reg, TCG_REG_ZERO, arg);
369 } else {
370 tcg_out_opc_imm(s, OPC_LUI, reg, 0, arg >> 16);
371 tcg_out_opc_imm(s, OPC_ORI, reg, reg, arg & 0xffff);
375 static inline void tcg_out_bswap16(TCGContext *s, TCGReg ret, TCGReg arg)
377 if (use_mips32r2_instructions) {
378 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
379 } else {
380 /* ret and arg can't be register at */
381 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
382 tcg_abort();
385 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
386 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 8);
387 tcg_out_opc_imm(s, OPC_ANDI, ret, ret, 0xff00);
388 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
392 static inline void tcg_out_bswap16s(TCGContext *s, TCGReg ret, TCGReg arg)
394 if (use_mips32r2_instructions) {
395 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
396 tcg_out_opc_reg(s, OPC_SEH, ret, 0, ret);
397 } else {
398 /* ret and arg can't be register at */
399 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
400 tcg_abort();
403 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
404 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
405 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
406 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
410 static inline void tcg_out_bswap32(TCGContext *s, TCGReg ret, TCGReg arg)
412 if (use_mips32r2_instructions) {
413 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
414 tcg_out_opc_sa(s, OPC_ROTR, ret, ret, 16);
415 } else {
416 /* ret and arg must be different and can't be register at */
417 if (ret == arg || ret == TCG_REG_AT || arg == TCG_REG_AT) {
418 tcg_abort();
421 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
423 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 24);
424 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
426 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, arg, 0xff00);
427 tcg_out_opc_sa(s, OPC_SLL, TCG_REG_AT, TCG_REG_AT, 8);
428 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
430 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
431 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0xff00);
432 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
436 static inline void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
438 if (use_mips32r2_instructions) {
439 tcg_out_opc_reg(s, OPC_SEB, ret, 0, arg);
440 } else {
441 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
442 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 24);
446 static inline void tcg_out_ext16s(TCGContext *s, TCGReg ret, TCGReg arg)
448 if (use_mips32r2_instructions) {
449 tcg_out_opc_reg(s, OPC_SEH, ret, 0, arg);
450 } else {
451 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 16);
452 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
456 static inline void tcg_out_ldst(TCGContext *s, int opc, TCGArg arg,
457 TCGReg arg1, TCGArg arg2)
459 if (arg2 == (int16_t) arg2) {
460 tcg_out_opc_imm(s, opc, arg, arg1, arg2);
461 } else {
462 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, arg2);
463 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, TCG_REG_AT, arg1);
464 tcg_out_opc_imm(s, opc, arg, TCG_REG_AT, 0);
468 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
469 TCGReg arg1, intptr_t arg2)
471 tcg_out_ldst(s, OPC_LW, arg, arg1, arg2);
474 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
475 TCGReg arg1, intptr_t arg2)
477 tcg_out_ldst(s, OPC_SW, arg, arg1, arg2);
480 static inline void tcg_out_addi(TCGContext *s, TCGReg reg, TCGArg val)
482 if (val == (int16_t)val) {
483 tcg_out_opc_imm(s, OPC_ADDIU, reg, reg, val);
484 } else {
485 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, val);
486 tcg_out_opc_reg(s, OPC_ADDU, reg, reg, TCG_REG_AT);
490 /* Helper routines for marshalling helper function arguments into
491 * the correct registers and stack.
492 * arg_num is where we want to put this argument, and is updated to be ready
493 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
494 * real registers, 4+ on stack.
496 * We provide routines for arguments which are: immediate, 32 bit
497 * value in register, 16 and 8 bit values in register (which must be zero
498 * extended before use) and 64 bit value in a lo:hi register pair.
500 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
501 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
503 if (*arg_num < 4) { \
504 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
505 } else { \
506 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
507 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
509 (*arg_num)++; \
511 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
512 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
513 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8, TCGReg arg)
514 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
515 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
516 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
517 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16, TCGReg arg)
518 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
519 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
520 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
521 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32, TCGArg arg)
522 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
524 /* We don't use the macro for this one to avoid an unnecessary reg-reg
525 move when storing to the stack. */
526 static inline void tcg_out_call_iarg_reg32(TCGContext *s, int *arg_num,
527 TCGReg arg)
529 if (*arg_num < 4) {
530 tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[*arg_num], arg);
531 } else {
532 tcg_out_st(s, TCG_TYPE_I32, arg, TCG_REG_SP, 4 * (*arg_num));
534 (*arg_num)++;
537 static inline void tcg_out_call_iarg_reg64(TCGContext *s, int *arg_num,
538 TCGReg arg_low, TCGReg arg_high)
540 (*arg_num) = (*arg_num + 1) & ~1;
542 #if defined(HOST_WORDS_BIGENDIAN)
543 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
544 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
545 #else
546 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
547 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
548 #endif
551 static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGArg arg1,
552 TCGArg arg2, int label_index)
554 TCGLabel *l = &s->labels[label_index];
556 switch (cond) {
557 case TCG_COND_EQ:
558 tcg_out_opc_br(s, OPC_BEQ, arg1, arg2);
559 break;
560 case TCG_COND_NE:
561 tcg_out_opc_br(s, OPC_BNE, arg1, arg2);
562 break;
563 case TCG_COND_LT:
564 if (arg2 == 0) {
565 tcg_out_opc_br(s, OPC_BLTZ, 0, arg1);
566 } else {
567 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
568 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
570 break;
571 case TCG_COND_LTU:
572 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
573 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
574 break;
575 case TCG_COND_GE:
576 if (arg2 == 0) {
577 tcg_out_opc_br(s, OPC_BGEZ, 0, arg1);
578 } else {
579 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
580 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
582 break;
583 case TCG_COND_GEU:
584 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
585 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
586 break;
587 case TCG_COND_LE:
588 if (arg2 == 0) {
589 tcg_out_opc_br(s, OPC_BLEZ, 0, arg1);
590 } else {
591 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
592 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
594 break;
595 case TCG_COND_LEU:
596 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
597 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
598 break;
599 case TCG_COND_GT:
600 if (arg2 == 0) {
601 tcg_out_opc_br(s, OPC_BGTZ, 0, arg1);
602 } else {
603 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
604 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
606 break;
607 case TCG_COND_GTU:
608 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
609 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
610 break;
611 default:
612 tcg_abort();
613 break;
615 if (l->has_value) {
616 reloc_pc16(s->code_ptr - 1, l->u.value_ptr);
617 } else {
618 tcg_out_reloc(s, s->code_ptr - 1, R_MIPS_PC16, label_index, 0);
620 tcg_out_nop(s);
623 /* XXX: we implement it at the target level to avoid having to
624 handle cross basic blocks temporaries */
625 static void tcg_out_brcond2(TCGContext *s, TCGCond cond, TCGArg arg1,
626 TCGArg arg2, TCGArg arg3, TCGArg arg4,
627 int label_index)
629 tcg_insn_unit *label_ptr;
631 switch(cond) {
632 case TCG_COND_NE:
633 tcg_out_brcond(s, TCG_COND_NE, arg2, arg4, label_index);
634 tcg_out_brcond(s, TCG_COND_NE, arg1, arg3, label_index);
635 return;
636 case TCG_COND_EQ:
637 break;
638 case TCG_COND_LT:
639 case TCG_COND_LE:
640 tcg_out_brcond(s, TCG_COND_LT, arg2, arg4, label_index);
641 break;
642 case TCG_COND_GT:
643 case TCG_COND_GE:
644 tcg_out_brcond(s, TCG_COND_GT, arg2, arg4, label_index);
645 break;
646 case TCG_COND_LTU:
647 case TCG_COND_LEU:
648 tcg_out_brcond(s, TCG_COND_LTU, arg2, arg4, label_index);
649 break;
650 case TCG_COND_GTU:
651 case TCG_COND_GEU:
652 tcg_out_brcond(s, TCG_COND_GTU, arg2, arg4, label_index);
653 break;
654 default:
655 tcg_abort();
658 label_ptr = s->code_ptr;
659 tcg_out_opc_br(s, OPC_BNE, arg2, arg4);
660 tcg_out_nop(s);
662 switch(cond) {
663 case TCG_COND_EQ:
664 tcg_out_brcond(s, TCG_COND_EQ, arg1, arg3, label_index);
665 break;
666 case TCG_COND_LT:
667 case TCG_COND_LTU:
668 tcg_out_brcond(s, TCG_COND_LTU, arg1, arg3, label_index);
669 break;
670 case TCG_COND_LE:
671 case TCG_COND_LEU:
672 tcg_out_brcond(s, TCG_COND_LEU, arg1, arg3, label_index);
673 break;
674 case TCG_COND_GT:
675 case TCG_COND_GTU:
676 tcg_out_brcond(s, TCG_COND_GTU, arg1, arg3, label_index);
677 break;
678 case TCG_COND_GE:
679 case TCG_COND_GEU:
680 tcg_out_brcond(s, TCG_COND_GEU, arg1, arg3, label_index);
681 break;
682 default:
683 tcg_abort();
686 reloc_pc16(label_ptr, s->code_ptr);
689 static void tcg_out_movcond(TCGContext *s, TCGCond cond, TCGReg ret,
690 TCGArg c1, TCGArg c2, TCGArg v)
692 switch (cond) {
693 case TCG_COND_EQ:
694 if (c1 == 0) {
695 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, c2);
696 } else if (c2 == 0) {
697 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, c1);
698 } else {
699 tcg_out_opc_reg(s, OPC_XOR, TCG_REG_AT, c1, c2);
700 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
702 break;
703 case TCG_COND_NE:
704 if (c1 == 0) {
705 tcg_out_opc_reg(s, OPC_MOVN, ret, v, c2);
706 } else if (c2 == 0) {
707 tcg_out_opc_reg(s, OPC_MOVN, ret, v, c1);
708 } else {
709 tcg_out_opc_reg(s, OPC_XOR, TCG_REG_AT, c1, c2);
710 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
712 break;
713 case TCG_COND_LT:
714 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, c1, c2);
715 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
716 break;
717 case TCG_COND_LTU:
718 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, c1, c2);
719 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
720 break;
721 case TCG_COND_GE:
722 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, c1, c2);
723 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
724 break;
725 case TCG_COND_GEU:
726 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, c1, c2);
727 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
728 break;
729 case TCG_COND_LE:
730 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, c2, c1);
731 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
732 break;
733 case TCG_COND_LEU:
734 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, c2, c1);
735 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
736 break;
737 case TCG_COND_GT:
738 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, c2, c1);
739 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
740 break;
741 case TCG_COND_GTU:
742 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, c2, c1);
743 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
744 break;
745 default:
746 tcg_abort();
747 break;
751 static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGReg ret,
752 TCGArg arg1, TCGArg arg2)
754 switch (cond) {
755 case TCG_COND_EQ:
756 if (arg1 == 0) {
757 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg2, 1);
758 } else if (arg2 == 0) {
759 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg1, 1);
760 } else {
761 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
762 tcg_out_opc_imm(s, OPC_SLTIU, ret, ret, 1);
764 break;
765 case TCG_COND_NE:
766 if (arg1 == 0) {
767 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg2);
768 } else if (arg2 == 0) {
769 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg1);
770 } else {
771 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
772 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, ret);
774 break;
775 case TCG_COND_LT:
776 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
777 break;
778 case TCG_COND_LTU:
779 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
780 break;
781 case TCG_COND_GE:
782 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
783 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
784 break;
785 case TCG_COND_GEU:
786 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
787 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
788 break;
789 case TCG_COND_LE:
790 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
791 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
792 break;
793 case TCG_COND_LEU:
794 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
795 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
796 break;
797 case TCG_COND_GT:
798 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
799 break;
800 case TCG_COND_GTU:
801 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
802 break;
803 default:
804 tcg_abort();
805 break;
809 /* XXX: we implement it at the target level to avoid having to
810 handle cross basic blocks temporaries */
811 static void tcg_out_setcond2(TCGContext *s, TCGCond cond, TCGReg ret,
812 TCGArg arg1, TCGArg arg2, TCGArg arg3, TCGArg arg4)
814 switch (cond) {
815 case TCG_COND_EQ:
816 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_AT, arg2, arg4);
817 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg1, arg3);
818 tcg_out_opc_reg(s, OPC_AND, ret, TCG_REG_AT, TCG_REG_T0);
819 return;
820 case TCG_COND_NE:
821 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_AT, arg2, arg4);
822 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_T0, arg1, arg3);
823 tcg_out_opc_reg(s, OPC_OR, ret, TCG_REG_AT, TCG_REG_T0);
824 return;
825 case TCG_COND_LT:
826 case TCG_COND_LE:
827 tcg_out_setcond(s, TCG_COND_LT, TCG_REG_AT, arg2, arg4);
828 break;
829 case TCG_COND_GT:
830 case TCG_COND_GE:
831 tcg_out_setcond(s, TCG_COND_GT, TCG_REG_AT, arg2, arg4);
832 break;
833 case TCG_COND_LTU:
834 case TCG_COND_LEU:
835 tcg_out_setcond(s, TCG_COND_LTU, TCG_REG_AT, arg2, arg4);
836 break;
837 case TCG_COND_GTU:
838 case TCG_COND_GEU:
839 tcg_out_setcond(s, TCG_COND_GTU, TCG_REG_AT, arg2, arg4);
840 break;
841 default:
842 tcg_abort();
843 break;
846 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg2, arg4);
848 switch(cond) {
849 case TCG_COND_LT:
850 case TCG_COND_LTU:
851 tcg_out_setcond(s, TCG_COND_LTU, ret, arg1, arg3);
852 break;
853 case TCG_COND_LE:
854 case TCG_COND_LEU:
855 tcg_out_setcond(s, TCG_COND_LEU, ret, arg1, arg3);
856 break;
857 case TCG_COND_GT:
858 case TCG_COND_GTU:
859 tcg_out_setcond(s, TCG_COND_GTU, ret, arg1, arg3);
860 break;
861 case TCG_COND_GE:
862 case TCG_COND_GEU:
863 tcg_out_setcond(s, TCG_COND_GEU, ret, arg1, arg3);
864 break;
865 default:
866 tcg_abort();
869 tcg_out_opc_reg(s, OPC_AND, ret, ret, TCG_REG_T0);
870 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
873 #if defined(CONFIG_SOFTMMU)
874 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
875 int mmu_idx) */
876 static const void * const qemu_ld_helpers[4] = {
877 helper_ldb_mmu,
878 helper_ldw_mmu,
879 helper_ldl_mmu,
880 helper_ldq_mmu,
883 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
884 uintxx_t val, int mmu_idx) */
885 static const void * const qemu_st_helpers[4] = {
886 helper_stb_mmu,
887 helper_stw_mmu,
888 helper_stl_mmu,
889 helper_stq_mmu,
891 #endif
893 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
894 int opc)
896 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
897 #if defined(CONFIG_SOFTMMU)
898 tcg_insn_unit *label1_ptr, *label2_ptr;
899 int arg_num;
900 int mem_index, s_bits;
901 int addr_meml;
902 # if TARGET_LONG_BITS == 64
903 tcg_insn_unit *label3_ptr;
904 TCGReg addr_regh;
905 int addr_memh;
906 # endif
907 #endif
908 data_regl = *args++;
909 if (opc == 3)
910 data_regh = *args++;
911 else
912 data_regh = 0;
913 addr_regl = *args++;
914 #if defined(CONFIG_SOFTMMU)
915 # if TARGET_LONG_BITS == 64
916 addr_regh = *args++;
917 # if defined(HOST_WORDS_BIGENDIAN)
918 addr_memh = 0;
919 addr_meml = 4;
920 # else
921 addr_memh = 4;
922 addr_meml = 0;
923 # endif
924 # else
925 addr_meml = 0;
926 # endif
927 mem_index = *args;
928 s_bits = opc & 3;
929 #endif
931 if (opc == 3) {
932 #if defined(HOST_WORDS_BIGENDIAN)
933 data_reg1 = data_regh;
934 data_reg2 = data_regl;
935 #else
936 data_reg1 = data_regl;
937 data_reg2 = data_regh;
938 #endif
939 } else {
940 data_reg1 = data_regl;
941 data_reg2 = 0;
943 #if defined(CONFIG_SOFTMMU)
944 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
945 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
946 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
947 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
948 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_meml);
949 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
950 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
952 # if TARGET_LONG_BITS == 64
953 label3_ptr = s->code_ptr;
954 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
955 tcg_out_nop(s);
957 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
958 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_memh);
960 label1_ptr = s->code_ptr;
961 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
962 tcg_out_nop(s);
964 reloc_pc16(label3_ptr, s->code_ptr);
965 # else
966 label1_ptr = s->code_ptr;
967 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
968 tcg_out_nop(s);
969 # endif
971 /* slow path */
972 arg_num = 0;
973 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
974 # if TARGET_LONG_BITS == 64
975 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
976 # else
977 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
978 # endif
979 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
980 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_ld_helpers[s_bits]);
981 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
982 tcg_out_nop(s);
984 switch(opc) {
985 case 0:
986 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xff);
987 break;
988 case 0 | 4:
989 tcg_out_ext8s(s, data_reg1, TCG_REG_V0);
990 break;
991 case 1:
992 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xffff);
993 break;
994 case 1 | 4:
995 tcg_out_ext16s(s, data_reg1, TCG_REG_V0);
996 break;
997 case 2:
998 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
999 break;
1000 case 3:
1001 tcg_out_mov(s, TCG_TYPE_I32, data_reg2, TCG_REG_V1);
1002 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
1003 break;
1004 default:
1005 tcg_abort();
1008 label2_ptr = s->code_ptr;
1009 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1010 tcg_out_nop(s);
1012 /* label1: fast path */
1013 reloc_pc16(label1_ptr, s->code_ptr);
1015 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1016 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1017 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_A0, addr_regl);
1018 #else
1019 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1020 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_V0, addr_regl, GUEST_BASE);
1021 } else {
1022 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_V0, GUEST_BASE);
1023 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_V0, addr_regl);
1025 #endif
1027 switch(opc) {
1028 case 0:
1029 tcg_out_opc_imm(s, OPC_LBU, data_reg1, TCG_REG_V0, 0);
1030 break;
1031 case 0 | 4:
1032 tcg_out_opc_imm(s, OPC_LB, data_reg1, TCG_REG_V0, 0);
1033 break;
1034 case 1:
1035 if (TCG_NEED_BSWAP) {
1036 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
1037 tcg_out_bswap16(s, data_reg1, TCG_REG_T0);
1038 } else {
1039 tcg_out_opc_imm(s, OPC_LHU, data_reg1, TCG_REG_V0, 0);
1041 break;
1042 case 1 | 4:
1043 if (TCG_NEED_BSWAP) {
1044 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
1045 tcg_out_bswap16s(s, data_reg1, TCG_REG_T0);
1046 } else {
1047 tcg_out_opc_imm(s, OPC_LH, data_reg1, TCG_REG_V0, 0);
1049 break;
1050 case 2:
1051 if (TCG_NEED_BSWAP) {
1052 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1053 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1054 } else {
1055 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1057 break;
1058 case 3:
1059 if (TCG_NEED_BSWAP) {
1060 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 4);
1061 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1062 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1063 tcg_out_bswap32(s, data_reg2, TCG_REG_T0);
1064 } else {
1065 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1066 tcg_out_opc_imm(s, OPC_LW, data_reg2, TCG_REG_V0, 4);
1068 break;
1069 default:
1070 tcg_abort();
1073 #if defined(CONFIG_SOFTMMU)
1074 reloc_pc16(label2_ptr, s->code_ptr);
1075 #endif
1078 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
1079 int opc)
1081 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
1082 #if defined(CONFIG_SOFTMMU)
1083 tcg_insn_unit *label1_ptr, *label2_ptr;
1084 int arg_num;
1085 int mem_index, s_bits;
1086 int addr_meml;
1087 #endif
1088 #if TARGET_LONG_BITS == 64
1089 # if defined(CONFIG_SOFTMMU)
1090 tcg_insn_unit *label3_ptr;
1091 TCGReg addr_regh;
1092 int addr_memh;
1093 # endif
1094 #endif
1095 data_regl = *args++;
1096 if (opc == 3) {
1097 data_regh = *args++;
1098 } else {
1099 data_regh = 0;
1101 addr_regl = *args++;
1102 #if defined(CONFIG_SOFTMMU)
1103 # if TARGET_LONG_BITS == 64
1104 addr_regh = *args++;
1105 # if defined(HOST_WORDS_BIGENDIAN)
1106 addr_memh = 0;
1107 addr_meml = 4;
1108 # else
1109 addr_memh = 4;
1110 addr_meml = 0;
1111 # endif
1112 # else
1113 addr_meml = 0;
1114 # endif
1115 mem_index = *args;
1116 s_bits = opc;
1117 #endif
1119 if (opc == 3) {
1120 #if defined(HOST_WORDS_BIGENDIAN)
1121 data_reg1 = data_regh;
1122 data_reg2 = data_regl;
1123 #else
1124 data_reg1 = data_regl;
1125 data_reg2 = data_regh;
1126 #endif
1127 } else {
1128 data_reg1 = data_regl;
1129 data_reg2 = 0;
1132 #if defined(CONFIG_SOFTMMU)
1133 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
1134 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
1135 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
1136 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1137 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_meml);
1138 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
1139 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
1141 # if TARGET_LONG_BITS == 64
1142 label3_ptr = s->code_ptr;
1143 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
1144 tcg_out_nop(s);
1146 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1147 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_memh);
1149 label1_ptr = s->code_ptr;
1150 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
1151 tcg_out_nop(s);
1153 reloc_pc16(label3_ptr, s->code_ptr);
1154 # else
1155 label1_ptr = s->code_ptr;
1156 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
1157 tcg_out_nop(s);
1158 # endif
1160 /* slow path */
1161 arg_num = 0;
1162 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
1163 # if TARGET_LONG_BITS == 64
1164 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
1165 # else
1166 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
1167 # endif
1168 switch(opc) {
1169 case 0:
1170 tcg_out_call_iarg_reg8(s, &arg_num, data_regl);
1171 break;
1172 case 1:
1173 tcg_out_call_iarg_reg16(s, &arg_num, data_regl);
1174 break;
1175 case 2:
1176 tcg_out_call_iarg_reg32(s, &arg_num, data_regl);
1177 break;
1178 case 3:
1179 tcg_out_call_iarg_reg64(s, &arg_num, data_regl, data_regh);
1180 break;
1181 default:
1182 tcg_abort();
1184 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
1185 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_st_helpers[s_bits]);
1186 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
1187 tcg_out_nop(s);
1189 label2_ptr = s->code_ptr;
1190 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1191 tcg_out_nop(s);
1193 /* label1: fast path */
1194 reloc_pc16(label1_ptr, s->code_ptr);
1196 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1197 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1198 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1199 #else
1200 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1201 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_A0, addr_regl, GUEST_BASE);
1202 } else {
1203 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_A0, GUEST_BASE);
1204 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1207 #endif
1209 switch(opc) {
1210 case 0:
1211 tcg_out_opc_imm(s, OPC_SB, data_reg1, TCG_REG_A0, 0);
1212 break;
1213 case 1:
1214 if (TCG_NEED_BSWAP) {
1215 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_T0, data_reg1, 0xffff);
1216 tcg_out_bswap16(s, TCG_REG_T0, TCG_REG_T0);
1217 tcg_out_opc_imm(s, OPC_SH, TCG_REG_T0, TCG_REG_A0, 0);
1218 } else {
1219 tcg_out_opc_imm(s, OPC_SH, data_reg1, TCG_REG_A0, 0);
1221 break;
1222 case 2:
1223 if (TCG_NEED_BSWAP) {
1224 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1225 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1226 } else {
1227 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1229 break;
1230 case 3:
1231 if (TCG_NEED_BSWAP) {
1232 tcg_out_bswap32(s, TCG_REG_T0, data_reg2);
1233 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1234 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1235 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 4);
1236 } else {
1237 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1238 tcg_out_opc_imm(s, OPC_SW, data_reg2, TCG_REG_A0, 4);
1240 break;
1241 default:
1242 tcg_abort();
1245 #if defined(CONFIG_SOFTMMU)
1246 reloc_pc16(label2_ptr, s->code_ptr);
1247 #endif
1250 static void tcg_out_call(TCGContext *s, tcg_insn_unit *target)
1252 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_T9, (intptr_t)target);
1253 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
1254 tcg_out_nop(s);
1257 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1258 const TCGArg *args, const int *const_args)
1260 switch(opc) {
1261 case INDEX_op_exit_tb:
1262 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_V0, args[0]);
1263 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, (uintptr_t)tb_ret_addr);
1264 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1265 tcg_out_nop(s);
1266 break;
1267 case INDEX_op_goto_tb:
1268 if (s->tb_jmp_offset) {
1269 /* direct jump method */
1270 tcg_abort();
1271 } else {
1272 /* indirect jump method */
1273 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT,
1274 (uintptr_t)(s->tb_next + args[0]));
1275 tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_AT, TCG_REG_AT, 0);
1276 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1278 tcg_out_nop(s);
1279 s->tb_next_offset[args[0]] = tcg_current_code_size(s);
1280 break;
1281 case INDEX_op_call:
1282 assert(const_args[0]);
1283 tcg_out_call(s, (tcg_insn_unit *)(intptr_t)args[0]);
1284 break;
1285 case INDEX_op_br:
1286 tcg_out_brcond(s, TCG_COND_EQ, TCG_REG_ZERO, TCG_REG_ZERO, args[0]);
1287 break;
1289 case INDEX_op_mov_i32:
1290 tcg_out_mov(s, TCG_TYPE_I32, args[0], args[1]);
1291 break;
1292 case INDEX_op_movi_i32:
1293 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1294 break;
1296 case INDEX_op_ld8u_i32:
1297 tcg_out_ldst(s, OPC_LBU, args[0], args[1], args[2]);
1298 break;
1299 case INDEX_op_ld8s_i32:
1300 tcg_out_ldst(s, OPC_LB, args[0], args[1], args[2]);
1301 break;
1302 case INDEX_op_ld16u_i32:
1303 tcg_out_ldst(s, OPC_LHU, args[0], args[1], args[2]);
1304 break;
1305 case INDEX_op_ld16s_i32:
1306 tcg_out_ldst(s, OPC_LH, args[0], args[1], args[2]);
1307 break;
1308 case INDEX_op_ld_i32:
1309 tcg_out_ldst(s, OPC_LW, args[0], args[1], args[2]);
1310 break;
1311 case INDEX_op_st8_i32:
1312 tcg_out_ldst(s, OPC_SB, args[0], args[1], args[2]);
1313 break;
1314 case INDEX_op_st16_i32:
1315 tcg_out_ldst(s, OPC_SH, args[0], args[1], args[2]);
1316 break;
1317 case INDEX_op_st_i32:
1318 tcg_out_ldst(s, OPC_SW, args[0], args[1], args[2]);
1319 break;
1321 case INDEX_op_add_i32:
1322 if (const_args[2]) {
1323 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], args[2]);
1324 } else {
1325 tcg_out_opc_reg(s, OPC_ADDU, args[0], args[1], args[2]);
1327 break;
1328 case INDEX_op_add2_i32:
1329 if (const_args[4]) {
1330 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], args[4]);
1331 } else {
1332 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, args[2], args[4]);
1334 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, TCG_REG_AT, args[2]);
1335 if (const_args[5]) {
1336 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], args[5]);
1337 } else {
1338 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[3], args[5]);
1340 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[1], TCG_REG_T0);
1341 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1342 break;
1343 case INDEX_op_sub_i32:
1344 if (const_args[2]) {
1345 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], -args[2]);
1346 } else {
1347 tcg_out_opc_reg(s, OPC_SUBU, args[0], args[1], args[2]);
1349 break;
1350 case INDEX_op_sub2_i32:
1351 if (const_args[4]) {
1352 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], -args[4]);
1353 } else {
1354 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, args[2], args[4]);
1356 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, args[2], TCG_REG_AT);
1357 if (const_args[5]) {
1358 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], -args[5]);
1359 } else {
1360 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[3], args[5]);
1362 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[1], TCG_REG_T0);
1363 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1364 break;
1365 case INDEX_op_mul_i32:
1366 if (use_mips32_instructions) {
1367 tcg_out_opc_reg(s, OPC_MUL, args[0], args[1], args[2]);
1368 } else {
1369 tcg_out_opc_reg(s, OPC_MULT, 0, args[1], args[2]);
1370 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1372 break;
1373 case INDEX_op_muls2_i32:
1374 tcg_out_opc_reg(s, OPC_MULT, 0, args[2], args[3]);
1375 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1376 tcg_out_opc_reg(s, OPC_MFHI, args[1], 0, 0);
1377 break;
1378 case INDEX_op_mulu2_i32:
1379 tcg_out_opc_reg(s, OPC_MULTU, 0, args[2], args[3]);
1380 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1381 tcg_out_opc_reg(s, OPC_MFHI, args[1], 0, 0);
1382 break;
1383 case INDEX_op_mulsh_i32:
1384 tcg_out_opc_reg(s, OPC_MULT, 0, args[1], args[2]);
1385 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1386 break;
1387 case INDEX_op_muluh_i32:
1388 tcg_out_opc_reg(s, OPC_MULTU, 0, args[1], args[2]);
1389 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1390 break;
1391 case INDEX_op_div_i32:
1392 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1393 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1394 break;
1395 case INDEX_op_divu_i32:
1396 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1397 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1398 break;
1399 case INDEX_op_rem_i32:
1400 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1401 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1402 break;
1403 case INDEX_op_remu_i32:
1404 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1405 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1406 break;
1408 case INDEX_op_and_i32:
1409 if (const_args[2]) {
1410 tcg_out_opc_imm(s, OPC_ANDI, args[0], args[1], args[2]);
1411 } else {
1412 tcg_out_opc_reg(s, OPC_AND, args[0], args[1], args[2]);
1414 break;
1415 case INDEX_op_or_i32:
1416 if (const_args[2]) {
1417 tcg_out_opc_imm(s, OPC_ORI, args[0], args[1], args[2]);
1418 } else {
1419 tcg_out_opc_reg(s, OPC_OR, args[0], args[1], args[2]);
1421 break;
1422 case INDEX_op_nor_i32:
1423 tcg_out_opc_reg(s, OPC_NOR, args[0], args[1], args[2]);
1424 break;
1425 case INDEX_op_not_i32:
1426 tcg_out_opc_reg(s, OPC_NOR, args[0], TCG_REG_ZERO, args[1]);
1427 break;
1428 case INDEX_op_xor_i32:
1429 if (const_args[2]) {
1430 tcg_out_opc_imm(s, OPC_XORI, args[0], args[1], args[2]);
1431 } else {
1432 tcg_out_opc_reg(s, OPC_XOR, args[0], args[1], args[2]);
1434 break;
1436 case INDEX_op_sar_i32:
1437 if (const_args[2]) {
1438 tcg_out_opc_sa(s, OPC_SRA, args[0], args[1], args[2]);
1439 } else {
1440 tcg_out_opc_reg(s, OPC_SRAV, args[0], args[2], args[1]);
1442 break;
1443 case INDEX_op_shl_i32:
1444 if (const_args[2]) {
1445 tcg_out_opc_sa(s, OPC_SLL, args[0], args[1], args[2]);
1446 } else {
1447 tcg_out_opc_reg(s, OPC_SLLV, args[0], args[2], args[1]);
1449 break;
1450 case INDEX_op_shr_i32:
1451 if (const_args[2]) {
1452 tcg_out_opc_sa(s, OPC_SRL, args[0], args[1], args[2]);
1453 } else {
1454 tcg_out_opc_reg(s, OPC_SRLV, args[0], args[2], args[1]);
1456 break;
1457 case INDEX_op_rotl_i32:
1458 if (const_args[2]) {
1459 tcg_out_opc_sa(s, OPC_ROTR, args[0], args[1], 0x20 - args[2]);
1460 } else {
1461 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, 32);
1462 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, TCG_REG_AT, args[2]);
1463 tcg_out_opc_reg(s, OPC_ROTRV, args[0], TCG_REG_AT, args[1]);
1465 break;
1466 case INDEX_op_rotr_i32:
1467 if (const_args[2]) {
1468 tcg_out_opc_sa(s, OPC_ROTR, args[0], args[1], args[2]);
1469 } else {
1470 tcg_out_opc_reg(s, OPC_ROTRV, args[0], args[2], args[1]);
1472 break;
1474 case INDEX_op_bswap16_i32:
1475 tcg_out_opc_reg(s, OPC_WSBH, args[0], 0, args[1]);
1476 break;
1477 case INDEX_op_bswap32_i32:
1478 tcg_out_opc_reg(s, OPC_WSBH, args[0], 0, args[1]);
1479 tcg_out_opc_sa(s, OPC_ROTR, args[0], args[0], 16);
1480 break;
1482 case INDEX_op_ext8s_i32:
1483 tcg_out_opc_reg(s, OPC_SEB, args[0], 0, args[1]);
1484 break;
1485 case INDEX_op_ext16s_i32:
1486 tcg_out_opc_reg(s, OPC_SEH, args[0], 0, args[1]);
1487 break;
1489 case INDEX_op_deposit_i32:
1490 tcg_out_opc_imm(s, OPC_INS, args[0], args[2],
1491 ((args[3] + args[4] - 1) << 11) | (args[3] << 6));
1492 break;
1494 case INDEX_op_brcond_i32:
1495 tcg_out_brcond(s, args[2], args[0], args[1], args[3]);
1496 break;
1497 case INDEX_op_brcond2_i32:
1498 tcg_out_brcond2(s, args[4], args[0], args[1], args[2], args[3], args[5]);
1499 break;
1501 case INDEX_op_movcond_i32:
1502 tcg_out_movcond(s, args[5], args[0], args[1], args[2], args[3]);
1503 break;
1505 case INDEX_op_setcond_i32:
1506 tcg_out_setcond(s, args[3], args[0], args[1], args[2]);
1507 break;
1508 case INDEX_op_setcond2_i32:
1509 tcg_out_setcond2(s, args[5], args[0], args[1], args[2], args[3], args[4]);
1510 break;
1512 case INDEX_op_qemu_ld8u:
1513 tcg_out_qemu_ld(s, args, 0);
1514 break;
1515 case INDEX_op_qemu_ld8s:
1516 tcg_out_qemu_ld(s, args, 0 | 4);
1517 break;
1518 case INDEX_op_qemu_ld16u:
1519 tcg_out_qemu_ld(s, args, 1);
1520 break;
1521 case INDEX_op_qemu_ld16s:
1522 tcg_out_qemu_ld(s, args, 1 | 4);
1523 break;
1524 case INDEX_op_qemu_ld32:
1525 tcg_out_qemu_ld(s, args, 2);
1526 break;
1527 case INDEX_op_qemu_ld64:
1528 tcg_out_qemu_ld(s, args, 3);
1529 break;
1530 case INDEX_op_qemu_st8:
1531 tcg_out_qemu_st(s, args, 0);
1532 break;
1533 case INDEX_op_qemu_st16:
1534 tcg_out_qemu_st(s, args, 1);
1535 break;
1536 case INDEX_op_qemu_st32:
1537 tcg_out_qemu_st(s, args, 2);
1538 break;
1539 case INDEX_op_qemu_st64:
1540 tcg_out_qemu_st(s, args, 3);
1541 break;
1543 default:
1544 tcg_abort();
1548 static const TCGTargetOpDef mips_op_defs[] = {
1549 { INDEX_op_exit_tb, { } },
1550 { INDEX_op_goto_tb, { } },
1551 { INDEX_op_call, { "i" } },
1552 { INDEX_op_br, { } },
1554 { INDEX_op_mov_i32, { "r", "r" } },
1555 { INDEX_op_movi_i32, { "r" } },
1556 { INDEX_op_ld8u_i32, { "r", "r" } },
1557 { INDEX_op_ld8s_i32, { "r", "r" } },
1558 { INDEX_op_ld16u_i32, { "r", "r" } },
1559 { INDEX_op_ld16s_i32, { "r", "r" } },
1560 { INDEX_op_ld_i32, { "r", "r" } },
1561 { INDEX_op_st8_i32, { "rZ", "r" } },
1562 { INDEX_op_st16_i32, { "rZ", "r" } },
1563 { INDEX_op_st_i32, { "rZ", "r" } },
1565 { INDEX_op_add_i32, { "r", "rZ", "rJ" } },
1566 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
1567 { INDEX_op_muls2_i32, { "r", "r", "rZ", "rZ" } },
1568 { INDEX_op_mulu2_i32, { "r", "r", "rZ", "rZ" } },
1569 { INDEX_op_mulsh_i32, { "r", "rZ", "rZ" } },
1570 { INDEX_op_muluh_i32, { "r", "rZ", "rZ" } },
1571 { INDEX_op_div_i32, { "r", "rZ", "rZ" } },
1572 { INDEX_op_divu_i32, { "r", "rZ", "rZ" } },
1573 { INDEX_op_rem_i32, { "r", "rZ", "rZ" } },
1574 { INDEX_op_remu_i32, { "r", "rZ", "rZ" } },
1575 { INDEX_op_sub_i32, { "r", "rZ", "rJ" } },
1577 { INDEX_op_and_i32, { "r", "rZ", "rI" } },
1578 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
1579 { INDEX_op_not_i32, { "r", "rZ" } },
1580 { INDEX_op_or_i32, { "r", "rZ", "rIZ" } },
1581 { INDEX_op_xor_i32, { "r", "rZ", "rIZ" } },
1583 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
1584 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
1585 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
1586 { INDEX_op_rotr_i32, { "r", "rZ", "ri" } },
1587 { INDEX_op_rotl_i32, { "r", "rZ", "ri" } },
1589 { INDEX_op_bswap16_i32, { "r", "r" } },
1590 { INDEX_op_bswap32_i32, { "r", "r" } },
1592 { INDEX_op_ext8s_i32, { "r", "rZ" } },
1593 { INDEX_op_ext16s_i32, { "r", "rZ" } },
1595 { INDEX_op_deposit_i32, { "r", "0", "rZ" } },
1597 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
1598 { INDEX_op_movcond_i32, { "r", "rZ", "rZ", "rZ", "0" } },
1599 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
1600 { INDEX_op_setcond2_i32, { "r", "rZ", "rZ", "rZ", "rZ" } },
1602 { INDEX_op_add2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1603 { INDEX_op_sub2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1604 { INDEX_op_brcond2_i32, { "rZ", "rZ", "rZ", "rZ" } },
1606 #if TARGET_LONG_BITS == 32
1607 { INDEX_op_qemu_ld8u, { "L", "lZ" } },
1608 { INDEX_op_qemu_ld8s, { "L", "lZ" } },
1609 { INDEX_op_qemu_ld16u, { "L", "lZ" } },
1610 { INDEX_op_qemu_ld16s, { "L", "lZ" } },
1611 { INDEX_op_qemu_ld32, { "L", "lZ" } },
1612 { INDEX_op_qemu_ld64, { "L", "L", "lZ" } },
1614 { INDEX_op_qemu_st8, { "SZ", "SZ" } },
1615 { INDEX_op_qemu_st16, { "SZ", "SZ" } },
1616 { INDEX_op_qemu_st32, { "SZ", "SZ" } },
1617 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ" } },
1618 #else
1619 { INDEX_op_qemu_ld8u, { "L", "lZ", "lZ" } },
1620 { INDEX_op_qemu_ld8s, { "L", "lZ", "lZ" } },
1621 { INDEX_op_qemu_ld16u, { "L", "lZ", "lZ" } },
1622 { INDEX_op_qemu_ld16s, { "L", "lZ", "lZ" } },
1623 { INDEX_op_qemu_ld32, { "L", "lZ", "lZ" } },
1624 { INDEX_op_qemu_ld64, { "L", "L", "lZ", "lZ" } },
1626 { INDEX_op_qemu_st8, { "SZ", "SZ", "SZ" } },
1627 { INDEX_op_qemu_st16, { "SZ", "SZ", "SZ" } },
1628 { INDEX_op_qemu_st32, { "SZ", "SZ", "SZ" } },
1629 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ", "SZ" } },
1630 #endif
1631 { -1 },
1634 static int tcg_target_callee_save_regs[] = {
1635 TCG_REG_S0, /* used for the global env (TCG_AREG0) */
1636 TCG_REG_S1,
1637 TCG_REG_S2,
1638 TCG_REG_S3,
1639 TCG_REG_S4,
1640 TCG_REG_S5,
1641 TCG_REG_S6,
1642 TCG_REG_S7,
1643 TCG_REG_FP,
1644 TCG_REG_RA, /* should be last for ABI compliance */
1647 /* The Linux kernel doesn't provide any information about the available
1648 instruction set. Probe it using a signal handler. */
1650 #include <signal.h>
1652 #ifndef use_movnz_instructions
1653 bool use_movnz_instructions = false;
1654 #endif
1656 #ifndef use_mips32_instructions
1657 bool use_mips32_instructions = false;
1658 #endif
1660 #ifndef use_mips32r2_instructions
1661 bool use_mips32r2_instructions = false;
1662 #endif
1664 static volatile sig_atomic_t got_sigill;
1666 static void sigill_handler(int signo, siginfo_t *si, void *data)
1668 /* Skip the faulty instruction */
1669 ucontext_t *uc = (ucontext_t *)data;
1670 uc->uc_mcontext.pc += 4;
1672 got_sigill = 1;
1675 static void tcg_target_detect_isa(void)
1677 struct sigaction sa_old, sa_new;
1679 memset(&sa_new, 0, sizeof(sa_new));
1680 sa_new.sa_flags = SA_SIGINFO;
1681 sa_new.sa_sigaction = sigill_handler;
1682 sigaction(SIGILL, &sa_new, &sa_old);
1684 /* Probe for movn/movz, necessary to implement movcond. */
1685 #ifndef use_movnz_instructions
1686 got_sigill = 0;
1687 asm volatile(".set push\n"
1688 ".set mips32\n"
1689 "movn $zero, $zero, $zero\n"
1690 "movz $zero, $zero, $zero\n"
1691 ".set pop\n"
1692 : : : );
1693 use_movnz_instructions = !got_sigill;
1694 #endif
1696 /* Probe for MIPS32 instructions. As no subsetting is allowed
1697 by the specification, it is only necessary to probe for one
1698 of the instructions. */
1699 #ifndef use_mips32_instructions
1700 got_sigill = 0;
1701 asm volatile(".set push\n"
1702 ".set mips32\n"
1703 "mul $zero, $zero\n"
1704 ".set pop\n"
1705 : : : );
1706 use_mips32_instructions = !got_sigill;
1707 #endif
1709 /* Probe for MIPS32r2 instructions if MIPS32 instructions are
1710 available. As no subsetting is allowed by the specification,
1711 it is only necessary to probe for one of the instructions. */
1712 #ifndef use_mips32r2_instructions
1713 if (use_mips32_instructions) {
1714 got_sigill = 0;
1715 asm volatile(".set push\n"
1716 ".set mips32r2\n"
1717 "seb $zero, $zero\n"
1718 ".set pop\n"
1719 : : : );
1720 use_mips32r2_instructions = !got_sigill;
1722 #endif
1724 sigaction(SIGILL, &sa_old, NULL);
1727 /* Generate global QEMU prologue and epilogue code */
1728 static void tcg_target_qemu_prologue(TCGContext *s)
1730 int i, frame_size;
1732 /* reserve some stack space, also for TCG temps. */
1733 frame_size = ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1734 + TCG_STATIC_CALL_ARGS_SIZE
1735 + CPU_TEMP_BUF_NLONGS * sizeof(long);
1736 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1737 ~(TCG_TARGET_STACK_ALIGN - 1);
1738 tcg_set_frame(s, TCG_REG_SP, ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1739 + TCG_STATIC_CALL_ARGS_SIZE,
1740 CPU_TEMP_BUF_NLONGS * sizeof(long));
1742 /* TB prologue */
1743 tcg_out_addi(s, TCG_REG_SP, -frame_size);
1744 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1745 tcg_out_st(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1746 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1749 /* Call generated code */
1750 tcg_out_opc_reg(s, OPC_JR, 0, tcg_target_call_iarg_regs[1], 0);
1751 tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
1752 tb_ret_addr = s->code_ptr;
1754 /* TB epilogue */
1755 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1756 tcg_out_ld(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1757 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1760 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_RA, 0);
1761 tcg_out_addi(s, TCG_REG_SP, frame_size);
1764 static void tcg_target_init(TCGContext *s)
1766 tcg_target_detect_isa();
1767 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32], 0xffffffff);
1768 tcg_regset_set(tcg_target_call_clobber_regs,
1769 (1 << TCG_REG_V0) |
1770 (1 << TCG_REG_V1) |
1771 (1 << TCG_REG_A0) |
1772 (1 << TCG_REG_A1) |
1773 (1 << TCG_REG_A2) |
1774 (1 << TCG_REG_A3) |
1775 (1 << TCG_REG_T1) |
1776 (1 << TCG_REG_T2) |
1777 (1 << TCG_REG_T3) |
1778 (1 << TCG_REG_T4) |
1779 (1 << TCG_REG_T5) |
1780 (1 << TCG_REG_T6) |
1781 (1 << TCG_REG_T7) |
1782 (1 << TCG_REG_T8) |
1783 (1 << TCG_REG_T9));
1785 tcg_regset_clear(s->reserved_regs);
1786 tcg_regset_set_reg(s->reserved_regs, TCG_REG_ZERO); /* zero register */
1787 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K0); /* kernel use only */
1788 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K1); /* kernel use only */
1789 tcg_regset_set_reg(s->reserved_regs, TCG_REG_AT); /* internal use */
1790 tcg_regset_set_reg(s->reserved_regs, TCG_REG_T0); /* internal use */
1791 tcg_regset_set_reg(s->reserved_regs, TCG_REG_RA); /* return address */
1792 tcg_regset_set_reg(s->reserved_regs, TCG_REG_SP); /* stack pointer */
1793 tcg_regset_set_reg(s->reserved_regs, TCG_REG_GP); /* global pointer */
1795 tcg_add_target_add_op_defs(mips_op_defs);