2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
32 #include "qemu-common.h"
38 //#define MIPS_DEBUG_DISAS
39 //#define MIPS_DEBUG_SIGN_EXTENSIONS
41 /* MIPS major opcodes */
42 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
45 /* indirect opcode tables */
46 OPC_SPECIAL
= (0x00 << 26),
47 OPC_REGIMM
= (0x01 << 26),
48 OPC_CP0
= (0x10 << 26),
49 OPC_CP1
= (0x11 << 26),
50 OPC_CP2
= (0x12 << 26),
51 OPC_CP3
= (0x13 << 26),
52 OPC_SPECIAL2
= (0x1C << 26),
53 OPC_SPECIAL3
= (0x1F << 26),
54 /* arithmetic with immediate */
55 OPC_ADDI
= (0x08 << 26),
56 OPC_ADDIU
= (0x09 << 26),
57 OPC_SLTI
= (0x0A << 26),
58 OPC_SLTIU
= (0x0B << 26),
59 /* logic with immediate */
60 OPC_ANDI
= (0x0C << 26),
61 OPC_ORI
= (0x0D << 26),
62 OPC_XORI
= (0x0E << 26),
63 OPC_LUI
= (0x0F << 26),
64 /* arithmetic with immediate */
65 OPC_DADDI
= (0x18 << 26),
66 OPC_DADDIU
= (0x19 << 26),
67 /* Jump and branches */
69 OPC_JAL
= (0x03 << 26),
70 OPC_JALS
= OPC_JAL
| 0x5,
71 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
72 OPC_BEQL
= (0x14 << 26),
73 OPC_BNE
= (0x05 << 26),
74 OPC_BNEL
= (0x15 << 26),
75 OPC_BLEZ
= (0x06 << 26),
76 OPC_BLEZL
= (0x16 << 26),
77 OPC_BGTZ
= (0x07 << 26),
78 OPC_BGTZL
= (0x17 << 26),
79 OPC_JALX
= (0x1D << 26), /* MIPS 16 only */
80 OPC_JALXS
= OPC_JALX
| 0x5,
82 OPC_LDL
= (0x1A << 26),
83 OPC_LDR
= (0x1B << 26),
84 OPC_LB
= (0x20 << 26),
85 OPC_LH
= (0x21 << 26),
86 OPC_LWL
= (0x22 << 26),
87 OPC_LW
= (0x23 << 26),
88 OPC_LWPC
= OPC_LW
| 0x5,
89 OPC_LBU
= (0x24 << 26),
90 OPC_LHU
= (0x25 << 26),
91 OPC_LWR
= (0x26 << 26),
92 OPC_LWU
= (0x27 << 26),
93 OPC_SB
= (0x28 << 26),
94 OPC_SH
= (0x29 << 26),
95 OPC_SWL
= (0x2A << 26),
96 OPC_SW
= (0x2B << 26),
97 OPC_SDL
= (0x2C << 26),
98 OPC_SDR
= (0x2D << 26),
99 OPC_SWR
= (0x2E << 26),
100 OPC_LL
= (0x30 << 26),
101 OPC_LLD
= (0x34 << 26),
102 OPC_LD
= (0x37 << 26),
103 OPC_LDPC
= OPC_LD
| 0x5,
104 OPC_SC
= (0x38 << 26),
105 OPC_SCD
= (0x3C << 26),
106 OPC_SD
= (0x3F << 26),
107 /* Floating point load/store */
108 OPC_LWC1
= (0x31 << 26),
109 OPC_LWC2
= (0x32 << 26),
110 OPC_LDC1
= (0x35 << 26),
111 OPC_LDC2
= (0x36 << 26),
112 OPC_SWC1
= (0x39 << 26),
113 OPC_SWC2
= (0x3A << 26),
114 OPC_SDC1
= (0x3D << 26),
115 OPC_SDC2
= (0x3E << 26),
116 /* MDMX ASE specific */
117 OPC_MDMX
= (0x1E << 26),
118 /* Cache and prefetch */
119 OPC_CACHE
= (0x2F << 26),
120 OPC_PREF
= (0x33 << 26),
121 /* Reserved major opcode */
122 OPC_MAJOR3B_RESERVED
= (0x3B << 26),
125 /* MIPS special opcodes */
126 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
130 OPC_SLL
= 0x00 | OPC_SPECIAL
,
131 /* NOP is SLL r0, r0, 0 */
132 /* SSNOP is SLL r0, r0, 1 */
133 /* EHB is SLL r0, r0, 3 */
134 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
135 OPC_ROTR
= OPC_SRL
| (1 << 21),
136 OPC_SRA
= 0x03 | OPC_SPECIAL
,
137 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
138 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
139 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
140 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
141 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
142 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
143 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
144 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
145 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
146 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
147 OPC_DROTR
= OPC_DSRL
| (1 << 21),
148 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
149 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
150 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
151 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
152 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
153 /* Multiplication / division */
154 OPC_MULT
= 0x18 | OPC_SPECIAL
,
155 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
156 OPC_DIV
= 0x1A | OPC_SPECIAL
,
157 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
158 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
159 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
160 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
161 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
162 /* 2 registers arithmetic / logic */
163 OPC_ADD
= 0x20 | OPC_SPECIAL
,
164 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
165 OPC_SUB
= 0x22 | OPC_SPECIAL
,
166 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
167 OPC_AND
= 0x24 | OPC_SPECIAL
,
168 OPC_OR
= 0x25 | OPC_SPECIAL
,
169 OPC_XOR
= 0x26 | OPC_SPECIAL
,
170 OPC_NOR
= 0x27 | OPC_SPECIAL
,
171 OPC_SLT
= 0x2A | OPC_SPECIAL
,
172 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
173 OPC_DADD
= 0x2C | OPC_SPECIAL
,
174 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
175 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
176 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
178 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
179 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
180 OPC_JALRC
= OPC_JALR
| (0x5 << 6),
181 OPC_JALRS
= 0x10 | OPC_SPECIAL
| (0x5 << 6),
183 OPC_TGE
= 0x30 | OPC_SPECIAL
,
184 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
185 OPC_TLT
= 0x32 | OPC_SPECIAL
,
186 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
187 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
188 OPC_TNE
= 0x36 | OPC_SPECIAL
,
189 /* HI / LO registers load & stores */
190 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
191 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
192 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
193 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
194 /* Conditional moves */
195 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
196 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
198 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
201 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
202 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
203 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
204 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
205 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
207 OPC_SPECIAL15_RESERVED
= 0x15 | OPC_SPECIAL
,
208 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
209 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
210 OPC_SPECIAL35_RESERVED
= 0x35 | OPC_SPECIAL
,
211 OPC_SPECIAL37_RESERVED
= 0x37 | OPC_SPECIAL
,
212 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
213 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
216 /* Multiplication variants of the vr54xx. */
217 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
220 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
221 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
222 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
223 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
224 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
225 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
226 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
227 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
228 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
229 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
230 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
231 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
232 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
233 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
236 /* REGIMM (rt field) opcodes */
237 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
240 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
241 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
242 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
243 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
244 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
245 OPC_BLTZALS
= OPC_BLTZAL
| 0x5, /* microMIPS */
246 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
247 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
248 OPC_BGEZALS
= OPC_BGEZAL
| 0x5, /* microMIPS */
249 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
250 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
251 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
252 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
253 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
254 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
255 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
256 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
259 /* Special2 opcodes */
260 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
263 /* Multiply & xxx operations */
264 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
265 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
266 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
267 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
268 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
270 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
271 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
272 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
273 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
274 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
275 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
276 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
277 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
278 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
279 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
280 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
281 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
283 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
284 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
285 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
286 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
288 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
291 /* Special3 opcodes */
292 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
295 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
296 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
297 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
298 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
299 OPC_INS
= 0x04 | OPC_SPECIAL3
,
300 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
301 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
302 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
303 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
304 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
305 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
306 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
307 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
310 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
311 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
312 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
313 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
314 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
315 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
316 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
317 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
318 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
319 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
320 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
321 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
325 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
328 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
329 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
330 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
334 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
337 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
338 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
341 /* Coprocessor 0 (rs field) */
342 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
345 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
346 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
347 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
348 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
349 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
350 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
351 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
352 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
353 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
354 OPC_C0
= (0x10 << 21) | OPC_CP0
,
355 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
356 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
360 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
363 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
364 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
365 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
366 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
367 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
368 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
371 /* Coprocessor 0 (with rs == C0) */
372 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
375 OPC_TLBR
= 0x01 | OPC_C0
,
376 OPC_TLBWI
= 0x02 | OPC_C0
,
377 OPC_TLBWR
= 0x06 | OPC_C0
,
378 OPC_TLBP
= 0x08 | OPC_C0
,
379 OPC_RFE
= 0x10 | OPC_C0
,
380 OPC_ERET
= 0x18 | OPC_C0
,
381 OPC_DERET
= 0x1F | OPC_C0
,
382 OPC_WAIT
= 0x20 | OPC_C0
,
385 /* Coprocessor 1 (rs field) */
386 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
388 /* Values for the fmt field in FP instructions */
390 /* 0 - 15 are reserved */
391 FMT_S
= 16, /* single fp */
392 FMT_D
= 17, /* double fp */
393 FMT_E
= 18, /* extended fp */
394 FMT_Q
= 19, /* quad fp */
395 FMT_W
= 20, /* 32-bit fixed */
396 FMT_L
= 21, /* 64-bit fixed */
397 FMT_PS
= 22, /* paired single fp */
398 /* 23 - 31 are reserved */
402 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
403 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
404 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
405 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
406 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
407 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
408 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
409 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
410 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
411 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
412 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
413 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
414 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
415 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
416 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
417 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
418 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
419 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
422 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
423 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
426 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
427 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
428 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
429 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
433 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
434 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
438 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
439 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
442 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
445 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
446 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
447 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
448 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
449 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
450 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
451 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
452 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
453 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
456 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
459 OPC_LWXC1
= 0x00 | OPC_CP3
,
460 OPC_LDXC1
= 0x01 | OPC_CP3
,
461 OPC_LUXC1
= 0x05 | OPC_CP3
,
462 OPC_SWXC1
= 0x08 | OPC_CP3
,
463 OPC_SDXC1
= 0x09 | OPC_CP3
,
464 OPC_SUXC1
= 0x0D | OPC_CP3
,
465 OPC_PREFX
= 0x0F | OPC_CP3
,
466 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
467 OPC_MADD_S
= 0x20 | OPC_CP3
,
468 OPC_MADD_D
= 0x21 | OPC_CP3
,
469 OPC_MADD_PS
= 0x26 | OPC_CP3
,
470 OPC_MSUB_S
= 0x28 | OPC_CP3
,
471 OPC_MSUB_D
= 0x29 | OPC_CP3
,
472 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
473 OPC_NMADD_S
= 0x30 | OPC_CP3
,
474 OPC_NMADD_D
= 0x31 | OPC_CP3
,
475 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
476 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
477 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
478 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
481 /* global register indices */
482 static TCGv_ptr cpu_env
;
483 static TCGv cpu_gpr
[32], cpu_PC
;
484 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
], cpu_ACX
[MIPS_DSP_ACC
];
485 static TCGv cpu_dspctrl
, btarget
, bcond
;
486 static TCGv_i32 hflags
;
487 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
489 static uint32_t gen_opc_hflags
[OPC_BUF_SIZE
];
491 #include "gen-icount.h"
493 #define gen_helper_0i(name, arg) do { \
494 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
495 gen_helper_##name(helper_tmp); \
496 tcg_temp_free_i32(helper_tmp); \
499 #define gen_helper_1i(name, arg1, arg2) do { \
500 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
501 gen_helper_##name(arg1, helper_tmp); \
502 tcg_temp_free_i32(helper_tmp); \
505 #define gen_helper_2i(name, arg1, arg2, arg3) do { \
506 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
507 gen_helper_##name(arg1, arg2, helper_tmp); \
508 tcg_temp_free_i32(helper_tmp); \
511 #define gen_helper_3i(name, arg1, arg2, arg3, arg4) do { \
512 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
513 gen_helper_##name(arg1, arg2, arg3, helper_tmp); \
514 tcg_temp_free_i32(helper_tmp); \
517 typedef struct DisasContext
{
518 struct TranslationBlock
*tb
;
519 target_ulong pc
, saved_pc
;
521 int singlestep_enabled
;
522 /* Routine used to access memory */
524 uint32_t hflags
, saved_hflags
;
526 target_ulong btarget
;
530 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
531 * exception condition */
532 BS_STOP
= 1, /* We want to stop translation for any reason */
533 BS_BRANCH
= 2, /* We reached a branch condition */
534 BS_EXCP
= 3, /* We reached an exception condition */
537 static const char *regnames
[] =
538 { "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
539 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
540 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
541 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra", };
543 static const char *regnames_HI
[] =
544 { "HI0", "HI1", "HI2", "HI3", };
546 static const char *regnames_LO
[] =
547 { "LO0", "LO1", "LO2", "LO3", };
549 static const char *regnames_ACX
[] =
550 { "ACX0", "ACX1", "ACX2", "ACX3", };
552 static const char *fregnames
[] =
553 { "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
554 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
555 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
556 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31", };
558 #ifdef MIPS_DEBUG_DISAS
559 #define MIPS_DEBUG(fmt, ...) \
560 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
561 TARGET_FMT_lx ": %08x " fmt "\n", \
562 ctx->pc, ctx->opcode , ## __VA_ARGS__)
563 #define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
565 #define MIPS_DEBUG(fmt, ...) do { } while(0)
566 #define LOG_DISAS(...) do { } while (0)
569 #define MIPS_INVAL(op) \
571 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
572 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
575 /* General purpose registers moves. */
576 static inline void gen_load_gpr (TCGv t
, int reg
)
579 tcg_gen_movi_tl(t
, 0);
581 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
584 static inline void gen_store_gpr (TCGv t
, int reg
)
587 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
590 /* Moves to/from ACX register. */
591 static inline void gen_load_ACX (TCGv t
, int reg
)
593 tcg_gen_mov_tl(t
, cpu_ACX
[reg
]);
596 static inline void gen_store_ACX (TCGv t
, int reg
)
598 tcg_gen_mov_tl(cpu_ACX
[reg
], t
);
601 /* Moves to/from shadow registers. */
602 static inline void gen_load_srsgpr (int from
, int to
)
604 TCGv t0
= tcg_temp_new();
607 tcg_gen_movi_tl(t0
, 0);
609 TCGv_i32 t2
= tcg_temp_new_i32();
610 TCGv_ptr addr
= tcg_temp_new_ptr();
612 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUState
, CP0_SRSCtl
));
613 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
614 tcg_gen_andi_i32(t2
, t2
, 0xf);
615 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
616 tcg_gen_ext_i32_ptr(addr
, t2
);
617 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
619 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
620 tcg_temp_free_ptr(addr
);
621 tcg_temp_free_i32(t2
);
623 gen_store_gpr(t0
, to
);
627 static inline void gen_store_srsgpr (int from
, int to
)
630 TCGv t0
= tcg_temp_new();
631 TCGv_i32 t2
= tcg_temp_new_i32();
632 TCGv_ptr addr
= tcg_temp_new_ptr();
634 gen_load_gpr(t0
, from
);
635 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUState
, CP0_SRSCtl
));
636 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
637 tcg_gen_andi_i32(t2
, t2
, 0xf);
638 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
639 tcg_gen_ext_i32_ptr(addr
, t2
);
640 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
642 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
643 tcg_temp_free_ptr(addr
);
644 tcg_temp_free_i32(t2
);
649 /* Floating point register moves. */
650 static inline void gen_load_fpr32 (TCGv_i32 t
, int reg
)
652 tcg_gen_ld_i32(t
, cpu_env
, offsetof(CPUState
, active_fpu
.fpr
[reg
].w
[FP_ENDIAN_IDX
]));
655 static inline void gen_store_fpr32 (TCGv_i32 t
, int reg
)
657 tcg_gen_st_i32(t
, cpu_env
, offsetof(CPUState
, active_fpu
.fpr
[reg
].w
[FP_ENDIAN_IDX
]));
660 static inline void gen_load_fpr32h (TCGv_i32 t
, int reg
)
662 tcg_gen_ld_i32(t
, cpu_env
, offsetof(CPUState
, active_fpu
.fpr
[reg
].w
[!FP_ENDIAN_IDX
]));
665 static inline void gen_store_fpr32h (TCGv_i32 t
, int reg
)
667 tcg_gen_st_i32(t
, cpu_env
, offsetof(CPUState
, active_fpu
.fpr
[reg
].w
[!FP_ENDIAN_IDX
]));
670 static inline void gen_load_fpr64 (DisasContext
*ctx
, TCGv_i64 t
, int reg
)
672 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
673 tcg_gen_ld_i64(t
, cpu_env
, offsetof(CPUState
, active_fpu
.fpr
[reg
].d
));
675 TCGv_i32 t0
= tcg_temp_new_i32();
676 TCGv_i32 t1
= tcg_temp_new_i32();
677 gen_load_fpr32(t0
, reg
& ~1);
678 gen_load_fpr32(t1
, reg
| 1);
679 tcg_gen_concat_i32_i64(t
, t0
, t1
);
680 tcg_temp_free_i32(t0
);
681 tcg_temp_free_i32(t1
);
685 static inline void gen_store_fpr64 (DisasContext
*ctx
, TCGv_i64 t
, int reg
)
687 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
688 tcg_gen_st_i64(t
, cpu_env
, offsetof(CPUState
, active_fpu
.fpr
[reg
].d
));
690 TCGv_i64 t0
= tcg_temp_new_i64();
691 TCGv_i32 t1
= tcg_temp_new_i32();
692 tcg_gen_trunc_i64_i32(t1
, t
);
693 gen_store_fpr32(t1
, reg
& ~1);
694 tcg_gen_shri_i64(t0
, t
, 32);
695 tcg_gen_trunc_i64_i32(t1
, t0
);
696 gen_store_fpr32(t1
, reg
| 1);
697 tcg_temp_free_i32(t1
);
698 tcg_temp_free_i64(t0
);
702 static inline int get_fp_bit (int cc
)
711 static inline void gen_save_pc(target_ulong pc
)
713 tcg_gen_movi_tl(cpu_PC
, pc
);
716 static inline void save_cpu_state (DisasContext
*ctx
, int do_save_pc
)
718 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
719 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
720 gen_save_pc(ctx
->pc
);
721 ctx
->saved_pc
= ctx
->pc
;
723 if (ctx
->hflags
!= ctx
->saved_hflags
) {
724 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
725 ctx
->saved_hflags
= ctx
->hflags
;
726 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
732 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
738 static inline void restore_cpu_state (CPUState
*env
, DisasContext
*ctx
)
740 ctx
->saved_hflags
= ctx
->hflags
;
741 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
747 ctx
->btarget
= env
->btarget
;
753 generate_exception_err (DisasContext
*ctx
, int excp
, int err
)
755 TCGv_i32 texcp
= tcg_const_i32(excp
);
756 TCGv_i32 terr
= tcg_const_i32(err
);
757 save_cpu_state(ctx
, 1);
758 gen_helper_raise_exception_err(texcp
, terr
);
759 tcg_temp_free_i32(terr
);
760 tcg_temp_free_i32(texcp
);
764 generate_exception (DisasContext
*ctx
, int excp
)
766 save_cpu_state(ctx
, 1);
767 gen_helper_0i(raise_exception
, excp
);
770 /* Addresses computation */
771 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
773 tcg_gen_add_tl(ret
, arg0
, arg1
);
775 #if defined(TARGET_MIPS64)
776 /* For compatibility with 32-bit code, data reference in user mode
777 with Status_UX = 0 should be casted to 32-bit and sign extended.
778 See the MIPS64 PRA manual, section 4.10. */
779 if (((ctx
->hflags
& MIPS_HFLAG_KSU
) == MIPS_HFLAG_UM
) &&
780 !(ctx
->hflags
& MIPS_HFLAG_UX
)) {
781 tcg_gen_ext32s_i64(ret
, ret
);
786 static inline void check_cp0_enabled(DisasContext
*ctx
)
788 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
789 generate_exception_err(ctx
, EXCP_CpU
, 0);
792 static inline void check_cp1_enabled(DisasContext
*ctx
)
794 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
795 generate_exception_err(ctx
, EXCP_CpU
, 1);
798 /* Verify that the processor is running with COP1X instructions enabled.
799 This is associated with the nabla symbol in the MIPS32 and MIPS64
802 static inline void check_cop1x(DisasContext
*ctx
)
804 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
805 generate_exception(ctx
, EXCP_RI
);
808 /* Verify that the processor is running with 64-bit floating-point
809 operations enabled. */
811 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
813 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
814 generate_exception(ctx
, EXCP_RI
);
818 * Verify if floating point register is valid; an operation is not defined
819 * if bit 0 of any register specification is set and the FR bit in the
820 * Status register equals zero, since the register numbers specify an
821 * even-odd pair of adjacent coprocessor general registers. When the FR bit
822 * in the Status register equals one, both even and odd register numbers
823 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
825 * Multiple 64 bit wide registers can be checked by calling
826 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
828 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
830 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
831 generate_exception(ctx
, EXCP_RI
);
834 /* This code generates a "reserved instruction" exception if the
835 CPU does not support the instruction set corresponding to flags. */
836 static inline void check_insn(CPUState
*env
, DisasContext
*ctx
, int flags
)
838 if (unlikely(!(env
->insn_flags
& flags
)))
839 generate_exception(ctx
, EXCP_RI
);
842 /* This code generates a "reserved instruction" exception if 64-bit
843 instructions are not enabled. */
844 static inline void check_mips_64(DisasContext
*ctx
)
846 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
847 generate_exception(ctx
, EXCP_RI
);
850 /* Define small wrappers for gen_load_fpr* so that we have a uniform
851 calling interface for 32 and 64-bit FPRs. No sense in changing
852 all callers for gen_load_fpr32 when we need the CTX parameter for
854 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(x, y)
855 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
856 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
857 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
858 int ft, int fs, int cc) \
860 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
861 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
864 check_cp1_64bitmode(ctx); \
870 check_cp1_registers(ctx, fs | ft); \
878 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
879 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
881 case 0: gen_helper_2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
882 case 1: gen_helper_2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
883 case 2: gen_helper_2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
884 case 3: gen_helper_2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
885 case 4: gen_helper_2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
886 case 5: gen_helper_2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
887 case 6: gen_helper_2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
888 case 7: gen_helper_2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
889 case 8: gen_helper_2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
890 case 9: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
891 case 10: gen_helper_2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
892 case 11: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
893 case 12: gen_helper_2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
894 case 13: gen_helper_2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
895 case 14: gen_helper_2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
896 case 15: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
899 tcg_temp_free_i##bits (fp0); \
900 tcg_temp_free_i##bits (fp1); \
903 FOP_CONDS(, 0, d
, FMT_D
, 64)
904 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
905 FOP_CONDS(, 0, s
, FMT_S
, 32)
906 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
907 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
908 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
910 #undef gen_ldcmp_fpr32
911 #undef gen_ldcmp_fpr64
913 /* load/store instructions. */
914 #define OP_LD(insn,fname) \
915 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
917 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
924 #if defined(TARGET_MIPS64)
930 #define OP_ST(insn,fname) \
931 static inline void op_st_##insn(TCGv arg1, TCGv arg2, DisasContext *ctx) \
933 tcg_gen_qemu_##fname(arg1, arg2, ctx->mem_idx); \
938 #if defined(TARGET_MIPS64)
943 #ifdef CONFIG_USER_ONLY
944 #define OP_LD_ATOMIC(insn,fname) \
945 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
947 TCGv t0 = tcg_temp_new(); \
948 tcg_gen_mov_tl(t0, arg1); \
949 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
950 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, lladdr)); \
951 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUState, llval)); \
955 #define OP_LD_ATOMIC(insn,fname) \
956 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
958 gen_helper_2i(insn, ret, arg1, ctx->mem_idx); \
961 OP_LD_ATOMIC(ll
,ld32s
);
962 #if defined(TARGET_MIPS64)
963 OP_LD_ATOMIC(lld
,ld64
);
967 #ifdef CONFIG_USER_ONLY
968 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
969 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
971 TCGv t0 = tcg_temp_new(); \
972 int l1 = gen_new_label(); \
973 int l2 = gen_new_label(); \
975 tcg_gen_andi_tl(t0, arg2, almask); \
976 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
977 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUState, CP0_BadVAddr)); \
978 generate_exception(ctx, EXCP_AdES); \
980 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, lladdr)); \
981 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
982 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
983 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, llreg)); \
984 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUState, llnewval)); \
985 gen_helper_0i(raise_exception, EXCP_SC); \
987 tcg_gen_movi_tl(t0, 0); \
988 gen_store_gpr(t0, rt); \
992 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
993 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
995 TCGv t0 = tcg_temp_new(); \
996 gen_helper_3i(insn, t0, arg1, arg2, ctx->mem_idx); \
997 gen_store_gpr(t0, rt); \
1001 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
1002 #if defined(TARGET_MIPS64)
1003 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
1007 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
1008 int base
, int16_t offset
)
1011 tcg_gen_movi_tl(addr
, offset
);
1012 } else if (offset
== 0) {
1013 gen_load_gpr(addr
, base
);
1015 tcg_gen_movi_tl(addr
, offset
);
1016 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
1020 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
1022 target_ulong pc
= ctx
->pc
;
1024 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
1025 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
1030 pc
&= ~(target_ulong
)3;
1035 static void gen_ld (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1036 int rt
, int base
, int16_t offset
)
1038 const char *opn
= "ld";
1041 if (rt
== 0 && env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
1042 /* Loongson CPU uses a load to zero register for prefetch.
1043 We emulate it as a NOP. On other CPU we must perform the
1044 actual memory access. */
1049 t0
= tcg_temp_new();
1050 t1
= tcg_temp_new();
1051 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1054 #if defined(TARGET_MIPS64)
1056 save_cpu_state(ctx
, 0);
1057 op_ld_lwu(t0
, t0
, ctx
);
1058 gen_store_gpr(t0
, rt
);
1062 save_cpu_state(ctx
, 0);
1063 op_ld_ld(t0
, t0
, ctx
);
1064 gen_store_gpr(t0
, rt
);
1068 save_cpu_state(ctx
, 1);
1069 op_ld_lld(t0
, t0
, ctx
);
1070 gen_store_gpr(t0
, rt
);
1074 save_cpu_state(ctx
, 1);
1075 gen_load_gpr(t1
, rt
);
1076 gen_helper_3i(ldl
, t1
, t1
, t0
, ctx
->mem_idx
);
1077 gen_store_gpr(t1
, rt
);
1081 save_cpu_state(ctx
, 1);
1082 gen_load_gpr(t1
, rt
);
1083 gen_helper_3i(ldr
, t1
, t1
, t0
, ctx
->mem_idx
);
1084 gen_store_gpr(t1
, rt
);
1088 save_cpu_state(ctx
, 0);
1089 tcg_gen_movi_tl(t1
, pc_relative_pc(ctx
));
1090 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1091 op_ld_ld(t0
, t0
, ctx
);
1092 gen_store_gpr(t0
, rt
);
1097 save_cpu_state(ctx
, 0);
1098 tcg_gen_movi_tl(t1
, pc_relative_pc(ctx
));
1099 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1100 op_ld_lw(t0
, t0
, ctx
);
1101 gen_store_gpr(t0
, rt
);
1105 save_cpu_state(ctx
, 0);
1106 op_ld_lw(t0
, t0
, ctx
);
1107 gen_store_gpr(t0
, rt
);
1111 save_cpu_state(ctx
, 0);
1112 op_ld_lh(t0
, t0
, ctx
);
1113 gen_store_gpr(t0
, rt
);
1117 save_cpu_state(ctx
, 0);
1118 op_ld_lhu(t0
, t0
, ctx
);
1119 gen_store_gpr(t0
, rt
);
1123 save_cpu_state(ctx
, 0);
1124 op_ld_lb(t0
, t0
, ctx
);
1125 gen_store_gpr(t0
, rt
);
1129 save_cpu_state(ctx
, 0);
1130 op_ld_lbu(t0
, t0
, ctx
);
1131 gen_store_gpr(t0
, rt
);
1135 save_cpu_state(ctx
, 1);
1136 gen_load_gpr(t1
, rt
);
1137 gen_helper_3i(lwl
, t1
, t1
, t0
, ctx
->mem_idx
);
1138 gen_store_gpr(t1
, rt
);
1142 save_cpu_state(ctx
, 1);
1143 gen_load_gpr(t1
, rt
);
1144 gen_helper_3i(lwr
, t1
, t1
, t0
, ctx
->mem_idx
);
1145 gen_store_gpr(t1
, rt
);
1149 save_cpu_state(ctx
, 1);
1150 op_ld_ll(t0
, t0
, ctx
);
1151 gen_store_gpr(t0
, rt
);
1155 (void)opn
; /* avoid a compiler warning */
1156 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1162 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
1163 int base
, int16_t offset
)
1165 const char *opn
= "st";
1166 TCGv t0
= tcg_temp_new();
1167 TCGv t1
= tcg_temp_new();
1169 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1170 gen_load_gpr(t1
, rt
);
1172 #if defined(TARGET_MIPS64)
1174 save_cpu_state(ctx
, 0);
1175 op_st_sd(t1
, t0
, ctx
);
1179 save_cpu_state(ctx
, 1);
1180 gen_helper_2i(sdl
, t1
, t0
, ctx
->mem_idx
);
1184 save_cpu_state(ctx
, 1);
1185 gen_helper_2i(sdr
, t1
, t0
, ctx
->mem_idx
);
1190 save_cpu_state(ctx
, 0);
1191 op_st_sw(t1
, t0
, ctx
);
1195 save_cpu_state(ctx
, 0);
1196 op_st_sh(t1
, t0
, ctx
);
1200 save_cpu_state(ctx
, 0);
1201 op_st_sb(t1
, t0
, ctx
);
1205 save_cpu_state(ctx
, 1);
1206 gen_helper_2i(swl
, t1
, t0
, ctx
->mem_idx
);
1210 save_cpu_state(ctx
, 1);
1211 gen_helper_2i(swr
, t1
, t0
, ctx
->mem_idx
);
1215 (void)opn
; /* avoid a compiler warning */
1216 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1222 /* Store conditional */
1223 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
1224 int base
, int16_t offset
)
1226 const char *opn
= "st_cond";
1229 t0
= tcg_temp_local_new();
1231 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1232 /* Don't do NOP if destination is zero: we must perform the actual
1235 t1
= tcg_temp_local_new();
1236 gen_load_gpr(t1
, rt
);
1238 #if defined(TARGET_MIPS64)
1240 save_cpu_state(ctx
, 1);
1241 op_st_scd(t1
, t0
, rt
, ctx
);
1246 save_cpu_state(ctx
, 1);
1247 op_st_sc(t1
, t0
, rt
, ctx
);
1251 (void)opn
; /* avoid a compiler warning */
1252 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1257 /* Load and store */
1258 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
1259 int base
, int16_t offset
)
1261 const char *opn
= "flt_ldst";
1262 TCGv t0
= tcg_temp_new();
1264 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1265 /* Don't do NOP if destination is zero: we must perform the actual
1270 TCGv_i32 fp0
= tcg_temp_new_i32();
1272 tcg_gen_qemu_ld32s(t0
, t0
, ctx
->mem_idx
);
1273 tcg_gen_trunc_tl_i32(fp0
, t0
);
1274 gen_store_fpr32(fp0
, ft
);
1275 tcg_temp_free_i32(fp0
);
1281 TCGv_i32 fp0
= tcg_temp_new_i32();
1282 TCGv t1
= tcg_temp_new();
1284 gen_load_fpr32(fp0
, ft
);
1285 tcg_gen_extu_i32_tl(t1
, fp0
);
1286 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
1288 tcg_temp_free_i32(fp0
);
1294 TCGv_i64 fp0
= tcg_temp_new_i64();
1296 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
1297 gen_store_fpr64(ctx
, fp0
, ft
);
1298 tcg_temp_free_i64(fp0
);
1304 TCGv_i64 fp0
= tcg_temp_new_i64();
1306 gen_load_fpr64(ctx
, fp0
, ft
);
1307 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
1308 tcg_temp_free_i64(fp0
);
1314 generate_exception(ctx
, EXCP_RI
);
1317 (void)opn
; /* avoid a compiler warning */
1318 MIPS_DEBUG("%s %s, %d(%s)", opn
, fregnames
[ft
], offset
, regnames
[base
]);
1323 static void gen_cop1_ldst(CPUState
*env
, DisasContext
*ctx
,
1324 uint32_t op
, int rt
, int rs
, int16_t imm
)
1326 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
1327 check_cp1_enabled(ctx
);
1328 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
1330 generate_exception_err(ctx
, EXCP_CpU
, 1);
1334 /* Arithmetic with immediate operand */
1335 static void gen_arith_imm (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1336 int rt
, int rs
, int16_t imm
)
1338 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1339 const char *opn
= "imm arith";
1341 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
1342 /* If no destination, treat it as a NOP.
1343 For addi, we must generate the overflow exception when needed. */
1350 TCGv t0
= tcg_temp_local_new();
1351 TCGv t1
= tcg_temp_new();
1352 TCGv t2
= tcg_temp_new();
1353 int l1
= gen_new_label();
1355 gen_load_gpr(t1
, rs
);
1356 tcg_gen_addi_tl(t0
, t1
, uimm
);
1357 tcg_gen_ext32s_tl(t0
, t0
);
1359 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1360 tcg_gen_xori_tl(t2
, t0
, uimm
);
1361 tcg_gen_and_tl(t1
, t1
, t2
);
1363 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1365 /* operands of same sign, result different sign */
1366 generate_exception(ctx
, EXCP_OVERFLOW
);
1368 tcg_gen_ext32s_tl(t0
, t0
);
1369 gen_store_gpr(t0
, rt
);
1376 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1377 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
1379 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1383 #if defined(TARGET_MIPS64)
1386 TCGv t0
= tcg_temp_local_new();
1387 TCGv t1
= tcg_temp_new();
1388 TCGv t2
= tcg_temp_new();
1389 int l1
= gen_new_label();
1391 gen_load_gpr(t1
, rs
);
1392 tcg_gen_addi_tl(t0
, t1
, uimm
);
1394 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1395 tcg_gen_xori_tl(t2
, t0
, uimm
);
1396 tcg_gen_and_tl(t1
, t1
, t2
);
1398 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1400 /* operands of same sign, result different sign */
1401 generate_exception(ctx
, EXCP_OVERFLOW
);
1403 gen_store_gpr(t0
, rt
);
1410 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1412 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1418 (void)opn
; /* avoid a compiler warning */
1419 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1422 /* Logic with immediate operand */
1423 static void gen_logic_imm (CPUState
*env
, uint32_t opc
, int rt
, int rs
, int16_t imm
)
1426 const char *opn
= "imm logic";
1429 /* If no destination, treat it as a NOP. */
1433 uimm
= (uint16_t)imm
;
1436 if (likely(rs
!= 0))
1437 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1439 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
1444 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1446 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1450 if (likely(rs
!= 0))
1451 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1453 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1457 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
1461 (void)opn
; /* avoid a compiler warning */
1462 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1465 /* Set on less than with immediate operand */
1466 static void gen_slt_imm (CPUState
*env
, uint32_t opc
, int rt
, int rs
, int16_t imm
)
1468 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1469 const char *opn
= "imm arith";
1473 /* If no destination, treat it as a NOP. */
1477 t0
= tcg_temp_new();
1478 gen_load_gpr(t0
, rs
);
1481 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
1485 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
1489 (void)opn
; /* avoid a compiler warning */
1490 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1494 /* Shifts with immediate operand */
1495 static void gen_shift_imm(CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1496 int rt
, int rs
, int16_t imm
)
1498 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
1499 const char *opn
= "imm shift";
1503 /* If no destination, treat it as a NOP. */
1508 t0
= tcg_temp_new();
1509 gen_load_gpr(t0
, rs
);
1512 tcg_gen_shli_tl(t0
, t0
, uimm
);
1513 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1517 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
1522 tcg_gen_ext32u_tl(t0
, t0
);
1523 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
1525 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1531 TCGv_i32 t1
= tcg_temp_new_i32();
1533 tcg_gen_trunc_tl_i32(t1
, t0
);
1534 tcg_gen_rotri_i32(t1
, t1
, uimm
);
1535 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
1536 tcg_temp_free_i32(t1
);
1538 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1542 #if defined(TARGET_MIPS64)
1544 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
1548 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
1552 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
1557 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
1559 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
1564 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1568 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1572 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1576 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1581 (void)opn
; /* avoid a compiler warning */
1582 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1587 static void gen_arith (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1588 int rd
, int rs
, int rt
)
1590 const char *opn
= "arith";
1592 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
1593 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
1594 /* If no destination, treat it as a NOP.
1595 For add & sub, we must generate the overflow exception when needed. */
1603 TCGv t0
= tcg_temp_local_new();
1604 TCGv t1
= tcg_temp_new();
1605 TCGv t2
= tcg_temp_new();
1606 int l1
= gen_new_label();
1608 gen_load_gpr(t1
, rs
);
1609 gen_load_gpr(t2
, rt
);
1610 tcg_gen_add_tl(t0
, t1
, t2
);
1611 tcg_gen_ext32s_tl(t0
, t0
);
1612 tcg_gen_xor_tl(t1
, t1
, t2
);
1613 tcg_gen_xor_tl(t2
, t0
, t2
);
1614 tcg_gen_andc_tl(t1
, t2
, t1
);
1616 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1618 /* operands of same sign, result different sign */
1619 generate_exception(ctx
, EXCP_OVERFLOW
);
1621 gen_store_gpr(t0
, rd
);
1627 if (rs
!= 0 && rt
!= 0) {
1628 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1629 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1630 } else if (rs
== 0 && rt
!= 0) {
1631 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1632 } else if (rs
!= 0 && rt
== 0) {
1633 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1635 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1641 TCGv t0
= tcg_temp_local_new();
1642 TCGv t1
= tcg_temp_new();
1643 TCGv t2
= tcg_temp_new();
1644 int l1
= gen_new_label();
1646 gen_load_gpr(t1
, rs
);
1647 gen_load_gpr(t2
, rt
);
1648 tcg_gen_sub_tl(t0
, t1
, t2
);
1649 tcg_gen_ext32s_tl(t0
, t0
);
1650 tcg_gen_xor_tl(t2
, t1
, t2
);
1651 tcg_gen_xor_tl(t1
, t0
, t1
);
1652 tcg_gen_and_tl(t1
, t1
, t2
);
1654 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1656 /* operands of different sign, first operand and result different sign */
1657 generate_exception(ctx
, EXCP_OVERFLOW
);
1659 gen_store_gpr(t0
, rd
);
1665 if (rs
!= 0 && rt
!= 0) {
1666 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1667 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1668 } else if (rs
== 0 && rt
!= 0) {
1669 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1670 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1671 } else if (rs
!= 0 && rt
== 0) {
1672 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1674 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1678 #if defined(TARGET_MIPS64)
1681 TCGv t0
= tcg_temp_local_new();
1682 TCGv t1
= tcg_temp_new();
1683 TCGv t2
= tcg_temp_new();
1684 int l1
= gen_new_label();
1686 gen_load_gpr(t1
, rs
);
1687 gen_load_gpr(t2
, rt
);
1688 tcg_gen_add_tl(t0
, t1
, t2
);
1689 tcg_gen_xor_tl(t1
, t1
, t2
);
1690 tcg_gen_xor_tl(t2
, t0
, t2
);
1691 tcg_gen_andc_tl(t1
, t2
, t1
);
1693 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1695 /* operands of same sign, result different sign */
1696 generate_exception(ctx
, EXCP_OVERFLOW
);
1698 gen_store_gpr(t0
, rd
);
1704 if (rs
!= 0 && rt
!= 0) {
1705 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1706 } else if (rs
== 0 && rt
!= 0) {
1707 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1708 } else if (rs
!= 0 && rt
== 0) {
1709 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1711 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1717 TCGv t0
= tcg_temp_local_new();
1718 TCGv t1
= tcg_temp_new();
1719 TCGv t2
= tcg_temp_new();
1720 int l1
= gen_new_label();
1722 gen_load_gpr(t1
, rs
);
1723 gen_load_gpr(t2
, rt
);
1724 tcg_gen_sub_tl(t0
, t1
, t2
);
1725 tcg_gen_xor_tl(t2
, t1
, t2
);
1726 tcg_gen_xor_tl(t1
, t0
, t1
);
1727 tcg_gen_and_tl(t1
, t1
, t2
);
1729 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1731 /* operands of different sign, first operand and result different sign */
1732 generate_exception(ctx
, EXCP_OVERFLOW
);
1734 gen_store_gpr(t0
, rd
);
1740 if (rs
!= 0 && rt
!= 0) {
1741 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1742 } else if (rs
== 0 && rt
!= 0) {
1743 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1744 } else if (rs
!= 0 && rt
== 0) {
1745 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1747 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1753 if (likely(rs
!= 0 && rt
!= 0)) {
1754 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1755 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1757 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1762 (void)opn
; /* avoid a compiler warning */
1763 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1766 /* Conditional move */
1767 static void gen_cond_move (CPUState
*env
, uint32_t opc
, int rd
, int rs
, int rt
)
1769 const char *opn
= "cond move";
1773 /* If no destination, treat it as a NOP.
1774 For add & sub, we must generate the overflow exception when needed. */
1779 l1
= gen_new_label();
1782 if (likely(rt
!= 0))
1783 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[rt
], 0, l1
);
1789 if (likely(rt
!= 0))
1790 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[rt
], 0, l1
);
1795 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1797 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1800 (void)opn
; /* avoid a compiler warning */
1801 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1805 static void gen_logic (CPUState
*env
, uint32_t opc
, int rd
, int rs
, int rt
)
1807 const char *opn
= "logic";
1810 /* If no destination, treat it as a NOP. */
1817 if (likely(rs
!= 0 && rt
!= 0)) {
1818 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1820 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1825 if (rs
!= 0 && rt
!= 0) {
1826 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1827 } else if (rs
== 0 && rt
!= 0) {
1828 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1829 } else if (rs
!= 0 && rt
== 0) {
1830 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1832 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
1837 if (likely(rs
!= 0 && rt
!= 0)) {
1838 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1839 } else if (rs
== 0 && rt
!= 0) {
1840 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1841 } else if (rs
!= 0 && rt
== 0) {
1842 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1844 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1849 if (likely(rs
!= 0 && rt
!= 0)) {
1850 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1851 } else if (rs
== 0 && rt
!= 0) {
1852 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1853 } else if (rs
!= 0 && rt
== 0) {
1854 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1856 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1861 (void)opn
; /* avoid a compiler warning */
1862 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1865 /* Set on lower than */
1866 static void gen_slt (CPUState
*env
, uint32_t opc
, int rd
, int rs
, int rt
)
1868 const char *opn
= "slt";
1872 /* If no destination, treat it as a NOP. */
1877 t0
= tcg_temp_new();
1878 t1
= tcg_temp_new();
1879 gen_load_gpr(t0
, rs
);
1880 gen_load_gpr(t1
, rt
);
1883 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
1887 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
1891 (void)opn
; /* avoid a compiler warning */
1892 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1898 static void gen_shift (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1899 int rd
, int rs
, int rt
)
1901 const char *opn
= "shifts";
1905 /* If no destination, treat it as a NOP.
1906 For add & sub, we must generate the overflow exception when needed. */
1911 t0
= tcg_temp_new();
1912 t1
= tcg_temp_new();
1913 gen_load_gpr(t0
, rs
);
1914 gen_load_gpr(t1
, rt
);
1917 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1918 tcg_gen_shl_tl(t0
, t1
, t0
);
1919 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
1923 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1924 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
1928 tcg_gen_ext32u_tl(t1
, t1
);
1929 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1930 tcg_gen_shr_tl(t0
, t1
, t0
);
1931 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
1936 TCGv_i32 t2
= tcg_temp_new_i32();
1937 TCGv_i32 t3
= tcg_temp_new_i32();
1939 tcg_gen_trunc_tl_i32(t2
, t0
);
1940 tcg_gen_trunc_tl_i32(t3
, t1
);
1941 tcg_gen_andi_i32(t2
, t2
, 0x1f);
1942 tcg_gen_rotr_i32(t2
, t3
, t2
);
1943 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
1944 tcg_temp_free_i32(t2
);
1945 tcg_temp_free_i32(t3
);
1949 #if defined(TARGET_MIPS64)
1951 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1952 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
1956 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1957 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
1961 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1962 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
1966 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1967 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
1972 (void)opn
; /* avoid a compiler warning */
1973 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1978 /* Arithmetic on HI/LO registers */
1979 static void gen_HILO (DisasContext
*ctx
, uint32_t opc
, int reg
)
1981 const char *opn
= "hilo";
1983 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
1990 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[0]);
1994 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[0]);
1999 tcg_gen_mov_tl(cpu_HI
[0], cpu_gpr
[reg
]);
2001 tcg_gen_movi_tl(cpu_HI
[0], 0);
2006 tcg_gen_mov_tl(cpu_LO
[0], cpu_gpr
[reg
]);
2008 tcg_gen_movi_tl(cpu_LO
[0], 0);
2012 (void)opn
; /* avoid a compiler warning */
2013 MIPS_DEBUG("%s %s", opn
, regnames
[reg
]);
2016 static void gen_muldiv (DisasContext
*ctx
, uint32_t opc
,
2019 const char *opn
= "mul/div";
2025 #if defined(TARGET_MIPS64)
2029 t0
= tcg_temp_local_new();
2030 t1
= tcg_temp_local_new();
2033 t0
= tcg_temp_new();
2034 t1
= tcg_temp_new();
2038 gen_load_gpr(t0
, rs
);
2039 gen_load_gpr(t1
, rt
);
2043 int l1
= gen_new_label();
2044 int l2
= gen_new_label();
2046 tcg_gen_ext32s_tl(t0
, t0
);
2047 tcg_gen_ext32s_tl(t1
, t1
);
2048 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2049 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2050 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2052 tcg_gen_mov_tl(cpu_LO
[0], t0
);
2053 tcg_gen_movi_tl(cpu_HI
[0], 0);
2056 tcg_gen_div_tl(cpu_LO
[0], t0
, t1
);
2057 tcg_gen_rem_tl(cpu_HI
[0], t0
, t1
);
2058 tcg_gen_ext32s_tl(cpu_LO
[0], cpu_LO
[0]);
2059 tcg_gen_ext32s_tl(cpu_HI
[0], cpu_HI
[0]);
2066 int l1
= gen_new_label();
2068 tcg_gen_ext32u_tl(t0
, t0
);
2069 tcg_gen_ext32u_tl(t1
, t1
);
2070 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2071 tcg_gen_divu_tl(cpu_LO
[0], t0
, t1
);
2072 tcg_gen_remu_tl(cpu_HI
[0], t0
, t1
);
2073 tcg_gen_ext32s_tl(cpu_LO
[0], cpu_LO
[0]);
2074 tcg_gen_ext32s_tl(cpu_HI
[0], cpu_HI
[0]);
2081 TCGv_i64 t2
= tcg_temp_new_i64();
2082 TCGv_i64 t3
= tcg_temp_new_i64();
2084 tcg_gen_ext_tl_i64(t2
, t0
);
2085 tcg_gen_ext_tl_i64(t3
, t1
);
2086 tcg_gen_mul_i64(t2
, t2
, t3
);
2087 tcg_temp_free_i64(t3
);
2088 tcg_gen_trunc_i64_tl(t0
, t2
);
2089 tcg_gen_shri_i64(t2
, t2
, 32);
2090 tcg_gen_trunc_i64_tl(t1
, t2
);
2091 tcg_temp_free_i64(t2
);
2092 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2093 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2099 TCGv_i64 t2
= tcg_temp_new_i64();
2100 TCGv_i64 t3
= tcg_temp_new_i64();
2102 tcg_gen_ext32u_tl(t0
, t0
);
2103 tcg_gen_ext32u_tl(t1
, t1
);
2104 tcg_gen_extu_tl_i64(t2
, t0
);
2105 tcg_gen_extu_tl_i64(t3
, t1
);
2106 tcg_gen_mul_i64(t2
, t2
, t3
);
2107 tcg_temp_free_i64(t3
);
2108 tcg_gen_trunc_i64_tl(t0
, t2
);
2109 tcg_gen_shri_i64(t2
, t2
, 32);
2110 tcg_gen_trunc_i64_tl(t1
, t2
);
2111 tcg_temp_free_i64(t2
);
2112 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2113 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2117 #if defined(TARGET_MIPS64)
2120 int l1
= gen_new_label();
2121 int l2
= gen_new_label();
2123 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2124 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2125 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2126 tcg_gen_mov_tl(cpu_LO
[0], t0
);
2127 tcg_gen_movi_tl(cpu_HI
[0], 0);
2130 tcg_gen_div_i64(cpu_LO
[0], t0
, t1
);
2131 tcg_gen_rem_i64(cpu_HI
[0], t0
, t1
);
2138 int l1
= gen_new_label();
2140 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2141 tcg_gen_divu_i64(cpu_LO
[0], t0
, t1
);
2142 tcg_gen_remu_i64(cpu_HI
[0], t0
, t1
);
2148 gen_helper_dmult(t0
, t1
);
2152 gen_helper_dmultu(t0
, t1
);
2158 TCGv_i64 t2
= tcg_temp_new_i64();
2159 TCGv_i64 t3
= tcg_temp_new_i64();
2161 tcg_gen_ext_tl_i64(t2
, t0
);
2162 tcg_gen_ext_tl_i64(t3
, t1
);
2163 tcg_gen_mul_i64(t2
, t2
, t3
);
2164 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2165 tcg_gen_add_i64(t2
, t2
, t3
);
2166 tcg_temp_free_i64(t3
);
2167 tcg_gen_trunc_i64_tl(t0
, t2
);
2168 tcg_gen_shri_i64(t2
, t2
, 32);
2169 tcg_gen_trunc_i64_tl(t1
, t2
);
2170 tcg_temp_free_i64(t2
);
2171 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2172 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2178 TCGv_i64 t2
= tcg_temp_new_i64();
2179 TCGv_i64 t3
= tcg_temp_new_i64();
2181 tcg_gen_ext32u_tl(t0
, t0
);
2182 tcg_gen_ext32u_tl(t1
, t1
);
2183 tcg_gen_extu_tl_i64(t2
, t0
);
2184 tcg_gen_extu_tl_i64(t3
, t1
);
2185 tcg_gen_mul_i64(t2
, t2
, t3
);
2186 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2187 tcg_gen_add_i64(t2
, t2
, t3
);
2188 tcg_temp_free_i64(t3
);
2189 tcg_gen_trunc_i64_tl(t0
, t2
);
2190 tcg_gen_shri_i64(t2
, t2
, 32);
2191 tcg_gen_trunc_i64_tl(t1
, t2
);
2192 tcg_temp_free_i64(t2
);
2193 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2194 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2200 TCGv_i64 t2
= tcg_temp_new_i64();
2201 TCGv_i64 t3
= tcg_temp_new_i64();
2203 tcg_gen_ext_tl_i64(t2
, t0
);
2204 tcg_gen_ext_tl_i64(t3
, t1
);
2205 tcg_gen_mul_i64(t2
, t2
, t3
);
2206 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2207 tcg_gen_sub_i64(t2
, t3
, t2
);
2208 tcg_temp_free_i64(t3
);
2209 tcg_gen_trunc_i64_tl(t0
, t2
);
2210 tcg_gen_shri_i64(t2
, t2
, 32);
2211 tcg_gen_trunc_i64_tl(t1
, t2
);
2212 tcg_temp_free_i64(t2
);
2213 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2214 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2220 TCGv_i64 t2
= tcg_temp_new_i64();
2221 TCGv_i64 t3
= tcg_temp_new_i64();
2223 tcg_gen_ext32u_tl(t0
, t0
);
2224 tcg_gen_ext32u_tl(t1
, t1
);
2225 tcg_gen_extu_tl_i64(t2
, t0
);
2226 tcg_gen_extu_tl_i64(t3
, t1
);
2227 tcg_gen_mul_i64(t2
, t2
, t3
);
2228 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2229 tcg_gen_sub_i64(t2
, t3
, t2
);
2230 tcg_temp_free_i64(t3
);
2231 tcg_gen_trunc_i64_tl(t0
, t2
);
2232 tcg_gen_shri_i64(t2
, t2
, 32);
2233 tcg_gen_trunc_i64_tl(t1
, t2
);
2234 tcg_temp_free_i64(t2
);
2235 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2236 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2242 generate_exception(ctx
, EXCP_RI
);
2245 (void)opn
; /* avoid a compiler warning */
2246 MIPS_DEBUG("%s %s %s", opn
, regnames
[rs
], regnames
[rt
]);
2252 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
2253 int rd
, int rs
, int rt
)
2255 const char *opn
= "mul vr54xx";
2256 TCGv t0
= tcg_temp_new();
2257 TCGv t1
= tcg_temp_new();
2259 gen_load_gpr(t0
, rs
);
2260 gen_load_gpr(t1
, rt
);
2263 case OPC_VR54XX_MULS
:
2264 gen_helper_muls(t0
, t0
, t1
);
2267 case OPC_VR54XX_MULSU
:
2268 gen_helper_mulsu(t0
, t0
, t1
);
2271 case OPC_VR54XX_MACC
:
2272 gen_helper_macc(t0
, t0
, t1
);
2275 case OPC_VR54XX_MACCU
:
2276 gen_helper_maccu(t0
, t0
, t1
);
2279 case OPC_VR54XX_MSAC
:
2280 gen_helper_msac(t0
, t0
, t1
);
2283 case OPC_VR54XX_MSACU
:
2284 gen_helper_msacu(t0
, t0
, t1
);
2287 case OPC_VR54XX_MULHI
:
2288 gen_helper_mulhi(t0
, t0
, t1
);
2291 case OPC_VR54XX_MULHIU
:
2292 gen_helper_mulhiu(t0
, t0
, t1
);
2295 case OPC_VR54XX_MULSHI
:
2296 gen_helper_mulshi(t0
, t0
, t1
);
2299 case OPC_VR54XX_MULSHIU
:
2300 gen_helper_mulshiu(t0
, t0
, t1
);
2303 case OPC_VR54XX_MACCHI
:
2304 gen_helper_macchi(t0
, t0
, t1
);
2307 case OPC_VR54XX_MACCHIU
:
2308 gen_helper_macchiu(t0
, t0
, t1
);
2311 case OPC_VR54XX_MSACHI
:
2312 gen_helper_msachi(t0
, t0
, t1
);
2315 case OPC_VR54XX_MSACHIU
:
2316 gen_helper_msachiu(t0
, t0
, t1
);
2320 MIPS_INVAL("mul vr54xx");
2321 generate_exception(ctx
, EXCP_RI
);
2324 gen_store_gpr(t0
, rd
);
2325 (void)opn
; /* avoid a compiler warning */
2326 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2333 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
2336 const char *opn
= "CLx";
2344 t0
= tcg_temp_new();
2345 gen_load_gpr(t0
, rs
);
2348 gen_helper_clo(cpu_gpr
[rd
], t0
);
2352 gen_helper_clz(cpu_gpr
[rd
], t0
);
2355 #if defined(TARGET_MIPS64)
2357 gen_helper_dclo(cpu_gpr
[rd
], t0
);
2361 gen_helper_dclz(cpu_gpr
[rd
], t0
);
2366 (void)opn
; /* avoid a compiler warning */
2367 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2371 /* Godson integer instructions */
2372 static void gen_loongson_integer (DisasContext
*ctx
, uint32_t opc
,
2373 int rd
, int rs
, int rt
)
2375 const char *opn
= "loongson";
2387 case OPC_MULTU_G_2E
:
2388 case OPC_MULTU_G_2F
:
2389 #if defined(TARGET_MIPS64)
2390 case OPC_DMULT_G_2E
:
2391 case OPC_DMULT_G_2F
:
2392 case OPC_DMULTU_G_2E
:
2393 case OPC_DMULTU_G_2F
:
2395 t0
= tcg_temp_new();
2396 t1
= tcg_temp_new();
2399 t0
= tcg_temp_local_new();
2400 t1
= tcg_temp_local_new();
2404 gen_load_gpr(t0
, rs
);
2405 gen_load_gpr(t1
, rt
);
2410 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2411 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2414 case OPC_MULTU_G_2E
:
2415 case OPC_MULTU_G_2F
:
2416 tcg_gen_ext32u_tl(t0
, t0
);
2417 tcg_gen_ext32u_tl(t1
, t1
);
2418 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2419 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2425 int l1
= gen_new_label();
2426 int l2
= gen_new_label();
2427 int l3
= gen_new_label();
2428 tcg_gen_ext32s_tl(t0
, t0
);
2429 tcg_gen_ext32s_tl(t1
, t1
);
2430 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2431 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2434 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2435 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2436 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
2439 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
2440 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2448 int l1
= gen_new_label();
2449 int l2
= gen_new_label();
2450 tcg_gen_ext32u_tl(t0
, t0
);
2451 tcg_gen_ext32u_tl(t1
, t1
);
2452 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2453 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2456 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
2457 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2465 int l1
= gen_new_label();
2466 int l2
= gen_new_label();
2467 int l3
= gen_new_label();
2468 tcg_gen_ext32u_tl(t0
, t0
);
2469 tcg_gen_ext32u_tl(t1
, t1
);
2470 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2471 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2472 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2474 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2477 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
2478 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2486 int l1
= gen_new_label();
2487 int l2
= gen_new_label();
2488 tcg_gen_ext32u_tl(t0
, t0
);
2489 tcg_gen_ext32u_tl(t1
, t1
);
2490 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2491 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2494 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
2495 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2500 #if defined(TARGET_MIPS64)
2501 case OPC_DMULT_G_2E
:
2502 case OPC_DMULT_G_2F
:
2503 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2506 case OPC_DMULTU_G_2E
:
2507 case OPC_DMULTU_G_2F
:
2508 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2514 int l1
= gen_new_label();
2515 int l2
= gen_new_label();
2516 int l3
= gen_new_label();
2517 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2518 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2521 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2522 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2523 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
2526 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
2531 case OPC_DDIVU_G_2E
:
2532 case OPC_DDIVU_G_2F
:
2534 int l1
= gen_new_label();
2535 int l2
= gen_new_label();
2536 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2537 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2540 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
2548 int l1
= gen_new_label();
2549 int l2
= gen_new_label();
2550 int l3
= gen_new_label();
2551 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2552 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2553 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2555 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2558 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
2563 case OPC_DMODU_G_2E
:
2564 case OPC_DMODU_G_2F
:
2566 int l1
= gen_new_label();
2567 int l2
= gen_new_label();
2568 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2569 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2572 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
2580 (void)opn
; /* avoid a compiler warning */
2581 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2587 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
2588 int rs
, int rt
, int16_t imm
)
2591 TCGv t0
= tcg_temp_new();
2592 TCGv t1
= tcg_temp_new();
2595 /* Load needed operands */
2603 /* Compare two registers */
2605 gen_load_gpr(t0
, rs
);
2606 gen_load_gpr(t1
, rt
);
2616 /* Compare register to immediate */
2617 if (rs
!= 0 || imm
!= 0) {
2618 gen_load_gpr(t0
, rs
);
2619 tcg_gen_movi_tl(t1
, (int32_t)imm
);
2626 case OPC_TEQ
: /* rs == rs */
2627 case OPC_TEQI
: /* r0 == 0 */
2628 case OPC_TGE
: /* rs >= rs */
2629 case OPC_TGEI
: /* r0 >= 0 */
2630 case OPC_TGEU
: /* rs >= rs unsigned */
2631 case OPC_TGEIU
: /* r0 >= 0 unsigned */
2633 generate_exception(ctx
, EXCP_TRAP
);
2635 case OPC_TLT
: /* rs < rs */
2636 case OPC_TLTI
: /* r0 < 0 */
2637 case OPC_TLTU
: /* rs < rs unsigned */
2638 case OPC_TLTIU
: /* r0 < 0 unsigned */
2639 case OPC_TNE
: /* rs != rs */
2640 case OPC_TNEI
: /* r0 != 0 */
2641 /* Never trap: treat as NOP. */
2645 int l1
= gen_new_label();
2650 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
2654 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
2658 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
2662 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
2666 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
2670 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
2673 generate_exception(ctx
, EXCP_TRAP
);
2680 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
2682 TranslationBlock
*tb
;
2684 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
2685 likely(!ctx
->singlestep_enabled
)) {
2688 tcg_gen_exit_tb((tcg_target_long
)tb
+ n
);
2691 if (ctx
->singlestep_enabled
) {
2692 save_cpu_state(ctx
, 0);
2693 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
2699 /* Branches (before delay slot) */
2700 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
2702 int rs
, int rt
, int32_t offset
)
2704 target_ulong btgt
= -1;
2706 int bcond_compute
= 0;
2707 TCGv t0
= tcg_temp_new();
2708 TCGv t1
= tcg_temp_new();
2710 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2711 #ifdef MIPS_DEBUG_DISAS
2712 LOG_DISAS("Branch in delay slot at PC 0x" TARGET_FMT_lx
"\n", ctx
->pc
);
2714 generate_exception(ctx
, EXCP_RI
);
2718 /* Load needed operands */
2724 /* Compare two registers */
2726 gen_load_gpr(t0
, rs
);
2727 gen_load_gpr(t1
, rt
);
2730 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
2746 /* Compare to zero */
2748 gen_load_gpr(t0
, rs
);
2751 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
2758 /* Jump to immediate */
2759 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
2765 /* Jump to register */
2766 if (offset
!= 0 && offset
!= 16) {
2767 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
2768 others are reserved. */
2769 MIPS_INVAL("jump hint");
2770 generate_exception(ctx
, EXCP_RI
);
2773 gen_load_gpr(btarget
, rs
);
2776 MIPS_INVAL("branch/jump");
2777 generate_exception(ctx
, EXCP_RI
);
2780 if (bcond_compute
== 0) {
2781 /* No condition to be computed */
2783 case OPC_BEQ
: /* rx == rx */
2784 case OPC_BEQL
: /* rx == rx likely */
2785 case OPC_BGEZ
: /* 0 >= 0 */
2786 case OPC_BGEZL
: /* 0 >= 0 likely */
2787 case OPC_BLEZ
: /* 0 <= 0 */
2788 case OPC_BLEZL
: /* 0 <= 0 likely */
2790 ctx
->hflags
|= MIPS_HFLAG_B
;
2791 MIPS_DEBUG("balways");
2794 case OPC_BGEZAL
: /* 0 >= 0 */
2795 case OPC_BGEZALL
: /* 0 >= 0 likely */
2796 ctx
->hflags
|= (opc
== OPC_BGEZALS
2798 : MIPS_HFLAG_BDS32
);
2799 /* Always take and link */
2801 ctx
->hflags
|= MIPS_HFLAG_B
;
2802 MIPS_DEBUG("balways and link");
2804 case OPC_BNE
: /* rx != rx */
2805 case OPC_BGTZ
: /* 0 > 0 */
2806 case OPC_BLTZ
: /* 0 < 0 */
2808 MIPS_DEBUG("bnever (NOP)");
2811 case OPC_BLTZAL
: /* 0 < 0 */
2812 ctx
->hflags
|= (opc
== OPC_BLTZALS
2814 : MIPS_HFLAG_BDS32
);
2815 /* Handle as an unconditional branch to get correct delay
2818 btgt
= ctx
->pc
+ (opc
== OPC_BLTZALS
? 6 : 8);
2819 ctx
->hflags
|= MIPS_HFLAG_B
;
2820 MIPS_DEBUG("bnever and link");
2822 case OPC_BLTZALL
: /* 0 < 0 likely */
2823 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
2824 /* Skip the instruction in the delay slot */
2825 MIPS_DEBUG("bnever, link and skip");
2828 case OPC_BNEL
: /* rx != rx likely */
2829 case OPC_BGTZL
: /* 0 > 0 likely */
2830 case OPC_BLTZL
: /* 0 < 0 likely */
2831 /* Skip the instruction in the delay slot */
2832 MIPS_DEBUG("bnever and skip");
2836 ctx
->hflags
|= MIPS_HFLAG_B
;
2837 MIPS_DEBUG("j " TARGET_FMT_lx
, btgt
);
2841 ctx
->hflags
|= MIPS_HFLAG_BX
;
2846 ctx
->hflags
|= MIPS_HFLAG_B
;
2847 ctx
->hflags
|= ((opc
== OPC_JALS
|| opc
== OPC_JALXS
)
2849 : MIPS_HFLAG_BDS32
);
2850 MIPS_DEBUG("jal " TARGET_FMT_lx
, btgt
);
2853 ctx
->hflags
|= MIPS_HFLAG_BR
;
2854 if (insn_bytes
== 4)
2855 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
2856 MIPS_DEBUG("jr %s", regnames
[rs
]);
2862 ctx
->hflags
|= MIPS_HFLAG_BR
;
2863 ctx
->hflags
|= (opc
== OPC_JALRS
2865 : MIPS_HFLAG_BDS32
);
2866 MIPS_DEBUG("jalr %s, %s", regnames
[rt
], regnames
[rs
]);
2869 MIPS_INVAL("branch/jump");
2870 generate_exception(ctx
, EXCP_RI
);
2876 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
2877 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx
,
2878 regnames
[rs
], regnames
[rt
], btgt
);
2881 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
2882 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx
,
2883 regnames
[rs
], regnames
[rt
], btgt
);
2886 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
2887 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx
,
2888 regnames
[rs
], regnames
[rt
], btgt
);
2891 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
2892 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx
,
2893 regnames
[rs
], regnames
[rt
], btgt
);
2896 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
2897 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2900 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
2901 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2905 ctx
->hflags
|= (opc
== OPC_BGEZALS
2907 : MIPS_HFLAG_BDS32
);
2908 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
2909 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2913 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
2915 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2918 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
2919 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2922 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
2923 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2926 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
2927 MIPS_DEBUG("blez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2930 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
2931 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2934 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
2935 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2938 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
2939 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2943 ctx
->hflags
|= (opc
== OPC_BLTZALS
2945 : MIPS_HFLAG_BDS32
);
2946 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
2948 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2950 ctx
->hflags
|= MIPS_HFLAG_BC
;
2953 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
2955 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2957 ctx
->hflags
|= MIPS_HFLAG_BL
;
2960 MIPS_INVAL("conditional branch/jump");
2961 generate_exception(ctx
, EXCP_RI
);
2965 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx
,
2966 blink
, ctx
->hflags
, btgt
);
2968 ctx
->btarget
= btgt
;
2970 int post_delay
= insn_bytes
;
2971 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
2973 if (opc
!= OPC_JALRC
)
2974 post_delay
+= ((ctx
->hflags
& MIPS_HFLAG_BDS16
) ? 2 : 4);
2976 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
2980 if (insn_bytes
== 2)
2981 ctx
->hflags
|= MIPS_HFLAG_B16
;
2986 /* special3 bitfield operations */
2987 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
2988 int rs
, int lsb
, int msb
)
2990 TCGv t0
= tcg_temp_new();
2991 TCGv t1
= tcg_temp_new();
2994 gen_load_gpr(t1
, rs
);
2999 tcg_gen_shri_tl(t0
, t1
, lsb
);
3001 tcg_gen_andi_tl(t0
, t0
, (1 << (msb
+ 1)) - 1);
3003 tcg_gen_ext32s_tl(t0
, t0
);
3006 #if defined(TARGET_MIPS64)
3008 tcg_gen_shri_tl(t0
, t1
, lsb
);
3010 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1 + 32)) - 1);
3014 tcg_gen_shri_tl(t0
, t1
, lsb
+ 32);
3015 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
3018 tcg_gen_shri_tl(t0
, t1
, lsb
);
3019 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
3025 mask
= ((msb
- lsb
+ 1 < 32) ? ((1 << (msb
- lsb
+ 1)) - 1) : ~0) << lsb
;
3026 gen_load_gpr(t0
, rt
);
3027 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3028 tcg_gen_shli_tl(t1
, t1
, lsb
);
3029 tcg_gen_andi_tl(t1
, t1
, mask
);
3030 tcg_gen_or_tl(t0
, t0
, t1
);
3031 tcg_gen_ext32s_tl(t0
, t0
);
3033 #if defined(TARGET_MIPS64)
3037 mask
= ((msb
- lsb
+ 1 + 32 < 64) ? ((1ULL << (msb
- lsb
+ 1 + 32)) - 1) : ~0ULL) << lsb
;
3038 gen_load_gpr(t0
, rt
);
3039 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3040 tcg_gen_shli_tl(t1
, t1
, lsb
);
3041 tcg_gen_andi_tl(t1
, t1
, mask
);
3042 tcg_gen_or_tl(t0
, t0
, t1
);
3047 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << (lsb
+ 32);
3048 gen_load_gpr(t0
, rt
);
3049 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3050 tcg_gen_shli_tl(t1
, t1
, lsb
+ 32);
3051 tcg_gen_andi_tl(t1
, t1
, mask
);
3052 tcg_gen_or_tl(t0
, t0
, t1
);
3057 gen_load_gpr(t0
, rt
);
3058 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << lsb
;
3059 gen_load_gpr(t0
, rt
);
3060 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3061 tcg_gen_shli_tl(t1
, t1
, lsb
);
3062 tcg_gen_andi_tl(t1
, t1
, mask
);
3063 tcg_gen_or_tl(t0
, t0
, t1
);
3068 MIPS_INVAL("bitops");
3069 generate_exception(ctx
, EXCP_RI
);
3074 gen_store_gpr(t0
, rt
);
3079 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
3084 /* If no destination, treat it as a NOP. */
3089 t0
= tcg_temp_new();
3090 gen_load_gpr(t0
, rt
);
3094 TCGv t1
= tcg_temp_new();
3096 tcg_gen_shri_tl(t1
, t0
, 8);
3097 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
3098 tcg_gen_shli_tl(t0
, t0
, 8);
3099 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
3100 tcg_gen_or_tl(t0
, t0
, t1
);
3102 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3106 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
3109 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
3111 #if defined(TARGET_MIPS64)
3114 TCGv t1
= tcg_temp_new();
3116 tcg_gen_shri_tl(t1
, t0
, 8);
3117 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
3118 tcg_gen_shli_tl(t0
, t0
, 8);
3119 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
3120 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
3126 TCGv t1
= tcg_temp_new();
3128 tcg_gen_shri_tl(t1
, t0
, 16);
3129 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
3130 tcg_gen_shli_tl(t0
, t0
, 16);
3131 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
3132 tcg_gen_or_tl(t0
, t0
, t1
);
3133 tcg_gen_shri_tl(t1
, t0
, 32);
3134 tcg_gen_shli_tl(t0
, t0
, 32);
3135 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
3141 MIPS_INVAL("bsfhl");
3142 generate_exception(ctx
, EXCP_RI
);
3149 #ifndef CONFIG_USER_ONLY
3150 /* CP0 (MMU and control) */
3151 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
3153 TCGv_i32 t0
= tcg_temp_new_i32();
3155 tcg_gen_ld_i32(t0
, cpu_env
, off
);
3156 tcg_gen_ext_i32_tl(arg
, t0
);
3157 tcg_temp_free_i32(t0
);
3160 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
3162 tcg_gen_ld_tl(arg
, cpu_env
, off
);
3163 tcg_gen_ext32s_tl(arg
, arg
);
3166 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
3168 TCGv_i32 t0
= tcg_temp_new_i32();
3170 tcg_gen_trunc_tl_i32(t0
, arg
);
3171 tcg_gen_st_i32(t0
, cpu_env
, off
);
3172 tcg_temp_free_i32(t0
);
3175 static inline void gen_mtc0_store64 (TCGv arg
, target_ulong off
)
3177 tcg_gen_ext32s_tl(arg
, arg
);
3178 tcg_gen_st_tl(arg
, cpu_env
, off
);
3181 static void gen_mfc0 (CPUState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
3183 const char *rn
= "invalid";
3186 check_insn(env
, ctx
, ISA_MIPS32
);
3192 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Index
));
3196 check_insn(env
, ctx
, ASE_MT
);
3197 gen_helper_mfc0_mvpcontrol(arg
);
3201 check_insn(env
, ctx
, ASE_MT
);
3202 gen_helper_mfc0_mvpconf0(arg
);
3206 check_insn(env
, ctx
, ASE_MT
);
3207 gen_helper_mfc0_mvpconf1(arg
);
3217 gen_helper_mfc0_random(arg
);
3221 check_insn(env
, ctx
, ASE_MT
);
3222 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEControl
));
3226 check_insn(env
, ctx
, ASE_MT
);
3227 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEConf0
));
3231 check_insn(env
, ctx
, ASE_MT
);
3232 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEConf1
));
3236 check_insn(env
, ctx
, ASE_MT
);
3237 gen_mfc0_load64(arg
, offsetof(CPUState
, CP0_YQMask
));
3241 check_insn(env
, ctx
, ASE_MT
);
3242 gen_mfc0_load64(arg
, offsetof(CPUState
, CP0_VPESchedule
));
3246 check_insn(env
, ctx
, ASE_MT
);
3247 gen_mfc0_load64(arg
, offsetof(CPUState
, CP0_VPEScheFBack
));
3248 rn
= "VPEScheFBack";
3251 check_insn(env
, ctx
, ASE_MT
);
3252 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEOpt
));
3262 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EntryLo0
));
3263 tcg_gen_ext32s_tl(arg
, arg
);
3267 check_insn(env
, ctx
, ASE_MT
);
3268 gen_helper_mfc0_tcstatus(arg
);
3272 check_insn(env
, ctx
, ASE_MT
);
3273 gen_helper_mfc0_tcbind(arg
);
3277 check_insn(env
, ctx
, ASE_MT
);
3278 gen_helper_mfc0_tcrestart(arg
);
3282 check_insn(env
, ctx
, ASE_MT
);
3283 gen_helper_mfc0_tchalt(arg
);
3287 check_insn(env
, ctx
, ASE_MT
);
3288 gen_helper_mfc0_tccontext(arg
);
3292 check_insn(env
, ctx
, ASE_MT
);
3293 gen_helper_mfc0_tcschedule(arg
);
3297 check_insn(env
, ctx
, ASE_MT
);
3298 gen_helper_mfc0_tcschefback(arg
);
3308 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EntryLo1
));
3309 tcg_gen_ext32s_tl(arg
, arg
);
3319 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_Context
));
3320 tcg_gen_ext32s_tl(arg
, arg
);
3324 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
3325 rn
= "ContextConfig";
3334 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_PageMask
));
3338 check_insn(env
, ctx
, ISA_MIPS32R2
);
3339 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_PageGrain
));
3349 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Wired
));
3353 check_insn(env
, ctx
, ISA_MIPS32R2
);
3354 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf0
));
3358 check_insn(env
, ctx
, ISA_MIPS32R2
);
3359 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf1
));
3363 check_insn(env
, ctx
, ISA_MIPS32R2
);
3364 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf2
));
3368 check_insn(env
, ctx
, ISA_MIPS32R2
);
3369 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf3
));
3373 check_insn(env
, ctx
, ISA_MIPS32R2
);
3374 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf4
));
3384 check_insn(env
, ctx
, ISA_MIPS32R2
);
3385 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_HWREna
));
3395 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_BadVAddr
));
3396 tcg_gen_ext32s_tl(arg
, arg
);
3406 /* Mark as an IO operation because we read the time. */
3409 gen_helper_mfc0_count(arg
);
3413 /* Break the TB to be able to take timer interrupts immediately
3414 after reading count. */
3415 ctx
->bstate
= BS_STOP
;
3418 /* 6,7 are implementation dependent */
3426 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EntryHi
));
3427 tcg_gen_ext32s_tl(arg
, arg
);
3437 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Compare
));
3440 /* 6,7 are implementation dependent */
3448 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Status
));
3452 check_insn(env
, ctx
, ISA_MIPS32R2
);
3453 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_IntCtl
));
3457 check_insn(env
, ctx
, ISA_MIPS32R2
);
3458 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSCtl
));
3462 check_insn(env
, ctx
, ISA_MIPS32R2
);
3463 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSMap
));
3473 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Cause
));
3483 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EPC
));
3484 tcg_gen_ext32s_tl(arg
, arg
);
3494 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_PRid
));
3498 check_insn(env
, ctx
, ISA_MIPS32R2
);
3499 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_EBase
));
3509 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config0
));
3513 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config1
));
3517 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config2
));
3521 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config3
));
3524 /* 4,5 are reserved */
3525 /* 6,7 are implementation dependent */
3527 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config6
));
3531 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config7
));
3541 gen_helper_mfc0_lladdr(arg
);
3551 gen_helper_1i(mfc0_watchlo
, arg
, sel
);
3561 gen_helper_1i(mfc0_watchhi
, arg
, sel
);
3571 #if defined(TARGET_MIPS64)
3572 check_insn(env
, ctx
, ISA_MIPS3
);
3573 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_XContext
));
3574 tcg_gen_ext32s_tl(arg
, arg
);
3583 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3586 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Framemask
));
3594 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
3595 rn
= "'Diagnostic"; /* implementation dependent */
3600 gen_helper_mfc0_debug(arg
); /* EJTAG support */
3604 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
3605 rn
= "TraceControl";
3608 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
3609 rn
= "TraceControl2";
3612 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
3613 rn
= "UserTraceData";
3616 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
3627 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_DEPC
));
3628 tcg_gen_ext32s_tl(arg
, arg
);
3638 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Performance0
));
3639 rn
= "Performance0";
3642 // gen_helper_mfc0_performance1(arg);
3643 rn
= "Performance1";
3646 // gen_helper_mfc0_performance2(arg);
3647 rn
= "Performance2";
3650 // gen_helper_mfc0_performance3(arg);
3651 rn
= "Performance3";
3654 // gen_helper_mfc0_performance4(arg);
3655 rn
= "Performance4";
3658 // gen_helper_mfc0_performance5(arg);
3659 rn
= "Performance5";
3662 // gen_helper_mfc0_performance6(arg);
3663 rn
= "Performance6";
3666 // gen_helper_mfc0_performance7(arg);
3667 rn
= "Performance7";
3674 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
3680 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
3693 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_TagLo
));
3700 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_DataLo
));
3713 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_TagHi
));
3720 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_DataHi
));
3730 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_ErrorEPC
));
3731 tcg_gen_ext32s_tl(arg
, arg
);
3742 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_DESAVE
));
3752 (void)rn
; /* avoid a compiler warning */
3753 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
3757 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
3758 generate_exception(ctx
, EXCP_RI
);
3761 static void gen_mtc0 (CPUState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
3763 const char *rn
= "invalid";
3766 check_insn(env
, ctx
, ISA_MIPS32
);
3775 gen_helper_mtc0_index(arg
);
3779 check_insn(env
, ctx
, ASE_MT
);
3780 gen_helper_mtc0_mvpcontrol(arg
);
3784 check_insn(env
, ctx
, ASE_MT
);
3789 check_insn(env
, ctx
, ASE_MT
);
3804 check_insn(env
, ctx
, ASE_MT
);
3805 gen_helper_mtc0_vpecontrol(arg
);
3809 check_insn(env
, ctx
, ASE_MT
);
3810 gen_helper_mtc0_vpeconf0(arg
);
3814 check_insn(env
, ctx
, ASE_MT
);
3815 gen_helper_mtc0_vpeconf1(arg
);
3819 check_insn(env
, ctx
, ASE_MT
);
3820 gen_helper_mtc0_yqmask(arg
);
3824 check_insn(env
, ctx
, ASE_MT
);
3825 gen_mtc0_store64(arg
, offsetof(CPUState
, CP0_VPESchedule
));
3829 check_insn(env
, ctx
, ASE_MT
);
3830 gen_mtc0_store64(arg
, offsetof(CPUState
, CP0_VPEScheFBack
));
3831 rn
= "VPEScheFBack";
3834 check_insn(env
, ctx
, ASE_MT
);
3835 gen_helper_mtc0_vpeopt(arg
);
3845 gen_helper_mtc0_entrylo0(arg
);
3849 check_insn(env
, ctx
, ASE_MT
);
3850 gen_helper_mtc0_tcstatus(arg
);
3854 check_insn(env
, ctx
, ASE_MT
);
3855 gen_helper_mtc0_tcbind(arg
);
3859 check_insn(env
, ctx
, ASE_MT
);
3860 gen_helper_mtc0_tcrestart(arg
);
3864 check_insn(env
, ctx
, ASE_MT
);
3865 gen_helper_mtc0_tchalt(arg
);
3869 check_insn(env
, ctx
, ASE_MT
);
3870 gen_helper_mtc0_tccontext(arg
);
3874 check_insn(env
, ctx
, ASE_MT
);
3875 gen_helper_mtc0_tcschedule(arg
);
3879 check_insn(env
, ctx
, ASE_MT
);
3880 gen_helper_mtc0_tcschefback(arg
);
3890 gen_helper_mtc0_entrylo1(arg
);
3900 gen_helper_mtc0_context(arg
);
3904 // gen_helper_mtc0_contextconfig(arg); /* SmartMIPS ASE */
3905 rn
= "ContextConfig";
3914 gen_helper_mtc0_pagemask(arg
);
3918 check_insn(env
, ctx
, ISA_MIPS32R2
);
3919 gen_helper_mtc0_pagegrain(arg
);
3929 gen_helper_mtc0_wired(arg
);
3933 check_insn(env
, ctx
, ISA_MIPS32R2
);
3934 gen_helper_mtc0_srsconf0(arg
);
3938 check_insn(env
, ctx
, ISA_MIPS32R2
);
3939 gen_helper_mtc0_srsconf1(arg
);
3943 check_insn(env
, ctx
, ISA_MIPS32R2
);
3944 gen_helper_mtc0_srsconf2(arg
);
3948 check_insn(env
, ctx
, ISA_MIPS32R2
);
3949 gen_helper_mtc0_srsconf3(arg
);
3953 check_insn(env
, ctx
, ISA_MIPS32R2
);
3954 gen_helper_mtc0_srsconf4(arg
);
3964 check_insn(env
, ctx
, ISA_MIPS32R2
);
3965 gen_helper_mtc0_hwrena(arg
);
3979 gen_helper_mtc0_count(arg
);
3982 /* 6,7 are implementation dependent */
3990 gen_helper_mtc0_entryhi(arg
);
4000 gen_helper_mtc0_compare(arg
);
4003 /* 6,7 are implementation dependent */
4011 save_cpu_state(ctx
, 1);
4012 gen_helper_mtc0_status(arg
);
4013 /* BS_STOP isn't good enough here, hflags may have changed. */
4014 gen_save_pc(ctx
->pc
+ 4);
4015 ctx
->bstate
= BS_EXCP
;
4019 check_insn(env
, ctx
, ISA_MIPS32R2
);
4020 gen_helper_mtc0_intctl(arg
);
4021 /* Stop translation as we may have switched the execution mode */
4022 ctx
->bstate
= BS_STOP
;
4026 check_insn(env
, ctx
, ISA_MIPS32R2
);
4027 gen_helper_mtc0_srsctl(arg
);
4028 /* Stop translation as we may have switched the execution mode */
4029 ctx
->bstate
= BS_STOP
;
4033 check_insn(env
, ctx
, ISA_MIPS32R2
);
4034 gen_mtc0_store32(arg
, offsetof(CPUState
, CP0_SRSMap
));
4035 /* Stop translation as we may have switched the execution mode */
4036 ctx
->bstate
= BS_STOP
;
4046 save_cpu_state(ctx
, 1);
4047 gen_helper_mtc0_cause(arg
);
4057 gen_mtc0_store64(arg
, offsetof(CPUState
, CP0_EPC
));
4071 check_insn(env
, ctx
, ISA_MIPS32R2
);
4072 gen_helper_mtc0_ebase(arg
);
4082 gen_helper_mtc0_config0(arg
);
4084 /* Stop translation as we may have switched the execution mode */
4085 ctx
->bstate
= BS_STOP
;
4088 /* ignored, read only */
4092 gen_helper_mtc0_config2(arg
);
4094 /* Stop translation as we may have switched the execution mode */
4095 ctx
->bstate
= BS_STOP
;
4098 /* ignored, read only */
4101 /* 4,5 are reserved */
4102 /* 6,7 are implementation dependent */
4112 rn
= "Invalid config selector";
4119 gen_helper_mtc0_lladdr(arg
);
4129 gen_helper_1i(mtc0_watchlo
, arg
, sel
);
4139 gen_helper_1i(mtc0_watchhi
, arg
, sel
);
4149 #if defined(TARGET_MIPS64)
4150 check_insn(env
, ctx
, ISA_MIPS3
);
4151 gen_helper_mtc0_xcontext(arg
);
4160 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4163 gen_helper_mtc0_framemask(arg
);
4172 rn
= "Diagnostic"; /* implementation dependent */
4177 gen_helper_mtc0_debug(arg
); /* EJTAG support */
4178 /* BS_STOP isn't good enough here, hflags may have changed. */
4179 gen_save_pc(ctx
->pc
+ 4);
4180 ctx
->bstate
= BS_EXCP
;
4184 // gen_helper_mtc0_tracecontrol(arg); /* PDtrace support */
4185 rn
= "TraceControl";
4186 /* Stop translation as we may have switched the execution mode */
4187 ctx
->bstate
= BS_STOP
;
4190 // gen_helper_mtc0_tracecontrol2(arg); /* PDtrace support */
4191 rn
= "TraceControl2";
4192 /* Stop translation as we may have switched the execution mode */
4193 ctx
->bstate
= BS_STOP
;
4196 /* Stop translation as we may have switched the execution mode */
4197 ctx
->bstate
= BS_STOP
;
4198 // gen_helper_mtc0_usertracedata(arg); /* PDtrace support */
4199 rn
= "UserTraceData";
4200 /* Stop translation as we may have switched the execution mode */
4201 ctx
->bstate
= BS_STOP
;
4204 // gen_helper_mtc0_tracebpc(arg); /* PDtrace support */
4205 /* Stop translation as we may have switched the execution mode */
4206 ctx
->bstate
= BS_STOP
;
4217 gen_mtc0_store64(arg
, offsetof(CPUState
, CP0_DEPC
));
4227 gen_helper_mtc0_performance0(arg
);
4228 rn
= "Performance0";
4231 // gen_helper_mtc0_performance1(arg);
4232 rn
= "Performance1";
4235 // gen_helper_mtc0_performance2(arg);
4236 rn
= "Performance2";
4239 // gen_helper_mtc0_performance3(arg);
4240 rn
= "Performance3";
4243 // gen_helper_mtc0_performance4(arg);
4244 rn
= "Performance4";
4247 // gen_helper_mtc0_performance5(arg);
4248 rn
= "Performance5";
4251 // gen_helper_mtc0_performance6(arg);
4252 rn
= "Performance6";
4255 // gen_helper_mtc0_performance7(arg);
4256 rn
= "Performance7";
4282 gen_helper_mtc0_taglo(arg
);
4289 gen_helper_mtc0_datalo(arg
);
4302 gen_helper_mtc0_taghi(arg
);
4309 gen_helper_mtc0_datahi(arg
);
4320 gen_mtc0_store64(arg
, offsetof(CPUState
, CP0_ErrorEPC
));
4331 gen_mtc0_store32(arg
, offsetof(CPUState
, CP0_DESAVE
));
4337 /* Stop translation as we may have switched the execution mode */
4338 ctx
->bstate
= BS_STOP
;
4343 (void)rn
; /* avoid a compiler warning */
4344 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4345 /* For simplicity assume that all writes can cause interrupts. */
4348 ctx
->bstate
= BS_STOP
;
4353 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4354 generate_exception(ctx
, EXCP_RI
);
4357 #if defined(TARGET_MIPS64)
4358 static void gen_dmfc0 (CPUState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4360 const char *rn
= "invalid";
4363 check_insn(env
, ctx
, ISA_MIPS64
);
4369 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Index
));
4373 check_insn(env
, ctx
, ASE_MT
);
4374 gen_helper_mfc0_mvpcontrol(arg
);
4378 check_insn(env
, ctx
, ASE_MT
);
4379 gen_helper_mfc0_mvpconf0(arg
);
4383 check_insn(env
, ctx
, ASE_MT
);
4384 gen_helper_mfc0_mvpconf1(arg
);
4394 gen_helper_mfc0_random(arg
);
4398 check_insn(env
, ctx
, ASE_MT
);
4399 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEControl
));
4403 check_insn(env
, ctx
, ASE_MT
);
4404 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEConf0
));
4408 check_insn(env
, ctx
, ASE_MT
);
4409 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEConf1
));
4413 check_insn(env
, ctx
, ASE_MT
);
4414 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_YQMask
));
4418 check_insn(env
, ctx
, ASE_MT
);
4419 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_VPESchedule
));
4423 check_insn(env
, ctx
, ASE_MT
);
4424 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_VPEScheFBack
));
4425 rn
= "VPEScheFBack";
4428 check_insn(env
, ctx
, ASE_MT
);
4429 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEOpt
));
4439 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EntryLo0
));
4443 check_insn(env
, ctx
, ASE_MT
);
4444 gen_helper_mfc0_tcstatus(arg
);
4448 check_insn(env
, ctx
, ASE_MT
);
4449 gen_helper_mfc0_tcbind(arg
);
4453 check_insn(env
, ctx
, ASE_MT
);
4454 gen_helper_dmfc0_tcrestart(arg
);
4458 check_insn(env
, ctx
, ASE_MT
);
4459 gen_helper_dmfc0_tchalt(arg
);
4463 check_insn(env
, ctx
, ASE_MT
);
4464 gen_helper_dmfc0_tccontext(arg
);
4468 check_insn(env
, ctx
, ASE_MT
);
4469 gen_helper_dmfc0_tcschedule(arg
);
4473 check_insn(env
, ctx
, ASE_MT
);
4474 gen_helper_dmfc0_tcschefback(arg
);
4484 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EntryLo1
));
4494 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_Context
));
4498 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
4499 rn
= "ContextConfig";
4508 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_PageMask
));
4512 check_insn(env
, ctx
, ISA_MIPS32R2
);
4513 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_PageGrain
));
4523 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Wired
));
4527 check_insn(env
, ctx
, ISA_MIPS32R2
);
4528 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf0
));
4532 check_insn(env
, ctx
, ISA_MIPS32R2
);
4533 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf1
));
4537 check_insn(env
, ctx
, ISA_MIPS32R2
);
4538 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf2
));
4542 check_insn(env
, ctx
, ISA_MIPS32R2
);
4543 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf3
));
4547 check_insn(env
, ctx
, ISA_MIPS32R2
);
4548 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf4
));
4558 check_insn(env
, ctx
, ISA_MIPS32R2
);
4559 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_HWREna
));
4569 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_BadVAddr
));
4579 /* Mark as an IO operation because we read the time. */
4582 gen_helper_mfc0_count(arg
);
4586 /* Break the TB to be able to take timer interrupts immediately
4587 after reading count. */
4588 ctx
->bstate
= BS_STOP
;
4591 /* 6,7 are implementation dependent */
4599 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EntryHi
));
4609 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Compare
));
4612 /* 6,7 are implementation dependent */
4620 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Status
));
4624 check_insn(env
, ctx
, ISA_MIPS32R2
);
4625 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_IntCtl
));
4629 check_insn(env
, ctx
, ISA_MIPS32R2
);
4630 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSCtl
));
4634 check_insn(env
, ctx
, ISA_MIPS32R2
);
4635 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSMap
));
4645 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Cause
));
4655 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EPC
));
4665 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_PRid
));
4669 check_insn(env
, ctx
, ISA_MIPS32R2
);
4670 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_EBase
));
4680 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config0
));
4684 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config1
));
4688 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config2
));
4692 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config3
));
4695 /* 6,7 are implementation dependent */
4697 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config6
));
4701 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config7
));
4711 gen_helper_dmfc0_lladdr(arg
);
4721 gen_helper_1i(dmfc0_watchlo
, arg
, sel
);
4731 gen_helper_1i(mfc0_watchhi
, arg
, sel
);
4741 check_insn(env
, ctx
, ISA_MIPS3
);
4742 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_XContext
));
4750 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4753 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Framemask
));
4761 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4762 rn
= "'Diagnostic"; /* implementation dependent */
4767 gen_helper_mfc0_debug(arg
); /* EJTAG support */
4771 // gen_helper_dmfc0_tracecontrol(arg); /* PDtrace support */
4772 rn
= "TraceControl";
4775 // gen_helper_dmfc0_tracecontrol2(arg); /* PDtrace support */
4776 rn
= "TraceControl2";
4779 // gen_helper_dmfc0_usertracedata(arg); /* PDtrace support */
4780 rn
= "UserTraceData";
4783 // gen_helper_dmfc0_tracebpc(arg); /* PDtrace support */
4794 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_DEPC
));
4804 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Performance0
));
4805 rn
= "Performance0";
4808 // gen_helper_dmfc0_performance1(arg);
4809 rn
= "Performance1";
4812 // gen_helper_dmfc0_performance2(arg);
4813 rn
= "Performance2";
4816 // gen_helper_dmfc0_performance3(arg);
4817 rn
= "Performance3";
4820 // gen_helper_dmfc0_performance4(arg);
4821 rn
= "Performance4";
4824 // gen_helper_dmfc0_performance5(arg);
4825 rn
= "Performance5";
4828 // gen_helper_dmfc0_performance6(arg);
4829 rn
= "Performance6";
4832 // gen_helper_dmfc0_performance7(arg);
4833 rn
= "Performance7";
4840 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4847 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4860 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_TagLo
));
4867 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_DataLo
));
4880 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_TagHi
));
4887 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_DataHi
));
4897 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_ErrorEPC
));
4908 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_DESAVE
));
4918 (void)rn
; /* avoid a compiler warning */
4919 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4923 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4924 generate_exception(ctx
, EXCP_RI
);
4927 static void gen_dmtc0 (CPUState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4929 const char *rn
= "invalid";
4932 check_insn(env
, ctx
, ISA_MIPS64
);
4941 gen_helper_mtc0_index(arg
);
4945 check_insn(env
, ctx
, ASE_MT
);
4946 gen_helper_mtc0_mvpcontrol(arg
);
4950 check_insn(env
, ctx
, ASE_MT
);
4955 check_insn(env
, ctx
, ASE_MT
);
4970 check_insn(env
, ctx
, ASE_MT
);
4971 gen_helper_mtc0_vpecontrol(arg
);
4975 check_insn(env
, ctx
, ASE_MT
);
4976 gen_helper_mtc0_vpeconf0(arg
);
4980 check_insn(env
, ctx
, ASE_MT
);
4981 gen_helper_mtc0_vpeconf1(arg
);
4985 check_insn(env
, ctx
, ASE_MT
);
4986 gen_helper_mtc0_yqmask(arg
);
4990 check_insn(env
, ctx
, ASE_MT
);
4991 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_VPESchedule
));
4995 check_insn(env
, ctx
, ASE_MT
);
4996 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_VPEScheFBack
));
4997 rn
= "VPEScheFBack";
5000 check_insn(env
, ctx
, ASE_MT
);
5001 gen_helper_mtc0_vpeopt(arg
);
5011 gen_helper_mtc0_entrylo0(arg
);
5015 check_insn(env
, ctx
, ASE_MT
);
5016 gen_helper_mtc0_tcstatus(arg
);
5020 check_insn(env
, ctx
, ASE_MT
);
5021 gen_helper_mtc0_tcbind(arg
);
5025 check_insn(env
, ctx
, ASE_MT
);
5026 gen_helper_mtc0_tcrestart(arg
);
5030 check_insn(env
, ctx
, ASE_MT
);
5031 gen_helper_mtc0_tchalt(arg
);
5035 check_insn(env
, ctx
, ASE_MT
);
5036 gen_helper_mtc0_tccontext(arg
);
5040 check_insn(env
, ctx
, ASE_MT
);
5041 gen_helper_mtc0_tcschedule(arg
);
5045 check_insn(env
, ctx
, ASE_MT
);
5046 gen_helper_mtc0_tcschefback(arg
);
5056 gen_helper_mtc0_entrylo1(arg
);
5066 gen_helper_mtc0_context(arg
);
5070 // gen_helper_mtc0_contextconfig(arg); /* SmartMIPS ASE */
5071 rn
= "ContextConfig";
5080 gen_helper_mtc0_pagemask(arg
);
5084 check_insn(env
, ctx
, ISA_MIPS32R2
);
5085 gen_helper_mtc0_pagegrain(arg
);
5095 gen_helper_mtc0_wired(arg
);
5099 check_insn(env
, ctx
, ISA_MIPS32R2
);
5100 gen_helper_mtc0_srsconf0(arg
);
5104 check_insn(env
, ctx
, ISA_MIPS32R2
);
5105 gen_helper_mtc0_srsconf1(arg
);
5109 check_insn(env
, ctx
, ISA_MIPS32R2
);
5110 gen_helper_mtc0_srsconf2(arg
);
5114 check_insn(env
, ctx
, ISA_MIPS32R2
);
5115 gen_helper_mtc0_srsconf3(arg
);
5119 check_insn(env
, ctx
, ISA_MIPS32R2
);
5120 gen_helper_mtc0_srsconf4(arg
);
5130 check_insn(env
, ctx
, ISA_MIPS32R2
);
5131 gen_helper_mtc0_hwrena(arg
);
5145 gen_helper_mtc0_count(arg
);
5148 /* 6,7 are implementation dependent */
5152 /* Stop translation as we may have switched the execution mode */
5153 ctx
->bstate
= BS_STOP
;
5158 gen_helper_mtc0_entryhi(arg
);
5168 gen_helper_mtc0_compare(arg
);
5171 /* 6,7 are implementation dependent */
5175 /* Stop translation as we may have switched the execution mode */
5176 ctx
->bstate
= BS_STOP
;
5181 save_cpu_state(ctx
, 1);
5182 gen_helper_mtc0_status(arg
);
5183 /* BS_STOP isn't good enough here, hflags may have changed. */
5184 gen_save_pc(ctx
->pc
+ 4);
5185 ctx
->bstate
= BS_EXCP
;
5189 check_insn(env
, ctx
, ISA_MIPS32R2
);
5190 gen_helper_mtc0_intctl(arg
);
5191 /* Stop translation as we may have switched the execution mode */
5192 ctx
->bstate
= BS_STOP
;
5196 check_insn(env
, ctx
, ISA_MIPS32R2
);
5197 gen_helper_mtc0_srsctl(arg
);
5198 /* Stop translation as we may have switched the execution mode */
5199 ctx
->bstate
= BS_STOP
;
5203 check_insn(env
, ctx
, ISA_MIPS32R2
);
5204 gen_mtc0_store32(arg
, offsetof(CPUState
, CP0_SRSMap
));
5205 /* Stop translation as we may have switched the execution mode */
5206 ctx
->bstate
= BS_STOP
;
5216 save_cpu_state(ctx
, 1);
5217 /* Mark as an IO operation because we may trigger a software
5222 gen_helper_mtc0_cause(arg
);
5226 /* Stop translation as we may have triggered an intetrupt */
5227 ctx
->bstate
= BS_STOP
;
5237 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EPC
));
5251 check_insn(env
, ctx
, ISA_MIPS32R2
);
5252 gen_helper_mtc0_ebase(arg
);
5262 gen_helper_mtc0_config0(arg
);
5264 /* Stop translation as we may have switched the execution mode */
5265 ctx
->bstate
= BS_STOP
;
5268 /* ignored, read only */
5272 gen_helper_mtc0_config2(arg
);
5274 /* Stop translation as we may have switched the execution mode */
5275 ctx
->bstate
= BS_STOP
;
5281 /* 6,7 are implementation dependent */
5283 rn
= "Invalid config selector";
5290 gen_helper_mtc0_lladdr(arg
);
5300 gen_helper_1i(mtc0_watchlo
, arg
, sel
);
5310 gen_helper_1i(mtc0_watchhi
, arg
, sel
);
5320 check_insn(env
, ctx
, ISA_MIPS3
);
5321 gen_helper_mtc0_xcontext(arg
);
5329 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5332 gen_helper_mtc0_framemask(arg
);
5341 rn
= "Diagnostic"; /* implementation dependent */
5346 gen_helper_mtc0_debug(arg
); /* EJTAG support */
5347 /* BS_STOP isn't good enough here, hflags may have changed. */
5348 gen_save_pc(ctx
->pc
+ 4);
5349 ctx
->bstate
= BS_EXCP
;
5353 // gen_helper_mtc0_tracecontrol(arg); /* PDtrace support */
5354 /* Stop translation as we may have switched the execution mode */
5355 ctx
->bstate
= BS_STOP
;
5356 rn
= "TraceControl";
5359 // gen_helper_mtc0_tracecontrol2(arg); /* PDtrace support */
5360 /* Stop translation as we may have switched the execution mode */
5361 ctx
->bstate
= BS_STOP
;
5362 rn
= "TraceControl2";
5365 // gen_helper_mtc0_usertracedata(arg); /* PDtrace support */
5366 /* Stop translation as we may have switched the execution mode */
5367 ctx
->bstate
= BS_STOP
;
5368 rn
= "UserTraceData";
5371 // gen_helper_mtc0_tracebpc(arg); /* PDtrace support */
5372 /* Stop translation as we may have switched the execution mode */
5373 ctx
->bstate
= BS_STOP
;
5384 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_DEPC
));
5394 gen_helper_mtc0_performance0(arg
);
5395 rn
= "Performance0";
5398 // gen_helper_mtc0_performance1(arg);
5399 rn
= "Performance1";
5402 // gen_helper_mtc0_performance2(arg);
5403 rn
= "Performance2";
5406 // gen_helper_mtc0_performance3(arg);
5407 rn
= "Performance3";
5410 // gen_helper_mtc0_performance4(arg);
5411 rn
= "Performance4";
5414 // gen_helper_mtc0_performance5(arg);
5415 rn
= "Performance5";
5418 // gen_helper_mtc0_performance6(arg);
5419 rn
= "Performance6";
5422 // gen_helper_mtc0_performance7(arg);
5423 rn
= "Performance7";
5449 gen_helper_mtc0_taglo(arg
);
5456 gen_helper_mtc0_datalo(arg
);
5469 gen_helper_mtc0_taghi(arg
);
5476 gen_helper_mtc0_datahi(arg
);
5487 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_ErrorEPC
));
5498 gen_mtc0_store32(arg
, offsetof(CPUState
, CP0_DESAVE
));
5504 /* Stop translation as we may have switched the execution mode */
5505 ctx
->bstate
= BS_STOP
;
5510 (void)rn
; /* avoid a compiler warning */
5511 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5512 /* For simplicity assume that all writes can cause interrupts. */
5515 ctx
->bstate
= BS_STOP
;
5520 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5521 generate_exception(ctx
, EXCP_RI
);
5523 #endif /* TARGET_MIPS64 */
5525 static void gen_mftr(CPUState
*env
, DisasContext
*ctx
, int rt
, int rd
,
5526 int u
, int sel
, int h
)
5528 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5529 TCGv t0
= tcg_temp_local_new();
5531 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5532 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5533 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5534 tcg_gen_movi_tl(t0
, -1);
5535 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5536 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5537 tcg_gen_movi_tl(t0
, -1);
5543 gen_helper_mftc0_vpecontrol(t0
);
5546 gen_helper_mftc0_vpeconf0(t0
);
5556 gen_helper_mftc0_tcstatus(t0
);
5559 gen_helper_mftc0_tcbind(t0
);
5562 gen_helper_mftc0_tcrestart(t0
);
5565 gen_helper_mftc0_tchalt(t0
);
5568 gen_helper_mftc0_tccontext(t0
);
5571 gen_helper_mftc0_tcschedule(t0
);
5574 gen_helper_mftc0_tcschefback(t0
);
5577 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5584 gen_helper_mftc0_entryhi(t0
);
5587 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5593 gen_helper_mftc0_status(t0
);
5596 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5602 gen_helper_mftc0_cause(t0
);
5612 gen_helper_mftc0_epc(t0
);
5622 gen_helper_mftc0_ebase(t0
);
5632 gen_helper_mftc0_configx(t0
, tcg_const_tl(sel
));
5642 gen_helper_mftc0_debug(t0
);
5645 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5650 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5652 } else switch (sel
) {
5653 /* GPR registers. */
5655 gen_helper_1i(mftgpr
, t0
, rt
);
5657 /* Auxiliary CPU registers */
5661 gen_helper_1i(mftlo
, t0
, 0);
5664 gen_helper_1i(mfthi
, t0
, 0);
5667 gen_helper_1i(mftacx
, t0
, 0);
5670 gen_helper_1i(mftlo
, t0
, 1);
5673 gen_helper_1i(mfthi
, t0
, 1);
5676 gen_helper_1i(mftacx
, t0
, 1);
5679 gen_helper_1i(mftlo
, t0
, 2);
5682 gen_helper_1i(mfthi
, t0
, 2);
5685 gen_helper_1i(mftacx
, t0
, 2);
5688 gen_helper_1i(mftlo
, t0
, 3);
5691 gen_helper_1i(mfthi
, t0
, 3);
5694 gen_helper_1i(mftacx
, t0
, 3);
5697 gen_helper_mftdsp(t0
);
5703 /* Floating point (COP1). */
5705 /* XXX: For now we support only a single FPU context. */
5707 TCGv_i32 fp0
= tcg_temp_new_i32();
5709 gen_load_fpr32(fp0
, rt
);
5710 tcg_gen_ext_i32_tl(t0
, fp0
);
5711 tcg_temp_free_i32(fp0
);
5713 TCGv_i32 fp0
= tcg_temp_new_i32();
5715 gen_load_fpr32h(fp0
, rt
);
5716 tcg_gen_ext_i32_tl(t0
, fp0
);
5717 tcg_temp_free_i32(fp0
);
5721 /* XXX: For now we support only a single FPU context. */
5722 gen_helper_1i(cfc1
, t0
, rt
);
5724 /* COP2: Not implemented. */
5731 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
5732 gen_store_gpr(t0
, rd
);
5738 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
5739 generate_exception(ctx
, EXCP_RI
);
5742 static void gen_mttr(CPUState
*env
, DisasContext
*ctx
, int rd
, int rt
,
5743 int u
, int sel
, int h
)
5745 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5746 TCGv t0
= tcg_temp_local_new();
5748 gen_load_gpr(t0
, rt
);
5749 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5750 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5751 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5753 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5754 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5761 gen_helper_mttc0_vpecontrol(t0
);
5764 gen_helper_mttc0_vpeconf0(t0
);
5774 gen_helper_mttc0_tcstatus(t0
);
5777 gen_helper_mttc0_tcbind(t0
);
5780 gen_helper_mttc0_tcrestart(t0
);
5783 gen_helper_mttc0_tchalt(t0
);
5786 gen_helper_mttc0_tccontext(t0
);
5789 gen_helper_mttc0_tcschedule(t0
);
5792 gen_helper_mttc0_tcschefback(t0
);
5795 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5802 gen_helper_mttc0_entryhi(t0
);
5805 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5811 gen_helper_mttc0_status(t0
);
5814 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5820 gen_helper_mttc0_cause(t0
);
5830 gen_helper_mttc0_ebase(t0
);
5840 gen_helper_mttc0_debug(t0
);
5843 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5848 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5850 } else switch (sel
) {
5851 /* GPR registers. */
5853 gen_helper_1i(mttgpr
, t0
, rd
);
5855 /* Auxiliary CPU registers */
5859 gen_helper_1i(mttlo
, t0
, 0);
5862 gen_helper_1i(mtthi
, t0
, 0);
5865 gen_helper_1i(mttacx
, t0
, 0);
5868 gen_helper_1i(mttlo
, t0
, 1);
5871 gen_helper_1i(mtthi
, t0
, 1);
5874 gen_helper_1i(mttacx
, t0
, 1);
5877 gen_helper_1i(mttlo
, t0
, 2);
5880 gen_helper_1i(mtthi
, t0
, 2);
5883 gen_helper_1i(mttacx
, t0
, 2);
5886 gen_helper_1i(mttlo
, t0
, 3);
5889 gen_helper_1i(mtthi
, t0
, 3);
5892 gen_helper_1i(mttacx
, t0
, 3);
5895 gen_helper_mttdsp(t0
);
5901 /* Floating point (COP1). */
5903 /* XXX: For now we support only a single FPU context. */
5905 TCGv_i32 fp0
= tcg_temp_new_i32();
5907 tcg_gen_trunc_tl_i32(fp0
, t0
);
5908 gen_store_fpr32(fp0
, rd
);
5909 tcg_temp_free_i32(fp0
);
5911 TCGv_i32 fp0
= tcg_temp_new_i32();
5913 tcg_gen_trunc_tl_i32(fp0
, t0
);
5914 gen_store_fpr32h(fp0
, rd
);
5915 tcg_temp_free_i32(fp0
);
5919 /* XXX: For now we support only a single FPU context. */
5920 gen_helper_1i(ctc1
, t0
, rd
);
5922 /* COP2: Not implemented. */
5929 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
5935 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
5936 generate_exception(ctx
, EXCP_RI
);
5939 static void gen_cp0 (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
5941 const char *opn
= "ldst";
5949 gen_mfc0(env
, ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
5954 TCGv t0
= tcg_temp_new();
5956 gen_load_gpr(t0
, rt
);
5957 gen_mtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5962 #if defined(TARGET_MIPS64)
5964 check_insn(env
, ctx
, ISA_MIPS3
);
5969 gen_dmfc0(env
, ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
5973 check_insn(env
, ctx
, ISA_MIPS3
);
5975 TCGv t0
= tcg_temp_new();
5977 gen_load_gpr(t0
, rt
);
5978 gen_dmtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5985 check_insn(env
, ctx
, ASE_MT
);
5990 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
5991 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
5995 check_insn(env
, ctx
, ASE_MT
);
5996 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
5997 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
6002 if (!env
->tlb
->helper_tlbwi
)
6008 if (!env
->tlb
->helper_tlbwr
)
6014 if (!env
->tlb
->helper_tlbp
)
6020 if (!env
->tlb
->helper_tlbr
)
6026 check_insn(env
, ctx
, ISA_MIPS2
);
6028 ctx
->bstate
= BS_EXCP
;
6032 check_insn(env
, ctx
, ISA_MIPS32
);
6033 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
6035 generate_exception(ctx
, EXCP_RI
);
6038 ctx
->bstate
= BS_EXCP
;
6043 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
6044 /* If we get an exception, we want to restart at next instruction */
6046 save_cpu_state(ctx
, 1);
6049 ctx
->bstate
= BS_EXCP
;
6054 generate_exception(ctx
, EXCP_RI
);
6057 (void)opn
; /* avoid a compiler warning */
6058 MIPS_DEBUG("%s %s %d", opn
, regnames
[rt
], rd
);
6060 #endif /* !CONFIG_USER_ONLY */
6062 /* CP1 Branches (before delay slot) */
6063 static void gen_compute_branch1 (CPUState
*env
, DisasContext
*ctx
, uint32_t op
,
6064 int32_t cc
, int32_t offset
)
6066 target_ulong btarget
;
6067 const char *opn
= "cp1 cond branch";
6068 TCGv_i32 t0
= tcg_temp_new_i32();
6071 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
6073 btarget
= ctx
->pc
+ 4 + offset
;
6077 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6078 tcg_gen_not_i32(t0
, t0
);
6079 tcg_gen_andi_i32(t0
, t0
, 1);
6080 tcg_gen_extu_i32_tl(bcond
, t0
);
6084 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6085 tcg_gen_not_i32(t0
, t0
);
6086 tcg_gen_andi_i32(t0
, t0
, 1);
6087 tcg_gen_extu_i32_tl(bcond
, t0
);
6091 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6092 tcg_gen_andi_i32(t0
, t0
, 1);
6093 tcg_gen_extu_i32_tl(bcond
, t0
);
6097 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6098 tcg_gen_andi_i32(t0
, t0
, 1);
6099 tcg_gen_extu_i32_tl(bcond
, t0
);
6102 ctx
->hflags
|= MIPS_HFLAG_BL
;
6106 TCGv_i32 t1
= tcg_temp_new_i32();
6107 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6108 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6109 tcg_gen_nor_i32(t0
, t0
, t1
);
6110 tcg_temp_free_i32(t1
);
6111 tcg_gen_andi_i32(t0
, t0
, 1);
6112 tcg_gen_extu_i32_tl(bcond
, t0
);
6118 TCGv_i32 t1
= tcg_temp_new_i32();
6119 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6120 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6121 tcg_gen_or_i32(t0
, t0
, t1
);
6122 tcg_temp_free_i32(t1
);
6123 tcg_gen_andi_i32(t0
, t0
, 1);
6124 tcg_gen_extu_i32_tl(bcond
, t0
);
6130 TCGv_i32 t1
= tcg_temp_new_i32();
6131 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6132 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6133 tcg_gen_or_i32(t0
, t0
, t1
);
6134 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
6135 tcg_gen_or_i32(t0
, t0
, t1
);
6136 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
6137 tcg_gen_nor_i32(t0
, t0
, t1
);
6138 tcg_temp_free_i32(t1
);
6139 tcg_gen_andi_i32(t0
, t0
, 1);
6140 tcg_gen_extu_i32_tl(bcond
, t0
);
6146 TCGv_i32 t1
= tcg_temp_new_i32();
6147 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6148 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6149 tcg_gen_or_i32(t0
, t0
, t1
);
6150 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
6151 tcg_gen_or_i32(t0
, t0
, t1
);
6152 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
6153 tcg_gen_or_i32(t0
, t0
, t1
);
6154 tcg_temp_free_i32(t1
);
6155 tcg_gen_andi_i32(t0
, t0
, 1);
6156 tcg_gen_extu_i32_tl(bcond
, t0
);
6160 ctx
->hflags
|= MIPS_HFLAG_BC
;
6164 generate_exception (ctx
, EXCP_RI
);
6167 (void)opn
; /* avoid a compiler warning */
6168 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx
, opn
,
6169 ctx
->hflags
, btarget
);
6170 ctx
->btarget
= btarget
;
6173 tcg_temp_free_i32(t0
);
6176 /* Coprocessor 1 (FPU) */
6178 #define FOP(func, fmt) (((fmt) << 21) | (func))
6181 OPC_ADD_S
= FOP(0, FMT_S
),
6182 OPC_SUB_S
= FOP(1, FMT_S
),
6183 OPC_MUL_S
= FOP(2, FMT_S
),
6184 OPC_DIV_S
= FOP(3, FMT_S
),
6185 OPC_SQRT_S
= FOP(4, FMT_S
),
6186 OPC_ABS_S
= FOP(5, FMT_S
),
6187 OPC_MOV_S
= FOP(6, FMT_S
),
6188 OPC_NEG_S
= FOP(7, FMT_S
),
6189 OPC_ROUND_L_S
= FOP(8, FMT_S
),
6190 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
6191 OPC_CEIL_L_S
= FOP(10, FMT_S
),
6192 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
6193 OPC_ROUND_W_S
= FOP(12, FMT_S
),
6194 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
6195 OPC_CEIL_W_S
= FOP(14, FMT_S
),
6196 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
6197 OPC_MOVCF_S
= FOP(17, FMT_S
),
6198 OPC_MOVZ_S
= FOP(18, FMT_S
),
6199 OPC_MOVN_S
= FOP(19, FMT_S
),
6200 OPC_RECIP_S
= FOP(21, FMT_S
),
6201 OPC_RSQRT_S
= FOP(22, FMT_S
),
6202 OPC_RECIP2_S
= FOP(28, FMT_S
),
6203 OPC_RECIP1_S
= FOP(29, FMT_S
),
6204 OPC_RSQRT1_S
= FOP(30, FMT_S
),
6205 OPC_RSQRT2_S
= FOP(31, FMT_S
),
6206 OPC_CVT_D_S
= FOP(33, FMT_S
),
6207 OPC_CVT_W_S
= FOP(36, FMT_S
),
6208 OPC_CVT_L_S
= FOP(37, FMT_S
),
6209 OPC_CVT_PS_S
= FOP(38, FMT_S
),
6210 OPC_CMP_F_S
= FOP (48, FMT_S
),
6211 OPC_CMP_UN_S
= FOP (49, FMT_S
),
6212 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
6213 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
6214 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
6215 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
6216 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
6217 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
6218 OPC_CMP_SF_S
= FOP (56, FMT_S
),
6219 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
6220 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
6221 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
6222 OPC_CMP_LT_S
= FOP (60, FMT_S
),
6223 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
6224 OPC_CMP_LE_S
= FOP (62, FMT_S
),
6225 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
6227 OPC_ADD_D
= FOP(0, FMT_D
),
6228 OPC_SUB_D
= FOP(1, FMT_D
),
6229 OPC_MUL_D
= FOP(2, FMT_D
),
6230 OPC_DIV_D
= FOP(3, FMT_D
),
6231 OPC_SQRT_D
= FOP(4, FMT_D
),
6232 OPC_ABS_D
= FOP(5, FMT_D
),
6233 OPC_MOV_D
= FOP(6, FMT_D
),
6234 OPC_NEG_D
= FOP(7, FMT_D
),
6235 OPC_ROUND_L_D
= FOP(8, FMT_D
),
6236 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
6237 OPC_CEIL_L_D
= FOP(10, FMT_D
),
6238 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
6239 OPC_ROUND_W_D
= FOP(12, FMT_D
),
6240 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
6241 OPC_CEIL_W_D
= FOP(14, FMT_D
),
6242 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
6243 OPC_MOVCF_D
= FOP(17, FMT_D
),
6244 OPC_MOVZ_D
= FOP(18, FMT_D
),
6245 OPC_MOVN_D
= FOP(19, FMT_D
),
6246 OPC_RECIP_D
= FOP(21, FMT_D
),
6247 OPC_RSQRT_D
= FOP(22, FMT_D
),
6248 OPC_RECIP2_D
= FOP(28, FMT_D
),
6249 OPC_RECIP1_D
= FOP(29, FMT_D
),
6250 OPC_RSQRT1_D
= FOP(30, FMT_D
),
6251 OPC_RSQRT2_D
= FOP(31, FMT_D
),
6252 OPC_CVT_S_D
= FOP(32, FMT_D
),
6253 OPC_CVT_W_D
= FOP(36, FMT_D
),
6254 OPC_CVT_L_D
= FOP(37, FMT_D
),
6255 OPC_CMP_F_D
= FOP (48, FMT_D
),
6256 OPC_CMP_UN_D
= FOP (49, FMT_D
),
6257 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
6258 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
6259 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
6260 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
6261 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
6262 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
6263 OPC_CMP_SF_D
= FOP (56, FMT_D
),
6264 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
6265 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
6266 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
6267 OPC_CMP_LT_D
= FOP (60, FMT_D
),
6268 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
6269 OPC_CMP_LE_D
= FOP (62, FMT_D
),
6270 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
6272 OPC_CVT_S_W
= FOP(32, FMT_W
),
6273 OPC_CVT_D_W
= FOP(33, FMT_W
),
6274 OPC_CVT_S_L
= FOP(32, FMT_L
),
6275 OPC_CVT_D_L
= FOP(33, FMT_L
),
6276 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
6278 OPC_ADD_PS
= FOP(0, FMT_PS
),
6279 OPC_SUB_PS
= FOP(1, FMT_PS
),
6280 OPC_MUL_PS
= FOP(2, FMT_PS
),
6281 OPC_DIV_PS
= FOP(3, FMT_PS
),
6282 OPC_ABS_PS
= FOP(5, FMT_PS
),
6283 OPC_MOV_PS
= FOP(6, FMT_PS
),
6284 OPC_NEG_PS
= FOP(7, FMT_PS
),
6285 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
6286 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
6287 OPC_MOVN_PS
= FOP(19, FMT_PS
),
6288 OPC_ADDR_PS
= FOP(24, FMT_PS
),
6289 OPC_MULR_PS
= FOP(26, FMT_PS
),
6290 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
6291 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
6292 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
6293 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
6295 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
6296 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
6297 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
6298 OPC_PLL_PS
= FOP(44, FMT_PS
),
6299 OPC_PLU_PS
= FOP(45, FMT_PS
),
6300 OPC_PUL_PS
= FOP(46, FMT_PS
),
6301 OPC_PUU_PS
= FOP(47, FMT_PS
),
6302 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
6303 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
6304 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
6305 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
6306 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
6307 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
6308 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
6309 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
6310 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
6311 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
6312 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
6313 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
6314 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
6315 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
6316 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
6317 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
6320 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
6322 const char *opn
= "cp1 move";
6323 TCGv t0
= tcg_temp_new();
6328 TCGv_i32 fp0
= tcg_temp_new_i32();
6330 gen_load_fpr32(fp0
, fs
);
6331 tcg_gen_ext_i32_tl(t0
, fp0
);
6332 tcg_temp_free_i32(fp0
);
6334 gen_store_gpr(t0
, rt
);
6338 gen_load_gpr(t0
, rt
);
6340 TCGv_i32 fp0
= tcg_temp_new_i32();
6342 tcg_gen_trunc_tl_i32(fp0
, t0
);
6343 gen_store_fpr32(fp0
, fs
);
6344 tcg_temp_free_i32(fp0
);
6349 gen_helper_1i(cfc1
, t0
, fs
);
6350 gen_store_gpr(t0
, rt
);
6354 gen_load_gpr(t0
, rt
);
6355 gen_helper_1i(ctc1
, t0
, fs
);
6358 #if defined(TARGET_MIPS64)
6360 gen_load_fpr64(ctx
, t0
, fs
);
6361 gen_store_gpr(t0
, rt
);
6365 gen_load_gpr(t0
, rt
);
6366 gen_store_fpr64(ctx
, t0
, fs
);
6372 TCGv_i32 fp0
= tcg_temp_new_i32();
6374 gen_load_fpr32h(fp0
, fs
);
6375 tcg_gen_ext_i32_tl(t0
, fp0
);
6376 tcg_temp_free_i32(fp0
);
6378 gen_store_gpr(t0
, rt
);
6382 gen_load_gpr(t0
, rt
);
6384 TCGv_i32 fp0
= tcg_temp_new_i32();
6386 tcg_gen_trunc_tl_i32(fp0
, t0
);
6387 gen_store_fpr32h(fp0
, fs
);
6388 tcg_temp_free_i32(fp0
);
6394 generate_exception (ctx
, EXCP_RI
);
6397 (void)opn
; /* avoid a compiler warning */
6398 MIPS_DEBUG("%s %s %s", opn
, regnames
[rt
], fregnames
[fs
]);
6404 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
6420 l1
= gen_new_label();
6421 t0
= tcg_temp_new_i32();
6422 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6423 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6424 tcg_temp_free_i32(t0
);
6426 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
6428 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
6433 static inline void gen_movcf_s (int fs
, int fd
, int cc
, int tf
)
6436 TCGv_i32 t0
= tcg_temp_new_i32();
6437 int l1
= gen_new_label();
6444 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6445 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6446 gen_load_fpr32(t0
, fs
);
6447 gen_store_fpr32(t0
, fd
);
6449 tcg_temp_free_i32(t0
);
6452 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
6455 TCGv_i32 t0
= tcg_temp_new_i32();
6457 int l1
= gen_new_label();
6464 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6465 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6466 tcg_temp_free_i32(t0
);
6467 fp0
= tcg_temp_new_i64();
6468 gen_load_fpr64(ctx
, fp0
, fs
);
6469 gen_store_fpr64(ctx
, fp0
, fd
);
6470 tcg_temp_free_i64(fp0
);
6474 static inline void gen_movcf_ps (int fs
, int fd
, int cc
, int tf
)
6477 TCGv_i32 t0
= tcg_temp_new_i32();
6478 int l1
= gen_new_label();
6479 int l2
= gen_new_label();
6486 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6487 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6488 gen_load_fpr32(t0
, fs
);
6489 gen_store_fpr32(t0
, fd
);
6492 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
6493 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
6494 gen_load_fpr32h(t0
, fs
);
6495 gen_store_fpr32h(t0
, fd
);
6496 tcg_temp_free_i32(t0
);
6501 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
6502 int ft
, int fs
, int fd
, int cc
)
6504 const char *opn
= "farith";
6505 const char *condnames
[] = {
6523 const char *condnames_abs
[] = {
6541 enum { BINOP
, CMPOP
, OTHEROP
} optype
= OTHEROP
;
6542 uint32_t func
= ctx
->opcode
& 0x3f;
6547 TCGv_i32 fp0
= tcg_temp_new_i32();
6548 TCGv_i32 fp1
= tcg_temp_new_i32();
6550 gen_load_fpr32(fp0
, fs
);
6551 gen_load_fpr32(fp1
, ft
);
6552 gen_helper_float_add_s(fp0
, fp0
, fp1
);
6553 tcg_temp_free_i32(fp1
);
6554 gen_store_fpr32(fp0
, fd
);
6555 tcg_temp_free_i32(fp0
);
6562 TCGv_i32 fp0
= tcg_temp_new_i32();
6563 TCGv_i32 fp1
= tcg_temp_new_i32();
6565 gen_load_fpr32(fp0
, fs
);
6566 gen_load_fpr32(fp1
, ft
);
6567 gen_helper_float_sub_s(fp0
, fp0
, fp1
);
6568 tcg_temp_free_i32(fp1
);
6569 gen_store_fpr32(fp0
, fd
);
6570 tcg_temp_free_i32(fp0
);
6577 TCGv_i32 fp0
= tcg_temp_new_i32();
6578 TCGv_i32 fp1
= tcg_temp_new_i32();
6580 gen_load_fpr32(fp0
, fs
);
6581 gen_load_fpr32(fp1
, ft
);
6582 gen_helper_float_mul_s(fp0
, fp0
, fp1
);
6583 tcg_temp_free_i32(fp1
);
6584 gen_store_fpr32(fp0
, fd
);
6585 tcg_temp_free_i32(fp0
);
6592 TCGv_i32 fp0
= tcg_temp_new_i32();
6593 TCGv_i32 fp1
= tcg_temp_new_i32();
6595 gen_load_fpr32(fp0
, fs
);
6596 gen_load_fpr32(fp1
, ft
);
6597 gen_helper_float_div_s(fp0
, fp0
, fp1
);
6598 tcg_temp_free_i32(fp1
);
6599 gen_store_fpr32(fp0
, fd
);
6600 tcg_temp_free_i32(fp0
);
6607 TCGv_i32 fp0
= tcg_temp_new_i32();
6609 gen_load_fpr32(fp0
, fs
);
6610 gen_helper_float_sqrt_s(fp0
, fp0
);
6611 gen_store_fpr32(fp0
, fd
);
6612 tcg_temp_free_i32(fp0
);
6618 TCGv_i32 fp0
= tcg_temp_new_i32();
6620 gen_load_fpr32(fp0
, fs
);
6621 gen_helper_float_abs_s(fp0
, fp0
);
6622 gen_store_fpr32(fp0
, fd
);
6623 tcg_temp_free_i32(fp0
);
6629 TCGv_i32 fp0
= tcg_temp_new_i32();
6631 gen_load_fpr32(fp0
, fs
);
6632 gen_store_fpr32(fp0
, fd
);
6633 tcg_temp_free_i32(fp0
);
6639 TCGv_i32 fp0
= tcg_temp_new_i32();
6641 gen_load_fpr32(fp0
, fs
);
6642 gen_helper_float_chs_s(fp0
, fp0
);
6643 gen_store_fpr32(fp0
, fd
);
6644 tcg_temp_free_i32(fp0
);
6649 check_cp1_64bitmode(ctx
);
6651 TCGv_i32 fp32
= tcg_temp_new_i32();
6652 TCGv_i64 fp64
= tcg_temp_new_i64();
6654 gen_load_fpr32(fp32
, fs
);
6655 gen_helper_float_roundl_s(fp64
, fp32
);
6656 tcg_temp_free_i32(fp32
);
6657 gen_store_fpr64(ctx
, fp64
, fd
);
6658 tcg_temp_free_i64(fp64
);
6663 check_cp1_64bitmode(ctx
);
6665 TCGv_i32 fp32
= tcg_temp_new_i32();
6666 TCGv_i64 fp64
= tcg_temp_new_i64();
6668 gen_load_fpr32(fp32
, fs
);
6669 gen_helper_float_truncl_s(fp64
, fp32
);
6670 tcg_temp_free_i32(fp32
);
6671 gen_store_fpr64(ctx
, fp64
, fd
);
6672 tcg_temp_free_i64(fp64
);
6677 check_cp1_64bitmode(ctx
);
6679 TCGv_i32 fp32
= tcg_temp_new_i32();
6680 TCGv_i64 fp64
= tcg_temp_new_i64();
6682 gen_load_fpr32(fp32
, fs
);
6683 gen_helper_float_ceill_s(fp64
, fp32
);
6684 tcg_temp_free_i32(fp32
);
6685 gen_store_fpr64(ctx
, fp64
, fd
);
6686 tcg_temp_free_i64(fp64
);
6691 check_cp1_64bitmode(ctx
);
6693 TCGv_i32 fp32
= tcg_temp_new_i32();
6694 TCGv_i64 fp64
= tcg_temp_new_i64();
6696 gen_load_fpr32(fp32
, fs
);
6697 gen_helper_float_floorl_s(fp64
, fp32
);
6698 tcg_temp_free_i32(fp32
);
6699 gen_store_fpr64(ctx
, fp64
, fd
);
6700 tcg_temp_free_i64(fp64
);
6706 TCGv_i32 fp0
= tcg_temp_new_i32();
6708 gen_load_fpr32(fp0
, fs
);
6709 gen_helper_float_roundw_s(fp0
, fp0
);
6710 gen_store_fpr32(fp0
, fd
);
6711 tcg_temp_free_i32(fp0
);
6717 TCGv_i32 fp0
= tcg_temp_new_i32();
6719 gen_load_fpr32(fp0
, fs
);
6720 gen_helper_float_truncw_s(fp0
, fp0
);
6721 gen_store_fpr32(fp0
, fd
);
6722 tcg_temp_free_i32(fp0
);
6728 TCGv_i32 fp0
= tcg_temp_new_i32();
6730 gen_load_fpr32(fp0
, fs
);
6731 gen_helper_float_ceilw_s(fp0
, fp0
);
6732 gen_store_fpr32(fp0
, fd
);
6733 tcg_temp_free_i32(fp0
);
6739 TCGv_i32 fp0
= tcg_temp_new_i32();
6741 gen_load_fpr32(fp0
, fs
);
6742 gen_helper_float_floorw_s(fp0
, fp0
);
6743 gen_store_fpr32(fp0
, fd
);
6744 tcg_temp_free_i32(fp0
);
6749 gen_movcf_s(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
6754 int l1
= gen_new_label();
6758 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
6760 fp0
= tcg_temp_new_i32();
6761 gen_load_fpr32(fp0
, fs
);
6762 gen_store_fpr32(fp0
, fd
);
6763 tcg_temp_free_i32(fp0
);
6770 int l1
= gen_new_label();
6774 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
6775 fp0
= tcg_temp_new_i32();
6776 gen_load_fpr32(fp0
, fs
);
6777 gen_store_fpr32(fp0
, fd
);
6778 tcg_temp_free_i32(fp0
);
6787 TCGv_i32 fp0
= tcg_temp_new_i32();
6789 gen_load_fpr32(fp0
, fs
);
6790 gen_helper_float_recip_s(fp0
, fp0
);
6791 gen_store_fpr32(fp0
, fd
);
6792 tcg_temp_free_i32(fp0
);
6799 TCGv_i32 fp0
= tcg_temp_new_i32();
6801 gen_load_fpr32(fp0
, fs
);
6802 gen_helper_float_rsqrt_s(fp0
, fp0
);
6803 gen_store_fpr32(fp0
, fd
);
6804 tcg_temp_free_i32(fp0
);
6809 check_cp1_64bitmode(ctx
);
6811 TCGv_i32 fp0
= tcg_temp_new_i32();
6812 TCGv_i32 fp1
= tcg_temp_new_i32();
6814 gen_load_fpr32(fp0
, fs
);
6815 gen_load_fpr32(fp1
, fd
);
6816 gen_helper_float_recip2_s(fp0
, fp0
, fp1
);
6817 tcg_temp_free_i32(fp1
);
6818 gen_store_fpr32(fp0
, fd
);
6819 tcg_temp_free_i32(fp0
);
6824 check_cp1_64bitmode(ctx
);
6826 TCGv_i32 fp0
= tcg_temp_new_i32();
6828 gen_load_fpr32(fp0
, fs
);
6829 gen_helper_float_recip1_s(fp0
, fp0
);
6830 gen_store_fpr32(fp0
, fd
);
6831 tcg_temp_free_i32(fp0
);
6836 check_cp1_64bitmode(ctx
);
6838 TCGv_i32 fp0
= tcg_temp_new_i32();
6840 gen_load_fpr32(fp0
, fs
);
6841 gen_helper_float_rsqrt1_s(fp0
, fp0
);
6842 gen_store_fpr32(fp0
, fd
);
6843 tcg_temp_free_i32(fp0
);
6848 check_cp1_64bitmode(ctx
);
6850 TCGv_i32 fp0
= tcg_temp_new_i32();
6851 TCGv_i32 fp1
= tcg_temp_new_i32();
6853 gen_load_fpr32(fp0
, fs
);
6854 gen_load_fpr32(fp1
, ft
);
6855 gen_helper_float_rsqrt2_s(fp0
, fp0
, fp1
);
6856 tcg_temp_free_i32(fp1
);
6857 gen_store_fpr32(fp0
, fd
);
6858 tcg_temp_free_i32(fp0
);
6863 check_cp1_registers(ctx
, fd
);
6865 TCGv_i32 fp32
= tcg_temp_new_i32();
6866 TCGv_i64 fp64
= tcg_temp_new_i64();
6868 gen_load_fpr32(fp32
, fs
);
6869 gen_helper_float_cvtd_s(fp64
, fp32
);
6870 tcg_temp_free_i32(fp32
);
6871 gen_store_fpr64(ctx
, fp64
, fd
);
6872 tcg_temp_free_i64(fp64
);
6878 TCGv_i32 fp0
= tcg_temp_new_i32();
6880 gen_load_fpr32(fp0
, fs
);
6881 gen_helper_float_cvtw_s(fp0
, fp0
);
6882 gen_store_fpr32(fp0
, fd
);
6883 tcg_temp_free_i32(fp0
);
6888 check_cp1_64bitmode(ctx
);
6890 TCGv_i32 fp32
= tcg_temp_new_i32();
6891 TCGv_i64 fp64
= tcg_temp_new_i64();
6893 gen_load_fpr32(fp32
, fs
);
6894 gen_helper_float_cvtl_s(fp64
, fp32
);
6895 tcg_temp_free_i32(fp32
);
6896 gen_store_fpr64(ctx
, fp64
, fd
);
6897 tcg_temp_free_i64(fp64
);
6902 check_cp1_64bitmode(ctx
);
6904 TCGv_i64 fp64
= tcg_temp_new_i64();
6905 TCGv_i32 fp32_0
= tcg_temp_new_i32();
6906 TCGv_i32 fp32_1
= tcg_temp_new_i32();
6908 gen_load_fpr32(fp32_0
, fs
);
6909 gen_load_fpr32(fp32_1
, ft
);
6910 tcg_gen_concat_i32_i64(fp64
, fp32_0
, fp32_1
);
6911 tcg_temp_free_i32(fp32_1
);
6912 tcg_temp_free_i32(fp32_0
);
6913 gen_store_fpr64(ctx
, fp64
, fd
);
6914 tcg_temp_free_i64(fp64
);
6927 case OPC_CMP_NGLE_S
:
6934 if (ctx
->opcode
& (1 << 6)) {
6935 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
6936 opn
= condnames_abs
[func
-48];
6938 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
6939 opn
= condnames
[func
-48];
6943 check_cp1_registers(ctx
, fs
| ft
| fd
);
6945 TCGv_i64 fp0
= tcg_temp_new_i64();
6946 TCGv_i64 fp1
= tcg_temp_new_i64();
6948 gen_load_fpr64(ctx
, fp0
, fs
);
6949 gen_load_fpr64(ctx
, fp1
, ft
);
6950 gen_helper_float_add_d(fp0
, fp0
, fp1
);
6951 tcg_temp_free_i64(fp1
);
6952 gen_store_fpr64(ctx
, fp0
, fd
);
6953 tcg_temp_free_i64(fp0
);
6959 check_cp1_registers(ctx
, fs
| ft
| fd
);
6961 TCGv_i64 fp0
= tcg_temp_new_i64();
6962 TCGv_i64 fp1
= tcg_temp_new_i64();
6964 gen_load_fpr64(ctx
, fp0
, fs
);
6965 gen_load_fpr64(ctx
, fp1
, ft
);
6966 gen_helper_float_sub_d(fp0
, fp0
, fp1
);
6967 tcg_temp_free_i64(fp1
);
6968 gen_store_fpr64(ctx
, fp0
, fd
);
6969 tcg_temp_free_i64(fp0
);
6975 check_cp1_registers(ctx
, fs
| ft
| fd
);
6977 TCGv_i64 fp0
= tcg_temp_new_i64();
6978 TCGv_i64 fp1
= tcg_temp_new_i64();
6980 gen_load_fpr64(ctx
, fp0
, fs
);
6981 gen_load_fpr64(ctx
, fp1
, ft
);
6982 gen_helper_float_mul_d(fp0
, fp0
, fp1
);
6983 tcg_temp_free_i64(fp1
);
6984 gen_store_fpr64(ctx
, fp0
, fd
);
6985 tcg_temp_free_i64(fp0
);
6991 check_cp1_registers(ctx
, fs
| ft
| fd
);
6993 TCGv_i64 fp0
= tcg_temp_new_i64();
6994 TCGv_i64 fp1
= tcg_temp_new_i64();
6996 gen_load_fpr64(ctx
, fp0
, fs
);
6997 gen_load_fpr64(ctx
, fp1
, ft
);
6998 gen_helper_float_div_d(fp0
, fp0
, fp1
);
6999 tcg_temp_free_i64(fp1
);
7000 gen_store_fpr64(ctx
, fp0
, fd
);
7001 tcg_temp_free_i64(fp0
);
7007 check_cp1_registers(ctx
, fs
| fd
);
7009 TCGv_i64 fp0
= tcg_temp_new_i64();
7011 gen_load_fpr64(ctx
, fp0
, fs
);
7012 gen_helper_float_sqrt_d(fp0
, fp0
);
7013 gen_store_fpr64(ctx
, fp0
, fd
);
7014 tcg_temp_free_i64(fp0
);
7019 check_cp1_registers(ctx
, fs
| fd
);
7021 TCGv_i64 fp0
= tcg_temp_new_i64();
7023 gen_load_fpr64(ctx
, fp0
, fs
);
7024 gen_helper_float_abs_d(fp0
, fp0
);
7025 gen_store_fpr64(ctx
, fp0
, fd
);
7026 tcg_temp_free_i64(fp0
);
7031 check_cp1_registers(ctx
, fs
| fd
);
7033 TCGv_i64 fp0
= tcg_temp_new_i64();
7035 gen_load_fpr64(ctx
, fp0
, fs
);
7036 gen_store_fpr64(ctx
, fp0
, fd
);
7037 tcg_temp_free_i64(fp0
);
7042 check_cp1_registers(ctx
, fs
| fd
);
7044 TCGv_i64 fp0
= tcg_temp_new_i64();
7046 gen_load_fpr64(ctx
, fp0
, fs
);
7047 gen_helper_float_chs_d(fp0
, fp0
);
7048 gen_store_fpr64(ctx
, fp0
, fd
);
7049 tcg_temp_free_i64(fp0
);
7054 check_cp1_64bitmode(ctx
);
7056 TCGv_i64 fp0
= tcg_temp_new_i64();
7058 gen_load_fpr64(ctx
, fp0
, fs
);
7059 gen_helper_float_roundl_d(fp0
, fp0
);
7060 gen_store_fpr64(ctx
, fp0
, fd
);
7061 tcg_temp_free_i64(fp0
);
7066 check_cp1_64bitmode(ctx
);
7068 TCGv_i64 fp0
= tcg_temp_new_i64();
7070 gen_load_fpr64(ctx
, fp0
, fs
);
7071 gen_helper_float_truncl_d(fp0
, fp0
);
7072 gen_store_fpr64(ctx
, fp0
, fd
);
7073 tcg_temp_free_i64(fp0
);
7078 check_cp1_64bitmode(ctx
);
7080 TCGv_i64 fp0
= tcg_temp_new_i64();
7082 gen_load_fpr64(ctx
, fp0
, fs
);
7083 gen_helper_float_ceill_d(fp0
, fp0
);
7084 gen_store_fpr64(ctx
, fp0
, fd
);
7085 tcg_temp_free_i64(fp0
);
7090 check_cp1_64bitmode(ctx
);
7092 TCGv_i64 fp0
= tcg_temp_new_i64();
7094 gen_load_fpr64(ctx
, fp0
, fs
);
7095 gen_helper_float_floorl_d(fp0
, fp0
);
7096 gen_store_fpr64(ctx
, fp0
, fd
);
7097 tcg_temp_free_i64(fp0
);
7102 check_cp1_registers(ctx
, fs
);
7104 TCGv_i32 fp32
= tcg_temp_new_i32();
7105 TCGv_i64 fp64
= tcg_temp_new_i64();
7107 gen_load_fpr64(ctx
, fp64
, fs
);
7108 gen_helper_float_roundw_d(fp32
, fp64
);
7109 tcg_temp_free_i64(fp64
);
7110 gen_store_fpr32(fp32
, fd
);
7111 tcg_temp_free_i32(fp32
);
7116 check_cp1_registers(ctx
, fs
);
7118 TCGv_i32 fp32
= tcg_temp_new_i32();
7119 TCGv_i64 fp64
= tcg_temp_new_i64();
7121 gen_load_fpr64(ctx
, fp64
, fs
);
7122 gen_helper_float_truncw_d(fp32
, fp64
);
7123 tcg_temp_free_i64(fp64
);
7124 gen_store_fpr32(fp32
, fd
);
7125 tcg_temp_free_i32(fp32
);
7130 check_cp1_registers(ctx
, fs
);
7132 TCGv_i32 fp32
= tcg_temp_new_i32();
7133 TCGv_i64 fp64
= tcg_temp_new_i64();
7135 gen_load_fpr64(ctx
, fp64
, fs
);
7136 gen_helper_float_ceilw_d(fp32
, fp64
);
7137 tcg_temp_free_i64(fp64
);
7138 gen_store_fpr32(fp32
, fd
);
7139 tcg_temp_free_i32(fp32
);
7144 check_cp1_registers(ctx
, fs
);
7146 TCGv_i32 fp32
= tcg_temp_new_i32();
7147 TCGv_i64 fp64
= tcg_temp_new_i64();
7149 gen_load_fpr64(ctx
, fp64
, fs
);
7150 gen_helper_float_floorw_d(fp32
, fp64
);
7151 tcg_temp_free_i64(fp64
);
7152 gen_store_fpr32(fp32
, fd
);
7153 tcg_temp_free_i32(fp32
);
7158 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7163 int l1
= gen_new_label();
7167 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7169 fp0
= tcg_temp_new_i64();
7170 gen_load_fpr64(ctx
, fp0
, fs
);
7171 gen_store_fpr64(ctx
, fp0
, fd
);
7172 tcg_temp_free_i64(fp0
);
7179 int l1
= gen_new_label();
7183 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7184 fp0
= tcg_temp_new_i64();
7185 gen_load_fpr64(ctx
, fp0
, fs
);
7186 gen_store_fpr64(ctx
, fp0
, fd
);
7187 tcg_temp_free_i64(fp0
);
7194 check_cp1_64bitmode(ctx
);
7196 TCGv_i64 fp0
= tcg_temp_new_i64();
7198 gen_load_fpr64(ctx
, fp0
, fs
);
7199 gen_helper_float_recip_d(fp0
, fp0
);
7200 gen_store_fpr64(ctx
, fp0
, fd
);
7201 tcg_temp_free_i64(fp0
);
7206 check_cp1_64bitmode(ctx
);
7208 TCGv_i64 fp0
= tcg_temp_new_i64();
7210 gen_load_fpr64(ctx
, fp0
, fs
);
7211 gen_helper_float_rsqrt_d(fp0
, fp0
);
7212 gen_store_fpr64(ctx
, fp0
, fd
);
7213 tcg_temp_free_i64(fp0
);
7218 check_cp1_64bitmode(ctx
);
7220 TCGv_i64 fp0
= tcg_temp_new_i64();
7221 TCGv_i64 fp1
= tcg_temp_new_i64();
7223 gen_load_fpr64(ctx
, fp0
, fs
);
7224 gen_load_fpr64(ctx
, fp1
, ft
);
7225 gen_helper_float_recip2_d(fp0
, fp0
, fp1
);
7226 tcg_temp_free_i64(fp1
);
7227 gen_store_fpr64(ctx
, fp0
, fd
);
7228 tcg_temp_free_i64(fp0
);
7233 check_cp1_64bitmode(ctx
);
7235 TCGv_i64 fp0
= tcg_temp_new_i64();
7237 gen_load_fpr64(ctx
, fp0
, fs
);
7238 gen_helper_float_recip1_d(fp0
, fp0
);
7239 gen_store_fpr64(ctx
, fp0
, fd
);
7240 tcg_temp_free_i64(fp0
);
7245 check_cp1_64bitmode(ctx
);
7247 TCGv_i64 fp0
= tcg_temp_new_i64();
7249 gen_load_fpr64(ctx
, fp0
, fs
);
7250 gen_helper_float_rsqrt1_d(fp0
, fp0
);
7251 gen_store_fpr64(ctx
, fp0
, fd
);
7252 tcg_temp_free_i64(fp0
);
7257 check_cp1_64bitmode(ctx
);
7259 TCGv_i64 fp0
= tcg_temp_new_i64();
7260 TCGv_i64 fp1
= tcg_temp_new_i64();
7262 gen_load_fpr64(ctx
, fp0
, fs
);
7263 gen_load_fpr64(ctx
, fp1
, ft
);
7264 gen_helper_float_rsqrt2_d(fp0
, fp0
, fp1
);
7265 tcg_temp_free_i64(fp1
);
7266 gen_store_fpr64(ctx
, fp0
, fd
);
7267 tcg_temp_free_i64(fp0
);
7280 case OPC_CMP_NGLE_D
:
7287 if (ctx
->opcode
& (1 << 6)) {
7288 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
7289 opn
= condnames_abs
[func
-48];
7291 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
7292 opn
= condnames
[func
-48];
7296 check_cp1_registers(ctx
, fs
);
7298 TCGv_i32 fp32
= tcg_temp_new_i32();
7299 TCGv_i64 fp64
= tcg_temp_new_i64();
7301 gen_load_fpr64(ctx
, fp64
, fs
);
7302 gen_helper_float_cvts_d(fp32
, fp64
);
7303 tcg_temp_free_i64(fp64
);
7304 gen_store_fpr32(fp32
, fd
);
7305 tcg_temp_free_i32(fp32
);
7310 check_cp1_registers(ctx
, fs
);
7312 TCGv_i32 fp32
= tcg_temp_new_i32();
7313 TCGv_i64 fp64
= tcg_temp_new_i64();
7315 gen_load_fpr64(ctx
, fp64
, fs
);
7316 gen_helper_float_cvtw_d(fp32
, fp64
);
7317 tcg_temp_free_i64(fp64
);
7318 gen_store_fpr32(fp32
, fd
);
7319 tcg_temp_free_i32(fp32
);
7324 check_cp1_64bitmode(ctx
);
7326 TCGv_i64 fp0
= tcg_temp_new_i64();
7328 gen_load_fpr64(ctx
, fp0
, fs
);
7329 gen_helper_float_cvtl_d(fp0
, fp0
);
7330 gen_store_fpr64(ctx
, fp0
, fd
);
7331 tcg_temp_free_i64(fp0
);
7337 TCGv_i32 fp0
= tcg_temp_new_i32();
7339 gen_load_fpr32(fp0
, fs
);
7340 gen_helper_float_cvts_w(fp0
, fp0
);
7341 gen_store_fpr32(fp0
, fd
);
7342 tcg_temp_free_i32(fp0
);
7347 check_cp1_registers(ctx
, fd
);
7349 TCGv_i32 fp32
= tcg_temp_new_i32();
7350 TCGv_i64 fp64
= tcg_temp_new_i64();
7352 gen_load_fpr32(fp32
, fs
);
7353 gen_helper_float_cvtd_w(fp64
, fp32
);
7354 tcg_temp_free_i32(fp32
);
7355 gen_store_fpr64(ctx
, fp64
, fd
);
7356 tcg_temp_free_i64(fp64
);
7361 check_cp1_64bitmode(ctx
);
7363 TCGv_i32 fp32
= tcg_temp_new_i32();
7364 TCGv_i64 fp64
= tcg_temp_new_i64();
7366 gen_load_fpr64(ctx
, fp64
, fs
);
7367 gen_helper_float_cvts_l(fp32
, fp64
);
7368 tcg_temp_free_i64(fp64
);
7369 gen_store_fpr32(fp32
, fd
);
7370 tcg_temp_free_i32(fp32
);
7375 check_cp1_64bitmode(ctx
);
7377 TCGv_i64 fp0
= tcg_temp_new_i64();
7379 gen_load_fpr64(ctx
, fp0
, fs
);
7380 gen_helper_float_cvtd_l(fp0
, fp0
);
7381 gen_store_fpr64(ctx
, fp0
, fd
);
7382 tcg_temp_free_i64(fp0
);
7387 check_cp1_64bitmode(ctx
);
7389 TCGv_i64 fp0
= tcg_temp_new_i64();
7391 gen_load_fpr64(ctx
, fp0
, fs
);
7392 gen_helper_float_cvtps_pw(fp0
, fp0
);
7393 gen_store_fpr64(ctx
, fp0
, fd
);
7394 tcg_temp_free_i64(fp0
);
7399 check_cp1_64bitmode(ctx
);
7401 TCGv_i64 fp0
= tcg_temp_new_i64();
7402 TCGv_i64 fp1
= tcg_temp_new_i64();
7404 gen_load_fpr64(ctx
, fp0
, fs
);
7405 gen_load_fpr64(ctx
, fp1
, ft
);
7406 gen_helper_float_add_ps(fp0
, fp0
, fp1
);
7407 tcg_temp_free_i64(fp1
);
7408 gen_store_fpr64(ctx
, fp0
, fd
);
7409 tcg_temp_free_i64(fp0
);
7414 check_cp1_64bitmode(ctx
);
7416 TCGv_i64 fp0
= tcg_temp_new_i64();
7417 TCGv_i64 fp1
= tcg_temp_new_i64();
7419 gen_load_fpr64(ctx
, fp0
, fs
);
7420 gen_load_fpr64(ctx
, fp1
, ft
);
7421 gen_helper_float_sub_ps(fp0
, fp0
, fp1
);
7422 tcg_temp_free_i64(fp1
);
7423 gen_store_fpr64(ctx
, fp0
, fd
);
7424 tcg_temp_free_i64(fp0
);
7429 check_cp1_64bitmode(ctx
);
7431 TCGv_i64 fp0
= tcg_temp_new_i64();
7432 TCGv_i64 fp1
= tcg_temp_new_i64();
7434 gen_load_fpr64(ctx
, fp0
, fs
);
7435 gen_load_fpr64(ctx
, fp1
, ft
);
7436 gen_helper_float_mul_ps(fp0
, fp0
, fp1
);
7437 tcg_temp_free_i64(fp1
);
7438 gen_store_fpr64(ctx
, fp0
, fd
);
7439 tcg_temp_free_i64(fp0
);
7444 check_cp1_64bitmode(ctx
);
7446 TCGv_i64 fp0
= tcg_temp_new_i64();
7448 gen_load_fpr64(ctx
, fp0
, fs
);
7449 gen_helper_float_abs_ps(fp0
, fp0
);
7450 gen_store_fpr64(ctx
, fp0
, fd
);
7451 tcg_temp_free_i64(fp0
);
7456 check_cp1_64bitmode(ctx
);
7458 TCGv_i64 fp0
= tcg_temp_new_i64();
7460 gen_load_fpr64(ctx
, fp0
, fs
);
7461 gen_store_fpr64(ctx
, fp0
, fd
);
7462 tcg_temp_free_i64(fp0
);
7467 check_cp1_64bitmode(ctx
);
7469 TCGv_i64 fp0
= tcg_temp_new_i64();
7471 gen_load_fpr64(ctx
, fp0
, fs
);
7472 gen_helper_float_chs_ps(fp0
, fp0
);
7473 gen_store_fpr64(ctx
, fp0
, fd
);
7474 tcg_temp_free_i64(fp0
);
7479 check_cp1_64bitmode(ctx
);
7480 gen_movcf_ps(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7484 check_cp1_64bitmode(ctx
);
7486 int l1
= gen_new_label();
7490 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7491 fp0
= tcg_temp_new_i64();
7492 gen_load_fpr64(ctx
, fp0
, fs
);
7493 gen_store_fpr64(ctx
, fp0
, fd
);
7494 tcg_temp_free_i64(fp0
);
7500 check_cp1_64bitmode(ctx
);
7502 int l1
= gen_new_label();
7506 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7507 fp0
= tcg_temp_new_i64();
7508 gen_load_fpr64(ctx
, fp0
, fs
);
7509 gen_store_fpr64(ctx
, fp0
, fd
);
7510 tcg_temp_free_i64(fp0
);
7517 check_cp1_64bitmode(ctx
);
7519 TCGv_i64 fp0
= tcg_temp_new_i64();
7520 TCGv_i64 fp1
= tcg_temp_new_i64();
7522 gen_load_fpr64(ctx
, fp0
, ft
);
7523 gen_load_fpr64(ctx
, fp1
, fs
);
7524 gen_helper_float_addr_ps(fp0
, fp0
, fp1
);
7525 tcg_temp_free_i64(fp1
);
7526 gen_store_fpr64(ctx
, fp0
, fd
);
7527 tcg_temp_free_i64(fp0
);
7532 check_cp1_64bitmode(ctx
);
7534 TCGv_i64 fp0
= tcg_temp_new_i64();
7535 TCGv_i64 fp1
= tcg_temp_new_i64();
7537 gen_load_fpr64(ctx
, fp0
, ft
);
7538 gen_load_fpr64(ctx
, fp1
, fs
);
7539 gen_helper_float_mulr_ps(fp0
, fp0
, fp1
);
7540 tcg_temp_free_i64(fp1
);
7541 gen_store_fpr64(ctx
, fp0
, fd
);
7542 tcg_temp_free_i64(fp0
);
7547 check_cp1_64bitmode(ctx
);
7549 TCGv_i64 fp0
= tcg_temp_new_i64();
7550 TCGv_i64 fp1
= tcg_temp_new_i64();
7552 gen_load_fpr64(ctx
, fp0
, fs
);
7553 gen_load_fpr64(ctx
, fp1
, fd
);
7554 gen_helper_float_recip2_ps(fp0
, fp0
, fp1
);
7555 tcg_temp_free_i64(fp1
);
7556 gen_store_fpr64(ctx
, fp0
, fd
);
7557 tcg_temp_free_i64(fp0
);
7562 check_cp1_64bitmode(ctx
);
7564 TCGv_i64 fp0
= tcg_temp_new_i64();
7566 gen_load_fpr64(ctx
, fp0
, fs
);
7567 gen_helper_float_recip1_ps(fp0
, fp0
);
7568 gen_store_fpr64(ctx
, fp0
, fd
);
7569 tcg_temp_free_i64(fp0
);
7574 check_cp1_64bitmode(ctx
);
7576 TCGv_i64 fp0
= tcg_temp_new_i64();
7578 gen_load_fpr64(ctx
, fp0
, fs
);
7579 gen_helper_float_rsqrt1_ps(fp0
, fp0
);
7580 gen_store_fpr64(ctx
, fp0
, fd
);
7581 tcg_temp_free_i64(fp0
);
7586 check_cp1_64bitmode(ctx
);
7588 TCGv_i64 fp0
= tcg_temp_new_i64();
7589 TCGv_i64 fp1
= tcg_temp_new_i64();
7591 gen_load_fpr64(ctx
, fp0
, fs
);
7592 gen_load_fpr64(ctx
, fp1
, ft
);
7593 gen_helper_float_rsqrt2_ps(fp0
, fp0
, fp1
);
7594 tcg_temp_free_i64(fp1
);
7595 gen_store_fpr64(ctx
, fp0
, fd
);
7596 tcg_temp_free_i64(fp0
);
7601 check_cp1_64bitmode(ctx
);
7603 TCGv_i32 fp0
= tcg_temp_new_i32();
7605 gen_load_fpr32h(fp0
, fs
);
7606 gen_helper_float_cvts_pu(fp0
, fp0
);
7607 gen_store_fpr32(fp0
, fd
);
7608 tcg_temp_free_i32(fp0
);
7613 check_cp1_64bitmode(ctx
);
7615 TCGv_i64 fp0
= tcg_temp_new_i64();
7617 gen_load_fpr64(ctx
, fp0
, fs
);
7618 gen_helper_float_cvtpw_ps(fp0
, fp0
);
7619 gen_store_fpr64(ctx
, fp0
, fd
);
7620 tcg_temp_free_i64(fp0
);
7625 check_cp1_64bitmode(ctx
);
7627 TCGv_i32 fp0
= tcg_temp_new_i32();
7629 gen_load_fpr32(fp0
, fs
);
7630 gen_helper_float_cvts_pl(fp0
, fp0
);
7631 gen_store_fpr32(fp0
, fd
);
7632 tcg_temp_free_i32(fp0
);
7637 check_cp1_64bitmode(ctx
);
7639 TCGv_i32 fp0
= tcg_temp_new_i32();
7640 TCGv_i32 fp1
= tcg_temp_new_i32();
7642 gen_load_fpr32(fp0
, fs
);
7643 gen_load_fpr32(fp1
, ft
);
7644 gen_store_fpr32h(fp0
, fd
);
7645 gen_store_fpr32(fp1
, fd
);
7646 tcg_temp_free_i32(fp0
);
7647 tcg_temp_free_i32(fp1
);
7652 check_cp1_64bitmode(ctx
);
7654 TCGv_i32 fp0
= tcg_temp_new_i32();
7655 TCGv_i32 fp1
= tcg_temp_new_i32();
7657 gen_load_fpr32(fp0
, fs
);
7658 gen_load_fpr32h(fp1
, ft
);
7659 gen_store_fpr32(fp1
, fd
);
7660 gen_store_fpr32h(fp0
, fd
);
7661 tcg_temp_free_i32(fp0
);
7662 tcg_temp_free_i32(fp1
);
7667 check_cp1_64bitmode(ctx
);
7669 TCGv_i32 fp0
= tcg_temp_new_i32();
7670 TCGv_i32 fp1
= tcg_temp_new_i32();
7672 gen_load_fpr32h(fp0
, fs
);
7673 gen_load_fpr32(fp1
, ft
);
7674 gen_store_fpr32(fp1
, fd
);
7675 gen_store_fpr32h(fp0
, fd
);
7676 tcg_temp_free_i32(fp0
);
7677 tcg_temp_free_i32(fp1
);
7682 check_cp1_64bitmode(ctx
);
7684 TCGv_i32 fp0
= tcg_temp_new_i32();
7685 TCGv_i32 fp1
= tcg_temp_new_i32();
7687 gen_load_fpr32h(fp0
, fs
);
7688 gen_load_fpr32h(fp1
, ft
);
7689 gen_store_fpr32(fp1
, fd
);
7690 gen_store_fpr32h(fp0
, fd
);
7691 tcg_temp_free_i32(fp0
);
7692 tcg_temp_free_i32(fp1
);
7699 case OPC_CMP_UEQ_PS
:
7700 case OPC_CMP_OLT_PS
:
7701 case OPC_CMP_ULT_PS
:
7702 case OPC_CMP_OLE_PS
:
7703 case OPC_CMP_ULE_PS
:
7705 case OPC_CMP_NGLE_PS
:
7706 case OPC_CMP_SEQ_PS
:
7707 case OPC_CMP_NGL_PS
:
7709 case OPC_CMP_NGE_PS
:
7711 case OPC_CMP_NGT_PS
:
7712 if (ctx
->opcode
& (1 << 6)) {
7713 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
7714 opn
= condnames_abs
[func
-48];
7716 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
7717 opn
= condnames
[func
-48];
7722 generate_exception (ctx
, EXCP_RI
);
7725 (void)opn
; /* avoid a compiler warning */
7728 MIPS_DEBUG("%s %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fs
], fregnames
[ft
]);
7731 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fs
], fregnames
[ft
]);
7734 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fd
], fregnames
[fs
]);
7739 /* Coprocessor 3 (FPU) */
7740 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
7741 int fd
, int fs
, int base
, int index
)
7743 const char *opn
= "extended float load/store";
7745 TCGv t0
= tcg_temp_new();
7748 gen_load_gpr(t0
, index
);
7749 } else if (index
== 0) {
7750 gen_load_gpr(t0
, base
);
7752 gen_load_gpr(t0
, index
);
7753 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], t0
);
7755 /* Don't do NOP if destination is zero: we must perform the actual
7757 save_cpu_state(ctx
, 0);
7762 TCGv_i32 fp0
= tcg_temp_new_i32();
7764 tcg_gen_qemu_ld32s(t0
, t0
, ctx
->mem_idx
);
7765 tcg_gen_trunc_tl_i32(fp0
, t0
);
7766 gen_store_fpr32(fp0
, fd
);
7767 tcg_temp_free_i32(fp0
);
7773 check_cp1_registers(ctx
, fd
);
7775 TCGv_i64 fp0
= tcg_temp_new_i64();
7777 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
7778 gen_store_fpr64(ctx
, fp0
, fd
);
7779 tcg_temp_free_i64(fp0
);
7784 check_cp1_64bitmode(ctx
);
7785 tcg_gen_andi_tl(t0
, t0
, ~0x7);
7787 TCGv_i64 fp0
= tcg_temp_new_i64();
7789 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
7790 gen_store_fpr64(ctx
, fp0
, fd
);
7791 tcg_temp_free_i64(fp0
);
7798 TCGv_i32 fp0
= tcg_temp_new_i32();
7799 TCGv t1
= tcg_temp_new();
7801 gen_load_fpr32(fp0
, fs
);
7802 tcg_gen_extu_i32_tl(t1
, fp0
);
7803 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
7804 tcg_temp_free_i32(fp0
);
7812 check_cp1_registers(ctx
, fs
);
7814 TCGv_i64 fp0
= tcg_temp_new_i64();
7816 gen_load_fpr64(ctx
, fp0
, fs
);
7817 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
7818 tcg_temp_free_i64(fp0
);
7824 check_cp1_64bitmode(ctx
);
7825 tcg_gen_andi_tl(t0
, t0
, ~0x7);
7827 TCGv_i64 fp0
= tcg_temp_new_i64();
7829 gen_load_fpr64(ctx
, fp0
, fs
);
7830 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
7831 tcg_temp_free_i64(fp0
);
7838 (void)opn
; (void)store
; /* avoid compiler warnings */
7839 MIPS_DEBUG("%s %s, %s(%s)", opn
, fregnames
[store
? fs
: fd
],
7840 regnames
[index
], regnames
[base
]);
7843 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
7844 int fd
, int fr
, int fs
, int ft
)
7846 const char *opn
= "flt3_arith";
7850 check_cp1_64bitmode(ctx
);
7852 TCGv t0
= tcg_temp_local_new();
7853 TCGv_i32 fp
= tcg_temp_new_i32();
7854 TCGv_i32 fph
= tcg_temp_new_i32();
7855 int l1
= gen_new_label();
7856 int l2
= gen_new_label();
7858 gen_load_gpr(t0
, fr
);
7859 tcg_gen_andi_tl(t0
, t0
, 0x7);
7861 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
7862 gen_load_fpr32(fp
, fs
);
7863 gen_load_fpr32h(fph
, fs
);
7864 gen_store_fpr32(fp
, fd
);
7865 gen_store_fpr32h(fph
, fd
);
7868 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
7870 #ifdef TARGET_WORDS_BIGENDIAN
7871 gen_load_fpr32(fp
, fs
);
7872 gen_load_fpr32h(fph
, ft
);
7873 gen_store_fpr32h(fp
, fd
);
7874 gen_store_fpr32(fph
, fd
);
7876 gen_load_fpr32h(fph
, fs
);
7877 gen_load_fpr32(fp
, ft
);
7878 gen_store_fpr32(fph
, fd
);
7879 gen_store_fpr32h(fp
, fd
);
7882 tcg_temp_free_i32(fp
);
7883 tcg_temp_free_i32(fph
);
7890 TCGv_i32 fp0
= tcg_temp_new_i32();
7891 TCGv_i32 fp1
= tcg_temp_new_i32();
7892 TCGv_i32 fp2
= tcg_temp_new_i32();
7894 gen_load_fpr32(fp0
, fs
);
7895 gen_load_fpr32(fp1
, ft
);
7896 gen_load_fpr32(fp2
, fr
);
7897 gen_helper_float_muladd_s(fp2
, fp0
, fp1
, fp2
);
7898 tcg_temp_free_i32(fp0
);
7899 tcg_temp_free_i32(fp1
);
7900 gen_store_fpr32(fp2
, fd
);
7901 tcg_temp_free_i32(fp2
);
7907 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7909 TCGv_i64 fp0
= tcg_temp_new_i64();
7910 TCGv_i64 fp1
= tcg_temp_new_i64();
7911 TCGv_i64 fp2
= tcg_temp_new_i64();
7913 gen_load_fpr64(ctx
, fp0
, fs
);
7914 gen_load_fpr64(ctx
, fp1
, ft
);
7915 gen_load_fpr64(ctx
, fp2
, fr
);
7916 gen_helper_float_muladd_d(fp2
, fp0
, fp1
, fp2
);
7917 tcg_temp_free_i64(fp0
);
7918 tcg_temp_free_i64(fp1
);
7919 gen_store_fpr64(ctx
, fp2
, fd
);
7920 tcg_temp_free_i64(fp2
);
7925 check_cp1_64bitmode(ctx
);
7927 TCGv_i64 fp0
= tcg_temp_new_i64();
7928 TCGv_i64 fp1
= tcg_temp_new_i64();
7929 TCGv_i64 fp2
= tcg_temp_new_i64();
7931 gen_load_fpr64(ctx
, fp0
, fs
);
7932 gen_load_fpr64(ctx
, fp1
, ft
);
7933 gen_load_fpr64(ctx
, fp2
, fr
);
7934 gen_helper_float_muladd_ps(fp2
, fp0
, fp1
, fp2
);
7935 tcg_temp_free_i64(fp0
);
7936 tcg_temp_free_i64(fp1
);
7937 gen_store_fpr64(ctx
, fp2
, fd
);
7938 tcg_temp_free_i64(fp2
);
7945 TCGv_i32 fp0
= tcg_temp_new_i32();
7946 TCGv_i32 fp1
= tcg_temp_new_i32();
7947 TCGv_i32 fp2
= tcg_temp_new_i32();
7949 gen_load_fpr32(fp0
, fs
);
7950 gen_load_fpr32(fp1
, ft
);
7951 gen_load_fpr32(fp2
, fr
);
7952 gen_helper_float_mulsub_s(fp2
, fp0
, fp1
, fp2
);
7953 tcg_temp_free_i32(fp0
);
7954 tcg_temp_free_i32(fp1
);
7955 gen_store_fpr32(fp2
, fd
);
7956 tcg_temp_free_i32(fp2
);
7962 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7964 TCGv_i64 fp0
= tcg_temp_new_i64();
7965 TCGv_i64 fp1
= tcg_temp_new_i64();
7966 TCGv_i64 fp2
= tcg_temp_new_i64();
7968 gen_load_fpr64(ctx
, fp0
, fs
);
7969 gen_load_fpr64(ctx
, fp1
, ft
);
7970 gen_load_fpr64(ctx
, fp2
, fr
);
7971 gen_helper_float_mulsub_d(fp2
, fp0
, fp1
, fp2
);
7972 tcg_temp_free_i64(fp0
);
7973 tcg_temp_free_i64(fp1
);
7974 gen_store_fpr64(ctx
, fp2
, fd
);
7975 tcg_temp_free_i64(fp2
);
7980 check_cp1_64bitmode(ctx
);
7982 TCGv_i64 fp0
= tcg_temp_new_i64();
7983 TCGv_i64 fp1
= tcg_temp_new_i64();
7984 TCGv_i64 fp2
= tcg_temp_new_i64();
7986 gen_load_fpr64(ctx
, fp0
, fs
);
7987 gen_load_fpr64(ctx
, fp1
, ft
);
7988 gen_load_fpr64(ctx
, fp2
, fr
);
7989 gen_helper_float_mulsub_ps(fp2
, fp0
, fp1
, fp2
);
7990 tcg_temp_free_i64(fp0
);
7991 tcg_temp_free_i64(fp1
);
7992 gen_store_fpr64(ctx
, fp2
, fd
);
7993 tcg_temp_free_i64(fp2
);
8000 TCGv_i32 fp0
= tcg_temp_new_i32();
8001 TCGv_i32 fp1
= tcg_temp_new_i32();
8002 TCGv_i32 fp2
= tcg_temp_new_i32();
8004 gen_load_fpr32(fp0
, fs
);
8005 gen_load_fpr32(fp1
, ft
);
8006 gen_load_fpr32(fp2
, fr
);
8007 gen_helper_float_nmuladd_s(fp2
, fp0
, fp1
, fp2
);
8008 tcg_temp_free_i32(fp0
);
8009 tcg_temp_free_i32(fp1
);
8010 gen_store_fpr32(fp2
, fd
);
8011 tcg_temp_free_i32(fp2
);
8017 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8019 TCGv_i64 fp0
= tcg_temp_new_i64();
8020 TCGv_i64 fp1
= tcg_temp_new_i64();
8021 TCGv_i64 fp2
= tcg_temp_new_i64();
8023 gen_load_fpr64(ctx
, fp0
, fs
);
8024 gen_load_fpr64(ctx
, fp1
, ft
);
8025 gen_load_fpr64(ctx
, fp2
, fr
);
8026 gen_helper_float_nmuladd_d(fp2
, fp0
, fp1
, fp2
);
8027 tcg_temp_free_i64(fp0
);
8028 tcg_temp_free_i64(fp1
);
8029 gen_store_fpr64(ctx
, fp2
, fd
);
8030 tcg_temp_free_i64(fp2
);
8035 check_cp1_64bitmode(ctx
);
8037 TCGv_i64 fp0
= tcg_temp_new_i64();
8038 TCGv_i64 fp1
= tcg_temp_new_i64();
8039 TCGv_i64 fp2
= tcg_temp_new_i64();
8041 gen_load_fpr64(ctx
, fp0
, fs
);
8042 gen_load_fpr64(ctx
, fp1
, ft
);
8043 gen_load_fpr64(ctx
, fp2
, fr
);
8044 gen_helper_float_nmuladd_ps(fp2
, fp0
, fp1
, fp2
);
8045 tcg_temp_free_i64(fp0
);
8046 tcg_temp_free_i64(fp1
);
8047 gen_store_fpr64(ctx
, fp2
, fd
);
8048 tcg_temp_free_i64(fp2
);
8055 TCGv_i32 fp0
= tcg_temp_new_i32();
8056 TCGv_i32 fp1
= tcg_temp_new_i32();
8057 TCGv_i32 fp2
= tcg_temp_new_i32();
8059 gen_load_fpr32(fp0
, fs
);
8060 gen_load_fpr32(fp1
, ft
);
8061 gen_load_fpr32(fp2
, fr
);
8062 gen_helper_float_nmulsub_s(fp2
, fp0
, fp1
, fp2
);
8063 tcg_temp_free_i32(fp0
);
8064 tcg_temp_free_i32(fp1
);
8065 gen_store_fpr32(fp2
, fd
);
8066 tcg_temp_free_i32(fp2
);
8072 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8074 TCGv_i64 fp0
= tcg_temp_new_i64();
8075 TCGv_i64 fp1
= tcg_temp_new_i64();
8076 TCGv_i64 fp2
= tcg_temp_new_i64();
8078 gen_load_fpr64(ctx
, fp0
, fs
);
8079 gen_load_fpr64(ctx
, fp1
, ft
);
8080 gen_load_fpr64(ctx
, fp2
, fr
);
8081 gen_helper_float_nmulsub_d(fp2
, fp0
, fp1
, fp2
);
8082 tcg_temp_free_i64(fp0
);
8083 tcg_temp_free_i64(fp1
);
8084 gen_store_fpr64(ctx
, fp2
, fd
);
8085 tcg_temp_free_i64(fp2
);
8090 check_cp1_64bitmode(ctx
);
8092 TCGv_i64 fp0
= tcg_temp_new_i64();
8093 TCGv_i64 fp1
= tcg_temp_new_i64();
8094 TCGv_i64 fp2
= tcg_temp_new_i64();
8096 gen_load_fpr64(ctx
, fp0
, fs
);
8097 gen_load_fpr64(ctx
, fp1
, ft
);
8098 gen_load_fpr64(ctx
, fp2
, fr
);
8099 gen_helper_float_nmulsub_ps(fp2
, fp0
, fp1
, fp2
);
8100 tcg_temp_free_i64(fp0
);
8101 tcg_temp_free_i64(fp1
);
8102 gen_store_fpr64(ctx
, fp2
, fd
);
8103 tcg_temp_free_i64(fp2
);
8109 generate_exception (ctx
, EXCP_RI
);
8112 (void)opn
; /* avoid a compiler warning */
8113 MIPS_DEBUG("%s %s, %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fr
],
8114 fregnames
[fs
], fregnames
[ft
]);
8118 gen_rdhwr (CPUState
*env
, DisasContext
*ctx
, int rt
, int rd
)
8122 check_insn(env
, ctx
, ISA_MIPS32R2
);
8123 t0
= tcg_temp_new();
8127 save_cpu_state(ctx
, 1);
8128 gen_helper_rdhwr_cpunum(t0
);
8129 gen_store_gpr(t0
, rt
);
8132 save_cpu_state(ctx
, 1);
8133 gen_helper_rdhwr_synci_step(t0
);
8134 gen_store_gpr(t0
, rt
);
8137 save_cpu_state(ctx
, 1);
8138 gen_helper_rdhwr_cc(t0
);
8139 gen_store_gpr(t0
, rt
);
8142 save_cpu_state(ctx
, 1);
8143 gen_helper_rdhwr_ccres(t0
);
8144 gen_store_gpr(t0
, rt
);
8147 #if defined(CONFIG_USER_ONLY)
8148 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, tls_value
));
8149 gen_store_gpr(t0
, rt
);
8152 /* XXX: Some CPUs implement this in hardware.
8153 Not supported yet. */
8155 default: /* Invalid */
8156 MIPS_INVAL("rdhwr");
8157 generate_exception(ctx
, EXCP_RI
);
8163 static void handle_delay_slot (CPUState
*env
, DisasContext
*ctx
,
8166 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8167 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
8168 /* Branches completion */
8169 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
8170 ctx
->bstate
= BS_BRANCH
;
8171 save_cpu_state(ctx
, 0);
8172 /* FIXME: Need to clear can_do_io. */
8173 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
8175 /* unconditional branch */
8176 MIPS_DEBUG("unconditional branch");
8177 if (proc_hflags
& MIPS_HFLAG_BX
) {
8178 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
8180 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8183 /* blikely taken case */
8184 MIPS_DEBUG("blikely branch taken");
8185 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8188 /* Conditional branch */
8189 MIPS_DEBUG("conditional branch");
8191 int l1
= gen_new_label();
8193 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
8194 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
8196 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8200 /* unconditional branch to register */
8201 MIPS_DEBUG("branch to register");
8202 if (env
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
8203 TCGv t0
= tcg_temp_new();
8204 TCGv_i32 t1
= tcg_temp_new_i32();
8206 tcg_gen_andi_tl(t0
, btarget
, 0x1);
8207 tcg_gen_trunc_tl_i32(t1
, t0
);
8209 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
8210 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
8211 tcg_gen_or_i32(hflags
, hflags
, t1
);
8212 tcg_temp_free_i32(t1
);
8214 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
8216 tcg_gen_mov_tl(cpu_PC
, btarget
);
8218 if (ctx
->singlestep_enabled
) {
8219 save_cpu_state(ctx
, 0);
8220 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
8225 MIPS_DEBUG("unknown branch");
8231 /* ISA extensions (ASEs) */
8232 /* MIPS16 extension to MIPS32 */
8234 /* MIPS16 major opcodes */
8236 M16_OPC_ADDIUSP
= 0x00,
8237 M16_OPC_ADDIUPC
= 0x01,
8240 M16_OPC_BEQZ
= 0x04,
8241 M16_OPC_BNEQZ
= 0x05,
8242 M16_OPC_SHIFT
= 0x06,
8244 M16_OPC_RRIA
= 0x08,
8245 M16_OPC_ADDIU8
= 0x09,
8246 M16_OPC_SLTI
= 0x0a,
8247 M16_OPC_SLTIU
= 0x0b,
8250 M16_OPC_CMPI
= 0x0e,
8254 M16_OPC_LWSP
= 0x12,
8258 M16_OPC_LWPC
= 0x16,
8262 M16_OPC_SWSP
= 0x1a,
8266 M16_OPC_EXTEND
= 0x1e,
8270 /* I8 funct field */
8289 /* RR funct field */
8323 /* I64 funct field */
8335 /* RR ry field for CNVT */
8337 RR_RY_CNVT_ZEB
= 0x0,
8338 RR_RY_CNVT_ZEH
= 0x1,
8339 RR_RY_CNVT_ZEW
= 0x2,
8340 RR_RY_CNVT_SEB
= 0x4,
8341 RR_RY_CNVT_SEH
= 0x5,
8342 RR_RY_CNVT_SEW
= 0x6,
8345 static int xlat (int r
)
8347 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
8352 static void gen_mips16_save (DisasContext
*ctx
,
8353 int xsregs
, int aregs
,
8354 int do_ra
, int do_s0
, int do_s1
,
8357 TCGv t0
= tcg_temp_new();
8358 TCGv t1
= tcg_temp_new();
8388 generate_exception(ctx
, EXCP_RI
);
8394 gen_base_offset_addr(ctx
, t0
, 29, 12);
8395 gen_load_gpr(t1
, 7);
8396 op_st_sw(t1
, t0
, ctx
);
8399 gen_base_offset_addr(ctx
, t0
, 29, 8);
8400 gen_load_gpr(t1
, 6);
8401 op_st_sw(t1
, t0
, ctx
);
8404 gen_base_offset_addr(ctx
, t0
, 29, 4);
8405 gen_load_gpr(t1
, 5);
8406 op_st_sw(t1
, t0
, ctx
);
8409 gen_base_offset_addr(ctx
, t0
, 29, 0);
8410 gen_load_gpr(t1
, 4);
8411 op_st_sw(t1
, t0
, ctx
);
8414 gen_load_gpr(t0
, 29);
8416 #define DECR_AND_STORE(reg) do { \
8417 tcg_gen_subi_tl(t0, t0, 4); \
8418 gen_load_gpr(t1, reg); \
8419 op_st_sw(t1, t0, ctx); \
8483 generate_exception(ctx
, EXCP_RI
);
8499 #undef DECR_AND_STORE
8501 tcg_gen_subi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
8506 static void gen_mips16_restore (DisasContext
*ctx
,
8507 int xsregs
, int aregs
,
8508 int do_ra
, int do_s0
, int do_s1
,
8512 TCGv t0
= tcg_temp_new();
8513 TCGv t1
= tcg_temp_new();
8515 tcg_gen_addi_tl(t0
, cpu_gpr
[29], framesize
);
8517 #define DECR_AND_LOAD(reg) do { \
8518 tcg_gen_subi_tl(t0, t0, 4); \
8519 op_ld_lw(t1, t0, ctx); \
8520 gen_store_gpr(t1, reg); \
8584 generate_exception(ctx
, EXCP_RI
);
8600 #undef DECR_AND_LOAD
8602 tcg_gen_addi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
8607 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
8608 int is_64_bit
, int extended
)
8612 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8613 generate_exception(ctx
, EXCP_RI
);
8617 t0
= tcg_temp_new();
8619 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
8620 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
8622 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
8628 #if defined(TARGET_MIPS64)
8629 static void decode_i64_mips16 (CPUState
*env
, DisasContext
*ctx
,
8630 int ry
, int funct
, int16_t offset
,
8636 offset
= extended
? offset
: offset
<< 3;
8637 gen_ld(env
, ctx
, OPC_LD
, ry
, 29, offset
);
8641 offset
= extended
? offset
: offset
<< 3;
8642 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
8646 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
8647 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
8651 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
8652 gen_arith_imm(env
, ctx
, OPC_DADDIU
, 29, 29, offset
);
8655 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8656 generate_exception(ctx
, EXCP_RI
);
8658 offset
= extended
? offset
: offset
<< 3;
8659 gen_ld(env
, ctx
, OPC_LDPC
, ry
, 0, offset
);
8664 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
8665 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, ry
, offset
);
8669 offset
= extended
? offset
: offset
<< 2;
8670 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
8674 offset
= extended
? offset
: offset
<< 2;
8675 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, 29, offset
);
8681 static int decode_extended_mips16_opc (CPUState
*env
, DisasContext
*ctx
,
8684 int extend
= lduw_code(ctx
->pc
+ 2);
8685 int op
, rx
, ry
, funct
, sa
;
8686 int16_t imm
, offset
;
8688 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
8689 op
= (ctx
->opcode
>> 11) & 0x1f;
8690 sa
= (ctx
->opcode
>> 22) & 0x1f;
8691 funct
= (ctx
->opcode
>> 8) & 0x7;
8692 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
8693 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
8694 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
8695 | ((ctx
->opcode
>> 21) & 0x3f) << 5
8696 | (ctx
->opcode
& 0x1f));
8698 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
8701 case M16_OPC_ADDIUSP
:
8702 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 29, imm
);
8704 case M16_OPC_ADDIUPC
:
8705 gen_addiupc(ctx
, rx
, imm
, 0, 1);
8708 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1);
8709 /* No delay slot, so just process as a normal instruction */
8712 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1);
8713 /* No delay slot, so just process as a normal instruction */
8716 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1);
8717 /* No delay slot, so just process as a normal instruction */
8720 switch (ctx
->opcode
& 0x3) {
8722 gen_shift_imm(env
, ctx
, OPC_SLL
, rx
, ry
, sa
);
8725 #if defined(TARGET_MIPS64)
8727 gen_shift_imm(env
, ctx
, OPC_DSLL
, rx
, ry
, sa
);
8729 generate_exception(ctx
, EXCP_RI
);
8733 gen_shift_imm(env
, ctx
, OPC_SRL
, rx
, ry
, sa
);
8736 gen_shift_imm(env
, ctx
, OPC_SRA
, rx
, ry
, sa
);
8740 #if defined(TARGET_MIPS64)
8743 gen_ld(env
, ctx
, OPC_LD
, ry
, rx
, offset
);
8747 imm
= ctx
->opcode
& 0xf;
8748 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
8749 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
8750 imm
= (int16_t) (imm
<< 1) >> 1;
8751 if ((ctx
->opcode
>> 4) & 0x1) {
8752 #if defined(TARGET_MIPS64)
8754 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, rx
, imm
);
8756 generate_exception(ctx
, EXCP_RI
);
8759 gen_arith_imm(env
, ctx
, OPC_ADDIU
, ry
, rx
, imm
);
8762 case M16_OPC_ADDIU8
:
8763 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, rx
, imm
);
8766 gen_slt_imm(env
, OPC_SLTI
, 24, rx
, imm
);
8769 gen_slt_imm(env
, OPC_SLTIU
, 24, rx
, imm
);
8774 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1);
8777 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1);
8780 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
8783 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, imm
);
8787 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
8788 int aregs
= (ctx
->opcode
>> 16) & 0xf;
8789 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
8790 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
8791 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
8792 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
8793 | (ctx
->opcode
& 0xf)) << 3;
8795 if (ctx
->opcode
& (1 << 7)) {
8796 gen_mips16_save(ctx
, xsregs
, aregs
,
8797 do_ra
, do_s0
, do_s1
,
8800 gen_mips16_restore(ctx
, xsregs
, aregs
,
8801 do_ra
, do_s0
, do_s1
,
8807 generate_exception(ctx
, EXCP_RI
);
8812 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
8815 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
8817 #if defined(TARGET_MIPS64)
8819 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
8823 gen_ld(env
, ctx
, OPC_LB
, ry
, rx
, offset
);
8826 gen_ld(env
, ctx
, OPC_LH
, ry
, rx
, offset
);
8829 gen_ld(env
, ctx
, OPC_LW
, rx
, 29, offset
);
8832 gen_ld(env
, ctx
, OPC_LW
, ry
, rx
, offset
);
8835 gen_ld(env
, ctx
, OPC_LBU
, ry
, rx
, offset
);
8838 gen_ld(env
, ctx
, OPC_LHU
, ry
, rx
, offset
);
8841 gen_ld(env
, ctx
, OPC_LWPC
, rx
, 0, offset
);
8843 #if defined(TARGET_MIPS64)
8845 gen_ld(env
, ctx
, OPC_LWU
, ry
, rx
, offset
);
8849 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
8852 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
8855 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
8858 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
8860 #if defined(TARGET_MIPS64)
8862 decode_i64_mips16(env
, ctx
, ry
, funct
, offset
, 1);
8866 generate_exception(ctx
, EXCP_RI
);
8873 static int decode_mips16_opc (CPUState
*env
, DisasContext
*ctx
,
8878 int op
, cnvt_op
, op1
, offset
;
8882 op
= (ctx
->opcode
>> 11) & 0x1f;
8883 sa
= (ctx
->opcode
>> 2) & 0x7;
8884 sa
= sa
== 0 ? 8 : sa
;
8885 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
8886 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
8887 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
8888 op1
= offset
= ctx
->opcode
& 0x1f;
8893 case M16_OPC_ADDIUSP
:
8895 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
8897 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 29, imm
);
8900 case M16_OPC_ADDIUPC
:
8901 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
8904 offset
= (ctx
->opcode
& 0x7ff) << 1;
8905 offset
= (int16_t)(offset
<< 4) >> 4;
8906 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
);
8907 /* No delay slot, so just process as a normal instruction */
8910 offset
= lduw_code(ctx
->pc
+ 2);
8911 offset
= (((ctx
->opcode
& 0x1f) << 21)
8912 | ((ctx
->opcode
>> 5) & 0x1f) << 16
8914 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALXS
: OPC_JALS
;
8915 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
);
8920 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
8921 /* No delay slot, so just process as a normal instruction */
8924 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
8925 /* No delay slot, so just process as a normal instruction */
8928 switch (ctx
->opcode
& 0x3) {
8930 gen_shift_imm(env
, ctx
, OPC_SLL
, rx
, ry
, sa
);
8933 #if defined(TARGET_MIPS64)
8935 gen_shift_imm(env
, ctx
, OPC_DSLL
, rx
, ry
, sa
);
8937 generate_exception(ctx
, EXCP_RI
);
8941 gen_shift_imm(env
, ctx
, OPC_SRL
, rx
, ry
, sa
);
8944 gen_shift_imm(env
, ctx
, OPC_SRA
, rx
, ry
, sa
);
8948 #if defined(TARGET_MIPS64)
8951 gen_ld(env
, ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
8956 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
8958 if ((ctx
->opcode
>> 4) & 1) {
8959 #if defined(TARGET_MIPS64)
8961 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, rx
, imm
);
8963 generate_exception(ctx
, EXCP_RI
);
8966 gen_arith_imm(env
, ctx
, OPC_ADDIU
, ry
, rx
, imm
);
8970 case M16_OPC_ADDIU8
:
8972 int16_t imm
= (int8_t) ctx
->opcode
;
8974 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, rx
, imm
);
8979 int16_t imm
= (uint8_t) ctx
->opcode
;
8981 gen_slt_imm(env
, OPC_SLTI
, 24, rx
, imm
);
8986 int16_t imm
= (uint8_t) ctx
->opcode
;
8988 gen_slt_imm(env
, OPC_SLTIU
, 24, rx
, imm
);
8995 funct
= (ctx
->opcode
>> 8) & 0x7;
8998 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
8999 ((int8_t)ctx
->opcode
) << 1);
9002 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
9003 ((int8_t)ctx
->opcode
) << 1);
9006 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
9009 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29,
9010 ((int8_t)ctx
->opcode
) << 3);
9014 int do_ra
= ctx
->opcode
& (1 << 6);
9015 int do_s0
= ctx
->opcode
& (1 << 5);
9016 int do_s1
= ctx
->opcode
& (1 << 4);
9017 int framesize
= ctx
->opcode
& 0xf;
9019 if (framesize
== 0) {
9022 framesize
= framesize
<< 3;
9025 if (ctx
->opcode
& (1 << 7)) {
9026 gen_mips16_save(ctx
, 0, 0,
9027 do_ra
, do_s0
, do_s1
, framesize
);
9029 gen_mips16_restore(ctx
, 0, 0,
9030 do_ra
, do_s0
, do_s1
, framesize
);
9036 int rz
= xlat(ctx
->opcode
& 0x7);
9038 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
9039 ((ctx
->opcode
>> 5) & 0x7);
9040 gen_arith(env
, ctx
, OPC_ADDU
, reg32
, rz
, 0);
9044 reg32
= ctx
->opcode
& 0x1f;
9045 gen_arith(env
, ctx
, OPC_ADDU
, ry
, reg32
, 0);
9048 generate_exception(ctx
, EXCP_RI
);
9055 int16_t imm
= (uint8_t) ctx
->opcode
;
9057 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 0, imm
);
9062 int16_t imm
= (uint8_t) ctx
->opcode
;
9064 gen_logic_imm(env
, OPC_XORI
, 24, rx
, imm
);
9067 #if defined(TARGET_MIPS64)
9070 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
9074 gen_ld(env
, ctx
, OPC_LB
, ry
, rx
, offset
);
9077 gen_ld(env
, ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
9080 gen_ld(env
, ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
9083 gen_ld(env
, ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
9086 gen_ld(env
, ctx
, OPC_LBU
, ry
, rx
, offset
);
9089 gen_ld(env
, ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
9092 gen_ld(env
, ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
9094 #if defined (TARGET_MIPS64)
9097 gen_ld(env
, ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
9101 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
9104 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
9107 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
9110 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
9114 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
9117 switch (ctx
->opcode
& 0x3) {
9119 mips32_op
= OPC_ADDU
;
9122 mips32_op
= OPC_SUBU
;
9124 #if defined(TARGET_MIPS64)
9126 mips32_op
= OPC_DADDU
;
9130 mips32_op
= OPC_DSUBU
;
9135 generate_exception(ctx
, EXCP_RI
);
9139 gen_arith(env
, ctx
, mips32_op
, rz
, rx
, ry
);
9148 int nd
= (ctx
->opcode
>> 7) & 0x1;
9149 int link
= (ctx
->opcode
>> 6) & 0x1;
9150 int ra
= (ctx
->opcode
>> 5) & 0x1;
9153 op
= nd
? OPC_JALRC
: OPC_JALRS
;
9158 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0);
9165 /* XXX: not clear which exception should be raised
9166 * when in debug mode...
9168 check_insn(env
, ctx
, ISA_MIPS32
);
9169 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9170 generate_exception(ctx
, EXCP_DBp
);
9172 generate_exception(ctx
, EXCP_DBp
);
9176 gen_slt(env
, OPC_SLT
, 24, rx
, ry
);
9179 gen_slt(env
, OPC_SLTU
, 24, rx
, ry
);
9182 generate_exception(ctx
, EXCP_BREAK
);
9185 gen_shift(env
, ctx
, OPC_SLLV
, ry
, rx
, ry
);
9188 gen_shift(env
, ctx
, OPC_SRLV
, ry
, rx
, ry
);
9191 gen_shift(env
, ctx
, OPC_SRAV
, ry
, rx
, ry
);
9193 #if defined (TARGET_MIPS64)
9196 gen_shift_imm(env
, ctx
, OPC_DSRL
, ry
, ry
, sa
);
9200 gen_logic(env
, OPC_XOR
, 24, rx
, ry
);
9203 gen_arith(env
, ctx
, OPC_SUBU
, rx
, 0, ry
);
9206 gen_logic(env
, OPC_AND
, rx
, rx
, ry
);
9209 gen_logic(env
, OPC_OR
, rx
, rx
, ry
);
9212 gen_logic(env
, OPC_XOR
, rx
, rx
, ry
);
9215 gen_logic(env
, OPC_NOR
, rx
, ry
, 0);
9218 gen_HILO(ctx
, OPC_MFHI
, rx
);
9222 case RR_RY_CNVT_ZEB
:
9223 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9225 case RR_RY_CNVT_ZEH
:
9226 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9228 case RR_RY_CNVT_SEB
:
9229 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9231 case RR_RY_CNVT_SEH
:
9232 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9234 #if defined (TARGET_MIPS64)
9235 case RR_RY_CNVT_ZEW
:
9237 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9239 case RR_RY_CNVT_SEW
:
9241 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9245 generate_exception(ctx
, EXCP_RI
);
9250 gen_HILO(ctx
, OPC_MFLO
, rx
);
9252 #if defined (TARGET_MIPS64)
9255 gen_shift_imm(env
, ctx
, OPC_DSRA
, ry
, ry
, sa
);
9259 gen_shift(env
, ctx
, OPC_DSLLV
, ry
, rx
, ry
);
9263 gen_shift(env
, ctx
, OPC_DSRLV
, ry
, rx
, ry
);
9267 gen_shift(env
, ctx
, OPC_DSRAV
, ry
, rx
, ry
);
9271 gen_muldiv(ctx
, OPC_MULT
, rx
, ry
);
9274 gen_muldiv(ctx
, OPC_MULTU
, rx
, ry
);
9277 gen_muldiv(ctx
, OPC_DIV
, rx
, ry
);
9280 gen_muldiv(ctx
, OPC_DIVU
, rx
, ry
);
9282 #if defined (TARGET_MIPS64)
9285 gen_muldiv(ctx
, OPC_DMULT
, rx
, ry
);
9289 gen_muldiv(ctx
, OPC_DMULTU
, rx
, ry
);
9293 gen_muldiv(ctx
, OPC_DDIV
, rx
, ry
);
9297 gen_muldiv(ctx
, OPC_DDIVU
, rx
, ry
);
9301 generate_exception(ctx
, EXCP_RI
);
9305 case M16_OPC_EXTEND
:
9306 decode_extended_mips16_opc(env
, ctx
, is_branch
);
9309 #if defined(TARGET_MIPS64)
9311 funct
= (ctx
->opcode
>> 8) & 0x7;
9312 decode_i64_mips16(env
, ctx
, ry
, funct
, offset
, 0);
9316 generate_exception(ctx
, EXCP_RI
);
9323 /* microMIPS extension to MIPS32 */
9325 /* microMIPS32 major opcodes */
9364 /* 0x20 is reserved */
9374 /* 0x28 and 0x29 are reserved */
9384 /* 0x30 and 0x31 are reserved */
9394 /* 0x38 and 0x39 are reserved */
9405 /* POOL32A encoding of minor opcode field */
9408 /* These opcodes are distinguished only by bits 9..6; those bits are
9409 * what are recorded below. */
9435 /* The following can be distinguished by their lower 6 bits. */
9441 /* POOL32AXF encoding of minor opcode field extension */
9455 /* bits 13..12 for 0x01 */
9461 /* bits 13..12 for 0x2a */
9467 /* bits 13..12 for 0x32 */
9471 /* bits 15..12 for 0x2c */
9487 /* bits 15..12 for 0x34 */
9495 /* bits 15..12 for 0x3c */
9497 JR
= 0x0, /* alias */
9502 /* bits 15..12 for 0x05 */
9506 /* bits 15..12 for 0x0d */
9516 /* bits 15..12 for 0x15 */
9522 /* bits 15..12 for 0x1d */
9526 /* bits 15..12 for 0x2d */
9531 /* bits 15..12 for 0x35 */
9538 /* POOL32B encoding of minor opcode field (bits 15..12) */
9554 /* POOL32C encoding of minor opcode field (bits 15..12) */
9562 /* 0xa is reserved */
9569 /* 0x6 is reserved */
9575 /* POOL32F encoding of minor opcode field (bits 5..0) */
9578 /* These are the bit 7..6 values */
9589 /* These are the bit 8..6 values */
9633 CABS_COND_FMT
= 0x1c, /* MIPS3D */
9637 /* POOL32Fxf encoding of minor opcode extension field */
9675 /* POOL32I encoding of minor opcode field (bits 25..21) */
9700 /* These overlap and are distinguished by bit16 of the instruction */
9709 /* POOL16A encoding of minor opcode field */
9716 /* POOL16B encoding of minor opcode field */
9723 /* POOL16C encoding of minor opcode field */
9743 /* POOL16D encoding of minor opcode field */
9750 /* POOL16E encoding of minor opcode field */
9757 static int mmreg (int r
)
9759 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
9764 /* Used for 16-bit store instructions. */
9765 static int mmreg2 (int r
)
9767 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
9772 #define uMIPS_RD(op) ((op >> 7) & 0x7)
9773 #define uMIPS_RS(op) ((op >> 4) & 0x7)
9774 #define uMIPS_RS2(op) uMIPS_RS(op)
9775 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
9776 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
9777 #define uMIPS_RS5(op) (op & 0x1f)
9779 /* Signed immediate */
9780 #define SIMM(op, start, width) \
9781 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
9784 /* Zero-extended immediate */
9785 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
9787 static void gen_addiur1sp (CPUState
*env
, DisasContext
*ctx
)
9789 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
9791 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
9794 static void gen_addiur2 (CPUState
*env
, DisasContext
*ctx
)
9796 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
9797 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
9798 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
9800 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
9803 static void gen_addiusp (CPUState
*env
, DisasContext
*ctx
)
9805 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
9809 decoded
= 256 + encoded
;
9810 } else if (encoded
<= 255) {
9812 } else if (encoded
<= 509) {
9813 decoded
= encoded
- 512;
9815 decoded
= encoded
- 768;
9818 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
9821 static void gen_addius5 (CPUState
*env
, DisasContext
*ctx
)
9823 int imm
= SIMM(ctx
->opcode
, 1, 4);
9824 int rd
= (ctx
->opcode
>> 5) & 0x1f;
9826 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rd
, imm
);
9829 static void gen_andi16 (CPUState
*env
, DisasContext
*ctx
)
9831 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
9832 31, 32, 63, 64, 255, 32768, 65535 };
9833 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
9834 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
9835 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
9837 gen_logic_imm(env
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
9840 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
9841 int base
, int16_t offset
)
9846 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
9847 generate_exception(ctx
, EXCP_RI
);
9851 t0
= tcg_temp_new();
9853 gen_base_offset_addr(ctx
, t0
, base
, offset
);
9855 t1
= tcg_const_tl(reglist
);
9856 t2
= tcg_const_i32(ctx
->mem_idx
);
9858 save_cpu_state(ctx
, 1);
9861 gen_helper_lwm(t0
, t1
, t2
);
9864 gen_helper_swm(t0
, t1
, t2
);
9866 #ifdef TARGET_MIPS64
9868 gen_helper_ldm(t0
, t1
, t2
);
9871 gen_helper_sdm(t0
, t1
, t2
);
9875 MIPS_DEBUG("%s, %x, %d(%s)", opn
, reglist
, offset
, regnames
[base
]);
9878 tcg_temp_free_i32(t2
);
9882 static void gen_pool16c_insn (CPUState
*env
, DisasContext
*ctx
, int *is_branch
)
9884 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
9885 int rs
= mmreg(ctx
->opcode
& 0x7);
9888 switch (((ctx
->opcode
) >> 4) & 0x3f) {
9893 gen_logic(env
, OPC_NOR
, rd
, rs
, 0);
9899 gen_logic(env
, OPC_XOR
, rd
, rd
, rs
);
9905 gen_logic(env
, OPC_AND
, rd
, rd
, rs
);
9911 gen_logic(env
, OPC_OR
, rd
, rd
, rs
);
9918 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
9919 int offset
= ZIMM(ctx
->opcode
, 0, 4);
9921 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
9930 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
9931 int offset
= ZIMM(ctx
->opcode
, 0, 4);
9933 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
9940 int reg
= ctx
->opcode
& 0x1f;
9942 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
9949 int reg
= ctx
->opcode
& 0x1f;
9951 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
9952 /* Let normal delay slot handling in our caller take us
9953 to the branch target. */
9965 int reg
= ctx
->opcode
& 0x1f;
9967 gen_compute_branch(ctx
, opc
, 2, reg
, 31, 0);
9973 gen_HILO(ctx
, OPC_MFHI
, uMIPS_RS5(ctx
->opcode
));
9977 gen_HILO(ctx
, OPC_MFLO
, uMIPS_RS5(ctx
->opcode
));
9980 generate_exception(ctx
, EXCP_BREAK
);
9983 /* XXX: not clear which exception should be raised
9984 * when in debug mode...
9986 check_insn(env
, ctx
, ISA_MIPS32
);
9987 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9988 generate_exception(ctx
, EXCP_DBp
);
9990 generate_exception(ctx
, EXCP_DBp
);
9996 int imm
= ZIMM(ctx
->opcode
, 0, 5);
9998 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0);
9999 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
10000 /* Let normal delay slot handling in our caller take us
10001 to the branch target. */
10005 generate_exception(ctx
, EXCP_RI
);
10010 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
10012 TCGv t0
= tcg_temp_new();
10013 TCGv t1
= tcg_temp_new();
10015 gen_load_gpr(t0
, base
);
10018 gen_load_gpr(t1
, index
);
10019 tcg_gen_shli_tl(t1
, t1
, 2);
10020 gen_op_addr_add(ctx
, t0
, t1
, t0
);
10023 save_cpu_state(ctx
, 0);
10024 op_ld_lw(t1
, t0
, ctx
);
10025 gen_store_gpr(t1
, rd
);
10031 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
10032 int base
, int16_t offset
)
10034 const char *opn
= "ldst_pair";
10037 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31 || rd
== base
) {
10038 generate_exception(ctx
, EXCP_RI
);
10042 t0
= tcg_temp_new();
10043 t1
= tcg_temp_new();
10045 gen_base_offset_addr(ctx
, t0
, base
, offset
);
10049 save_cpu_state(ctx
, 0);
10050 op_ld_lw(t1
, t0
, ctx
);
10051 gen_store_gpr(t1
, rd
);
10052 tcg_gen_movi_tl(t1
, 4);
10053 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10054 op_ld_lw(t1
, t0
, ctx
);
10055 gen_store_gpr(t1
, rd
+1);
10059 save_cpu_state(ctx
, 0);
10060 gen_load_gpr(t1
, rd
);
10061 op_st_sw(t1
, t0
, ctx
);
10062 tcg_gen_movi_tl(t1
, 4);
10063 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10064 gen_load_gpr(t1
, rd
+1);
10065 op_st_sw(t1
, t0
, ctx
);
10068 #ifdef TARGET_MIPS64
10070 save_cpu_state(ctx
, 0);
10071 op_ld_ld(t1
, t0
, ctx
);
10072 gen_store_gpr(t1
, rd
);
10073 tcg_gen_movi_tl(t1
, 8);
10074 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10075 op_ld_ld(t1
, t0
, ctx
);
10076 gen_store_gpr(t1
, rd
+1);
10080 save_cpu_state(ctx
, 0);
10081 gen_load_gpr(t1
, rd
);
10082 op_st_sd(t1
, t0
, ctx
);
10083 tcg_gen_movi_tl(t1
, 8);
10084 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10085 gen_load_gpr(t1
, rd
+1);
10086 op_st_sd(t1
, t0
, ctx
);
10091 (void)opn
; /* avoid a compiler warning */
10092 MIPS_DEBUG("%s, %s, %d(%s)", opn
, regnames
[rd
], offset
, regnames
[base
]);
10097 static void gen_pool32axf (CPUState
*env
, DisasContext
*ctx
, int rt
, int rs
,
10100 int extension
= (ctx
->opcode
>> 6) & 0x3f;
10101 int minor
= (ctx
->opcode
>> 12) & 0xf;
10102 uint32_t mips32_op
;
10104 switch (extension
) {
10106 mips32_op
= OPC_TEQ
;
10109 mips32_op
= OPC_TGE
;
10112 mips32_op
= OPC_TGEU
;
10115 mips32_op
= OPC_TLT
;
10118 mips32_op
= OPC_TLTU
;
10121 mips32_op
= OPC_TNE
;
10123 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
10125 #ifndef CONFIG_USER_ONLY
10129 /* Treat as NOP. */
10132 gen_mfc0(env
, ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
10137 TCGv t0
= tcg_temp_new();
10139 gen_load_gpr(t0
, rt
);
10140 gen_mtc0(env
, ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
10148 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
10151 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
10154 mips32_op
= OPC_CLO
;
10157 mips32_op
= OPC_CLZ
;
10159 check_insn(env
, ctx
, ISA_MIPS32
);
10160 gen_cl(ctx
, mips32_op
, rt
, rs
);
10163 gen_rdhwr(env
, ctx
, rt
, rs
);
10166 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
10169 mips32_op
= OPC_MULT
;
10172 mips32_op
= OPC_MULTU
;
10175 mips32_op
= OPC_DIV
;
10178 mips32_op
= OPC_DIVU
;
10181 mips32_op
= OPC_MADD
;
10184 mips32_op
= OPC_MADDU
;
10187 mips32_op
= OPC_MSUB
;
10190 mips32_op
= OPC_MSUBU
;
10192 check_insn(env
, ctx
, ISA_MIPS32
);
10193 gen_muldiv(ctx
, mips32_op
, rs
, rt
);
10196 goto pool32axf_invalid
;
10207 generate_exception_err(ctx
, EXCP_CpU
, 2);
10210 goto pool32axf_invalid
;
10217 gen_compute_branch (ctx
, OPC_JALR
, 4, rs
, rt
, 0);
10222 gen_compute_branch (ctx
, OPC_JALRS
, 4, rs
, rt
, 0);
10226 goto pool32axf_invalid
;
10232 check_insn(env
, ctx
, ISA_MIPS32R2
);
10233 gen_load_srsgpr(rt
, rs
);
10236 check_insn(env
, ctx
, ISA_MIPS32R2
);
10237 gen_store_srsgpr(rt
, rs
);
10240 goto pool32axf_invalid
;
10243 #ifndef CONFIG_USER_ONLY
10247 mips32_op
= OPC_TLBP
;
10250 mips32_op
= OPC_TLBR
;
10253 mips32_op
= OPC_TLBWI
;
10256 mips32_op
= OPC_TLBWR
;
10259 mips32_op
= OPC_WAIT
;
10262 mips32_op
= OPC_DERET
;
10265 mips32_op
= OPC_ERET
;
10267 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
10270 goto pool32axf_invalid
;
10277 TCGv t0
= tcg_temp_new();
10279 save_cpu_state(ctx
, 1);
10281 gen_store_gpr(t0
, rs
);
10282 /* Stop translation as we may have switched the execution mode */
10283 ctx
->bstate
= BS_STOP
;
10289 TCGv t0
= tcg_temp_new();
10291 save_cpu_state(ctx
, 1);
10293 gen_store_gpr(t0
, rs
);
10294 /* Stop translation as we may have switched the execution mode */
10295 ctx
->bstate
= BS_STOP
;
10300 goto pool32axf_invalid
;
10310 generate_exception(ctx
, EXCP_SYSCALL
);
10311 ctx
->bstate
= BS_STOP
;
10314 check_insn(env
, ctx
, ISA_MIPS32
);
10315 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10316 generate_exception(ctx
, EXCP_DBp
);
10318 generate_exception(ctx
, EXCP_DBp
);
10322 goto pool32axf_invalid
;
10328 gen_HILO(ctx
, OPC_MFHI
, rs
);
10331 gen_HILO(ctx
, OPC_MFLO
, rs
);
10334 gen_HILO(ctx
, OPC_MTHI
, rs
);
10337 gen_HILO(ctx
, OPC_MTLO
, rs
);
10340 goto pool32axf_invalid
;
10345 MIPS_INVAL("pool32axf");
10346 generate_exception(ctx
, EXCP_RI
);
10351 /* Values for microMIPS fmt field. Variable-width, depending on which
10352 formats the instruction supports. */
10371 static void gen_pool32fxf (CPUState
*env
, DisasContext
*ctx
, int rt
, int rs
)
10373 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
10374 uint32_t mips32_op
;
10376 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
10377 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
10378 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
10380 switch (extension
) {
10381 case FLOAT_1BIT_FMT(CFC1
, 0):
10382 mips32_op
= OPC_CFC1
;
10384 case FLOAT_1BIT_FMT(CTC1
, 0):
10385 mips32_op
= OPC_CTC1
;
10387 case FLOAT_1BIT_FMT(MFC1
, 0):
10388 mips32_op
= OPC_MFC1
;
10390 case FLOAT_1BIT_FMT(MTC1
, 0):
10391 mips32_op
= OPC_MTC1
;
10393 case FLOAT_1BIT_FMT(MFHC1
, 0):
10394 mips32_op
= OPC_MFHC1
;
10396 case FLOAT_1BIT_FMT(MTHC1
, 0):
10397 mips32_op
= OPC_MTHC1
;
10399 gen_cp1(ctx
, mips32_op
, rt
, rs
);
10402 /* Reciprocal square root */
10403 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
10404 mips32_op
= OPC_RSQRT_S
;
10406 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
10407 mips32_op
= OPC_RSQRT_D
;
10411 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
10412 mips32_op
= OPC_SQRT_S
;
10414 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
10415 mips32_op
= OPC_SQRT_D
;
10419 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
10420 mips32_op
= OPC_RECIP_S
;
10422 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
10423 mips32_op
= OPC_RECIP_D
;
10427 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
10428 mips32_op
= OPC_FLOOR_L_S
;
10430 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
10431 mips32_op
= OPC_FLOOR_L_D
;
10433 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
10434 mips32_op
= OPC_FLOOR_W_S
;
10436 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
10437 mips32_op
= OPC_FLOOR_W_D
;
10441 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
10442 mips32_op
= OPC_CEIL_L_S
;
10444 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
10445 mips32_op
= OPC_CEIL_L_D
;
10447 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
10448 mips32_op
= OPC_CEIL_W_S
;
10450 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
10451 mips32_op
= OPC_CEIL_W_D
;
10455 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
10456 mips32_op
= OPC_TRUNC_L_S
;
10458 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
10459 mips32_op
= OPC_TRUNC_L_D
;
10461 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
10462 mips32_op
= OPC_TRUNC_W_S
;
10464 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
10465 mips32_op
= OPC_TRUNC_W_D
;
10469 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
10470 mips32_op
= OPC_ROUND_L_S
;
10472 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
10473 mips32_op
= OPC_ROUND_L_D
;
10475 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
10476 mips32_op
= OPC_ROUND_W_S
;
10478 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
10479 mips32_op
= OPC_ROUND_W_D
;
10482 /* Integer to floating-point conversion */
10483 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
10484 mips32_op
= OPC_CVT_L_S
;
10486 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
10487 mips32_op
= OPC_CVT_L_D
;
10489 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
10490 mips32_op
= OPC_CVT_W_S
;
10492 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
10493 mips32_op
= OPC_CVT_W_D
;
10496 /* Paired-foo conversions */
10497 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
10498 mips32_op
= OPC_CVT_S_PL
;
10500 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
10501 mips32_op
= OPC_CVT_S_PU
;
10503 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
10504 mips32_op
= OPC_CVT_PW_PS
;
10506 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
10507 mips32_op
= OPC_CVT_PS_PW
;
10510 /* Floating-point moves */
10511 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
10512 mips32_op
= OPC_MOV_S
;
10514 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
10515 mips32_op
= OPC_MOV_D
;
10517 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
10518 mips32_op
= OPC_MOV_PS
;
10521 /* Absolute value */
10522 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
10523 mips32_op
= OPC_ABS_S
;
10525 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
10526 mips32_op
= OPC_ABS_D
;
10528 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
10529 mips32_op
= OPC_ABS_PS
;
10533 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
10534 mips32_op
= OPC_NEG_S
;
10536 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
10537 mips32_op
= OPC_NEG_D
;
10539 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
10540 mips32_op
= OPC_NEG_PS
;
10543 /* Reciprocal square root step */
10544 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
10545 mips32_op
= OPC_RSQRT1_S
;
10547 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
10548 mips32_op
= OPC_RSQRT1_D
;
10550 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
10551 mips32_op
= OPC_RSQRT1_PS
;
10554 /* Reciprocal step */
10555 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
10556 mips32_op
= OPC_RECIP1_S
;
10558 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
10559 mips32_op
= OPC_RECIP1_S
;
10561 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
10562 mips32_op
= OPC_RECIP1_PS
;
10565 /* Conversions from double */
10566 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
10567 mips32_op
= OPC_CVT_D_S
;
10569 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
10570 mips32_op
= OPC_CVT_D_W
;
10572 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
10573 mips32_op
= OPC_CVT_D_L
;
10576 /* Conversions from single */
10577 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
10578 mips32_op
= OPC_CVT_S_D
;
10580 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
10581 mips32_op
= OPC_CVT_S_W
;
10583 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
10584 mips32_op
= OPC_CVT_S_L
;
10586 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
10589 /* Conditional moves on floating-point codes */
10590 case COND_FLOAT_MOV(MOVT
, 0):
10591 case COND_FLOAT_MOV(MOVT
, 1):
10592 case COND_FLOAT_MOV(MOVT
, 2):
10593 case COND_FLOAT_MOV(MOVT
, 3):
10594 case COND_FLOAT_MOV(MOVT
, 4):
10595 case COND_FLOAT_MOV(MOVT
, 5):
10596 case COND_FLOAT_MOV(MOVT
, 6):
10597 case COND_FLOAT_MOV(MOVT
, 7):
10598 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
10600 case COND_FLOAT_MOV(MOVF
, 0):
10601 case COND_FLOAT_MOV(MOVF
, 1):
10602 case COND_FLOAT_MOV(MOVF
, 2):
10603 case COND_FLOAT_MOV(MOVF
, 3):
10604 case COND_FLOAT_MOV(MOVF
, 4):
10605 case COND_FLOAT_MOV(MOVF
, 5):
10606 case COND_FLOAT_MOV(MOVF
, 6):
10607 case COND_FLOAT_MOV(MOVF
, 7):
10608 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
10611 MIPS_INVAL("pool32fxf");
10612 generate_exception(ctx
, EXCP_RI
);
10617 static void decode_micromips32_opc (CPUState
*env
, DisasContext
*ctx
,
10618 uint16_t insn_hw1
, int *is_branch
)
10622 int rt
, rs
, rd
, rr
;
10624 uint32_t op
, minor
, mips32_op
;
10625 uint32_t cond
, fmt
, cc
;
10627 insn
= lduw_code(ctx
->pc
+ 2);
10628 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
10630 rt
= (ctx
->opcode
>> 21) & 0x1f;
10631 rs
= (ctx
->opcode
>> 16) & 0x1f;
10632 rd
= (ctx
->opcode
>> 11) & 0x1f;
10633 rr
= (ctx
->opcode
>> 6) & 0x1f;
10634 imm
= (int16_t) ctx
->opcode
;
10636 op
= (ctx
->opcode
>> 26) & 0x3f;
10639 minor
= ctx
->opcode
& 0x3f;
10642 minor
= (ctx
->opcode
>> 6) & 0xf;
10645 mips32_op
= OPC_SLL
;
10648 mips32_op
= OPC_SRA
;
10651 mips32_op
= OPC_SRL
;
10654 mips32_op
= OPC_ROTR
;
10656 gen_shift_imm(env
, ctx
, mips32_op
, rt
, rs
, rd
);
10659 goto pool32a_invalid
;
10663 minor
= (ctx
->opcode
>> 6) & 0xf;
10667 mips32_op
= OPC_ADD
;
10670 mips32_op
= OPC_ADDU
;
10673 mips32_op
= OPC_SUB
;
10676 mips32_op
= OPC_SUBU
;
10679 mips32_op
= OPC_MUL
;
10681 gen_arith(env
, ctx
, mips32_op
, rd
, rs
, rt
);
10685 mips32_op
= OPC_SLLV
;
10688 mips32_op
= OPC_SRLV
;
10691 mips32_op
= OPC_SRAV
;
10694 mips32_op
= OPC_ROTRV
;
10696 gen_shift(env
, ctx
, mips32_op
, rd
, rs
, rt
);
10698 /* Logical operations */
10700 mips32_op
= OPC_AND
;
10703 mips32_op
= OPC_OR
;
10706 mips32_op
= OPC_NOR
;
10709 mips32_op
= OPC_XOR
;
10711 gen_logic(env
, mips32_op
, rd
, rs
, rt
);
10713 /* Set less than */
10715 mips32_op
= OPC_SLT
;
10718 mips32_op
= OPC_SLTU
;
10720 gen_slt(env
, mips32_op
, rd
, rs
, rt
);
10723 goto pool32a_invalid
;
10727 minor
= (ctx
->opcode
>> 6) & 0xf;
10729 /* Conditional moves */
10731 mips32_op
= OPC_MOVN
;
10734 mips32_op
= OPC_MOVZ
;
10736 gen_cond_move(env
, mips32_op
, rd
, rs
, rt
);
10739 gen_ldxs(ctx
, rs
, rt
, rd
);
10742 goto pool32a_invalid
;
10746 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
10749 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
10752 gen_pool32axf(env
, ctx
, rt
, rs
, is_branch
);
10755 generate_exception(ctx
, EXCP_BREAK
);
10759 MIPS_INVAL("pool32a");
10760 generate_exception(ctx
, EXCP_RI
);
10765 minor
= (ctx
->opcode
>> 12) & 0xf;
10768 /* Treat as no-op. */
10772 /* COP2: Not implemented. */
10773 generate_exception_err(ctx
, EXCP_CpU
, 2);
10777 #ifdef TARGET_MIPS64
10781 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
10785 #ifdef TARGET_MIPS64
10789 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
10792 MIPS_INVAL("pool32b");
10793 generate_exception(ctx
, EXCP_RI
);
10798 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
10799 minor
= ctx
->opcode
& 0x3f;
10800 check_cp1_enabled(ctx
);
10803 mips32_op
= OPC_ALNV_PS
;
10806 mips32_op
= OPC_MADD_S
;
10809 mips32_op
= OPC_MADD_D
;
10812 mips32_op
= OPC_MADD_PS
;
10815 mips32_op
= OPC_MSUB_S
;
10818 mips32_op
= OPC_MSUB_D
;
10821 mips32_op
= OPC_MSUB_PS
;
10824 mips32_op
= OPC_NMADD_S
;
10827 mips32_op
= OPC_NMADD_D
;
10830 mips32_op
= OPC_NMADD_PS
;
10833 mips32_op
= OPC_NMSUB_S
;
10836 mips32_op
= OPC_NMSUB_D
;
10839 mips32_op
= OPC_NMSUB_PS
;
10841 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
10843 case CABS_COND_FMT
:
10844 cond
= (ctx
->opcode
>> 6) & 0xf;
10845 cc
= (ctx
->opcode
>> 13) & 0x7;
10846 fmt
= (ctx
->opcode
>> 10) & 0x3;
10849 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
10852 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
10855 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
10858 goto pool32f_invalid
;
10862 cond
= (ctx
->opcode
>> 6) & 0xf;
10863 cc
= (ctx
->opcode
>> 13) & 0x7;
10864 fmt
= (ctx
->opcode
>> 10) & 0x3;
10867 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
10870 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
10873 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
10876 goto pool32f_invalid
;
10880 gen_pool32fxf(env
, ctx
, rt
, rs
);
10884 switch ((ctx
->opcode
>> 6) & 0x7) {
10886 mips32_op
= OPC_PLL_PS
;
10889 mips32_op
= OPC_PLU_PS
;
10892 mips32_op
= OPC_PUL_PS
;
10895 mips32_op
= OPC_PUU_PS
;
10898 mips32_op
= OPC_CVT_PS_S
;
10900 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
10903 goto pool32f_invalid
;
10908 switch ((ctx
->opcode
>> 6) & 0x7) {
10910 mips32_op
= OPC_LWXC1
;
10913 mips32_op
= OPC_SWXC1
;
10916 mips32_op
= OPC_LDXC1
;
10919 mips32_op
= OPC_SDXC1
;
10922 mips32_op
= OPC_LUXC1
;
10925 mips32_op
= OPC_SUXC1
;
10927 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
10930 goto pool32f_invalid
;
10935 fmt
= (ctx
->opcode
>> 9) & 0x3;
10936 switch ((ctx
->opcode
>> 6) & 0x7) {
10940 mips32_op
= OPC_RSQRT2_S
;
10943 mips32_op
= OPC_RSQRT2_D
;
10946 mips32_op
= OPC_RSQRT2_PS
;
10949 goto pool32f_invalid
;
10955 mips32_op
= OPC_RECIP2_S
;
10958 mips32_op
= OPC_RECIP2_D
;
10961 mips32_op
= OPC_RECIP2_PS
;
10964 goto pool32f_invalid
;
10968 mips32_op
= OPC_ADDR_PS
;
10971 mips32_op
= OPC_MULR_PS
;
10973 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
10976 goto pool32f_invalid
;
10980 /* MOV[FT].fmt and PREFX */
10981 cc
= (ctx
->opcode
>> 13) & 0x7;
10982 fmt
= (ctx
->opcode
>> 9) & 0x3;
10983 switch ((ctx
->opcode
>> 6) & 0x7) {
10987 gen_movcf_s(rs
, rt
, cc
, 0);
10990 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
10993 gen_movcf_ps(rs
, rt
, cc
, 0);
10996 goto pool32f_invalid
;
11002 gen_movcf_s(rs
, rt
, cc
, 1);
11005 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
11008 gen_movcf_ps(rs
, rt
, cc
, 1);
11011 goto pool32f_invalid
;
11017 goto pool32f_invalid
;
11020 #define FINSN_3ARG_SDPS(prfx) \
11021 switch ((ctx->opcode >> 8) & 0x3) { \
11023 mips32_op = OPC_##prfx##_S; \
11026 mips32_op = OPC_##prfx##_D; \
11028 case FMT_SDPS_PS: \
11029 mips32_op = OPC_##prfx##_PS; \
11032 goto pool32f_invalid; \
11035 /* regular FP ops */
11036 switch ((ctx
->opcode
>> 6) & 0x3) {
11038 FINSN_3ARG_SDPS(ADD
);
11041 FINSN_3ARG_SDPS(SUB
);
11044 FINSN_3ARG_SDPS(MUL
);
11047 fmt
= (ctx
->opcode
>> 8) & 0x3;
11049 mips32_op
= OPC_DIV_D
;
11050 } else if (fmt
== 0) {
11051 mips32_op
= OPC_DIV_S
;
11053 goto pool32f_invalid
;
11057 goto pool32f_invalid
;
11062 switch ((ctx
->opcode
>> 6) & 0x3) {
11064 FINSN_3ARG_SDPS(MOVN
);
11067 FINSN_3ARG_SDPS(MOVZ
);
11070 goto pool32f_invalid
;
11074 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11078 MIPS_INVAL("pool32f");
11079 generate_exception(ctx
, EXCP_RI
);
11083 generate_exception_err(ctx
, EXCP_CpU
, 1);
11087 minor
= (ctx
->opcode
>> 21) & 0x1f;
11090 mips32_op
= OPC_BLTZ
;
11093 mips32_op
= OPC_BLTZAL
;
11096 mips32_op
= OPC_BLTZALS
;
11099 mips32_op
= OPC_BGEZ
;
11102 mips32_op
= OPC_BGEZAL
;
11105 mips32_op
= OPC_BGEZALS
;
11108 mips32_op
= OPC_BLEZ
;
11111 mips32_op
= OPC_BGTZ
;
11113 gen_compute_branch(ctx
, mips32_op
, 4, rs
, -1, imm
<< 1);
11119 mips32_op
= OPC_TLTI
;
11122 mips32_op
= OPC_TGEI
;
11125 mips32_op
= OPC_TLTIU
;
11128 mips32_op
= OPC_TGEIU
;
11131 mips32_op
= OPC_TNEI
;
11134 mips32_op
= OPC_TEQI
;
11136 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
11141 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
11142 4, rs
, 0, imm
<< 1);
11143 /* Compact branches don't have a delay slot, so just let
11144 the normal delay slot handling take us to the branch
11148 gen_logic_imm(env
, OPC_LUI
, rs
, -1, imm
);
11154 /* COP2: Not implemented. */
11155 generate_exception_err(ctx
, EXCP_CpU
, 2);
11158 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
11161 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
11164 mips32_op
= OPC_BC1FANY4
;
11167 mips32_op
= OPC_BC1TANY4
;
11170 check_insn(env
, ctx
, ASE_MIPS3D
);
11173 gen_compute_branch1(env
, ctx
, mips32_op
,
11174 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
11179 /* MIPS DSP: not implemented */
11182 MIPS_INVAL("pool32i");
11183 generate_exception(ctx
, EXCP_RI
);
11188 minor
= (ctx
->opcode
>> 12) & 0xf;
11191 mips32_op
= OPC_LWL
;
11194 mips32_op
= OPC_SWL
;
11197 mips32_op
= OPC_LWR
;
11200 mips32_op
= OPC_SWR
;
11202 #if defined(TARGET_MIPS64)
11204 mips32_op
= OPC_LDL
;
11207 mips32_op
= OPC_SDL
;
11210 mips32_op
= OPC_LDR
;
11213 mips32_op
= OPC_SDR
;
11216 mips32_op
= OPC_LWU
;
11219 mips32_op
= OPC_LLD
;
11223 mips32_op
= OPC_LL
;
11226 gen_ld(env
, ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11229 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11232 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11234 #if defined(TARGET_MIPS64)
11236 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11240 /* Treat as no-op */
11243 MIPS_INVAL("pool32c");
11244 generate_exception(ctx
, EXCP_RI
);
11249 mips32_op
= OPC_ADDI
;
11252 mips32_op
= OPC_ADDIU
;
11254 gen_arith_imm(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11257 /* Logical operations */
11259 mips32_op
= OPC_ORI
;
11262 mips32_op
= OPC_XORI
;
11265 mips32_op
= OPC_ANDI
;
11267 gen_logic_imm(env
, mips32_op
, rt
, rs
, imm
);
11270 /* Set less than immediate */
11272 mips32_op
= OPC_SLTI
;
11275 mips32_op
= OPC_SLTIU
;
11277 gen_slt_imm(env
, mips32_op
, rt
, rs
, imm
);
11280 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
11281 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
);
11285 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
11286 gen_compute_branch(ctx
, OPC_JALS
, 4, rt
, rs
, offset
);
11290 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1);
11294 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1);
11298 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
11299 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
11303 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
11304 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
11307 /* Floating point (COP1) */
11309 mips32_op
= OPC_LWC1
;
11312 mips32_op
= OPC_LDC1
;
11315 mips32_op
= OPC_SWC1
;
11318 mips32_op
= OPC_SDC1
;
11320 gen_cop1_ldst(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11324 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
11325 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
11327 gen_addiupc(ctx
, reg
, offset
, 0, 0);
11330 /* Loads and stores */
11332 mips32_op
= OPC_LB
;
11335 mips32_op
= OPC_LBU
;
11338 mips32_op
= OPC_LH
;
11341 mips32_op
= OPC_LHU
;
11344 mips32_op
= OPC_LW
;
11346 #ifdef TARGET_MIPS64
11348 mips32_op
= OPC_LD
;
11351 mips32_op
= OPC_SD
;
11355 mips32_op
= OPC_SB
;
11358 mips32_op
= OPC_SH
;
11361 mips32_op
= OPC_SW
;
11364 gen_ld(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11367 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
11370 generate_exception(ctx
, EXCP_RI
);
11375 static int decode_micromips_opc (CPUState
*env
, DisasContext
*ctx
, int *is_branch
)
11379 /* make sure instructions are on a halfword boundary */
11380 if (ctx
->pc
& 0x1) {
11381 env
->CP0_BadVAddr
= ctx
->pc
;
11382 generate_exception(ctx
, EXCP_AdEL
);
11383 ctx
->bstate
= BS_STOP
;
11387 op
= (ctx
->opcode
>> 10) & 0x3f;
11388 /* Enforce properly-sized instructions in a delay slot */
11389 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11390 int bits
= ctx
->hflags
& MIPS_HFLAG_BMASK_EXT
;
11424 case POOL48A
: /* ??? */
11429 if (bits
& MIPS_HFLAG_BDS16
) {
11430 generate_exception(ctx
, EXCP_RI
);
11431 /* Just stop translation; the user is confused. */
11432 ctx
->bstate
= BS_STOP
;
11457 if (bits
& MIPS_HFLAG_BDS32
) {
11458 generate_exception(ctx
, EXCP_RI
);
11459 /* Just stop translation; the user is confused. */
11460 ctx
->bstate
= BS_STOP
;
11471 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11472 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
11473 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
11476 switch (ctx
->opcode
& 0x1) {
11485 gen_arith(env
, ctx
, opc
, rd
, rs1
, rs2
);
11490 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11491 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
11492 int amount
= (ctx
->opcode
>> 1) & 0x7;
11494 amount
= amount
== 0 ? 8 : amount
;
11496 switch (ctx
->opcode
& 0x1) {
11505 gen_shift_imm(env
, ctx
, opc
, rd
, rs
, amount
);
11509 gen_pool16c_insn(env
, ctx
, is_branch
);
11513 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11514 int rb
= 28; /* GP */
11515 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
11517 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11521 if (ctx
->opcode
& 1) {
11522 generate_exception(ctx
, EXCP_RI
);
11525 int enc_dest
= uMIPS_RD(ctx
->opcode
);
11526 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
11527 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
11528 int rd
, rs
, re
, rt
;
11529 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
11530 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
11531 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
11533 rd
= rd_enc
[enc_dest
];
11534 re
= re_enc
[enc_dest
];
11535 rs
= rs_rt_enc
[enc_rs
];
11536 rt
= rs_rt_enc
[enc_rt
];
11538 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, 0);
11539 gen_arith_imm(env
, ctx
, OPC_ADDIU
, re
, rt
, 0);
11544 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11545 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11546 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
11547 offset
= (offset
== 0xf ? -1 : offset
);
11549 gen_ld(env
, ctx
, OPC_LBU
, rd
, rb
, offset
);
11554 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11555 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11556 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
11558 gen_ld(env
, ctx
, OPC_LHU
, rd
, rb
, offset
);
11563 int rd
= (ctx
->opcode
>> 5) & 0x1f;
11564 int rb
= 29; /* SP */
11565 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
11567 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11572 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11573 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11574 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
11576 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11581 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
11582 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11583 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
11585 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
11590 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
11591 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11592 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
11594 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
11599 int rd
= (ctx
->opcode
>> 5) & 0x1f;
11600 int rb
= 29; /* SP */
11601 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
11603 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
11608 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
11609 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11610 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
11612 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
11617 int rd
= uMIPS_RD5(ctx
->opcode
);
11618 int rs
= uMIPS_RS5(ctx
->opcode
);
11620 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, 0);
11624 gen_andi16(env
, ctx
);
11627 switch (ctx
->opcode
& 0x1) {
11629 gen_addius5(env
, ctx
);
11632 gen_addiusp(env
, ctx
);
11637 switch (ctx
->opcode
& 0x1) {
11639 gen_addiur2(env
, ctx
);
11642 gen_addiur1sp(env
, ctx
);
11647 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
11648 SIMM(ctx
->opcode
, 0, 10) << 1);
11653 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
11654 mmreg(uMIPS_RD(ctx
->opcode
)),
11655 0, SIMM(ctx
->opcode
, 0, 7) << 1);
11660 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
11661 int imm
= ZIMM(ctx
->opcode
, 0, 7);
11663 imm
= (imm
== 0x7f ? -1 : imm
);
11664 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
11674 generate_exception(ctx
, EXCP_RI
);
11677 decode_micromips32_opc (env
, ctx
, op
, is_branch
);
11684 /* SmartMIPS extension to MIPS32 */
11686 #if defined(TARGET_MIPS64)
11688 /* MDMX extension to MIPS64 */
11692 static void decode_opc (CPUState
*env
, DisasContext
*ctx
, int *is_branch
)
11695 int rs
, rt
, rd
, sa
;
11696 uint32_t op
, op1
, op2
;
11699 /* make sure instructions are on a word boundary */
11700 if (ctx
->pc
& 0x3) {
11701 env
->CP0_BadVAddr
= ctx
->pc
;
11702 generate_exception(ctx
, EXCP_AdEL
);
11706 /* Handle blikely not taken case */
11707 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
11708 int l1
= gen_new_label();
11710 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx
")", ctx
->pc
+ 4);
11711 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
11712 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
11713 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
11717 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
11718 tcg_gen_debug_insn_start(ctx
->pc
);
11720 op
= MASK_OP_MAJOR(ctx
->opcode
);
11721 rs
= (ctx
->opcode
>> 21) & 0x1f;
11722 rt
= (ctx
->opcode
>> 16) & 0x1f;
11723 rd
= (ctx
->opcode
>> 11) & 0x1f;
11724 sa
= (ctx
->opcode
>> 6) & 0x1f;
11725 imm
= (int16_t)ctx
->opcode
;
11728 op1
= MASK_SPECIAL(ctx
->opcode
);
11730 case OPC_SLL
: /* Shift with immediate */
11732 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11735 switch ((ctx
->opcode
>> 21) & 0x1f) {
11737 /* rotr is decoded as srl on non-R2 CPUs */
11738 if (env
->insn_flags
& ISA_MIPS32R2
) {
11743 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11746 generate_exception(ctx
, EXCP_RI
);
11750 case OPC_MOVN
: /* Conditional move */
11752 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
|
11753 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
11754 gen_cond_move(env
, op1
, rd
, rs
, rt
);
11756 case OPC_ADD
... OPC_SUBU
:
11757 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
11759 case OPC_SLLV
: /* Shifts */
11761 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
11764 switch ((ctx
->opcode
>> 6) & 0x1f) {
11766 /* rotrv is decoded as srlv on non-R2 CPUs */
11767 if (env
->insn_flags
& ISA_MIPS32R2
) {
11772 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
11775 generate_exception(ctx
, EXCP_RI
);
11779 case OPC_SLT
: /* Set on less than */
11781 gen_slt(env
, op1
, rd
, rs
, rt
);
11783 case OPC_AND
: /* Logic*/
11787 gen_logic(env
, op1
, rd
, rs
, rt
);
11789 case OPC_MULT
... OPC_DIVU
:
11791 check_insn(env
, ctx
, INSN_VR54XX
);
11792 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
11793 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
11795 gen_muldiv(ctx
, op1
, rs
, rt
);
11797 case OPC_JR
... OPC_JALR
:
11798 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
);
11801 case OPC_TGE
... OPC_TEQ
: /* Traps */
11803 gen_trap(ctx
, op1
, rs
, rt
, -1);
11805 case OPC_MFHI
: /* Move from HI/LO */
11807 gen_HILO(ctx
, op1
, rd
);
11810 case OPC_MTLO
: /* Move to HI/LO */
11811 gen_HILO(ctx
, op1
, rs
);
11813 case OPC_PMON
: /* Pmon entry point, also R4010 selsl */
11814 #ifdef MIPS_STRICT_STANDARD
11815 MIPS_INVAL("PMON / selsl");
11816 generate_exception(ctx
, EXCP_RI
);
11818 gen_helper_0i(pmon
, sa
);
11822 generate_exception(ctx
, EXCP_SYSCALL
);
11823 ctx
->bstate
= BS_STOP
;
11826 generate_exception(ctx
, EXCP_BREAK
);
11829 #ifdef MIPS_STRICT_STANDARD
11830 MIPS_INVAL("SPIM");
11831 generate_exception(ctx
, EXCP_RI
);
11833 /* Implemented as RI exception for now. */
11834 MIPS_INVAL("spim (unofficial)");
11835 generate_exception(ctx
, EXCP_RI
);
11839 /* Treat as NOP. */
11843 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
11844 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
11845 check_cp1_enabled(ctx
);
11846 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
11847 (ctx
->opcode
>> 16) & 1);
11849 generate_exception_err(ctx
, EXCP_CpU
, 1);
11853 #if defined(TARGET_MIPS64)
11854 /* MIPS64 specific opcodes */
11859 check_insn(env
, ctx
, ISA_MIPS3
);
11860 check_mips_64(ctx
);
11861 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11864 switch ((ctx
->opcode
>> 21) & 0x1f) {
11866 /* drotr is decoded as dsrl on non-R2 CPUs */
11867 if (env
->insn_flags
& ISA_MIPS32R2
) {
11872 check_insn(env
, ctx
, ISA_MIPS3
);
11873 check_mips_64(ctx
);
11874 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11877 generate_exception(ctx
, EXCP_RI
);
11882 switch ((ctx
->opcode
>> 21) & 0x1f) {
11884 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
11885 if (env
->insn_flags
& ISA_MIPS32R2
) {
11890 check_insn(env
, ctx
, ISA_MIPS3
);
11891 check_mips_64(ctx
);
11892 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11895 generate_exception(ctx
, EXCP_RI
);
11899 case OPC_DADD
... OPC_DSUBU
:
11900 check_insn(env
, ctx
, ISA_MIPS3
);
11901 check_mips_64(ctx
);
11902 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
11906 check_insn(env
, ctx
, ISA_MIPS3
);
11907 check_mips_64(ctx
);
11908 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
11911 switch ((ctx
->opcode
>> 6) & 0x1f) {
11913 /* drotrv is decoded as dsrlv on non-R2 CPUs */
11914 if (env
->insn_flags
& ISA_MIPS32R2
) {
11919 check_insn(env
, ctx
, ISA_MIPS3
);
11920 check_mips_64(ctx
);
11921 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
11924 generate_exception(ctx
, EXCP_RI
);
11928 case OPC_DMULT
... OPC_DDIVU
:
11929 check_insn(env
, ctx
, ISA_MIPS3
);
11930 check_mips_64(ctx
);
11931 gen_muldiv(ctx
, op1
, rs
, rt
);
11934 default: /* Invalid */
11935 MIPS_INVAL("special");
11936 generate_exception(ctx
, EXCP_RI
);
11941 op1
= MASK_SPECIAL2(ctx
->opcode
);
11943 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
11944 case OPC_MSUB
... OPC_MSUBU
:
11945 check_insn(env
, ctx
, ISA_MIPS32
);
11946 gen_muldiv(ctx
, op1
, rs
, rt
);
11949 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
11953 check_insn(env
, ctx
, ISA_MIPS32
);
11954 gen_cl(ctx
, op1
, rd
, rs
);
11957 /* XXX: not clear which exception should be raised
11958 * when in debug mode...
11960 check_insn(env
, ctx
, ISA_MIPS32
);
11961 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
11962 generate_exception(ctx
, EXCP_DBp
);
11964 generate_exception(ctx
, EXCP_DBp
);
11966 /* Treat as NOP. */
11969 case OPC_DIVU_G_2F
:
11970 case OPC_MULT_G_2F
:
11971 case OPC_MULTU_G_2F
:
11973 case OPC_MODU_G_2F
:
11974 check_insn(env
, ctx
, INSN_LOONGSON2F
);
11975 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
11977 #if defined(TARGET_MIPS64)
11980 check_insn(env
, ctx
, ISA_MIPS64
);
11981 check_mips_64(ctx
);
11982 gen_cl(ctx
, op1
, rd
, rs
);
11984 case OPC_DMULT_G_2F
:
11985 case OPC_DMULTU_G_2F
:
11986 case OPC_DDIV_G_2F
:
11987 case OPC_DDIVU_G_2F
:
11988 case OPC_DMOD_G_2F
:
11989 case OPC_DMODU_G_2F
:
11990 check_insn(env
, ctx
, INSN_LOONGSON2F
);
11991 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
11994 default: /* Invalid */
11995 MIPS_INVAL("special2");
11996 generate_exception(ctx
, EXCP_RI
);
12001 op1
= MASK_SPECIAL3(ctx
->opcode
);
12005 check_insn(env
, ctx
, ISA_MIPS32R2
);
12006 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
12009 check_insn(env
, ctx
, ISA_MIPS32R2
);
12010 op2
= MASK_BSHFL(ctx
->opcode
);
12011 gen_bshfl(ctx
, op2
, rt
, rd
);
12014 gen_rdhwr(env
, ctx
, rt
, rd
);
12017 check_insn(env
, ctx
, ASE_MT
);
12019 TCGv t0
= tcg_temp_new();
12020 TCGv t1
= tcg_temp_new();
12022 gen_load_gpr(t0
, rt
);
12023 gen_load_gpr(t1
, rs
);
12024 gen_helper_fork(t0
, t1
);
12030 check_insn(env
, ctx
, ASE_MT
);
12032 TCGv t0
= tcg_temp_new();
12034 save_cpu_state(ctx
, 1);
12035 gen_load_gpr(t0
, rs
);
12036 gen_helper_yield(t0
, t0
);
12037 gen_store_gpr(t0
, rd
);
12041 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
12042 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
12043 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
12044 check_insn(env
, ctx
, INSN_LOONGSON2E
);
12045 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12047 #if defined(TARGET_MIPS64)
12048 case OPC_DEXTM
... OPC_DEXT
:
12049 case OPC_DINSM
... OPC_DINS
:
12050 check_insn(env
, ctx
, ISA_MIPS64R2
);
12051 check_mips_64(ctx
);
12052 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
12055 check_insn(env
, ctx
, ISA_MIPS64R2
);
12056 check_mips_64(ctx
);
12057 op2
= MASK_DBSHFL(ctx
->opcode
);
12058 gen_bshfl(ctx
, op2
, rt
, rd
);
12060 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
12061 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
12062 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
12063 check_insn(env
, ctx
, INSN_LOONGSON2E
);
12064 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12067 default: /* Invalid */
12068 MIPS_INVAL("special3");
12069 generate_exception(ctx
, EXCP_RI
);
12074 op1
= MASK_REGIMM(ctx
->opcode
);
12076 case OPC_BLTZ
... OPC_BGEZL
: /* REGIMM branches */
12077 case OPC_BLTZAL
... OPC_BGEZALL
:
12078 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2);
12081 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
12083 gen_trap(ctx
, op1
, rs
, -1, imm
);
12086 check_insn(env
, ctx
, ISA_MIPS32R2
);
12087 /* Treat as NOP. */
12089 default: /* Invalid */
12090 MIPS_INVAL("regimm");
12091 generate_exception(ctx
, EXCP_RI
);
12096 check_cp0_enabled(ctx
);
12097 op1
= MASK_CP0(ctx
->opcode
);
12103 #if defined(TARGET_MIPS64)
12107 #ifndef CONFIG_USER_ONLY
12108 gen_cp0(env
, ctx
, op1
, rt
, rd
);
12109 #endif /* !CONFIG_USER_ONLY */
12111 case OPC_C0_FIRST
... OPC_C0_LAST
:
12112 #ifndef CONFIG_USER_ONLY
12113 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
12114 #endif /* !CONFIG_USER_ONLY */
12117 #ifndef CONFIG_USER_ONLY
12119 TCGv t0
= tcg_temp_new();
12121 op2
= MASK_MFMC0(ctx
->opcode
);
12124 check_insn(env
, ctx
, ASE_MT
);
12125 gen_helper_dmt(t0
);
12126 gen_store_gpr(t0
, rt
);
12129 check_insn(env
, ctx
, ASE_MT
);
12130 gen_helper_emt(t0
);
12131 gen_store_gpr(t0
, rt
);
12134 check_insn(env
, ctx
, ASE_MT
);
12135 gen_helper_dvpe(t0
);
12136 gen_store_gpr(t0
, rt
);
12139 check_insn(env
, ctx
, ASE_MT
);
12140 gen_helper_evpe(t0
);
12141 gen_store_gpr(t0
, rt
);
12144 check_insn(env
, ctx
, ISA_MIPS32R2
);
12145 save_cpu_state(ctx
, 1);
12147 gen_store_gpr(t0
, rt
);
12148 /* Stop translation as we may have switched the execution mode */
12149 ctx
->bstate
= BS_STOP
;
12152 check_insn(env
, ctx
, ISA_MIPS32R2
);
12153 save_cpu_state(ctx
, 1);
12155 gen_store_gpr(t0
, rt
);
12156 /* Stop translation as we may have switched the execution mode */
12157 ctx
->bstate
= BS_STOP
;
12159 default: /* Invalid */
12160 MIPS_INVAL("mfmc0");
12161 generate_exception(ctx
, EXCP_RI
);
12166 #endif /* !CONFIG_USER_ONLY */
12169 check_insn(env
, ctx
, ISA_MIPS32R2
);
12170 gen_load_srsgpr(rt
, rd
);
12173 check_insn(env
, ctx
, ISA_MIPS32R2
);
12174 gen_store_srsgpr(rt
, rd
);
12178 generate_exception(ctx
, EXCP_RI
);
12182 case OPC_ADDI
: /* Arithmetic with immediate opcode */
12184 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
12186 case OPC_SLTI
: /* Set on less than with immediate opcode */
12188 gen_slt_imm(env
, op
, rt
, rs
, imm
);
12190 case OPC_ANDI
: /* Arithmetic with immediate opcode */
12194 gen_logic_imm(env
, op
, rt
, rs
, imm
);
12196 case OPC_J
... OPC_JAL
: /* Jump */
12197 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
12198 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
12201 case OPC_BEQ
... OPC_BGTZ
: /* Branch */
12202 case OPC_BEQL
... OPC_BGTZL
:
12203 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2);
12206 case OPC_LB
... OPC_LWR
: /* Load and stores */
12208 gen_ld(env
, ctx
, op
, rt
, rs
, imm
);
12210 case OPC_SB
... OPC_SW
:
12212 gen_st(ctx
, op
, rt
, rs
, imm
);
12215 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
12218 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
12219 /* Treat as NOP. */
12222 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
12223 /* Treat as NOP. */
12226 /* Floating point (COP1). */
12231 gen_cop1_ldst(env
, ctx
, op
, rt
, rs
, imm
);
12235 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12236 check_cp1_enabled(ctx
);
12237 op1
= MASK_CP1(ctx
->opcode
);
12241 check_insn(env
, ctx
, ISA_MIPS32R2
);
12246 gen_cp1(ctx
, op1
, rt
, rd
);
12248 #if defined(TARGET_MIPS64)
12251 check_insn(env
, ctx
, ISA_MIPS3
);
12252 gen_cp1(ctx
, op1
, rt
, rd
);
12258 check_insn(env
, ctx
, ASE_MIPS3D
);
12261 gen_compute_branch1(env
, ctx
, MASK_BC1(ctx
->opcode
),
12262 (rt
>> 2) & 0x7, imm
<< 2);
12270 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
12275 generate_exception (ctx
, EXCP_RI
);
12279 generate_exception_err(ctx
, EXCP_CpU
, 1);
12289 /* COP2: Not implemented. */
12290 generate_exception_err(ctx
, EXCP_CpU
, 2);
12294 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12295 check_cp1_enabled(ctx
);
12296 op1
= MASK_CP3(ctx
->opcode
);
12304 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
12307 /* Treat as NOP. */
12322 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
12326 generate_exception (ctx
, EXCP_RI
);
12330 generate_exception_err(ctx
, EXCP_CpU
, 1);
12334 #if defined(TARGET_MIPS64)
12335 /* MIPS64 opcodes */
12337 case OPC_LDL
... OPC_LDR
:
12340 check_insn(env
, ctx
, ISA_MIPS3
);
12341 check_mips_64(ctx
);
12342 gen_ld(env
, ctx
, op
, rt
, rs
, imm
);
12344 case OPC_SDL
... OPC_SDR
:
12346 check_insn(env
, ctx
, ISA_MIPS3
);
12347 check_mips_64(ctx
);
12348 gen_st(ctx
, op
, rt
, rs
, imm
);
12351 check_insn(env
, ctx
, ISA_MIPS3
);
12352 check_mips_64(ctx
);
12353 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
12357 check_insn(env
, ctx
, ISA_MIPS3
);
12358 check_mips_64(ctx
);
12359 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
12363 check_insn(env
, ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
12364 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
12365 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
12369 check_insn(env
, ctx
, ASE_MDMX
);
12370 /* MDMX: Not implemented. */
12371 default: /* Invalid */
12372 MIPS_INVAL("major opcode");
12373 generate_exception(ctx
, EXCP_RI
);
12379 gen_intermediate_code_internal (CPUState
*env
, TranslationBlock
*tb
,
12383 target_ulong pc_start
;
12384 uint16_t *gen_opc_end
;
12393 qemu_log("search pc %d\n", search_pc
);
12396 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
12399 ctx
.singlestep_enabled
= env
->singlestep_enabled
;
12401 ctx
.bstate
= BS_NONE
;
12402 /* Restore delay slot state from the tb context. */
12403 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
12404 restore_cpu_state(env
, &ctx
);
12405 #ifdef CONFIG_USER_ONLY
12406 ctx
.mem_idx
= MIPS_HFLAG_UM
;
12408 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
12411 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
12412 if (max_insns
== 0)
12413 max_insns
= CF_COUNT_MASK
;
12414 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
12415 gen_icount_start();
12416 while (ctx
.bstate
== BS_NONE
) {
12417 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
12418 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
12419 if (bp
->pc
== ctx
.pc
) {
12420 save_cpu_state(&ctx
, 1);
12421 ctx
.bstate
= BS_BRANCH
;
12422 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
12423 /* Include the breakpoint location or the tb won't
12424 * be flushed when it must be. */
12426 goto done_generating
;
12432 j
= gen_opc_ptr
- gen_opc_buf
;
12436 gen_opc_instr_start
[lj
++] = 0;
12438 gen_opc_pc
[lj
] = ctx
.pc
;
12439 gen_opc_hflags
[lj
] = ctx
.hflags
& MIPS_HFLAG_BMASK
;
12440 gen_opc_instr_start
[lj
] = 1;
12441 gen_opc_icount
[lj
] = num_insns
;
12443 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
12447 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
12448 ctx
.opcode
= ldl_code(ctx
.pc
);
12450 decode_opc(env
, &ctx
, &is_branch
);
12451 } else if (env
->insn_flags
& ASE_MICROMIPS
) {
12452 ctx
.opcode
= lduw_code(ctx
.pc
);
12453 insn_bytes
= decode_micromips_opc(env
, &ctx
, &is_branch
);
12454 } else if (env
->insn_flags
& ASE_MIPS16
) {
12455 ctx
.opcode
= lduw_code(ctx
.pc
);
12456 insn_bytes
= decode_mips16_opc(env
, &ctx
, &is_branch
);
12458 generate_exception(&ctx
, EXCP_RI
);
12459 ctx
.bstate
= BS_STOP
;
12463 handle_delay_slot(env
, &ctx
, insn_bytes
);
12465 ctx
.pc
+= insn_bytes
;
12469 /* Execute a branch and its delay slot as a single instruction.
12470 This is what GDB expects and is consistent with what the
12471 hardware does (e.g. if a delay slot instruction faults, the
12472 reported PC is the PC of the branch). */
12473 if (env
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0)
12476 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
12479 if (gen_opc_ptr
>= gen_opc_end
)
12482 if (num_insns
>= max_insns
)
12488 if (tb
->cflags
& CF_LAST_IO
)
12490 if (env
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
12491 save_cpu_state(&ctx
, ctx
.bstate
== BS_NONE
);
12492 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
12494 switch (ctx
.bstate
) {
12496 gen_goto_tb(&ctx
, 0, ctx
.pc
);
12499 save_cpu_state(&ctx
, 0);
12500 gen_goto_tb(&ctx
, 0, ctx
.pc
);
12503 tcg_gen_exit_tb(0);
12511 gen_icount_end(tb
, num_insns
);
12512 *gen_opc_ptr
= INDEX_op_end
;
12514 j
= gen_opc_ptr
- gen_opc_buf
;
12517 gen_opc_instr_start
[lj
++] = 0;
12519 tb
->size
= ctx
.pc
- pc_start
;
12520 tb
->icount
= num_insns
;
12524 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
12525 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
12526 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
12532 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
12534 gen_intermediate_code_internal(env
, tb
, 0);
12537 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
12539 gen_intermediate_code_internal(env
, tb
, 1);
12542 static void fpu_dump_state(CPUState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
12546 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
12548 #define printfpr(fp) \
12551 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
12552 " fd:%13g fs:%13g psu: %13g\n", \
12553 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
12554 (double)(fp)->fd, \
12555 (double)(fp)->fs[FP_ENDIAN_IDX], \
12556 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
12559 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
12560 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
12561 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
12562 " fd:%13g fs:%13g psu:%13g\n", \
12563 tmp.w[FP_ENDIAN_IDX], tmp.d, \
12565 (double)tmp.fs[FP_ENDIAN_IDX], \
12566 (double)tmp.fs[!FP_ENDIAN_IDX]); \
12571 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
12572 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
12573 get_float_exception_flags(&env
->active_fpu
.fp_status
));
12574 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
12575 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
12576 printfpr(&env
->active_fpu
.fpr
[i
]);
12582 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
12583 /* Debug help: The architecture requires 32bit code to maintain proper
12584 sign-extended values on 64bit machines. */
12586 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
12589 cpu_mips_check_sign_extensions (CPUState
*env
, FILE *f
,
12590 fprintf_function cpu_fprintf
,
12595 if (!SIGN_EXT_P(env
->active_tc
.PC
))
12596 cpu_fprintf(f
, "BROKEN: pc=0x" TARGET_FMT_lx
"\n", env
->active_tc
.PC
);
12597 if (!SIGN_EXT_P(env
->active_tc
.HI
[0]))
12598 cpu_fprintf(f
, "BROKEN: HI=0x" TARGET_FMT_lx
"\n", env
->active_tc
.HI
[0]);
12599 if (!SIGN_EXT_P(env
->active_tc
.LO
[0]))
12600 cpu_fprintf(f
, "BROKEN: LO=0x" TARGET_FMT_lx
"\n", env
->active_tc
.LO
[0]);
12601 if (!SIGN_EXT_P(env
->btarget
))
12602 cpu_fprintf(f
, "BROKEN: btarget=0x" TARGET_FMT_lx
"\n", env
->btarget
);
12604 for (i
= 0; i
< 32; i
++) {
12605 if (!SIGN_EXT_P(env
->active_tc
.gpr
[i
]))
12606 cpu_fprintf(f
, "BROKEN: %s=0x" TARGET_FMT_lx
"\n", regnames
[i
], env
->active_tc
.gpr
[i
]);
12609 if (!SIGN_EXT_P(env
->CP0_EPC
))
12610 cpu_fprintf(f
, "BROKEN: EPC=0x" TARGET_FMT_lx
"\n", env
->CP0_EPC
);
12611 if (!SIGN_EXT_P(env
->lladdr
))
12612 cpu_fprintf(f
, "BROKEN: LLAddr=0x" TARGET_FMT_lx
"\n", env
->lladdr
);
12616 void cpu_dump_state (CPUState
*env
, FILE *f
, fprintf_function cpu_fprintf
,
12621 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
12622 " LO=0x" TARGET_FMT_lx
" ds %04x "
12623 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
12624 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
12625 env
->hflags
, env
->btarget
, env
->bcond
);
12626 for (i
= 0; i
< 32; i
++) {
12628 cpu_fprintf(f
, "GPR%02d:", i
);
12629 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
12631 cpu_fprintf(f
, "\n");
12634 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
12635 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
12636 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx
"\n",
12637 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
12638 if (env
->hflags
& MIPS_HFLAG_FPU
)
12639 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
12640 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
12641 cpu_mips_check_sign_extensions(env
, f
, cpu_fprintf
, flags
);
12645 static void mips_tcg_init(void)
12650 /* Initialize various static tables. */
12654 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
12655 TCGV_UNUSED(cpu_gpr
[0]);
12656 for (i
= 1; i
< 32; i
++)
12657 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
12658 offsetof(CPUState
, active_tc
.gpr
[i
]),
12660 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
12661 offsetof(CPUState
, active_tc
.PC
), "PC");
12662 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
12663 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
12664 offsetof(CPUState
, active_tc
.HI
[i
]),
12666 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
12667 offsetof(CPUState
, active_tc
.LO
[i
]),
12669 cpu_ACX
[i
] = tcg_global_mem_new(TCG_AREG0
,
12670 offsetof(CPUState
, active_tc
.ACX
[i
]),
12673 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
12674 offsetof(CPUState
, active_tc
.DSPControl
),
12676 bcond
= tcg_global_mem_new(TCG_AREG0
,
12677 offsetof(CPUState
, bcond
), "bcond");
12678 btarget
= tcg_global_mem_new(TCG_AREG0
,
12679 offsetof(CPUState
, btarget
), "btarget");
12680 hflags
= tcg_global_mem_new_i32(TCG_AREG0
,
12681 offsetof(CPUState
, hflags
), "hflags");
12683 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
12684 offsetof(CPUState
, active_fpu
.fcr0
),
12686 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
12687 offsetof(CPUState
, active_fpu
.fcr31
),
12690 /* register helpers */
12691 #define GEN_HELPER 2
12692 #include "helper.h"
12697 #include "translate_init.c"
12699 CPUMIPSState
*cpu_mips_init (const char *cpu_model
)
12702 const mips_def_t
*def
;
12704 def
= cpu_mips_find_by_name(cpu_model
);
12707 env
= g_malloc0(sizeof(CPUMIPSState
));
12708 env
->cpu_model
= def
;
12709 env
->cpu_model_str
= cpu_model
;
12711 cpu_exec_init(env
);
12712 #ifndef CONFIG_USER_ONLY
12713 mmu_init(env
, def
);
12715 fpu_init(env
, def
);
12716 mvp_init(env
, def
);
12719 qemu_init_vcpu(env
);
12723 void cpu_reset (CPUMIPSState
*env
)
12725 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
12726 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
12727 log_cpu_state(env
, 0);
12730 memset(env
, 0, offsetof(CPUMIPSState
, breakpoints
));
12733 /* Reset registers to their default values */
12734 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
12735 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
12736 #ifdef TARGET_WORDS_BIGENDIAN
12737 env
->CP0_Config0
|= (1 << CP0C0_BE
);
12739 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
12740 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
12741 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
12742 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
12743 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
12744 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
12745 << env
->cpu_model
->CP0_LLAddr_shift
;
12746 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
12747 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
12748 env
->CCRes
= env
->cpu_model
->CCRes
;
12749 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
12750 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
12751 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
12752 env
->current_tc
= 0;
12753 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
12754 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
12755 #if defined(TARGET_MIPS64)
12756 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
12757 env
->SEGMask
|= 3ULL << 62;
12760 env
->PABITS
= env
->cpu_model
->PABITS
;
12761 env
->PAMask
= (target_ulong
)((1ULL << env
->cpu_model
->PABITS
) - 1);
12762 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
12763 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
12764 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
12765 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
12766 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
12767 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
12768 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
12769 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
12770 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
12771 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
12772 env
->insn_flags
= env
->cpu_model
->insn_flags
;
12774 #if defined(CONFIG_USER_ONLY)
12775 env
->hflags
= MIPS_HFLAG_UM
;
12776 /* Enable access to the SYNCI_Step register. */
12777 env
->CP0_HWREna
|= (1 << 1);
12778 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12779 env
->hflags
|= MIPS_HFLAG_FPU
;
12781 #ifdef TARGET_MIPS64
12782 if (env
->active_fpu
.fcr0
& (1 << FCR0_F64
)) {
12783 env
->hflags
|= MIPS_HFLAG_F64
;
12787 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
12788 /* If the exception was raised from a delay slot,
12789 come back to the jump. */
12790 env
->CP0_ErrorEPC
= env
->active_tc
.PC
- 4;
12792 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
12794 env
->active_tc
.PC
= (int32_t)0xBFC00000;
12795 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
12796 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
12797 env
->CP0_Wired
= 0;
12798 env
->CP0_EBase
= 0x80000000 | (env
->cpu_index
& 0x3FF);
12799 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
12800 /* vectored interrupts not implemented, timer on int 7,
12801 no performance counters. */
12802 env
->CP0_IntCtl
= 0xe0000000;
12806 for (i
= 0; i
< 7; i
++) {
12807 env
->CP0_WatchLo
[i
] = 0;
12808 env
->CP0_WatchHi
[i
] = 0x80000000;
12810 env
->CP0_WatchLo
[7] = 0;
12811 env
->CP0_WatchHi
[7] = 0;
12813 /* Count register increments in debug mode, EJTAG version 1 */
12814 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
12815 env
->hflags
= MIPS_HFLAG_CP0
;
12817 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
12820 /* Only TC0 on VPE 0 starts as active. */
12821 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
12822 env
->tcs
[i
].CP0_TCBind
= env
->cpu_index
<< CP0TCBd_CurVPE
;
12823 env
->tcs
[i
].CP0_TCHalt
= 1;
12825 env
->active_tc
.CP0_TCHalt
= 1;
12828 if (!env
->cpu_index
) {
12829 /* VPE0 starts up enabled. */
12830 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
12831 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
12833 /* TC0 starts up unhalted. */
12835 env
->active_tc
.CP0_TCHalt
= 0;
12836 env
->tcs
[0].CP0_TCHalt
= 0;
12837 /* With thread 0 active. */
12838 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
12839 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
12843 #if defined(TARGET_MIPS64)
12844 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
12845 env
->hflags
|= MIPS_HFLAG_64
;
12848 env
->exception_index
= EXCP_NONE
;
12851 void restore_state_to_opc(CPUState
*env
, TranslationBlock
*tb
, int pc_pos
)
12853 env
->active_tc
.PC
= gen_opc_pc
[pc_pos
];
12854 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
12855 env
->hflags
|= gen_opc_hflags
[pc_pos
];