2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
31 #define MIPS_DEBUG_DISAS 0
32 //#define MIPS_DEBUG_SIGN_EXTENSIONS
34 /* MIPS major opcodes */
35 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
38 /* indirect opcode tables */
39 OPC_SPECIAL
= (0x00 << 26),
40 OPC_REGIMM
= (0x01 << 26),
41 OPC_CP0
= (0x10 << 26),
42 OPC_CP1
= (0x11 << 26),
43 OPC_CP2
= (0x12 << 26),
44 OPC_CP3
= (0x13 << 26),
45 OPC_SPECIAL2
= (0x1C << 26),
46 OPC_SPECIAL3
= (0x1F << 26),
47 /* arithmetic with immediate */
48 OPC_ADDI
= (0x08 << 26),
49 OPC_ADDIU
= (0x09 << 26),
50 OPC_SLTI
= (0x0A << 26),
51 OPC_SLTIU
= (0x0B << 26),
52 /* logic with immediate */
53 OPC_ANDI
= (0x0C << 26),
54 OPC_ORI
= (0x0D << 26),
55 OPC_XORI
= (0x0E << 26),
56 OPC_LUI
= (0x0F << 26),
57 /* arithmetic with immediate */
58 OPC_DADDI
= (0x18 << 26),
59 OPC_DADDIU
= (0x19 << 26),
60 /* Jump and branches */
62 OPC_JAL
= (0x03 << 26),
63 OPC_JALS
= OPC_JAL
| 0x5,
64 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
65 OPC_BEQL
= (0x14 << 26),
66 OPC_BNE
= (0x05 << 26),
67 OPC_BNEL
= (0x15 << 26),
68 OPC_BLEZ
= (0x06 << 26),
69 OPC_BLEZL
= (0x16 << 26),
70 OPC_BGTZ
= (0x07 << 26),
71 OPC_BGTZL
= (0x17 << 26),
72 OPC_JALX
= (0x1D << 26), /* MIPS 16 only */
73 OPC_JALXS
= OPC_JALX
| 0x5,
75 OPC_LDL
= (0x1A << 26),
76 OPC_LDR
= (0x1B << 26),
77 OPC_LB
= (0x20 << 26),
78 OPC_LH
= (0x21 << 26),
79 OPC_LWL
= (0x22 << 26),
80 OPC_LW
= (0x23 << 26),
81 OPC_LWPC
= OPC_LW
| 0x5,
82 OPC_LBU
= (0x24 << 26),
83 OPC_LHU
= (0x25 << 26),
84 OPC_LWR
= (0x26 << 26),
85 OPC_LWU
= (0x27 << 26),
86 OPC_SB
= (0x28 << 26),
87 OPC_SH
= (0x29 << 26),
88 OPC_SWL
= (0x2A << 26),
89 OPC_SW
= (0x2B << 26),
90 OPC_SDL
= (0x2C << 26),
91 OPC_SDR
= (0x2D << 26),
92 OPC_SWR
= (0x2E << 26),
93 OPC_LL
= (0x30 << 26),
94 OPC_LLD
= (0x34 << 26),
95 OPC_LD
= (0x37 << 26),
96 OPC_LDPC
= OPC_LD
| 0x5,
97 OPC_SC
= (0x38 << 26),
98 OPC_SCD
= (0x3C << 26),
99 OPC_SD
= (0x3F << 26),
100 /* Floating point load/store */
101 OPC_LWC1
= (0x31 << 26),
102 OPC_LWC2
= (0x32 << 26),
103 OPC_LDC1
= (0x35 << 26),
104 OPC_LDC2
= (0x36 << 26),
105 OPC_SWC1
= (0x39 << 26),
106 OPC_SWC2
= (0x3A << 26),
107 OPC_SDC1
= (0x3D << 26),
108 OPC_SDC2
= (0x3E << 26),
109 /* MDMX ASE specific */
110 OPC_MDMX
= (0x1E << 26),
111 /* Cache and prefetch */
112 OPC_CACHE
= (0x2F << 26),
113 OPC_PREF
= (0x33 << 26),
114 /* Reserved major opcode */
115 OPC_MAJOR3B_RESERVED
= (0x3B << 26),
118 /* MIPS special opcodes */
119 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
123 OPC_SLL
= 0x00 | OPC_SPECIAL
,
124 /* NOP is SLL r0, r0, 0 */
125 /* SSNOP is SLL r0, r0, 1 */
126 /* EHB is SLL r0, r0, 3 */
127 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
128 OPC_ROTR
= OPC_SRL
| (1 << 21),
129 OPC_SRA
= 0x03 | OPC_SPECIAL
,
130 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
131 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
132 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
133 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
134 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
135 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
136 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
137 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
138 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
139 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
140 OPC_DROTR
= OPC_DSRL
| (1 << 21),
141 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
142 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
143 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
144 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
145 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
146 /* Multiplication / division */
147 OPC_MULT
= 0x18 | OPC_SPECIAL
,
148 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
149 OPC_DIV
= 0x1A | OPC_SPECIAL
,
150 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
151 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
152 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
153 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
154 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
155 /* 2 registers arithmetic / logic */
156 OPC_ADD
= 0x20 | OPC_SPECIAL
,
157 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
158 OPC_SUB
= 0x22 | OPC_SPECIAL
,
159 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
160 OPC_AND
= 0x24 | OPC_SPECIAL
,
161 OPC_OR
= 0x25 | OPC_SPECIAL
,
162 OPC_XOR
= 0x26 | OPC_SPECIAL
,
163 OPC_NOR
= 0x27 | OPC_SPECIAL
,
164 OPC_SLT
= 0x2A | OPC_SPECIAL
,
165 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
166 OPC_DADD
= 0x2C | OPC_SPECIAL
,
167 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
168 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
169 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
171 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
172 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
173 OPC_JALRC
= OPC_JALR
| (0x5 << 6),
174 OPC_JALRS
= 0x10 | OPC_SPECIAL
| (0x5 << 6),
176 OPC_TGE
= 0x30 | OPC_SPECIAL
,
177 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
178 OPC_TLT
= 0x32 | OPC_SPECIAL
,
179 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
180 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
181 OPC_TNE
= 0x36 | OPC_SPECIAL
,
182 /* HI / LO registers load & stores */
183 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
184 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
185 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
186 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
187 /* Conditional moves */
188 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
189 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
191 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
194 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
195 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
196 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
197 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
198 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
200 OPC_SPECIAL15_RESERVED
= 0x15 | OPC_SPECIAL
,
201 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
202 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
203 OPC_SPECIAL35_RESERVED
= 0x35 | OPC_SPECIAL
,
204 OPC_SPECIAL37_RESERVED
= 0x37 | OPC_SPECIAL
,
205 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
206 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
209 /* Multiplication variants of the vr54xx. */
210 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
213 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
214 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
215 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
216 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
217 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
218 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
219 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
220 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
221 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
222 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
223 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
224 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
225 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
226 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
229 /* REGIMM (rt field) opcodes */
230 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
233 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
234 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
235 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
236 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
237 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
238 OPC_BLTZALS
= OPC_BLTZAL
| 0x5, /* microMIPS */
239 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
240 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
241 OPC_BGEZALS
= OPC_BGEZAL
| 0x5, /* microMIPS */
242 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
243 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
244 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
245 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
246 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
247 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
248 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
249 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
252 /* Special2 opcodes */
253 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
256 /* Multiply & xxx operations */
257 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
258 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
259 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
260 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
261 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
263 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
264 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
265 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
266 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
267 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
268 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
269 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
270 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
271 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
272 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
273 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
274 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
276 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
277 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
278 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
279 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
281 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
284 /* Special3 opcodes */
285 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
288 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
289 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
290 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
291 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
292 OPC_INS
= 0x04 | OPC_SPECIAL3
,
293 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
294 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
295 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
296 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
297 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
298 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
299 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
300 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
303 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
304 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
305 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
306 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
307 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
308 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
309 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
310 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
311 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
312 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
313 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
314 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
318 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
321 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
322 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
323 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
327 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
330 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
331 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
334 /* Coprocessor 0 (rs field) */
335 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
338 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
339 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
340 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
341 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
342 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
343 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
344 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
345 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
346 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
347 OPC_C0
= (0x10 << 21) | OPC_CP0
,
348 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
349 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
353 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
356 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
357 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
358 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
359 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
360 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
361 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
364 /* Coprocessor 0 (with rs == C0) */
365 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
368 OPC_TLBR
= 0x01 | OPC_C0
,
369 OPC_TLBWI
= 0x02 | OPC_C0
,
370 OPC_TLBWR
= 0x06 | OPC_C0
,
371 OPC_TLBP
= 0x08 | OPC_C0
,
372 OPC_RFE
= 0x10 | OPC_C0
,
373 OPC_ERET
= 0x18 | OPC_C0
,
374 OPC_DERET
= 0x1F | OPC_C0
,
375 OPC_WAIT
= 0x20 | OPC_C0
,
378 /* Coprocessor 1 (rs field) */
379 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
381 /* Values for the fmt field in FP instructions */
383 /* 0 - 15 are reserved */
384 FMT_S
= 16, /* single fp */
385 FMT_D
= 17, /* double fp */
386 FMT_E
= 18, /* extended fp */
387 FMT_Q
= 19, /* quad fp */
388 FMT_W
= 20, /* 32-bit fixed */
389 FMT_L
= 21, /* 64-bit fixed */
390 FMT_PS
= 22, /* paired single fp */
391 /* 23 - 31 are reserved */
395 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
396 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
397 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
398 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
399 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
400 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
401 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
402 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
403 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
404 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
405 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
406 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
407 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
408 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
409 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
410 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
411 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
412 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
415 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
416 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
419 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
420 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
421 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
422 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
426 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
427 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
431 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
432 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
435 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
438 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
439 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
440 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
441 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
442 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
443 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
444 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
445 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
446 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
449 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
452 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
453 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
454 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
455 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
456 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
457 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
458 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
459 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
461 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
462 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
463 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
464 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
465 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
466 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
467 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
468 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
470 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
471 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
472 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
473 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
474 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
475 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
476 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
477 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
479 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
480 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
481 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
482 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
483 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
484 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
485 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
486 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
488 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
489 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
490 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
491 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
492 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
493 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
495 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
496 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
497 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
498 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
499 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
500 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
502 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
503 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
504 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
505 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
506 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
507 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
509 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
510 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
511 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
512 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
513 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
514 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
516 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
517 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
518 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
519 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
520 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
521 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
523 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
524 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
525 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
526 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
527 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
528 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
530 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
531 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
532 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
533 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
534 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
535 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
537 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
538 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
539 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
540 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
541 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
542 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
546 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
549 OPC_LWXC1
= 0x00 | OPC_CP3
,
550 OPC_LDXC1
= 0x01 | OPC_CP3
,
551 OPC_LUXC1
= 0x05 | OPC_CP3
,
552 OPC_SWXC1
= 0x08 | OPC_CP3
,
553 OPC_SDXC1
= 0x09 | OPC_CP3
,
554 OPC_SUXC1
= 0x0D | OPC_CP3
,
555 OPC_PREFX
= 0x0F | OPC_CP3
,
556 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
557 OPC_MADD_S
= 0x20 | OPC_CP3
,
558 OPC_MADD_D
= 0x21 | OPC_CP3
,
559 OPC_MADD_PS
= 0x26 | OPC_CP3
,
560 OPC_MSUB_S
= 0x28 | OPC_CP3
,
561 OPC_MSUB_D
= 0x29 | OPC_CP3
,
562 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
563 OPC_NMADD_S
= 0x30 | OPC_CP3
,
564 OPC_NMADD_D
= 0x31 | OPC_CP3
,
565 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
566 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
567 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
568 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
571 /* global register indices */
572 static TCGv_ptr cpu_env
;
573 static TCGv cpu_gpr
[32], cpu_PC
;
574 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
], cpu_ACX
[MIPS_DSP_ACC
];
575 static TCGv cpu_dspctrl
, btarget
, bcond
;
576 static TCGv_i32 hflags
;
577 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
579 static uint32_t gen_opc_hflags
[OPC_BUF_SIZE
];
581 #include "gen-icount.h"
583 #define gen_helper_0e0i(name, arg) do { \
584 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
585 gen_helper_##name(cpu_env, helper_tmp); \
586 tcg_temp_free_i32(helper_tmp); \
589 #define gen_helper_0e1i(name, arg1, arg2) do { \
590 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
591 gen_helper_##name(cpu_env, arg1, helper_tmp); \
592 tcg_temp_free_i32(helper_tmp); \
595 #define gen_helper_1e0i(name, ret, arg1) do { \
596 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
597 gen_helper_##name(ret, cpu_env, helper_tmp); \
598 tcg_temp_free_i32(helper_tmp); \
601 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
602 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
603 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
604 tcg_temp_free_i32(helper_tmp); \
607 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
608 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
609 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
610 tcg_temp_free_i32(helper_tmp); \
613 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
614 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
615 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
616 tcg_temp_free_i32(helper_tmp); \
619 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
620 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
621 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
622 tcg_temp_free_i32(helper_tmp); \
625 typedef struct DisasContext
{
626 struct TranslationBlock
*tb
;
627 target_ulong pc
, saved_pc
;
629 int singlestep_enabled
;
630 /* Routine used to access memory */
632 uint32_t hflags
, saved_hflags
;
634 target_ulong btarget
;
638 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
639 * exception condition */
640 BS_STOP
= 1, /* We want to stop translation for any reason */
641 BS_BRANCH
= 2, /* We reached a branch condition */
642 BS_EXCP
= 3, /* We reached an exception condition */
645 static const char *regnames
[] =
646 { "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
647 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
648 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
649 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra", };
651 static const char *regnames_HI
[] =
652 { "HI0", "HI1", "HI2", "HI3", };
654 static const char *regnames_LO
[] =
655 { "LO0", "LO1", "LO2", "LO3", };
657 static const char *regnames_ACX
[] =
658 { "ACX0", "ACX1", "ACX2", "ACX3", };
660 static const char *fregnames
[] =
661 { "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
662 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
663 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
664 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31", };
666 #define MIPS_DEBUG(fmt, ...) \
668 if (MIPS_DEBUG_DISAS) { \
669 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
670 TARGET_FMT_lx ": %08x " fmt "\n", \
671 ctx->pc, ctx->opcode , ## __VA_ARGS__); \
675 #define LOG_DISAS(...) \
677 if (MIPS_DEBUG_DISAS) { \
678 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
682 #define MIPS_INVAL(op) \
683 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
684 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F))
686 /* General purpose registers moves. */
687 static inline void gen_load_gpr (TCGv t
, int reg
)
690 tcg_gen_movi_tl(t
, 0);
692 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
695 static inline void gen_store_gpr (TCGv t
, int reg
)
698 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
701 /* Moves to/from ACX register. */
702 static inline void gen_load_ACX (TCGv t
, int reg
)
704 tcg_gen_mov_tl(t
, cpu_ACX
[reg
]);
707 static inline void gen_store_ACX (TCGv t
, int reg
)
709 tcg_gen_mov_tl(cpu_ACX
[reg
], t
);
712 /* Moves to/from shadow registers. */
713 static inline void gen_load_srsgpr (int from
, int to
)
715 TCGv t0
= tcg_temp_new();
718 tcg_gen_movi_tl(t0
, 0);
720 TCGv_i32 t2
= tcg_temp_new_i32();
721 TCGv_ptr addr
= tcg_temp_new_ptr();
723 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
724 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
725 tcg_gen_andi_i32(t2
, t2
, 0xf);
726 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
727 tcg_gen_ext_i32_ptr(addr
, t2
);
728 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
730 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
731 tcg_temp_free_ptr(addr
);
732 tcg_temp_free_i32(t2
);
734 gen_store_gpr(t0
, to
);
738 static inline void gen_store_srsgpr (int from
, int to
)
741 TCGv t0
= tcg_temp_new();
742 TCGv_i32 t2
= tcg_temp_new_i32();
743 TCGv_ptr addr
= tcg_temp_new_ptr();
745 gen_load_gpr(t0
, from
);
746 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
747 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
748 tcg_gen_andi_i32(t2
, t2
, 0xf);
749 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
750 tcg_gen_ext_i32_ptr(addr
, t2
);
751 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
753 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
754 tcg_temp_free_ptr(addr
);
755 tcg_temp_free_i32(t2
);
760 /* Floating point register moves. */
761 static inline void gen_load_fpr32 (TCGv_i32 t
, int reg
)
763 tcg_gen_ld_i32(t
, cpu_env
, offsetof(CPUMIPSState
, active_fpu
.fpr
[reg
].w
[FP_ENDIAN_IDX
]));
766 static inline void gen_store_fpr32 (TCGv_i32 t
, int reg
)
768 tcg_gen_st_i32(t
, cpu_env
, offsetof(CPUMIPSState
, active_fpu
.fpr
[reg
].w
[FP_ENDIAN_IDX
]));
771 static inline void gen_load_fpr32h (TCGv_i32 t
, int reg
)
773 tcg_gen_ld_i32(t
, cpu_env
, offsetof(CPUMIPSState
, active_fpu
.fpr
[reg
].w
[!FP_ENDIAN_IDX
]));
776 static inline void gen_store_fpr32h (TCGv_i32 t
, int reg
)
778 tcg_gen_st_i32(t
, cpu_env
, offsetof(CPUMIPSState
, active_fpu
.fpr
[reg
].w
[!FP_ENDIAN_IDX
]));
781 static inline void gen_load_fpr64 (DisasContext
*ctx
, TCGv_i64 t
, int reg
)
783 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
784 tcg_gen_ld_i64(t
, cpu_env
, offsetof(CPUMIPSState
, active_fpu
.fpr
[reg
].d
));
786 TCGv_i32 t0
= tcg_temp_new_i32();
787 TCGv_i32 t1
= tcg_temp_new_i32();
788 gen_load_fpr32(t0
, reg
& ~1);
789 gen_load_fpr32(t1
, reg
| 1);
790 tcg_gen_concat_i32_i64(t
, t0
, t1
);
791 tcg_temp_free_i32(t0
);
792 tcg_temp_free_i32(t1
);
796 static inline void gen_store_fpr64 (DisasContext
*ctx
, TCGv_i64 t
, int reg
)
798 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
799 tcg_gen_st_i64(t
, cpu_env
, offsetof(CPUMIPSState
, active_fpu
.fpr
[reg
].d
));
801 TCGv_i64 t0
= tcg_temp_new_i64();
802 TCGv_i32 t1
= tcg_temp_new_i32();
803 tcg_gen_trunc_i64_i32(t1
, t
);
804 gen_store_fpr32(t1
, reg
& ~1);
805 tcg_gen_shri_i64(t0
, t
, 32);
806 tcg_gen_trunc_i64_i32(t1
, t0
);
807 gen_store_fpr32(t1
, reg
| 1);
808 tcg_temp_free_i32(t1
);
809 tcg_temp_free_i64(t0
);
813 static inline int get_fp_bit (int cc
)
822 static inline void gen_save_pc(target_ulong pc
)
824 tcg_gen_movi_tl(cpu_PC
, pc
);
827 static inline void save_cpu_state (DisasContext
*ctx
, int do_save_pc
)
829 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
830 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
831 gen_save_pc(ctx
->pc
);
832 ctx
->saved_pc
= ctx
->pc
;
834 if (ctx
->hflags
!= ctx
->saved_hflags
) {
835 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
836 ctx
->saved_hflags
= ctx
->hflags
;
837 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
843 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
849 static inline void restore_cpu_state (CPUMIPSState
*env
, DisasContext
*ctx
)
851 ctx
->saved_hflags
= ctx
->hflags
;
852 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
858 ctx
->btarget
= env
->btarget
;
864 generate_exception_err (DisasContext
*ctx
, int excp
, int err
)
866 TCGv_i32 texcp
= tcg_const_i32(excp
);
867 TCGv_i32 terr
= tcg_const_i32(err
);
868 save_cpu_state(ctx
, 1);
869 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
870 tcg_temp_free_i32(terr
);
871 tcg_temp_free_i32(texcp
);
875 generate_exception (DisasContext
*ctx
, int excp
)
877 save_cpu_state(ctx
, 1);
878 gen_helper_0e0i(raise_exception
, excp
);
881 /* Addresses computation */
882 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
884 tcg_gen_add_tl(ret
, arg0
, arg1
);
886 #if defined(TARGET_MIPS64)
887 /* For compatibility with 32-bit code, data reference in user mode
888 with Status_UX = 0 should be casted to 32-bit and sign extended.
889 See the MIPS64 PRA manual, section 4.10. */
890 if (((ctx
->hflags
& MIPS_HFLAG_KSU
) == MIPS_HFLAG_UM
) &&
891 !(ctx
->hflags
& MIPS_HFLAG_UX
)) {
892 tcg_gen_ext32s_i64(ret
, ret
);
897 static inline void check_cp0_enabled(DisasContext
*ctx
)
899 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
900 generate_exception_err(ctx
, EXCP_CpU
, 0);
903 static inline void check_cp1_enabled(DisasContext
*ctx
)
905 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
906 generate_exception_err(ctx
, EXCP_CpU
, 1);
909 /* Verify that the processor is running with COP1X instructions enabled.
910 This is associated with the nabla symbol in the MIPS32 and MIPS64
913 static inline void check_cop1x(DisasContext
*ctx
)
915 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
916 generate_exception(ctx
, EXCP_RI
);
919 /* Verify that the processor is running with 64-bit floating-point
920 operations enabled. */
922 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
924 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
925 generate_exception(ctx
, EXCP_RI
);
929 * Verify if floating point register is valid; an operation is not defined
930 * if bit 0 of any register specification is set and the FR bit in the
931 * Status register equals zero, since the register numbers specify an
932 * even-odd pair of adjacent coprocessor general registers. When the FR bit
933 * in the Status register equals one, both even and odd register numbers
934 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
936 * Multiple 64 bit wide registers can be checked by calling
937 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
939 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
941 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
942 generate_exception(ctx
, EXCP_RI
);
945 /* This code generates a "reserved instruction" exception if the
946 CPU does not support the instruction set corresponding to flags. */
947 static inline void check_insn(CPUMIPSState
*env
, DisasContext
*ctx
, int flags
)
949 if (unlikely(!(env
->insn_flags
& flags
)))
950 generate_exception(ctx
, EXCP_RI
);
953 /* This code generates a "reserved instruction" exception if 64-bit
954 instructions are not enabled. */
955 static inline void check_mips_64(DisasContext
*ctx
)
957 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
958 generate_exception(ctx
, EXCP_RI
);
961 /* Define small wrappers for gen_load_fpr* so that we have a uniform
962 calling interface for 32 and 64-bit FPRs. No sense in changing
963 all callers for gen_load_fpr32 when we need the CTX parameter for
965 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(x, y)
966 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
967 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
968 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
969 int ft, int fs, int cc) \
971 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
972 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
975 check_cp1_64bitmode(ctx); \
981 check_cp1_registers(ctx, fs | ft); \
989 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
990 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
992 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
993 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
994 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
995 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
996 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
997 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
998 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
999 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1000 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1001 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1002 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1003 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1004 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1005 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1006 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1007 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1010 tcg_temp_free_i##bits (fp0); \
1011 tcg_temp_free_i##bits (fp1); \
1014 FOP_CONDS(, 0, d
, FMT_D
, 64)
1015 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1016 FOP_CONDS(, 0, s
, FMT_S
, 32)
1017 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1018 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1019 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1021 #undef gen_ldcmp_fpr32
1022 #undef gen_ldcmp_fpr64
1024 /* load/store instructions. */
1025 #define OP_LD(insn,fname) \
1026 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
1028 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
1035 #if defined(TARGET_MIPS64)
1041 #define OP_ST(insn,fname) \
1042 static inline void op_st_##insn(TCGv arg1, TCGv arg2, DisasContext *ctx) \
1044 tcg_gen_qemu_##fname(arg1, arg2, ctx->mem_idx); \
1049 #if defined(TARGET_MIPS64)
1054 #ifdef CONFIG_USER_ONLY
1055 #define OP_LD_ATOMIC(insn,fname) \
1056 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
1058 TCGv t0 = tcg_temp_new(); \
1059 tcg_gen_mov_tl(t0, arg1); \
1060 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
1061 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
1062 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
1063 tcg_temp_free(t0); \
1066 #define OP_LD_ATOMIC(insn,fname) \
1067 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
1069 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
1072 OP_LD_ATOMIC(ll
,ld32s
);
1073 #if defined(TARGET_MIPS64)
1074 OP_LD_ATOMIC(lld
,ld64
);
1078 #ifdef CONFIG_USER_ONLY
1079 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
1080 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
1082 TCGv t0 = tcg_temp_new(); \
1083 int l1 = gen_new_label(); \
1084 int l2 = gen_new_label(); \
1086 tcg_gen_andi_tl(t0, arg2, almask); \
1087 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
1088 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
1089 generate_exception(ctx, EXCP_AdES); \
1090 gen_set_label(l1); \
1091 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
1092 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
1093 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
1094 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
1095 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
1096 gen_helper_0e0i(raise_exception, EXCP_SC); \
1097 gen_set_label(l2); \
1098 tcg_gen_movi_tl(t0, 0); \
1099 gen_store_gpr(t0, rt); \
1100 tcg_temp_free(t0); \
1103 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
1104 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
1106 TCGv t0 = tcg_temp_new(); \
1107 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
1108 gen_store_gpr(t0, rt); \
1109 tcg_temp_free(t0); \
1112 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
1113 #if defined(TARGET_MIPS64)
1114 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
1118 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
1119 int base
, int16_t offset
)
1122 tcg_gen_movi_tl(addr
, offset
);
1123 } else if (offset
== 0) {
1124 gen_load_gpr(addr
, base
);
1126 tcg_gen_movi_tl(addr
, offset
);
1127 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
1131 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
1133 target_ulong pc
= ctx
->pc
;
1135 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
1136 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
1141 pc
&= ~(target_ulong
)3;
1146 static void gen_ld (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1147 int rt
, int base
, int16_t offset
)
1149 const char *opn
= "ld";
1152 if (rt
== 0 && env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
1153 /* Loongson CPU uses a load to zero register for prefetch.
1154 We emulate it as a NOP. On other CPU we must perform the
1155 actual memory access. */
1160 t0
= tcg_temp_new();
1161 t1
= tcg_temp_new();
1162 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1165 #if defined(TARGET_MIPS64)
1167 save_cpu_state(ctx
, 0);
1168 op_ld_lwu(t0
, t0
, ctx
);
1169 gen_store_gpr(t0
, rt
);
1173 save_cpu_state(ctx
, 0);
1174 op_ld_ld(t0
, t0
, ctx
);
1175 gen_store_gpr(t0
, rt
);
1179 save_cpu_state(ctx
, 1);
1180 op_ld_lld(t0
, t0
, ctx
);
1181 gen_store_gpr(t0
, rt
);
1185 save_cpu_state(ctx
, 1);
1186 gen_load_gpr(t1
, rt
);
1187 gen_helper_1e2i(ldl
, t1
, t1
, t0
, ctx
->mem_idx
);
1188 gen_store_gpr(t1
, rt
);
1192 save_cpu_state(ctx
, 1);
1193 gen_load_gpr(t1
, rt
);
1194 gen_helper_1e2i(ldr
, t1
, t1
, t0
, ctx
->mem_idx
);
1195 gen_store_gpr(t1
, rt
);
1199 save_cpu_state(ctx
, 0);
1200 tcg_gen_movi_tl(t1
, pc_relative_pc(ctx
));
1201 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1202 op_ld_ld(t0
, t0
, ctx
);
1203 gen_store_gpr(t0
, rt
);
1208 save_cpu_state(ctx
, 0);
1209 tcg_gen_movi_tl(t1
, pc_relative_pc(ctx
));
1210 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1211 op_ld_lw(t0
, t0
, ctx
);
1212 gen_store_gpr(t0
, rt
);
1216 save_cpu_state(ctx
, 0);
1217 op_ld_lw(t0
, t0
, ctx
);
1218 gen_store_gpr(t0
, rt
);
1222 save_cpu_state(ctx
, 0);
1223 op_ld_lh(t0
, t0
, ctx
);
1224 gen_store_gpr(t0
, rt
);
1228 save_cpu_state(ctx
, 0);
1229 op_ld_lhu(t0
, t0
, ctx
);
1230 gen_store_gpr(t0
, rt
);
1234 save_cpu_state(ctx
, 0);
1235 op_ld_lb(t0
, t0
, ctx
);
1236 gen_store_gpr(t0
, rt
);
1240 save_cpu_state(ctx
, 0);
1241 op_ld_lbu(t0
, t0
, ctx
);
1242 gen_store_gpr(t0
, rt
);
1246 save_cpu_state(ctx
, 1);
1247 gen_load_gpr(t1
, rt
);
1248 gen_helper_1e2i(lwl
, t1
, t1
, t0
, ctx
->mem_idx
);
1249 gen_store_gpr(t1
, rt
);
1253 save_cpu_state(ctx
, 1);
1254 gen_load_gpr(t1
, rt
);
1255 gen_helper_1e2i(lwr
, t1
, t1
, t0
, ctx
->mem_idx
);
1256 gen_store_gpr(t1
, rt
);
1260 save_cpu_state(ctx
, 1);
1261 op_ld_ll(t0
, t0
, ctx
);
1262 gen_store_gpr(t0
, rt
);
1266 (void)opn
; /* avoid a compiler warning */
1267 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1273 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
1274 int base
, int16_t offset
)
1276 const char *opn
= "st";
1277 TCGv t0
= tcg_temp_new();
1278 TCGv t1
= tcg_temp_new();
1280 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1281 gen_load_gpr(t1
, rt
);
1283 #if defined(TARGET_MIPS64)
1285 save_cpu_state(ctx
, 0);
1286 op_st_sd(t1
, t0
, ctx
);
1290 save_cpu_state(ctx
, 1);
1291 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
1295 save_cpu_state(ctx
, 1);
1296 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
1301 save_cpu_state(ctx
, 0);
1302 op_st_sw(t1
, t0
, ctx
);
1306 save_cpu_state(ctx
, 0);
1307 op_st_sh(t1
, t0
, ctx
);
1311 save_cpu_state(ctx
, 0);
1312 op_st_sb(t1
, t0
, ctx
);
1316 save_cpu_state(ctx
, 1);
1317 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
1321 save_cpu_state(ctx
, 1);
1322 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
1326 (void)opn
; /* avoid a compiler warning */
1327 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1333 /* Store conditional */
1334 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
1335 int base
, int16_t offset
)
1337 const char *opn
= "st_cond";
1340 t0
= tcg_temp_local_new();
1342 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1343 /* Don't do NOP if destination is zero: we must perform the actual
1346 t1
= tcg_temp_local_new();
1347 gen_load_gpr(t1
, rt
);
1349 #if defined(TARGET_MIPS64)
1351 save_cpu_state(ctx
, 1);
1352 op_st_scd(t1
, t0
, rt
, ctx
);
1357 save_cpu_state(ctx
, 1);
1358 op_st_sc(t1
, t0
, rt
, ctx
);
1362 (void)opn
; /* avoid a compiler warning */
1363 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1368 /* Load and store */
1369 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
1370 int base
, int16_t offset
)
1372 const char *opn
= "flt_ldst";
1373 TCGv t0
= tcg_temp_new();
1375 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1376 /* Don't do NOP if destination is zero: we must perform the actual
1381 TCGv_i32 fp0
= tcg_temp_new_i32();
1383 tcg_gen_qemu_ld32s(t0
, t0
, ctx
->mem_idx
);
1384 tcg_gen_trunc_tl_i32(fp0
, t0
);
1385 gen_store_fpr32(fp0
, ft
);
1386 tcg_temp_free_i32(fp0
);
1392 TCGv_i32 fp0
= tcg_temp_new_i32();
1393 TCGv t1
= tcg_temp_new();
1395 gen_load_fpr32(fp0
, ft
);
1396 tcg_gen_extu_i32_tl(t1
, fp0
);
1397 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
1399 tcg_temp_free_i32(fp0
);
1405 TCGv_i64 fp0
= tcg_temp_new_i64();
1407 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
1408 gen_store_fpr64(ctx
, fp0
, ft
);
1409 tcg_temp_free_i64(fp0
);
1415 TCGv_i64 fp0
= tcg_temp_new_i64();
1417 gen_load_fpr64(ctx
, fp0
, ft
);
1418 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
1419 tcg_temp_free_i64(fp0
);
1425 generate_exception(ctx
, EXCP_RI
);
1428 (void)opn
; /* avoid a compiler warning */
1429 MIPS_DEBUG("%s %s, %d(%s)", opn
, fregnames
[ft
], offset
, regnames
[base
]);
1434 static void gen_cop1_ldst(CPUMIPSState
*env
, DisasContext
*ctx
,
1435 uint32_t op
, int rt
, int rs
, int16_t imm
)
1437 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
1438 check_cp1_enabled(ctx
);
1439 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
1441 generate_exception_err(ctx
, EXCP_CpU
, 1);
1445 /* Arithmetic with immediate operand */
1446 static void gen_arith_imm (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1447 int rt
, int rs
, int16_t imm
)
1449 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1450 const char *opn
= "imm arith";
1452 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
1453 /* If no destination, treat it as a NOP.
1454 For addi, we must generate the overflow exception when needed. */
1461 TCGv t0
= tcg_temp_local_new();
1462 TCGv t1
= tcg_temp_new();
1463 TCGv t2
= tcg_temp_new();
1464 int l1
= gen_new_label();
1466 gen_load_gpr(t1
, rs
);
1467 tcg_gen_addi_tl(t0
, t1
, uimm
);
1468 tcg_gen_ext32s_tl(t0
, t0
);
1470 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1471 tcg_gen_xori_tl(t2
, t0
, uimm
);
1472 tcg_gen_and_tl(t1
, t1
, t2
);
1474 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1476 /* operands of same sign, result different sign */
1477 generate_exception(ctx
, EXCP_OVERFLOW
);
1479 tcg_gen_ext32s_tl(t0
, t0
);
1480 gen_store_gpr(t0
, rt
);
1487 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1488 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
1490 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1494 #if defined(TARGET_MIPS64)
1497 TCGv t0
= tcg_temp_local_new();
1498 TCGv t1
= tcg_temp_new();
1499 TCGv t2
= tcg_temp_new();
1500 int l1
= gen_new_label();
1502 gen_load_gpr(t1
, rs
);
1503 tcg_gen_addi_tl(t0
, t1
, uimm
);
1505 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1506 tcg_gen_xori_tl(t2
, t0
, uimm
);
1507 tcg_gen_and_tl(t1
, t1
, t2
);
1509 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1511 /* operands of same sign, result different sign */
1512 generate_exception(ctx
, EXCP_OVERFLOW
);
1514 gen_store_gpr(t0
, rt
);
1521 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1523 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1529 (void)opn
; /* avoid a compiler warning */
1530 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1533 /* Logic with immediate operand */
1534 static void gen_logic_imm(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1535 int rt
, int rs
, int16_t imm
)
1538 const char *opn
= "imm logic";
1541 /* If no destination, treat it as a NOP. */
1545 uimm
= (uint16_t)imm
;
1548 if (likely(rs
!= 0))
1549 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1551 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
1556 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1558 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1562 if (likely(rs
!= 0))
1563 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1565 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1569 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
1573 (void)opn
; /* avoid a compiler warning */
1574 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1577 /* Set on less than with immediate operand */
1578 static void gen_slt_imm(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1579 int rt
, int rs
, int16_t imm
)
1581 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1582 const char *opn
= "imm arith";
1586 /* If no destination, treat it as a NOP. */
1590 t0
= tcg_temp_new();
1591 gen_load_gpr(t0
, rs
);
1594 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
1598 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
1602 (void)opn
; /* avoid a compiler warning */
1603 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1607 /* Shifts with immediate operand */
1608 static void gen_shift_imm(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1609 int rt
, int rs
, int16_t imm
)
1611 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
1612 const char *opn
= "imm shift";
1616 /* If no destination, treat it as a NOP. */
1621 t0
= tcg_temp_new();
1622 gen_load_gpr(t0
, rs
);
1625 tcg_gen_shli_tl(t0
, t0
, uimm
);
1626 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1630 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
1635 tcg_gen_ext32u_tl(t0
, t0
);
1636 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
1638 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1644 TCGv_i32 t1
= tcg_temp_new_i32();
1646 tcg_gen_trunc_tl_i32(t1
, t0
);
1647 tcg_gen_rotri_i32(t1
, t1
, uimm
);
1648 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
1649 tcg_temp_free_i32(t1
);
1651 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1655 #if defined(TARGET_MIPS64)
1657 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
1661 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
1665 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
1670 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
1672 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
1677 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1681 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1685 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1689 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1694 (void)opn
; /* avoid a compiler warning */
1695 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1700 static void gen_arith (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1701 int rd
, int rs
, int rt
)
1703 const char *opn
= "arith";
1705 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
1706 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
1707 /* If no destination, treat it as a NOP.
1708 For add & sub, we must generate the overflow exception when needed. */
1716 TCGv t0
= tcg_temp_local_new();
1717 TCGv t1
= tcg_temp_new();
1718 TCGv t2
= tcg_temp_new();
1719 int l1
= gen_new_label();
1721 gen_load_gpr(t1
, rs
);
1722 gen_load_gpr(t2
, rt
);
1723 tcg_gen_add_tl(t0
, t1
, t2
);
1724 tcg_gen_ext32s_tl(t0
, t0
);
1725 tcg_gen_xor_tl(t1
, t1
, t2
);
1726 tcg_gen_xor_tl(t2
, t0
, t2
);
1727 tcg_gen_andc_tl(t1
, t2
, t1
);
1729 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1731 /* operands of same sign, result different sign */
1732 generate_exception(ctx
, EXCP_OVERFLOW
);
1734 gen_store_gpr(t0
, rd
);
1740 if (rs
!= 0 && rt
!= 0) {
1741 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1742 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1743 } else if (rs
== 0 && rt
!= 0) {
1744 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1745 } else if (rs
!= 0 && rt
== 0) {
1746 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1748 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1754 TCGv t0
= tcg_temp_local_new();
1755 TCGv t1
= tcg_temp_new();
1756 TCGv t2
= tcg_temp_new();
1757 int l1
= gen_new_label();
1759 gen_load_gpr(t1
, rs
);
1760 gen_load_gpr(t2
, rt
);
1761 tcg_gen_sub_tl(t0
, t1
, t2
);
1762 tcg_gen_ext32s_tl(t0
, t0
);
1763 tcg_gen_xor_tl(t2
, t1
, t2
);
1764 tcg_gen_xor_tl(t1
, t0
, t1
);
1765 tcg_gen_and_tl(t1
, t1
, t2
);
1767 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1769 /* operands of different sign, first operand and result different sign */
1770 generate_exception(ctx
, EXCP_OVERFLOW
);
1772 gen_store_gpr(t0
, rd
);
1778 if (rs
!= 0 && rt
!= 0) {
1779 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1780 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1781 } else if (rs
== 0 && rt
!= 0) {
1782 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1783 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1784 } else if (rs
!= 0 && rt
== 0) {
1785 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1787 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1791 #if defined(TARGET_MIPS64)
1794 TCGv t0
= tcg_temp_local_new();
1795 TCGv t1
= tcg_temp_new();
1796 TCGv t2
= tcg_temp_new();
1797 int l1
= gen_new_label();
1799 gen_load_gpr(t1
, rs
);
1800 gen_load_gpr(t2
, rt
);
1801 tcg_gen_add_tl(t0
, t1
, t2
);
1802 tcg_gen_xor_tl(t1
, t1
, t2
);
1803 tcg_gen_xor_tl(t2
, t0
, t2
);
1804 tcg_gen_andc_tl(t1
, t2
, t1
);
1806 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1808 /* operands of same sign, result different sign */
1809 generate_exception(ctx
, EXCP_OVERFLOW
);
1811 gen_store_gpr(t0
, rd
);
1817 if (rs
!= 0 && rt
!= 0) {
1818 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1819 } else if (rs
== 0 && rt
!= 0) {
1820 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1821 } else if (rs
!= 0 && rt
== 0) {
1822 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1824 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1830 TCGv t0
= tcg_temp_local_new();
1831 TCGv t1
= tcg_temp_new();
1832 TCGv t2
= tcg_temp_new();
1833 int l1
= gen_new_label();
1835 gen_load_gpr(t1
, rs
);
1836 gen_load_gpr(t2
, rt
);
1837 tcg_gen_sub_tl(t0
, t1
, t2
);
1838 tcg_gen_xor_tl(t2
, t1
, t2
);
1839 tcg_gen_xor_tl(t1
, t0
, t1
);
1840 tcg_gen_and_tl(t1
, t1
, t2
);
1842 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1844 /* operands of different sign, first operand and result different sign */
1845 generate_exception(ctx
, EXCP_OVERFLOW
);
1847 gen_store_gpr(t0
, rd
);
1853 if (rs
!= 0 && rt
!= 0) {
1854 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1855 } else if (rs
== 0 && rt
!= 0) {
1856 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1857 } else if (rs
!= 0 && rt
== 0) {
1858 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1860 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1866 if (likely(rs
!= 0 && rt
!= 0)) {
1867 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1868 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1870 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1875 (void)opn
; /* avoid a compiler warning */
1876 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1879 /* Conditional move */
1880 static void gen_cond_move(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1881 int rd
, int rs
, int rt
)
1883 const char *opn
= "cond move";
1887 /* If no destination, treat it as a NOP.
1888 For add & sub, we must generate the overflow exception when needed. */
1893 l1
= gen_new_label();
1896 if (likely(rt
!= 0))
1897 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[rt
], 0, l1
);
1903 if (likely(rt
!= 0))
1904 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[rt
], 0, l1
);
1909 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1911 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1914 (void)opn
; /* avoid a compiler warning */
1915 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1919 static void gen_logic(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1920 int rd
, int rs
, int rt
)
1922 const char *opn
= "logic";
1925 /* If no destination, treat it as a NOP. */
1932 if (likely(rs
!= 0 && rt
!= 0)) {
1933 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1935 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1940 if (rs
!= 0 && rt
!= 0) {
1941 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1942 } else if (rs
== 0 && rt
!= 0) {
1943 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1944 } else if (rs
!= 0 && rt
== 0) {
1945 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1947 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
1952 if (likely(rs
!= 0 && rt
!= 0)) {
1953 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1954 } else if (rs
== 0 && rt
!= 0) {
1955 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1956 } else if (rs
!= 0 && rt
== 0) {
1957 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1959 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1964 if (likely(rs
!= 0 && rt
!= 0)) {
1965 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1966 } else if (rs
== 0 && rt
!= 0) {
1967 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1968 } else if (rs
!= 0 && rt
== 0) {
1969 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1971 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1976 (void)opn
; /* avoid a compiler warning */
1977 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1980 /* Set on lower than */
1981 static void gen_slt(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1982 int rd
, int rs
, int rt
)
1984 const char *opn
= "slt";
1988 /* If no destination, treat it as a NOP. */
1993 t0
= tcg_temp_new();
1994 t1
= tcg_temp_new();
1995 gen_load_gpr(t0
, rs
);
1996 gen_load_gpr(t1
, rt
);
1999 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2003 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2007 (void)opn
; /* avoid a compiler warning */
2008 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2014 static void gen_shift (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
2015 int rd
, int rs
, int rt
)
2017 const char *opn
= "shifts";
2021 /* If no destination, treat it as a NOP.
2022 For add & sub, we must generate the overflow exception when needed. */
2027 t0
= tcg_temp_new();
2028 t1
= tcg_temp_new();
2029 gen_load_gpr(t0
, rs
);
2030 gen_load_gpr(t1
, rt
);
2033 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2034 tcg_gen_shl_tl(t0
, t1
, t0
);
2035 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2039 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2040 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2044 tcg_gen_ext32u_tl(t1
, t1
);
2045 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2046 tcg_gen_shr_tl(t0
, t1
, t0
);
2047 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2052 TCGv_i32 t2
= tcg_temp_new_i32();
2053 TCGv_i32 t3
= tcg_temp_new_i32();
2055 tcg_gen_trunc_tl_i32(t2
, t0
);
2056 tcg_gen_trunc_tl_i32(t3
, t1
);
2057 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2058 tcg_gen_rotr_i32(t2
, t3
, t2
);
2059 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2060 tcg_temp_free_i32(t2
);
2061 tcg_temp_free_i32(t3
);
2065 #if defined(TARGET_MIPS64)
2067 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2068 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2072 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2073 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2077 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2078 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
2082 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2083 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
2088 (void)opn
; /* avoid a compiler warning */
2089 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2094 /* Arithmetic on HI/LO registers */
2095 static void gen_HILO (DisasContext
*ctx
, uint32_t opc
, int reg
)
2097 const char *opn
= "hilo";
2099 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
2106 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[0]);
2110 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[0]);
2115 tcg_gen_mov_tl(cpu_HI
[0], cpu_gpr
[reg
]);
2117 tcg_gen_movi_tl(cpu_HI
[0], 0);
2122 tcg_gen_mov_tl(cpu_LO
[0], cpu_gpr
[reg
]);
2124 tcg_gen_movi_tl(cpu_LO
[0], 0);
2128 (void)opn
; /* avoid a compiler warning */
2129 MIPS_DEBUG("%s %s", opn
, regnames
[reg
]);
2132 static void gen_muldiv (DisasContext
*ctx
, uint32_t opc
,
2135 const char *opn
= "mul/div";
2141 #if defined(TARGET_MIPS64)
2145 t0
= tcg_temp_local_new();
2146 t1
= tcg_temp_local_new();
2149 t0
= tcg_temp_new();
2150 t1
= tcg_temp_new();
2154 gen_load_gpr(t0
, rs
);
2155 gen_load_gpr(t1
, rt
);
2159 int l1
= gen_new_label();
2160 int l2
= gen_new_label();
2162 tcg_gen_ext32s_tl(t0
, t0
);
2163 tcg_gen_ext32s_tl(t1
, t1
);
2164 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2165 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2166 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2168 tcg_gen_mov_tl(cpu_LO
[0], t0
);
2169 tcg_gen_movi_tl(cpu_HI
[0], 0);
2172 tcg_gen_div_tl(cpu_LO
[0], t0
, t1
);
2173 tcg_gen_rem_tl(cpu_HI
[0], t0
, t1
);
2174 tcg_gen_ext32s_tl(cpu_LO
[0], cpu_LO
[0]);
2175 tcg_gen_ext32s_tl(cpu_HI
[0], cpu_HI
[0]);
2182 int l1
= gen_new_label();
2184 tcg_gen_ext32u_tl(t0
, t0
);
2185 tcg_gen_ext32u_tl(t1
, t1
);
2186 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2187 tcg_gen_divu_tl(cpu_LO
[0], t0
, t1
);
2188 tcg_gen_remu_tl(cpu_HI
[0], t0
, t1
);
2189 tcg_gen_ext32s_tl(cpu_LO
[0], cpu_LO
[0]);
2190 tcg_gen_ext32s_tl(cpu_HI
[0], cpu_HI
[0]);
2197 TCGv_i64 t2
= tcg_temp_new_i64();
2198 TCGv_i64 t3
= tcg_temp_new_i64();
2200 tcg_gen_ext_tl_i64(t2
, t0
);
2201 tcg_gen_ext_tl_i64(t3
, t1
);
2202 tcg_gen_mul_i64(t2
, t2
, t3
);
2203 tcg_temp_free_i64(t3
);
2204 tcg_gen_trunc_i64_tl(t0
, t2
);
2205 tcg_gen_shri_i64(t2
, t2
, 32);
2206 tcg_gen_trunc_i64_tl(t1
, t2
);
2207 tcg_temp_free_i64(t2
);
2208 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2209 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2215 TCGv_i64 t2
= tcg_temp_new_i64();
2216 TCGv_i64 t3
= tcg_temp_new_i64();
2218 tcg_gen_ext32u_tl(t0
, t0
);
2219 tcg_gen_ext32u_tl(t1
, t1
);
2220 tcg_gen_extu_tl_i64(t2
, t0
);
2221 tcg_gen_extu_tl_i64(t3
, t1
);
2222 tcg_gen_mul_i64(t2
, t2
, t3
);
2223 tcg_temp_free_i64(t3
);
2224 tcg_gen_trunc_i64_tl(t0
, t2
);
2225 tcg_gen_shri_i64(t2
, t2
, 32);
2226 tcg_gen_trunc_i64_tl(t1
, t2
);
2227 tcg_temp_free_i64(t2
);
2228 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2229 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2233 #if defined(TARGET_MIPS64)
2236 int l1
= gen_new_label();
2237 int l2
= gen_new_label();
2239 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2240 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2241 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2242 tcg_gen_mov_tl(cpu_LO
[0], t0
);
2243 tcg_gen_movi_tl(cpu_HI
[0], 0);
2246 tcg_gen_div_i64(cpu_LO
[0], t0
, t1
);
2247 tcg_gen_rem_i64(cpu_HI
[0], t0
, t1
);
2254 int l1
= gen_new_label();
2256 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2257 tcg_gen_divu_i64(cpu_LO
[0], t0
, t1
);
2258 tcg_gen_remu_i64(cpu_HI
[0], t0
, t1
);
2264 gen_helper_dmult(cpu_env
, t0
, t1
);
2268 gen_helper_dmultu(cpu_env
, t0
, t1
);
2274 TCGv_i64 t2
= tcg_temp_new_i64();
2275 TCGv_i64 t3
= tcg_temp_new_i64();
2277 tcg_gen_ext_tl_i64(t2
, t0
);
2278 tcg_gen_ext_tl_i64(t3
, t1
);
2279 tcg_gen_mul_i64(t2
, t2
, t3
);
2280 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2281 tcg_gen_add_i64(t2
, t2
, t3
);
2282 tcg_temp_free_i64(t3
);
2283 tcg_gen_trunc_i64_tl(t0
, t2
);
2284 tcg_gen_shri_i64(t2
, t2
, 32);
2285 tcg_gen_trunc_i64_tl(t1
, t2
);
2286 tcg_temp_free_i64(t2
);
2287 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2288 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2294 TCGv_i64 t2
= tcg_temp_new_i64();
2295 TCGv_i64 t3
= tcg_temp_new_i64();
2297 tcg_gen_ext32u_tl(t0
, t0
);
2298 tcg_gen_ext32u_tl(t1
, t1
);
2299 tcg_gen_extu_tl_i64(t2
, t0
);
2300 tcg_gen_extu_tl_i64(t3
, t1
);
2301 tcg_gen_mul_i64(t2
, t2
, t3
);
2302 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2303 tcg_gen_add_i64(t2
, t2
, t3
);
2304 tcg_temp_free_i64(t3
);
2305 tcg_gen_trunc_i64_tl(t0
, t2
);
2306 tcg_gen_shri_i64(t2
, t2
, 32);
2307 tcg_gen_trunc_i64_tl(t1
, t2
);
2308 tcg_temp_free_i64(t2
);
2309 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2310 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2316 TCGv_i64 t2
= tcg_temp_new_i64();
2317 TCGv_i64 t3
= tcg_temp_new_i64();
2319 tcg_gen_ext_tl_i64(t2
, t0
);
2320 tcg_gen_ext_tl_i64(t3
, t1
);
2321 tcg_gen_mul_i64(t2
, t2
, t3
);
2322 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2323 tcg_gen_sub_i64(t2
, t3
, t2
);
2324 tcg_temp_free_i64(t3
);
2325 tcg_gen_trunc_i64_tl(t0
, t2
);
2326 tcg_gen_shri_i64(t2
, t2
, 32);
2327 tcg_gen_trunc_i64_tl(t1
, t2
);
2328 tcg_temp_free_i64(t2
);
2329 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2330 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2336 TCGv_i64 t2
= tcg_temp_new_i64();
2337 TCGv_i64 t3
= tcg_temp_new_i64();
2339 tcg_gen_ext32u_tl(t0
, t0
);
2340 tcg_gen_ext32u_tl(t1
, t1
);
2341 tcg_gen_extu_tl_i64(t2
, t0
);
2342 tcg_gen_extu_tl_i64(t3
, t1
);
2343 tcg_gen_mul_i64(t2
, t2
, t3
);
2344 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2345 tcg_gen_sub_i64(t2
, t3
, t2
);
2346 tcg_temp_free_i64(t3
);
2347 tcg_gen_trunc_i64_tl(t0
, t2
);
2348 tcg_gen_shri_i64(t2
, t2
, 32);
2349 tcg_gen_trunc_i64_tl(t1
, t2
);
2350 tcg_temp_free_i64(t2
);
2351 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2352 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2358 generate_exception(ctx
, EXCP_RI
);
2361 (void)opn
; /* avoid a compiler warning */
2362 MIPS_DEBUG("%s %s %s", opn
, regnames
[rs
], regnames
[rt
]);
2368 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
2369 int rd
, int rs
, int rt
)
2371 const char *opn
= "mul vr54xx";
2372 TCGv t0
= tcg_temp_new();
2373 TCGv t1
= tcg_temp_new();
2375 gen_load_gpr(t0
, rs
);
2376 gen_load_gpr(t1
, rt
);
2379 case OPC_VR54XX_MULS
:
2380 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
2383 case OPC_VR54XX_MULSU
:
2384 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
2387 case OPC_VR54XX_MACC
:
2388 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
2391 case OPC_VR54XX_MACCU
:
2392 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
2395 case OPC_VR54XX_MSAC
:
2396 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
2399 case OPC_VR54XX_MSACU
:
2400 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
2403 case OPC_VR54XX_MULHI
:
2404 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
2407 case OPC_VR54XX_MULHIU
:
2408 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
2411 case OPC_VR54XX_MULSHI
:
2412 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
2415 case OPC_VR54XX_MULSHIU
:
2416 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
2419 case OPC_VR54XX_MACCHI
:
2420 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
2423 case OPC_VR54XX_MACCHIU
:
2424 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
2427 case OPC_VR54XX_MSACHI
:
2428 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
2431 case OPC_VR54XX_MSACHIU
:
2432 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
2436 MIPS_INVAL("mul vr54xx");
2437 generate_exception(ctx
, EXCP_RI
);
2440 gen_store_gpr(t0
, rd
);
2441 (void)opn
; /* avoid a compiler warning */
2442 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2449 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
2452 const char *opn
= "CLx";
2460 t0
= tcg_temp_new();
2461 gen_load_gpr(t0
, rs
);
2464 gen_helper_clo(cpu_gpr
[rd
], t0
);
2468 gen_helper_clz(cpu_gpr
[rd
], t0
);
2471 #if defined(TARGET_MIPS64)
2473 gen_helper_dclo(cpu_gpr
[rd
], t0
);
2477 gen_helper_dclz(cpu_gpr
[rd
], t0
);
2482 (void)opn
; /* avoid a compiler warning */
2483 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2487 /* Godson integer instructions */
2488 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
2489 int rd
, int rs
, int rt
)
2491 const char *opn
= "loongson";
2503 case OPC_MULTU_G_2E
:
2504 case OPC_MULTU_G_2F
:
2505 #if defined(TARGET_MIPS64)
2506 case OPC_DMULT_G_2E
:
2507 case OPC_DMULT_G_2F
:
2508 case OPC_DMULTU_G_2E
:
2509 case OPC_DMULTU_G_2F
:
2511 t0
= tcg_temp_new();
2512 t1
= tcg_temp_new();
2515 t0
= tcg_temp_local_new();
2516 t1
= tcg_temp_local_new();
2520 gen_load_gpr(t0
, rs
);
2521 gen_load_gpr(t1
, rt
);
2526 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2527 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2530 case OPC_MULTU_G_2E
:
2531 case OPC_MULTU_G_2F
:
2532 tcg_gen_ext32u_tl(t0
, t0
);
2533 tcg_gen_ext32u_tl(t1
, t1
);
2534 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2535 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2541 int l1
= gen_new_label();
2542 int l2
= gen_new_label();
2543 int l3
= gen_new_label();
2544 tcg_gen_ext32s_tl(t0
, t0
);
2545 tcg_gen_ext32s_tl(t1
, t1
);
2546 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2547 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2550 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2551 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2552 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
2555 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
2556 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2564 int l1
= gen_new_label();
2565 int l2
= gen_new_label();
2566 tcg_gen_ext32u_tl(t0
, t0
);
2567 tcg_gen_ext32u_tl(t1
, t1
);
2568 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2569 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2572 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
2573 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2581 int l1
= gen_new_label();
2582 int l2
= gen_new_label();
2583 int l3
= gen_new_label();
2584 tcg_gen_ext32u_tl(t0
, t0
);
2585 tcg_gen_ext32u_tl(t1
, t1
);
2586 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2587 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2588 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2590 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2593 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
2594 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2602 int l1
= gen_new_label();
2603 int l2
= gen_new_label();
2604 tcg_gen_ext32u_tl(t0
, t0
);
2605 tcg_gen_ext32u_tl(t1
, t1
);
2606 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2607 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2610 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
2611 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2616 #if defined(TARGET_MIPS64)
2617 case OPC_DMULT_G_2E
:
2618 case OPC_DMULT_G_2F
:
2619 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2622 case OPC_DMULTU_G_2E
:
2623 case OPC_DMULTU_G_2F
:
2624 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2630 int l1
= gen_new_label();
2631 int l2
= gen_new_label();
2632 int l3
= gen_new_label();
2633 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2634 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2637 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2638 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2639 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
2642 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
2647 case OPC_DDIVU_G_2E
:
2648 case OPC_DDIVU_G_2F
:
2650 int l1
= gen_new_label();
2651 int l2
= gen_new_label();
2652 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2653 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2656 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
2664 int l1
= gen_new_label();
2665 int l2
= gen_new_label();
2666 int l3
= gen_new_label();
2667 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2668 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2669 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2671 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2674 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
2679 case OPC_DMODU_G_2E
:
2680 case OPC_DMODU_G_2F
:
2682 int l1
= gen_new_label();
2683 int l2
= gen_new_label();
2684 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2685 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2688 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
2696 (void)opn
; /* avoid a compiler warning */
2697 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2702 /* Loongson multimedia instructions */
2703 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
2705 const char *opn
= "loongson_cp2";
2706 uint32_t opc
, shift_max
;
2709 opc
= MASK_LMI(ctx
->opcode
);
2715 t0
= tcg_temp_local_new_i64();
2716 t1
= tcg_temp_local_new_i64();
2719 t0
= tcg_temp_new_i64();
2720 t1
= tcg_temp_new_i64();
2724 gen_load_fpr64(ctx
, t0
, rs
);
2725 gen_load_fpr64(ctx
, t1
, rt
);
2727 #define LMI_HELPER(UP, LO) \
2728 case OPC_##UP: gen_helper_##LO(t0, t0, t1); opn = #LO; break
2729 #define LMI_HELPER_1(UP, LO) \
2730 case OPC_##UP: gen_helper_##LO(t0, t0); opn = #LO; break
2731 #define LMI_DIRECT(UP, LO, OP) \
2732 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); opn = #LO; break
2735 LMI_HELPER(PADDSH
, paddsh
);
2736 LMI_HELPER(PADDUSH
, paddush
);
2737 LMI_HELPER(PADDH
, paddh
);
2738 LMI_HELPER(PADDW
, paddw
);
2739 LMI_HELPER(PADDSB
, paddsb
);
2740 LMI_HELPER(PADDUSB
, paddusb
);
2741 LMI_HELPER(PADDB
, paddb
);
2743 LMI_HELPER(PSUBSH
, psubsh
);
2744 LMI_HELPER(PSUBUSH
, psubush
);
2745 LMI_HELPER(PSUBH
, psubh
);
2746 LMI_HELPER(PSUBW
, psubw
);
2747 LMI_HELPER(PSUBSB
, psubsb
);
2748 LMI_HELPER(PSUBUSB
, psubusb
);
2749 LMI_HELPER(PSUBB
, psubb
);
2751 LMI_HELPER(PSHUFH
, pshufh
);
2752 LMI_HELPER(PACKSSWH
, packsswh
);
2753 LMI_HELPER(PACKSSHB
, packsshb
);
2754 LMI_HELPER(PACKUSHB
, packushb
);
2756 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
2757 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
2758 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
2759 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
2760 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
2761 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
2763 LMI_HELPER(PAVGH
, pavgh
);
2764 LMI_HELPER(PAVGB
, pavgb
);
2765 LMI_HELPER(PMAXSH
, pmaxsh
);
2766 LMI_HELPER(PMINSH
, pminsh
);
2767 LMI_HELPER(PMAXUB
, pmaxub
);
2768 LMI_HELPER(PMINUB
, pminub
);
2770 LMI_HELPER(PCMPEQW
, pcmpeqw
);
2771 LMI_HELPER(PCMPGTW
, pcmpgtw
);
2772 LMI_HELPER(PCMPEQH
, pcmpeqh
);
2773 LMI_HELPER(PCMPGTH
, pcmpgth
);
2774 LMI_HELPER(PCMPEQB
, pcmpeqb
);
2775 LMI_HELPER(PCMPGTB
, pcmpgtb
);
2777 LMI_HELPER(PSLLW
, psllw
);
2778 LMI_HELPER(PSLLH
, psllh
);
2779 LMI_HELPER(PSRLW
, psrlw
);
2780 LMI_HELPER(PSRLH
, psrlh
);
2781 LMI_HELPER(PSRAW
, psraw
);
2782 LMI_HELPER(PSRAH
, psrah
);
2784 LMI_HELPER(PMULLH
, pmullh
);
2785 LMI_HELPER(PMULHH
, pmulhh
);
2786 LMI_HELPER(PMULHUH
, pmulhuh
);
2787 LMI_HELPER(PMADDHW
, pmaddhw
);
2789 LMI_HELPER(PASUBUB
, pasubub
);
2790 LMI_HELPER_1(BIADD
, biadd
);
2791 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
2793 LMI_DIRECT(PADDD
, paddd
, add
);
2794 LMI_DIRECT(PSUBD
, psubd
, sub
);
2795 LMI_DIRECT(XOR_CP2
, xor, xor);
2796 LMI_DIRECT(NOR_CP2
, nor
, nor
);
2797 LMI_DIRECT(AND_CP2
, and, and);
2798 LMI_DIRECT(PANDN
, pandn
, andc
);
2799 LMI_DIRECT(OR
, or, or);
2802 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
2806 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
2810 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
2814 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
2819 tcg_gen_andi_i64(t1
, t1
, 3);
2820 tcg_gen_shli_i64(t1
, t1
, 4);
2821 tcg_gen_shr_i64(t0
, t0
, t1
);
2822 tcg_gen_ext16u_i64(t0
, t0
);
2827 tcg_gen_add_i64(t0
, t0
, t1
);
2828 tcg_gen_ext32s_i64(t0
, t0
);
2832 tcg_gen_sub_i64(t0
, t0
, t1
);
2833 tcg_gen_ext32s_i64(t0
, t0
);
2862 /* Make sure shift count isn't TCG undefined behaviour. */
2863 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
2868 tcg_gen_shl_i64(t0
, t0
, t1
);
2872 /* Since SRA is UndefinedResult without sign-extended inputs,
2873 we can treat SRA and DSRA the same. */
2874 tcg_gen_sar_i64(t0
, t0
, t1
);
2877 /* We want to shift in zeros for SRL; zero-extend first. */
2878 tcg_gen_ext32u_i64(t0
, t0
);
2881 tcg_gen_shr_i64(t0
, t0
, t1
);
2885 if (shift_max
== 32) {
2886 tcg_gen_ext32s_i64(t0
, t0
);
2889 /* Shifts larger than MAX produce zero. */
2890 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
2891 tcg_gen_neg_i64(t1
, t1
);
2892 tcg_gen_and_i64(t0
, t0
, t1
);
2898 TCGv_i64 t2
= tcg_temp_new_i64();
2899 int lab
= gen_new_label();
2901 tcg_gen_mov_i64(t2
, t0
);
2902 tcg_gen_add_i64(t0
, t1
, t2
);
2903 if (opc
== OPC_ADD_CP2
) {
2904 tcg_gen_ext32s_i64(t0
, t0
);
2906 tcg_gen_xor_i64(t1
, t1
, t2
);
2907 tcg_gen_xor_i64(t2
, t2
, t0
);
2908 tcg_gen_andc_i64(t1
, t2
, t1
);
2909 tcg_temp_free_i64(t2
);
2910 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
2911 generate_exception(ctx
, EXCP_OVERFLOW
);
2914 opn
= (opc
== OPC_ADD_CP2
? "add" : "dadd");
2921 TCGv_i64 t2
= tcg_temp_new_i64();
2922 int lab
= gen_new_label();
2924 tcg_gen_mov_i64(t2
, t0
);
2925 tcg_gen_sub_i64(t0
, t1
, t2
);
2926 if (opc
== OPC_SUB_CP2
) {
2927 tcg_gen_ext32s_i64(t0
, t0
);
2929 tcg_gen_xor_i64(t1
, t1
, t2
);
2930 tcg_gen_xor_i64(t2
, t2
, t0
);
2931 tcg_gen_and_i64(t1
, t1
, t2
);
2932 tcg_temp_free_i64(t2
);
2933 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
2934 generate_exception(ctx
, EXCP_OVERFLOW
);
2937 opn
= (opc
== OPC_SUB_CP2
? "sub" : "dsub");
2942 tcg_gen_ext32u_i64(t0
, t0
);
2943 tcg_gen_ext32u_i64(t1
, t1
);
2944 tcg_gen_mul_i64(t0
, t0
, t1
);
2954 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
2955 FD field is the CC field? */
2958 generate_exception(ctx
, EXCP_RI
);
2965 gen_store_fpr64(ctx
, t0
, rd
);
2967 (void)opn
; /* avoid a compiler warning */
2968 MIPS_DEBUG("%s %s, %s, %s", opn
,
2969 fregnames
[rd
], fregnames
[rs
], fregnames
[rt
]);
2970 tcg_temp_free_i64(t0
);
2971 tcg_temp_free_i64(t1
);
2975 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
2976 int rs
, int rt
, int16_t imm
)
2979 TCGv t0
= tcg_temp_new();
2980 TCGv t1
= tcg_temp_new();
2983 /* Load needed operands */
2991 /* Compare two registers */
2993 gen_load_gpr(t0
, rs
);
2994 gen_load_gpr(t1
, rt
);
3004 /* Compare register to immediate */
3005 if (rs
!= 0 || imm
!= 0) {
3006 gen_load_gpr(t0
, rs
);
3007 tcg_gen_movi_tl(t1
, (int32_t)imm
);
3014 case OPC_TEQ
: /* rs == rs */
3015 case OPC_TEQI
: /* r0 == 0 */
3016 case OPC_TGE
: /* rs >= rs */
3017 case OPC_TGEI
: /* r0 >= 0 */
3018 case OPC_TGEU
: /* rs >= rs unsigned */
3019 case OPC_TGEIU
: /* r0 >= 0 unsigned */
3021 generate_exception(ctx
, EXCP_TRAP
);
3023 case OPC_TLT
: /* rs < rs */
3024 case OPC_TLTI
: /* r0 < 0 */
3025 case OPC_TLTU
: /* rs < rs unsigned */
3026 case OPC_TLTIU
: /* r0 < 0 unsigned */
3027 case OPC_TNE
: /* rs != rs */
3028 case OPC_TNEI
: /* r0 != 0 */
3029 /* Never trap: treat as NOP. */
3033 int l1
= gen_new_label();
3038 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
3042 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
3046 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
3050 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
3054 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
3058 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
3061 generate_exception(ctx
, EXCP_TRAP
);
3068 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
3070 TranslationBlock
*tb
;
3072 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
3073 likely(!ctx
->singlestep_enabled
)) {
3076 tcg_gen_exit_tb((tcg_target_long
)tb
+ n
);
3079 if (ctx
->singlestep_enabled
) {
3080 save_cpu_state(ctx
, 0);
3081 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
3087 /* Branches (before delay slot) */
3088 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
3090 int rs
, int rt
, int32_t offset
)
3092 target_ulong btgt
= -1;
3094 int bcond_compute
= 0;
3095 TCGv t0
= tcg_temp_new();
3096 TCGv t1
= tcg_temp_new();
3098 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3099 #ifdef MIPS_DEBUG_DISAS
3100 LOG_DISAS("Branch in delay slot at PC 0x" TARGET_FMT_lx
"\n", ctx
->pc
);
3102 generate_exception(ctx
, EXCP_RI
);
3106 /* Load needed operands */
3112 /* Compare two registers */
3114 gen_load_gpr(t0
, rs
);
3115 gen_load_gpr(t1
, rt
);
3118 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
3134 /* Compare to zero */
3136 gen_load_gpr(t0
, rs
);
3139 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
3146 /* Jump to immediate */
3147 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
3153 /* Jump to register */
3154 if (offset
!= 0 && offset
!= 16) {
3155 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
3156 others are reserved. */
3157 MIPS_INVAL("jump hint");
3158 generate_exception(ctx
, EXCP_RI
);
3161 gen_load_gpr(btarget
, rs
);
3164 MIPS_INVAL("branch/jump");
3165 generate_exception(ctx
, EXCP_RI
);
3168 if (bcond_compute
== 0) {
3169 /* No condition to be computed */
3171 case OPC_BEQ
: /* rx == rx */
3172 case OPC_BEQL
: /* rx == rx likely */
3173 case OPC_BGEZ
: /* 0 >= 0 */
3174 case OPC_BGEZL
: /* 0 >= 0 likely */
3175 case OPC_BLEZ
: /* 0 <= 0 */
3176 case OPC_BLEZL
: /* 0 <= 0 likely */
3178 ctx
->hflags
|= MIPS_HFLAG_B
;
3179 MIPS_DEBUG("balways");
3182 case OPC_BGEZAL
: /* 0 >= 0 */
3183 case OPC_BGEZALL
: /* 0 >= 0 likely */
3184 ctx
->hflags
|= (opc
== OPC_BGEZALS
3186 : MIPS_HFLAG_BDS32
);
3187 /* Always take and link */
3189 ctx
->hflags
|= MIPS_HFLAG_B
;
3190 MIPS_DEBUG("balways and link");
3192 case OPC_BNE
: /* rx != rx */
3193 case OPC_BGTZ
: /* 0 > 0 */
3194 case OPC_BLTZ
: /* 0 < 0 */
3196 MIPS_DEBUG("bnever (NOP)");
3199 case OPC_BLTZAL
: /* 0 < 0 */
3200 ctx
->hflags
|= (opc
== OPC_BLTZALS
3202 : MIPS_HFLAG_BDS32
);
3203 /* Handle as an unconditional branch to get correct delay
3206 btgt
= ctx
->pc
+ (opc
== OPC_BLTZALS
? 6 : 8);
3207 ctx
->hflags
|= MIPS_HFLAG_B
;
3208 MIPS_DEBUG("bnever and link");
3210 case OPC_BLTZALL
: /* 0 < 0 likely */
3211 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
3212 /* Skip the instruction in the delay slot */
3213 MIPS_DEBUG("bnever, link and skip");
3216 case OPC_BNEL
: /* rx != rx likely */
3217 case OPC_BGTZL
: /* 0 > 0 likely */
3218 case OPC_BLTZL
: /* 0 < 0 likely */
3219 /* Skip the instruction in the delay slot */
3220 MIPS_DEBUG("bnever and skip");
3224 ctx
->hflags
|= MIPS_HFLAG_B
;
3225 MIPS_DEBUG("j " TARGET_FMT_lx
, btgt
);
3229 ctx
->hflags
|= MIPS_HFLAG_BX
;
3234 ctx
->hflags
|= MIPS_HFLAG_B
;
3235 ctx
->hflags
|= ((opc
== OPC_JALS
|| opc
== OPC_JALXS
)
3237 : MIPS_HFLAG_BDS32
);
3238 MIPS_DEBUG("jal " TARGET_FMT_lx
, btgt
);
3241 ctx
->hflags
|= MIPS_HFLAG_BR
;
3242 if (insn_bytes
== 4)
3243 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
3244 MIPS_DEBUG("jr %s", regnames
[rs
]);
3250 ctx
->hflags
|= MIPS_HFLAG_BR
;
3251 ctx
->hflags
|= (opc
== OPC_JALRS
3253 : MIPS_HFLAG_BDS32
);
3254 MIPS_DEBUG("jalr %s, %s", regnames
[rt
], regnames
[rs
]);
3257 MIPS_INVAL("branch/jump");
3258 generate_exception(ctx
, EXCP_RI
);
3264 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
3265 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx
,
3266 regnames
[rs
], regnames
[rt
], btgt
);
3269 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
3270 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx
,
3271 regnames
[rs
], regnames
[rt
], btgt
);
3274 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
3275 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx
,
3276 regnames
[rs
], regnames
[rt
], btgt
);
3279 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
3280 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx
,
3281 regnames
[rs
], regnames
[rt
], btgt
);
3284 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3285 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3288 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3289 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3293 ctx
->hflags
|= (opc
== OPC_BGEZALS
3295 : MIPS_HFLAG_BDS32
);
3296 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3297 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3301 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3303 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3306 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
3307 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3310 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
3311 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3314 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
3315 MIPS_DEBUG("blez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3318 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
3319 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3322 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3323 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3326 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3327 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3331 ctx
->hflags
|= (opc
== OPC_BLTZALS
3333 : MIPS_HFLAG_BDS32
);
3334 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3336 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3338 ctx
->hflags
|= MIPS_HFLAG_BC
;
3341 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3343 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3345 ctx
->hflags
|= MIPS_HFLAG_BL
;
3348 MIPS_INVAL("conditional branch/jump");
3349 generate_exception(ctx
, EXCP_RI
);
3353 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx
,
3354 blink
, ctx
->hflags
, btgt
);
3356 ctx
->btarget
= btgt
;
3358 int post_delay
= insn_bytes
;
3359 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
3361 if (opc
!= OPC_JALRC
)
3362 post_delay
+= ((ctx
->hflags
& MIPS_HFLAG_BDS16
) ? 2 : 4);
3364 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
3368 if (insn_bytes
== 2)
3369 ctx
->hflags
|= MIPS_HFLAG_B16
;
3374 /* special3 bitfield operations */
3375 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
3376 int rs
, int lsb
, int msb
)
3378 TCGv t0
= tcg_temp_new();
3379 TCGv t1
= tcg_temp_new();
3382 gen_load_gpr(t1
, rs
);
3387 tcg_gen_shri_tl(t0
, t1
, lsb
);
3389 tcg_gen_andi_tl(t0
, t0
, (1 << (msb
+ 1)) - 1);
3391 tcg_gen_ext32s_tl(t0
, t0
);
3394 #if defined(TARGET_MIPS64)
3396 tcg_gen_shri_tl(t0
, t1
, lsb
);
3398 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1 + 32)) - 1);
3402 tcg_gen_shri_tl(t0
, t1
, lsb
+ 32);
3403 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
3406 tcg_gen_shri_tl(t0
, t1
, lsb
);
3407 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
3413 mask
= ((msb
- lsb
+ 1 < 32) ? ((1 << (msb
- lsb
+ 1)) - 1) : ~0) << lsb
;
3414 gen_load_gpr(t0
, rt
);
3415 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3416 tcg_gen_shli_tl(t1
, t1
, lsb
);
3417 tcg_gen_andi_tl(t1
, t1
, mask
);
3418 tcg_gen_or_tl(t0
, t0
, t1
);
3419 tcg_gen_ext32s_tl(t0
, t0
);
3421 #if defined(TARGET_MIPS64)
3425 mask
= ((msb
- lsb
+ 1 + 32 < 64) ? ((1ULL << (msb
- lsb
+ 1 + 32)) - 1) : ~0ULL) << lsb
;
3426 gen_load_gpr(t0
, rt
);
3427 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3428 tcg_gen_shli_tl(t1
, t1
, lsb
);
3429 tcg_gen_andi_tl(t1
, t1
, mask
);
3430 tcg_gen_or_tl(t0
, t0
, t1
);
3435 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << (lsb
+ 32);
3436 gen_load_gpr(t0
, rt
);
3437 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3438 tcg_gen_shli_tl(t1
, t1
, lsb
+ 32);
3439 tcg_gen_andi_tl(t1
, t1
, mask
);
3440 tcg_gen_or_tl(t0
, t0
, t1
);
3445 gen_load_gpr(t0
, rt
);
3446 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << lsb
;
3447 gen_load_gpr(t0
, rt
);
3448 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3449 tcg_gen_shli_tl(t1
, t1
, lsb
);
3450 tcg_gen_andi_tl(t1
, t1
, mask
);
3451 tcg_gen_or_tl(t0
, t0
, t1
);
3456 MIPS_INVAL("bitops");
3457 generate_exception(ctx
, EXCP_RI
);
3462 gen_store_gpr(t0
, rt
);
3467 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
3472 /* If no destination, treat it as a NOP. */
3477 t0
= tcg_temp_new();
3478 gen_load_gpr(t0
, rt
);
3482 TCGv t1
= tcg_temp_new();
3484 tcg_gen_shri_tl(t1
, t0
, 8);
3485 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
3486 tcg_gen_shli_tl(t0
, t0
, 8);
3487 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
3488 tcg_gen_or_tl(t0
, t0
, t1
);
3490 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3494 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
3497 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
3499 #if defined(TARGET_MIPS64)
3502 TCGv t1
= tcg_temp_new();
3504 tcg_gen_shri_tl(t1
, t0
, 8);
3505 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
3506 tcg_gen_shli_tl(t0
, t0
, 8);
3507 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
3508 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
3514 TCGv t1
= tcg_temp_new();
3516 tcg_gen_shri_tl(t1
, t0
, 16);
3517 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
3518 tcg_gen_shli_tl(t0
, t0
, 16);
3519 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
3520 tcg_gen_or_tl(t0
, t0
, t1
);
3521 tcg_gen_shri_tl(t1
, t0
, 32);
3522 tcg_gen_shli_tl(t0
, t0
, 32);
3523 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
3529 MIPS_INVAL("bsfhl");
3530 generate_exception(ctx
, EXCP_RI
);
3537 #ifndef CONFIG_USER_ONLY
3538 /* CP0 (MMU and control) */
3539 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
3541 TCGv_i32 t0
= tcg_temp_new_i32();
3543 tcg_gen_ld_i32(t0
, cpu_env
, off
);
3544 tcg_gen_ext_i32_tl(arg
, t0
);
3545 tcg_temp_free_i32(t0
);
3548 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
3550 tcg_gen_ld_tl(arg
, cpu_env
, off
);
3551 tcg_gen_ext32s_tl(arg
, arg
);
3554 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
3556 TCGv_i32 t0
= tcg_temp_new_i32();
3558 tcg_gen_trunc_tl_i32(t0
, arg
);
3559 tcg_gen_st_i32(t0
, cpu_env
, off
);
3560 tcg_temp_free_i32(t0
);
3563 static inline void gen_mtc0_store64 (TCGv arg
, target_ulong off
)
3565 tcg_gen_ext32s_tl(arg
, arg
);
3566 tcg_gen_st_tl(arg
, cpu_env
, off
);
3569 static void gen_mfc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
3571 const char *rn
= "invalid";
3574 check_insn(env
, ctx
, ISA_MIPS32
);
3580 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
3584 check_insn(env
, ctx
, ASE_MT
);
3585 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
3589 check_insn(env
, ctx
, ASE_MT
);
3590 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
3594 check_insn(env
, ctx
, ASE_MT
);
3595 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
3605 gen_helper_mfc0_random(arg
, cpu_env
);
3609 check_insn(env
, ctx
, ASE_MT
);
3610 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
3614 check_insn(env
, ctx
, ASE_MT
);
3615 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
3619 check_insn(env
, ctx
, ASE_MT
);
3620 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
3624 check_insn(env
, ctx
, ASE_MT
);
3625 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
3629 check_insn(env
, ctx
, ASE_MT
);
3630 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
3634 check_insn(env
, ctx
, ASE_MT
);
3635 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
3636 rn
= "VPEScheFBack";
3639 check_insn(env
, ctx
, ASE_MT
);
3640 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
3650 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
3651 tcg_gen_ext32s_tl(arg
, arg
);
3655 check_insn(env
, ctx
, ASE_MT
);
3656 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
3660 check_insn(env
, ctx
, ASE_MT
);
3661 gen_helper_mfc0_tcbind(arg
, cpu_env
);
3665 check_insn(env
, ctx
, ASE_MT
);
3666 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
3670 check_insn(env
, ctx
, ASE_MT
);
3671 gen_helper_mfc0_tchalt(arg
, cpu_env
);
3675 check_insn(env
, ctx
, ASE_MT
);
3676 gen_helper_mfc0_tccontext(arg
, cpu_env
);
3680 check_insn(env
, ctx
, ASE_MT
);
3681 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
3685 check_insn(env
, ctx
, ASE_MT
);
3686 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
3696 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
3697 tcg_gen_ext32s_tl(arg
, arg
);
3707 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
3708 tcg_gen_ext32s_tl(arg
, arg
);
3712 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
3713 rn
= "ContextConfig";
3722 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
3726 check_insn(env
, ctx
, ISA_MIPS32R2
);
3727 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
3737 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
3741 check_insn(env
, ctx
, ISA_MIPS32R2
);
3742 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
3746 check_insn(env
, ctx
, ISA_MIPS32R2
);
3747 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
3751 check_insn(env
, ctx
, ISA_MIPS32R2
);
3752 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
3756 check_insn(env
, ctx
, ISA_MIPS32R2
);
3757 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
3761 check_insn(env
, ctx
, ISA_MIPS32R2
);
3762 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
3772 check_insn(env
, ctx
, ISA_MIPS32R2
);
3773 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
3783 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
3784 tcg_gen_ext32s_tl(arg
, arg
);
3794 /* Mark as an IO operation because we read the time. */
3797 gen_helper_mfc0_count(arg
, cpu_env
);
3801 /* Break the TB to be able to take timer interrupts immediately
3802 after reading count. */
3803 ctx
->bstate
= BS_STOP
;
3806 /* 6,7 are implementation dependent */
3814 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
3815 tcg_gen_ext32s_tl(arg
, arg
);
3825 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
3828 /* 6,7 are implementation dependent */
3836 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
3840 check_insn(env
, ctx
, ISA_MIPS32R2
);
3841 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
3845 check_insn(env
, ctx
, ISA_MIPS32R2
);
3846 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
3850 check_insn(env
, ctx
, ISA_MIPS32R2
);
3851 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
3861 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
3871 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
3872 tcg_gen_ext32s_tl(arg
, arg
);
3882 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
3886 check_insn(env
, ctx
, ISA_MIPS32R2
);
3887 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
3897 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
3901 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
3905 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
3909 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
3912 /* 4,5 are reserved */
3913 /* 6,7 are implementation dependent */
3915 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
3919 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
3929 gen_helper_mfc0_lladdr(arg
, cpu_env
);
3939 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
3949 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
3959 #if defined(TARGET_MIPS64)
3960 check_insn(env
, ctx
, ISA_MIPS3
);
3961 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
3962 tcg_gen_ext32s_tl(arg
, arg
);
3971 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3974 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
3982 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
3983 rn
= "'Diagnostic"; /* implementation dependent */
3988 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
3992 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
3993 rn
= "TraceControl";
3996 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
3997 rn
= "TraceControl2";
4000 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
4001 rn
= "UserTraceData";
4004 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
4015 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
4016 tcg_gen_ext32s_tl(arg
, arg
);
4026 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
4027 rn
= "Performance0";
4030 // gen_helper_mfc0_performance1(arg);
4031 rn
= "Performance1";
4034 // gen_helper_mfc0_performance2(arg);
4035 rn
= "Performance2";
4038 // gen_helper_mfc0_performance3(arg);
4039 rn
= "Performance3";
4042 // gen_helper_mfc0_performance4(arg);
4043 rn
= "Performance4";
4046 // gen_helper_mfc0_performance5(arg);
4047 rn
= "Performance5";
4050 // gen_helper_mfc0_performance6(arg);
4051 rn
= "Performance6";
4054 // gen_helper_mfc0_performance7(arg);
4055 rn
= "Performance7";
4062 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4068 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4081 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4088 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
4101 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
4108 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
4118 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
4119 tcg_gen_ext32s_tl(arg
, arg
);
4130 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
4140 (void)rn
; /* avoid a compiler warning */
4141 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4145 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4146 generate_exception(ctx
, EXCP_RI
);
4149 static void gen_mtc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4151 const char *rn
= "invalid";
4154 check_insn(env
, ctx
, ISA_MIPS32
);
4163 gen_helper_mtc0_index(cpu_env
, arg
);
4167 check_insn(env
, ctx
, ASE_MT
);
4168 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
4172 check_insn(env
, ctx
, ASE_MT
);
4177 check_insn(env
, ctx
, ASE_MT
);
4192 check_insn(env
, ctx
, ASE_MT
);
4193 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
4197 check_insn(env
, ctx
, ASE_MT
);
4198 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
4202 check_insn(env
, ctx
, ASE_MT
);
4203 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
4207 check_insn(env
, ctx
, ASE_MT
);
4208 gen_helper_mtc0_yqmask(cpu_env
, arg
);
4212 check_insn(env
, ctx
, ASE_MT
);
4213 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4217 check_insn(env
, ctx
, ASE_MT
);
4218 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4219 rn
= "VPEScheFBack";
4222 check_insn(env
, ctx
, ASE_MT
);
4223 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
4233 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
4237 check_insn(env
, ctx
, ASE_MT
);
4238 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
4242 check_insn(env
, ctx
, ASE_MT
);
4243 gen_helper_mtc0_tcbind(cpu_env
, arg
);
4247 check_insn(env
, ctx
, ASE_MT
);
4248 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
4252 check_insn(env
, ctx
, ASE_MT
);
4253 gen_helper_mtc0_tchalt(cpu_env
, arg
);
4257 check_insn(env
, ctx
, ASE_MT
);
4258 gen_helper_mtc0_tccontext(cpu_env
, arg
);
4262 check_insn(env
, ctx
, ASE_MT
);
4263 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
4267 check_insn(env
, ctx
, ASE_MT
);
4268 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
4278 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
4288 gen_helper_mtc0_context(cpu_env
, arg
);
4292 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
4293 rn
= "ContextConfig";
4302 gen_helper_mtc0_pagemask(cpu_env
, arg
);
4306 check_insn(env
, ctx
, ISA_MIPS32R2
);
4307 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
4317 gen_helper_mtc0_wired(cpu_env
, arg
);
4321 check_insn(env
, ctx
, ISA_MIPS32R2
);
4322 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
4326 check_insn(env
, ctx
, ISA_MIPS32R2
);
4327 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
4331 check_insn(env
, ctx
, ISA_MIPS32R2
);
4332 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
4336 check_insn(env
, ctx
, ISA_MIPS32R2
);
4337 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
4341 check_insn(env
, ctx
, ISA_MIPS32R2
);
4342 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
4352 check_insn(env
, ctx
, ISA_MIPS32R2
);
4353 gen_helper_mtc0_hwrena(cpu_env
, arg
);
4367 gen_helper_mtc0_count(cpu_env
, arg
);
4370 /* 6,7 are implementation dependent */
4378 gen_helper_mtc0_entryhi(cpu_env
, arg
);
4388 gen_helper_mtc0_compare(cpu_env
, arg
);
4391 /* 6,7 are implementation dependent */
4399 save_cpu_state(ctx
, 1);
4400 gen_helper_mtc0_status(cpu_env
, arg
);
4401 /* BS_STOP isn't good enough here, hflags may have changed. */
4402 gen_save_pc(ctx
->pc
+ 4);
4403 ctx
->bstate
= BS_EXCP
;
4407 check_insn(env
, ctx
, ISA_MIPS32R2
);
4408 gen_helper_mtc0_intctl(cpu_env
, arg
);
4409 /* Stop translation as we may have switched the execution mode */
4410 ctx
->bstate
= BS_STOP
;
4414 check_insn(env
, ctx
, ISA_MIPS32R2
);
4415 gen_helper_mtc0_srsctl(cpu_env
, arg
);
4416 /* Stop translation as we may have switched the execution mode */
4417 ctx
->bstate
= BS_STOP
;
4421 check_insn(env
, ctx
, ISA_MIPS32R2
);
4422 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
4423 /* Stop translation as we may have switched the execution mode */
4424 ctx
->bstate
= BS_STOP
;
4434 save_cpu_state(ctx
, 1);
4435 gen_helper_mtc0_cause(cpu_env
, arg
);
4445 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_EPC
));
4459 check_insn(env
, ctx
, ISA_MIPS32R2
);
4460 gen_helper_mtc0_ebase(cpu_env
, arg
);
4470 gen_helper_mtc0_config0(cpu_env
, arg
);
4472 /* Stop translation as we may have switched the execution mode */
4473 ctx
->bstate
= BS_STOP
;
4476 /* ignored, read only */
4480 gen_helper_mtc0_config2(cpu_env
, arg
);
4482 /* Stop translation as we may have switched the execution mode */
4483 ctx
->bstate
= BS_STOP
;
4486 /* ignored, read only */
4489 /* 4,5 are reserved */
4490 /* 6,7 are implementation dependent */
4500 rn
= "Invalid config selector";
4507 gen_helper_mtc0_lladdr(cpu_env
, arg
);
4517 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
4527 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
4537 #if defined(TARGET_MIPS64)
4538 check_insn(env
, ctx
, ISA_MIPS3
);
4539 gen_helper_mtc0_xcontext(cpu_env
, arg
);
4548 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4551 gen_helper_mtc0_framemask(cpu_env
, arg
);
4560 rn
= "Diagnostic"; /* implementation dependent */
4565 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
4566 /* BS_STOP isn't good enough here, hflags may have changed. */
4567 gen_save_pc(ctx
->pc
+ 4);
4568 ctx
->bstate
= BS_EXCP
;
4572 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
4573 rn
= "TraceControl";
4574 /* Stop translation as we may have switched the execution mode */
4575 ctx
->bstate
= BS_STOP
;
4578 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
4579 rn
= "TraceControl2";
4580 /* Stop translation as we may have switched the execution mode */
4581 ctx
->bstate
= BS_STOP
;
4584 /* Stop translation as we may have switched the execution mode */
4585 ctx
->bstate
= BS_STOP
;
4586 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
4587 rn
= "UserTraceData";
4588 /* Stop translation as we may have switched the execution mode */
4589 ctx
->bstate
= BS_STOP
;
4592 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
4593 /* Stop translation as we may have switched the execution mode */
4594 ctx
->bstate
= BS_STOP
;
4605 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_DEPC
));
4615 gen_helper_mtc0_performance0(cpu_env
, arg
);
4616 rn
= "Performance0";
4619 // gen_helper_mtc0_performance1(arg);
4620 rn
= "Performance1";
4623 // gen_helper_mtc0_performance2(arg);
4624 rn
= "Performance2";
4627 // gen_helper_mtc0_performance3(arg);
4628 rn
= "Performance3";
4631 // gen_helper_mtc0_performance4(arg);
4632 rn
= "Performance4";
4635 // gen_helper_mtc0_performance5(arg);
4636 rn
= "Performance5";
4639 // gen_helper_mtc0_performance6(arg);
4640 rn
= "Performance6";
4643 // gen_helper_mtc0_performance7(arg);
4644 rn
= "Performance7";
4670 gen_helper_mtc0_taglo(cpu_env
, arg
);
4677 gen_helper_mtc0_datalo(cpu_env
, arg
);
4690 gen_helper_mtc0_taghi(cpu_env
, arg
);
4697 gen_helper_mtc0_datahi(cpu_env
, arg
);
4708 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
4719 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
4725 /* Stop translation as we may have switched the execution mode */
4726 ctx
->bstate
= BS_STOP
;
4731 (void)rn
; /* avoid a compiler warning */
4732 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4733 /* For simplicity assume that all writes can cause interrupts. */
4736 ctx
->bstate
= BS_STOP
;
4741 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4742 generate_exception(ctx
, EXCP_RI
);
4745 #if defined(TARGET_MIPS64)
4746 static void gen_dmfc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4748 const char *rn
= "invalid";
4751 check_insn(env
, ctx
, ISA_MIPS64
);
4757 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4761 check_insn(env
, ctx
, ASE_MT
);
4762 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4766 check_insn(env
, ctx
, ASE_MT
);
4767 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4771 check_insn(env
, ctx
, ASE_MT
);
4772 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4782 gen_helper_mfc0_random(arg
, cpu_env
);
4786 check_insn(env
, ctx
, ASE_MT
);
4787 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4791 check_insn(env
, ctx
, ASE_MT
);
4792 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4796 check_insn(env
, ctx
, ASE_MT
);
4797 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4801 check_insn(env
, ctx
, ASE_MT
);
4802 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
4806 check_insn(env
, ctx
, ASE_MT
);
4807 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4811 check_insn(env
, ctx
, ASE_MT
);
4812 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4813 rn
= "VPEScheFBack";
4816 check_insn(env
, ctx
, ASE_MT
);
4817 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
4827 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4831 check_insn(env
, ctx
, ASE_MT
);
4832 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
4836 check_insn(env
, ctx
, ASE_MT
);
4837 gen_helper_mfc0_tcbind(arg
, cpu_env
);
4841 check_insn(env
, ctx
, ASE_MT
);
4842 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
4846 check_insn(env
, ctx
, ASE_MT
);
4847 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
4851 check_insn(env
, ctx
, ASE_MT
);
4852 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
4856 check_insn(env
, ctx
, ASE_MT
);
4857 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
4861 check_insn(env
, ctx
, ASE_MT
);
4862 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
4872 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4882 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
4886 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
4887 rn
= "ContextConfig";
4896 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
4900 check_insn(env
, ctx
, ISA_MIPS32R2
);
4901 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
4911 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
4915 check_insn(env
, ctx
, ISA_MIPS32R2
);
4916 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
4920 check_insn(env
, ctx
, ISA_MIPS32R2
);
4921 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
4925 check_insn(env
, ctx
, ISA_MIPS32R2
);
4926 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
4930 check_insn(env
, ctx
, ISA_MIPS32R2
);
4931 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
4935 check_insn(env
, ctx
, ISA_MIPS32R2
);
4936 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
4946 check_insn(env
, ctx
, ISA_MIPS32R2
);
4947 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
4957 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
4967 /* Mark as an IO operation because we read the time. */
4970 gen_helper_mfc0_count(arg
, cpu_env
);
4974 /* Break the TB to be able to take timer interrupts immediately
4975 after reading count. */
4976 ctx
->bstate
= BS_STOP
;
4979 /* 6,7 are implementation dependent */
4987 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
4997 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5000 /* 6,7 are implementation dependent */
5008 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5012 check_insn(env
, ctx
, ISA_MIPS32R2
);
5013 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5017 check_insn(env
, ctx
, ISA_MIPS32R2
);
5018 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5022 check_insn(env
, ctx
, ISA_MIPS32R2
);
5023 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5033 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5043 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5053 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5057 check_insn(env
, ctx
, ISA_MIPS32R2
);
5058 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5068 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5072 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5076 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5080 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5083 /* 6,7 are implementation dependent */
5085 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5089 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5099 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
5109 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
5119 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5129 check_insn(env
, ctx
, ISA_MIPS3
);
5130 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5138 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5141 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5149 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5150 rn
= "'Diagnostic"; /* implementation dependent */
5155 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5159 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
5160 rn
= "TraceControl";
5163 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
5164 rn
= "TraceControl2";
5167 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
5168 rn
= "UserTraceData";
5171 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
5182 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5192 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5193 rn
= "Performance0";
5196 // gen_helper_dmfc0_performance1(arg);
5197 rn
= "Performance1";
5200 // gen_helper_dmfc0_performance2(arg);
5201 rn
= "Performance2";
5204 // gen_helper_dmfc0_performance3(arg);
5205 rn
= "Performance3";
5208 // gen_helper_dmfc0_performance4(arg);
5209 rn
= "Performance4";
5212 // gen_helper_dmfc0_performance5(arg);
5213 rn
= "Performance5";
5216 // gen_helper_dmfc0_performance6(arg);
5217 rn
= "Performance6";
5220 // gen_helper_dmfc0_performance7(arg);
5221 rn
= "Performance7";
5228 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5235 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5248 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5255 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5268 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5275 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5285 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5296 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5306 (void)rn
; /* avoid a compiler warning */
5307 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5311 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5312 generate_exception(ctx
, EXCP_RI
);
5315 static void gen_dmtc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5317 const char *rn
= "invalid";
5320 check_insn(env
, ctx
, ISA_MIPS64
);
5329 gen_helper_mtc0_index(cpu_env
, arg
);
5333 check_insn(env
, ctx
, ASE_MT
);
5334 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5338 check_insn(env
, ctx
, ASE_MT
);
5343 check_insn(env
, ctx
, ASE_MT
);
5358 check_insn(env
, ctx
, ASE_MT
);
5359 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5363 check_insn(env
, ctx
, ASE_MT
);
5364 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5368 check_insn(env
, ctx
, ASE_MT
);
5369 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5373 check_insn(env
, ctx
, ASE_MT
);
5374 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5378 check_insn(env
, ctx
, ASE_MT
);
5379 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5383 check_insn(env
, ctx
, ASE_MT
);
5384 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5385 rn
= "VPEScheFBack";
5388 check_insn(env
, ctx
, ASE_MT
);
5389 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5399 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5403 check_insn(env
, ctx
, ASE_MT
);
5404 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5408 check_insn(env
, ctx
, ASE_MT
);
5409 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5413 check_insn(env
, ctx
, ASE_MT
);
5414 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5418 check_insn(env
, ctx
, ASE_MT
);
5419 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5423 check_insn(env
, ctx
, ASE_MT
);
5424 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5428 check_insn(env
, ctx
, ASE_MT
);
5429 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5433 check_insn(env
, ctx
, ASE_MT
);
5434 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5444 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5454 gen_helper_mtc0_context(cpu_env
, arg
);
5458 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5459 rn
= "ContextConfig";
5468 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5472 check_insn(env
, ctx
, ISA_MIPS32R2
);
5473 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5483 gen_helper_mtc0_wired(cpu_env
, arg
);
5487 check_insn(env
, ctx
, ISA_MIPS32R2
);
5488 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5492 check_insn(env
, ctx
, ISA_MIPS32R2
);
5493 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5497 check_insn(env
, ctx
, ISA_MIPS32R2
);
5498 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5502 check_insn(env
, ctx
, ISA_MIPS32R2
);
5503 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5507 check_insn(env
, ctx
, ISA_MIPS32R2
);
5508 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5518 check_insn(env
, ctx
, ISA_MIPS32R2
);
5519 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5533 gen_helper_mtc0_count(cpu_env
, arg
);
5536 /* 6,7 are implementation dependent */
5540 /* Stop translation as we may have switched the execution mode */
5541 ctx
->bstate
= BS_STOP
;
5546 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5556 gen_helper_mtc0_compare(cpu_env
, arg
);
5559 /* 6,7 are implementation dependent */
5563 /* Stop translation as we may have switched the execution mode */
5564 ctx
->bstate
= BS_STOP
;
5569 save_cpu_state(ctx
, 1);
5570 gen_helper_mtc0_status(cpu_env
, arg
);
5571 /* BS_STOP isn't good enough here, hflags may have changed. */
5572 gen_save_pc(ctx
->pc
+ 4);
5573 ctx
->bstate
= BS_EXCP
;
5577 check_insn(env
, ctx
, ISA_MIPS32R2
);
5578 gen_helper_mtc0_intctl(cpu_env
, arg
);
5579 /* Stop translation as we may have switched the execution mode */
5580 ctx
->bstate
= BS_STOP
;
5584 check_insn(env
, ctx
, ISA_MIPS32R2
);
5585 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5586 /* Stop translation as we may have switched the execution mode */
5587 ctx
->bstate
= BS_STOP
;
5591 check_insn(env
, ctx
, ISA_MIPS32R2
);
5592 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5593 /* Stop translation as we may have switched the execution mode */
5594 ctx
->bstate
= BS_STOP
;
5604 save_cpu_state(ctx
, 1);
5605 /* Mark as an IO operation because we may trigger a software
5610 gen_helper_mtc0_cause(cpu_env
, arg
);
5614 /* Stop translation as we may have triggered an intetrupt */
5615 ctx
->bstate
= BS_STOP
;
5625 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5639 check_insn(env
, ctx
, ISA_MIPS32R2
);
5640 gen_helper_mtc0_ebase(cpu_env
, arg
);
5650 gen_helper_mtc0_config0(cpu_env
, arg
);
5652 /* Stop translation as we may have switched the execution mode */
5653 ctx
->bstate
= BS_STOP
;
5656 /* ignored, read only */
5660 gen_helper_mtc0_config2(cpu_env
, arg
);
5662 /* Stop translation as we may have switched the execution mode */
5663 ctx
->bstate
= BS_STOP
;
5669 /* 6,7 are implementation dependent */
5671 rn
= "Invalid config selector";
5678 gen_helper_mtc0_lladdr(cpu_env
, arg
);
5688 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
5698 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
5708 check_insn(env
, ctx
, ISA_MIPS3
);
5709 gen_helper_mtc0_xcontext(cpu_env
, arg
);
5717 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5720 gen_helper_mtc0_framemask(cpu_env
, arg
);
5729 rn
= "Diagnostic"; /* implementation dependent */
5734 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
5735 /* BS_STOP isn't good enough here, hflags may have changed. */
5736 gen_save_pc(ctx
->pc
+ 4);
5737 ctx
->bstate
= BS_EXCP
;
5741 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
5742 /* Stop translation as we may have switched the execution mode */
5743 ctx
->bstate
= BS_STOP
;
5744 rn
= "TraceControl";
5747 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
5748 /* Stop translation as we may have switched the execution mode */
5749 ctx
->bstate
= BS_STOP
;
5750 rn
= "TraceControl2";
5753 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
5754 /* Stop translation as we may have switched the execution mode */
5755 ctx
->bstate
= BS_STOP
;
5756 rn
= "UserTraceData";
5759 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
5760 /* Stop translation as we may have switched the execution mode */
5761 ctx
->bstate
= BS_STOP
;
5772 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5782 gen_helper_mtc0_performance0(cpu_env
, arg
);
5783 rn
= "Performance0";
5786 // gen_helper_mtc0_performance1(cpu_env, arg);
5787 rn
= "Performance1";
5790 // gen_helper_mtc0_performance2(cpu_env, arg);
5791 rn
= "Performance2";
5794 // gen_helper_mtc0_performance3(cpu_env, arg);
5795 rn
= "Performance3";
5798 // gen_helper_mtc0_performance4(cpu_env, arg);
5799 rn
= "Performance4";
5802 // gen_helper_mtc0_performance5(cpu_env, arg);
5803 rn
= "Performance5";
5806 // gen_helper_mtc0_performance6(cpu_env, arg);
5807 rn
= "Performance6";
5810 // gen_helper_mtc0_performance7(cpu_env, arg);
5811 rn
= "Performance7";
5837 gen_helper_mtc0_taglo(cpu_env
, arg
);
5844 gen_helper_mtc0_datalo(cpu_env
, arg
);
5857 gen_helper_mtc0_taghi(cpu_env
, arg
);
5864 gen_helper_mtc0_datahi(cpu_env
, arg
);
5875 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5886 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5892 /* Stop translation as we may have switched the execution mode */
5893 ctx
->bstate
= BS_STOP
;
5898 (void)rn
; /* avoid a compiler warning */
5899 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5900 /* For simplicity assume that all writes can cause interrupts. */
5903 ctx
->bstate
= BS_STOP
;
5908 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5909 generate_exception(ctx
, EXCP_RI
);
5911 #endif /* TARGET_MIPS64 */
5913 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
5914 int u
, int sel
, int h
)
5916 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5917 TCGv t0
= tcg_temp_local_new();
5919 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5920 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5921 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5922 tcg_gen_movi_tl(t0
, -1);
5923 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5924 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5925 tcg_gen_movi_tl(t0
, -1);
5931 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
5934 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
5944 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
5947 gen_helper_mftc0_tcbind(t0
, cpu_env
);
5950 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
5953 gen_helper_mftc0_tchalt(t0
, cpu_env
);
5956 gen_helper_mftc0_tccontext(t0
, cpu_env
);
5959 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
5962 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
5965 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5972 gen_helper_mftc0_entryhi(t0
, cpu_env
);
5975 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5981 gen_helper_mftc0_status(t0
, cpu_env
);
5984 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5990 gen_helper_mftc0_cause(t0
, cpu_env
);
6000 gen_helper_mftc0_epc(t0
, cpu_env
);
6010 gen_helper_mftc0_ebase(t0
, cpu_env
);
6020 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
6030 gen_helper_mftc0_debug(t0
, cpu_env
);
6033 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
6038 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
6040 } else switch (sel
) {
6041 /* GPR registers. */
6043 gen_helper_1e0i(mftgpr
, t0
, rt
);
6045 /* Auxiliary CPU registers */
6049 gen_helper_1e0i(mftlo
, t0
, 0);
6052 gen_helper_1e0i(mfthi
, t0
, 0);
6055 gen_helper_1e0i(mftacx
, t0
, 0);
6058 gen_helper_1e0i(mftlo
, t0
, 1);
6061 gen_helper_1e0i(mfthi
, t0
, 1);
6064 gen_helper_1e0i(mftacx
, t0
, 1);
6067 gen_helper_1e0i(mftlo
, t0
, 2);
6070 gen_helper_1e0i(mfthi
, t0
, 2);
6073 gen_helper_1e0i(mftacx
, t0
, 2);
6076 gen_helper_1e0i(mftlo
, t0
, 3);
6079 gen_helper_1e0i(mfthi
, t0
, 3);
6082 gen_helper_1e0i(mftacx
, t0
, 3);
6085 gen_helper_mftdsp(t0
, cpu_env
);
6091 /* Floating point (COP1). */
6093 /* XXX: For now we support only a single FPU context. */
6095 TCGv_i32 fp0
= tcg_temp_new_i32();
6097 gen_load_fpr32(fp0
, rt
);
6098 tcg_gen_ext_i32_tl(t0
, fp0
);
6099 tcg_temp_free_i32(fp0
);
6101 TCGv_i32 fp0
= tcg_temp_new_i32();
6103 gen_load_fpr32h(fp0
, rt
);
6104 tcg_gen_ext_i32_tl(t0
, fp0
);
6105 tcg_temp_free_i32(fp0
);
6109 /* XXX: For now we support only a single FPU context. */
6110 gen_helper_1e0i(cfc1
, t0
, rt
);
6112 /* COP2: Not implemented. */
6119 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
6120 gen_store_gpr(t0
, rd
);
6126 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
6127 generate_exception(ctx
, EXCP_RI
);
6130 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
6131 int u
, int sel
, int h
)
6133 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
6134 TCGv t0
= tcg_temp_local_new();
6136 gen_load_gpr(t0
, rt
);
6137 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
6138 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
6139 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
6141 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
6142 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
6149 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
6152 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
6162 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
6165 gen_helper_mttc0_tcbind(cpu_env
, t0
);
6168 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
6171 gen_helper_mttc0_tchalt(cpu_env
, t0
);
6174 gen_helper_mttc0_tccontext(cpu_env
, t0
);
6177 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
6180 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
6183 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6190 gen_helper_mttc0_entryhi(cpu_env
, t0
);
6193 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6199 gen_helper_mttc0_status(cpu_env
, t0
);
6202 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6208 gen_helper_mttc0_cause(cpu_env
, t0
);
6218 gen_helper_mttc0_ebase(cpu_env
, t0
);
6228 gen_helper_mttc0_debug(cpu_env
, t0
);
6231 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6236 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6238 } else switch (sel
) {
6239 /* GPR registers. */
6241 gen_helper_0e1i(mttgpr
, t0
, rd
);
6243 /* Auxiliary CPU registers */
6247 gen_helper_0e1i(mttlo
, t0
, 0);
6250 gen_helper_0e1i(mtthi
, t0
, 0);
6253 gen_helper_0e1i(mttacx
, t0
, 0);
6256 gen_helper_0e1i(mttlo
, t0
, 1);
6259 gen_helper_0e1i(mtthi
, t0
, 1);
6262 gen_helper_0e1i(mttacx
, t0
, 1);
6265 gen_helper_0e1i(mttlo
, t0
, 2);
6268 gen_helper_0e1i(mtthi
, t0
, 2);
6271 gen_helper_0e1i(mttacx
, t0
, 2);
6274 gen_helper_0e1i(mttlo
, t0
, 3);
6277 gen_helper_0e1i(mtthi
, t0
, 3);
6280 gen_helper_0e1i(mttacx
, t0
, 3);
6283 gen_helper_mttdsp(cpu_env
, t0
);
6289 /* Floating point (COP1). */
6291 /* XXX: For now we support only a single FPU context. */
6293 TCGv_i32 fp0
= tcg_temp_new_i32();
6295 tcg_gen_trunc_tl_i32(fp0
, t0
);
6296 gen_store_fpr32(fp0
, rd
);
6297 tcg_temp_free_i32(fp0
);
6299 TCGv_i32 fp0
= tcg_temp_new_i32();
6301 tcg_gen_trunc_tl_i32(fp0
, t0
);
6302 gen_store_fpr32h(fp0
, rd
);
6303 tcg_temp_free_i32(fp0
);
6307 /* XXX: For now we support only a single FPU context. */
6308 gen_helper_0e1i(ctc1
, t0
, rd
);
6310 /* COP2: Not implemented. */
6317 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
6323 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
6324 generate_exception(ctx
, EXCP_RI
);
6327 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
6329 const char *opn
= "ldst";
6331 check_cp0_enabled(ctx
);
6338 gen_mfc0(env
, ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
6343 TCGv t0
= tcg_temp_new();
6345 gen_load_gpr(t0
, rt
);
6346 gen_mtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
6351 #if defined(TARGET_MIPS64)
6353 check_insn(env
, ctx
, ISA_MIPS3
);
6358 gen_dmfc0(env
, ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
6362 check_insn(env
, ctx
, ISA_MIPS3
);
6364 TCGv t0
= tcg_temp_new();
6366 gen_load_gpr(t0
, rt
);
6367 gen_dmtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
6374 check_insn(env
, ctx
, ASE_MT
);
6379 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
6380 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
6384 check_insn(env
, ctx
, ASE_MT
);
6385 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
6386 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
6391 if (!env
->tlb
->helper_tlbwi
)
6393 gen_helper_tlbwi(cpu_env
);
6397 if (!env
->tlb
->helper_tlbwr
)
6399 gen_helper_tlbwr(cpu_env
);
6403 if (!env
->tlb
->helper_tlbp
)
6405 gen_helper_tlbp(cpu_env
);
6409 if (!env
->tlb
->helper_tlbr
)
6411 gen_helper_tlbr(cpu_env
);
6415 check_insn(env
, ctx
, ISA_MIPS2
);
6416 gen_helper_eret(cpu_env
);
6417 ctx
->bstate
= BS_EXCP
;
6421 check_insn(env
, ctx
, ISA_MIPS32
);
6422 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
6424 generate_exception(ctx
, EXCP_RI
);
6426 gen_helper_deret(cpu_env
);
6427 ctx
->bstate
= BS_EXCP
;
6432 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
6433 /* If we get an exception, we want to restart at next instruction */
6435 save_cpu_state(ctx
, 1);
6437 gen_helper_wait(cpu_env
);
6438 ctx
->bstate
= BS_EXCP
;
6443 generate_exception(ctx
, EXCP_RI
);
6446 (void)opn
; /* avoid a compiler warning */
6447 MIPS_DEBUG("%s %s %d", opn
, regnames
[rt
], rd
);
6449 #endif /* !CONFIG_USER_ONLY */
6451 /* CP1 Branches (before delay slot) */
6452 static void gen_compute_branch1 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op
,
6453 int32_t cc
, int32_t offset
)
6455 target_ulong btarget
;
6456 const char *opn
= "cp1 cond branch";
6457 TCGv_i32 t0
= tcg_temp_new_i32();
6460 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
6462 btarget
= ctx
->pc
+ 4 + offset
;
6466 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6467 tcg_gen_not_i32(t0
, t0
);
6468 tcg_gen_andi_i32(t0
, t0
, 1);
6469 tcg_gen_extu_i32_tl(bcond
, t0
);
6473 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6474 tcg_gen_not_i32(t0
, t0
);
6475 tcg_gen_andi_i32(t0
, t0
, 1);
6476 tcg_gen_extu_i32_tl(bcond
, t0
);
6480 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6481 tcg_gen_andi_i32(t0
, t0
, 1);
6482 tcg_gen_extu_i32_tl(bcond
, t0
);
6486 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6487 tcg_gen_andi_i32(t0
, t0
, 1);
6488 tcg_gen_extu_i32_tl(bcond
, t0
);
6491 ctx
->hflags
|= MIPS_HFLAG_BL
;
6495 TCGv_i32 t1
= tcg_temp_new_i32();
6496 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6497 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6498 tcg_gen_nand_i32(t0
, t0
, t1
);
6499 tcg_temp_free_i32(t1
);
6500 tcg_gen_andi_i32(t0
, t0
, 1);
6501 tcg_gen_extu_i32_tl(bcond
, t0
);
6507 TCGv_i32 t1
= tcg_temp_new_i32();
6508 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6509 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6510 tcg_gen_or_i32(t0
, t0
, t1
);
6511 tcg_temp_free_i32(t1
);
6512 tcg_gen_andi_i32(t0
, t0
, 1);
6513 tcg_gen_extu_i32_tl(bcond
, t0
);
6519 TCGv_i32 t1
= tcg_temp_new_i32();
6520 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6521 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6522 tcg_gen_and_i32(t0
, t0
, t1
);
6523 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
6524 tcg_gen_and_i32(t0
, t0
, t1
);
6525 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
6526 tcg_gen_nand_i32(t0
, t0
, t1
);
6527 tcg_temp_free_i32(t1
);
6528 tcg_gen_andi_i32(t0
, t0
, 1);
6529 tcg_gen_extu_i32_tl(bcond
, t0
);
6535 TCGv_i32 t1
= tcg_temp_new_i32();
6536 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6537 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6538 tcg_gen_or_i32(t0
, t0
, t1
);
6539 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
6540 tcg_gen_or_i32(t0
, t0
, t1
);
6541 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
6542 tcg_gen_or_i32(t0
, t0
, t1
);
6543 tcg_temp_free_i32(t1
);
6544 tcg_gen_andi_i32(t0
, t0
, 1);
6545 tcg_gen_extu_i32_tl(bcond
, t0
);
6549 ctx
->hflags
|= MIPS_HFLAG_BC
;
6553 generate_exception (ctx
, EXCP_RI
);
6556 (void)opn
; /* avoid a compiler warning */
6557 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx
, opn
,
6558 ctx
->hflags
, btarget
);
6559 ctx
->btarget
= btarget
;
6562 tcg_temp_free_i32(t0
);
6565 /* Coprocessor 1 (FPU) */
6567 #define FOP(func, fmt) (((fmt) << 21) | (func))
6570 OPC_ADD_S
= FOP(0, FMT_S
),
6571 OPC_SUB_S
= FOP(1, FMT_S
),
6572 OPC_MUL_S
= FOP(2, FMT_S
),
6573 OPC_DIV_S
= FOP(3, FMT_S
),
6574 OPC_SQRT_S
= FOP(4, FMT_S
),
6575 OPC_ABS_S
= FOP(5, FMT_S
),
6576 OPC_MOV_S
= FOP(6, FMT_S
),
6577 OPC_NEG_S
= FOP(7, FMT_S
),
6578 OPC_ROUND_L_S
= FOP(8, FMT_S
),
6579 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
6580 OPC_CEIL_L_S
= FOP(10, FMT_S
),
6581 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
6582 OPC_ROUND_W_S
= FOP(12, FMT_S
),
6583 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
6584 OPC_CEIL_W_S
= FOP(14, FMT_S
),
6585 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
6586 OPC_MOVCF_S
= FOP(17, FMT_S
),
6587 OPC_MOVZ_S
= FOP(18, FMT_S
),
6588 OPC_MOVN_S
= FOP(19, FMT_S
),
6589 OPC_RECIP_S
= FOP(21, FMT_S
),
6590 OPC_RSQRT_S
= FOP(22, FMT_S
),
6591 OPC_RECIP2_S
= FOP(28, FMT_S
),
6592 OPC_RECIP1_S
= FOP(29, FMT_S
),
6593 OPC_RSQRT1_S
= FOP(30, FMT_S
),
6594 OPC_RSQRT2_S
= FOP(31, FMT_S
),
6595 OPC_CVT_D_S
= FOP(33, FMT_S
),
6596 OPC_CVT_W_S
= FOP(36, FMT_S
),
6597 OPC_CVT_L_S
= FOP(37, FMT_S
),
6598 OPC_CVT_PS_S
= FOP(38, FMT_S
),
6599 OPC_CMP_F_S
= FOP (48, FMT_S
),
6600 OPC_CMP_UN_S
= FOP (49, FMT_S
),
6601 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
6602 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
6603 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
6604 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
6605 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
6606 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
6607 OPC_CMP_SF_S
= FOP (56, FMT_S
),
6608 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
6609 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
6610 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
6611 OPC_CMP_LT_S
= FOP (60, FMT_S
),
6612 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
6613 OPC_CMP_LE_S
= FOP (62, FMT_S
),
6614 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
6616 OPC_ADD_D
= FOP(0, FMT_D
),
6617 OPC_SUB_D
= FOP(1, FMT_D
),
6618 OPC_MUL_D
= FOP(2, FMT_D
),
6619 OPC_DIV_D
= FOP(3, FMT_D
),
6620 OPC_SQRT_D
= FOP(4, FMT_D
),
6621 OPC_ABS_D
= FOP(5, FMT_D
),
6622 OPC_MOV_D
= FOP(6, FMT_D
),
6623 OPC_NEG_D
= FOP(7, FMT_D
),
6624 OPC_ROUND_L_D
= FOP(8, FMT_D
),
6625 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
6626 OPC_CEIL_L_D
= FOP(10, FMT_D
),
6627 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
6628 OPC_ROUND_W_D
= FOP(12, FMT_D
),
6629 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
6630 OPC_CEIL_W_D
= FOP(14, FMT_D
),
6631 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
6632 OPC_MOVCF_D
= FOP(17, FMT_D
),
6633 OPC_MOVZ_D
= FOP(18, FMT_D
),
6634 OPC_MOVN_D
= FOP(19, FMT_D
),
6635 OPC_RECIP_D
= FOP(21, FMT_D
),
6636 OPC_RSQRT_D
= FOP(22, FMT_D
),
6637 OPC_RECIP2_D
= FOP(28, FMT_D
),
6638 OPC_RECIP1_D
= FOP(29, FMT_D
),
6639 OPC_RSQRT1_D
= FOP(30, FMT_D
),
6640 OPC_RSQRT2_D
= FOP(31, FMT_D
),
6641 OPC_CVT_S_D
= FOP(32, FMT_D
),
6642 OPC_CVT_W_D
= FOP(36, FMT_D
),
6643 OPC_CVT_L_D
= FOP(37, FMT_D
),
6644 OPC_CMP_F_D
= FOP (48, FMT_D
),
6645 OPC_CMP_UN_D
= FOP (49, FMT_D
),
6646 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
6647 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
6648 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
6649 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
6650 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
6651 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
6652 OPC_CMP_SF_D
= FOP (56, FMT_D
),
6653 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
6654 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
6655 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
6656 OPC_CMP_LT_D
= FOP (60, FMT_D
),
6657 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
6658 OPC_CMP_LE_D
= FOP (62, FMT_D
),
6659 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
6661 OPC_CVT_S_W
= FOP(32, FMT_W
),
6662 OPC_CVT_D_W
= FOP(33, FMT_W
),
6663 OPC_CVT_S_L
= FOP(32, FMT_L
),
6664 OPC_CVT_D_L
= FOP(33, FMT_L
),
6665 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
6667 OPC_ADD_PS
= FOP(0, FMT_PS
),
6668 OPC_SUB_PS
= FOP(1, FMT_PS
),
6669 OPC_MUL_PS
= FOP(2, FMT_PS
),
6670 OPC_DIV_PS
= FOP(3, FMT_PS
),
6671 OPC_ABS_PS
= FOP(5, FMT_PS
),
6672 OPC_MOV_PS
= FOP(6, FMT_PS
),
6673 OPC_NEG_PS
= FOP(7, FMT_PS
),
6674 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
6675 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
6676 OPC_MOVN_PS
= FOP(19, FMT_PS
),
6677 OPC_ADDR_PS
= FOP(24, FMT_PS
),
6678 OPC_MULR_PS
= FOP(26, FMT_PS
),
6679 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
6680 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
6681 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
6682 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
6684 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
6685 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
6686 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
6687 OPC_PLL_PS
= FOP(44, FMT_PS
),
6688 OPC_PLU_PS
= FOP(45, FMT_PS
),
6689 OPC_PUL_PS
= FOP(46, FMT_PS
),
6690 OPC_PUU_PS
= FOP(47, FMT_PS
),
6691 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
6692 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
6693 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
6694 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
6695 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
6696 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
6697 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
6698 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
6699 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
6700 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
6701 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
6702 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
6703 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
6704 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
6705 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
6706 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
6709 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
6711 const char *opn
= "cp1 move";
6712 TCGv t0
= tcg_temp_new();
6717 TCGv_i32 fp0
= tcg_temp_new_i32();
6719 gen_load_fpr32(fp0
, fs
);
6720 tcg_gen_ext_i32_tl(t0
, fp0
);
6721 tcg_temp_free_i32(fp0
);
6723 gen_store_gpr(t0
, rt
);
6727 gen_load_gpr(t0
, rt
);
6729 TCGv_i32 fp0
= tcg_temp_new_i32();
6731 tcg_gen_trunc_tl_i32(fp0
, t0
);
6732 gen_store_fpr32(fp0
, fs
);
6733 tcg_temp_free_i32(fp0
);
6738 gen_helper_1e0i(cfc1
, t0
, fs
);
6739 gen_store_gpr(t0
, rt
);
6743 gen_load_gpr(t0
, rt
);
6744 gen_helper_0e1i(ctc1
, t0
, fs
);
6747 #if defined(TARGET_MIPS64)
6749 gen_load_fpr64(ctx
, t0
, fs
);
6750 gen_store_gpr(t0
, rt
);
6754 gen_load_gpr(t0
, rt
);
6755 gen_store_fpr64(ctx
, t0
, fs
);
6761 TCGv_i32 fp0
= tcg_temp_new_i32();
6763 gen_load_fpr32h(fp0
, fs
);
6764 tcg_gen_ext_i32_tl(t0
, fp0
);
6765 tcg_temp_free_i32(fp0
);
6767 gen_store_gpr(t0
, rt
);
6771 gen_load_gpr(t0
, rt
);
6773 TCGv_i32 fp0
= tcg_temp_new_i32();
6775 tcg_gen_trunc_tl_i32(fp0
, t0
);
6776 gen_store_fpr32h(fp0
, fs
);
6777 tcg_temp_free_i32(fp0
);
6783 generate_exception (ctx
, EXCP_RI
);
6786 (void)opn
; /* avoid a compiler warning */
6787 MIPS_DEBUG("%s %s %s", opn
, regnames
[rt
], fregnames
[fs
]);
6793 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
6809 l1
= gen_new_label();
6810 t0
= tcg_temp_new_i32();
6811 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6812 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6813 tcg_temp_free_i32(t0
);
6815 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
6817 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
6822 static inline void gen_movcf_s (int fs
, int fd
, int cc
, int tf
)
6825 TCGv_i32 t0
= tcg_temp_new_i32();
6826 int l1
= gen_new_label();
6833 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6834 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6835 gen_load_fpr32(t0
, fs
);
6836 gen_store_fpr32(t0
, fd
);
6838 tcg_temp_free_i32(t0
);
6841 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
6844 TCGv_i32 t0
= tcg_temp_new_i32();
6846 int l1
= gen_new_label();
6853 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6854 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6855 tcg_temp_free_i32(t0
);
6856 fp0
= tcg_temp_new_i64();
6857 gen_load_fpr64(ctx
, fp0
, fs
);
6858 gen_store_fpr64(ctx
, fp0
, fd
);
6859 tcg_temp_free_i64(fp0
);
6863 static inline void gen_movcf_ps (int fs
, int fd
, int cc
, int tf
)
6866 TCGv_i32 t0
= tcg_temp_new_i32();
6867 int l1
= gen_new_label();
6868 int l2
= gen_new_label();
6875 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6876 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6877 gen_load_fpr32(t0
, fs
);
6878 gen_store_fpr32(t0
, fd
);
6881 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
6882 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
6883 gen_load_fpr32h(t0
, fs
);
6884 gen_store_fpr32h(t0
, fd
);
6885 tcg_temp_free_i32(t0
);
6890 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
6891 int ft
, int fs
, int fd
, int cc
)
6893 const char *opn
= "farith";
6894 const char *condnames
[] = {
6912 const char *condnames_abs
[] = {
6930 enum { BINOP
, CMPOP
, OTHEROP
} optype
= OTHEROP
;
6931 uint32_t func
= ctx
->opcode
& 0x3f;
6936 TCGv_i32 fp0
= tcg_temp_new_i32();
6937 TCGv_i32 fp1
= tcg_temp_new_i32();
6939 gen_load_fpr32(fp0
, fs
);
6940 gen_load_fpr32(fp1
, ft
);
6941 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
6942 tcg_temp_free_i32(fp1
);
6943 gen_store_fpr32(fp0
, fd
);
6944 tcg_temp_free_i32(fp0
);
6951 TCGv_i32 fp0
= tcg_temp_new_i32();
6952 TCGv_i32 fp1
= tcg_temp_new_i32();
6954 gen_load_fpr32(fp0
, fs
);
6955 gen_load_fpr32(fp1
, ft
);
6956 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
6957 tcg_temp_free_i32(fp1
);
6958 gen_store_fpr32(fp0
, fd
);
6959 tcg_temp_free_i32(fp0
);
6966 TCGv_i32 fp0
= tcg_temp_new_i32();
6967 TCGv_i32 fp1
= tcg_temp_new_i32();
6969 gen_load_fpr32(fp0
, fs
);
6970 gen_load_fpr32(fp1
, ft
);
6971 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
6972 tcg_temp_free_i32(fp1
);
6973 gen_store_fpr32(fp0
, fd
);
6974 tcg_temp_free_i32(fp0
);
6981 TCGv_i32 fp0
= tcg_temp_new_i32();
6982 TCGv_i32 fp1
= tcg_temp_new_i32();
6984 gen_load_fpr32(fp0
, fs
);
6985 gen_load_fpr32(fp1
, ft
);
6986 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
6987 tcg_temp_free_i32(fp1
);
6988 gen_store_fpr32(fp0
, fd
);
6989 tcg_temp_free_i32(fp0
);
6996 TCGv_i32 fp0
= tcg_temp_new_i32();
6998 gen_load_fpr32(fp0
, fs
);
6999 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
7000 gen_store_fpr32(fp0
, fd
);
7001 tcg_temp_free_i32(fp0
);
7007 TCGv_i32 fp0
= tcg_temp_new_i32();
7009 gen_load_fpr32(fp0
, fs
);
7010 gen_helper_float_abs_s(fp0
, fp0
);
7011 gen_store_fpr32(fp0
, fd
);
7012 tcg_temp_free_i32(fp0
);
7018 TCGv_i32 fp0
= tcg_temp_new_i32();
7020 gen_load_fpr32(fp0
, fs
);
7021 gen_store_fpr32(fp0
, fd
);
7022 tcg_temp_free_i32(fp0
);
7028 TCGv_i32 fp0
= tcg_temp_new_i32();
7030 gen_load_fpr32(fp0
, fs
);
7031 gen_helper_float_chs_s(fp0
, fp0
);
7032 gen_store_fpr32(fp0
, fd
);
7033 tcg_temp_free_i32(fp0
);
7038 check_cp1_64bitmode(ctx
);
7040 TCGv_i32 fp32
= tcg_temp_new_i32();
7041 TCGv_i64 fp64
= tcg_temp_new_i64();
7043 gen_load_fpr32(fp32
, fs
);
7044 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
7045 tcg_temp_free_i32(fp32
);
7046 gen_store_fpr64(ctx
, fp64
, fd
);
7047 tcg_temp_free_i64(fp64
);
7052 check_cp1_64bitmode(ctx
);
7054 TCGv_i32 fp32
= tcg_temp_new_i32();
7055 TCGv_i64 fp64
= tcg_temp_new_i64();
7057 gen_load_fpr32(fp32
, fs
);
7058 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
7059 tcg_temp_free_i32(fp32
);
7060 gen_store_fpr64(ctx
, fp64
, fd
);
7061 tcg_temp_free_i64(fp64
);
7066 check_cp1_64bitmode(ctx
);
7068 TCGv_i32 fp32
= tcg_temp_new_i32();
7069 TCGv_i64 fp64
= tcg_temp_new_i64();
7071 gen_load_fpr32(fp32
, fs
);
7072 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
7073 tcg_temp_free_i32(fp32
);
7074 gen_store_fpr64(ctx
, fp64
, fd
);
7075 tcg_temp_free_i64(fp64
);
7080 check_cp1_64bitmode(ctx
);
7082 TCGv_i32 fp32
= tcg_temp_new_i32();
7083 TCGv_i64 fp64
= tcg_temp_new_i64();
7085 gen_load_fpr32(fp32
, fs
);
7086 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
7087 tcg_temp_free_i32(fp32
);
7088 gen_store_fpr64(ctx
, fp64
, fd
);
7089 tcg_temp_free_i64(fp64
);
7095 TCGv_i32 fp0
= tcg_temp_new_i32();
7097 gen_load_fpr32(fp0
, fs
);
7098 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
7099 gen_store_fpr32(fp0
, fd
);
7100 tcg_temp_free_i32(fp0
);
7106 TCGv_i32 fp0
= tcg_temp_new_i32();
7108 gen_load_fpr32(fp0
, fs
);
7109 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
7110 gen_store_fpr32(fp0
, fd
);
7111 tcg_temp_free_i32(fp0
);
7117 TCGv_i32 fp0
= tcg_temp_new_i32();
7119 gen_load_fpr32(fp0
, fs
);
7120 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
7121 gen_store_fpr32(fp0
, fd
);
7122 tcg_temp_free_i32(fp0
);
7128 TCGv_i32 fp0
= tcg_temp_new_i32();
7130 gen_load_fpr32(fp0
, fs
);
7131 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
7132 gen_store_fpr32(fp0
, fd
);
7133 tcg_temp_free_i32(fp0
);
7138 gen_movcf_s(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7143 int l1
= gen_new_label();
7147 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7149 fp0
= tcg_temp_new_i32();
7150 gen_load_fpr32(fp0
, fs
);
7151 gen_store_fpr32(fp0
, fd
);
7152 tcg_temp_free_i32(fp0
);
7159 int l1
= gen_new_label();
7163 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7164 fp0
= tcg_temp_new_i32();
7165 gen_load_fpr32(fp0
, fs
);
7166 gen_store_fpr32(fp0
, fd
);
7167 tcg_temp_free_i32(fp0
);
7176 TCGv_i32 fp0
= tcg_temp_new_i32();
7178 gen_load_fpr32(fp0
, fs
);
7179 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
7180 gen_store_fpr32(fp0
, fd
);
7181 tcg_temp_free_i32(fp0
);
7188 TCGv_i32 fp0
= tcg_temp_new_i32();
7190 gen_load_fpr32(fp0
, fs
);
7191 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
7192 gen_store_fpr32(fp0
, fd
);
7193 tcg_temp_free_i32(fp0
);
7198 check_cp1_64bitmode(ctx
);
7200 TCGv_i32 fp0
= tcg_temp_new_i32();
7201 TCGv_i32 fp1
= tcg_temp_new_i32();
7203 gen_load_fpr32(fp0
, fs
);
7204 gen_load_fpr32(fp1
, ft
);
7205 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
7206 tcg_temp_free_i32(fp1
);
7207 gen_store_fpr32(fp0
, fd
);
7208 tcg_temp_free_i32(fp0
);
7213 check_cp1_64bitmode(ctx
);
7215 TCGv_i32 fp0
= tcg_temp_new_i32();
7217 gen_load_fpr32(fp0
, fs
);
7218 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
7219 gen_store_fpr32(fp0
, fd
);
7220 tcg_temp_free_i32(fp0
);
7225 check_cp1_64bitmode(ctx
);
7227 TCGv_i32 fp0
= tcg_temp_new_i32();
7229 gen_load_fpr32(fp0
, fs
);
7230 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
7231 gen_store_fpr32(fp0
, fd
);
7232 tcg_temp_free_i32(fp0
);
7237 check_cp1_64bitmode(ctx
);
7239 TCGv_i32 fp0
= tcg_temp_new_i32();
7240 TCGv_i32 fp1
= tcg_temp_new_i32();
7242 gen_load_fpr32(fp0
, fs
);
7243 gen_load_fpr32(fp1
, ft
);
7244 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
7245 tcg_temp_free_i32(fp1
);
7246 gen_store_fpr32(fp0
, fd
);
7247 tcg_temp_free_i32(fp0
);
7252 check_cp1_registers(ctx
, fd
);
7254 TCGv_i32 fp32
= tcg_temp_new_i32();
7255 TCGv_i64 fp64
= tcg_temp_new_i64();
7257 gen_load_fpr32(fp32
, fs
);
7258 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
7259 tcg_temp_free_i32(fp32
);
7260 gen_store_fpr64(ctx
, fp64
, fd
);
7261 tcg_temp_free_i64(fp64
);
7267 TCGv_i32 fp0
= tcg_temp_new_i32();
7269 gen_load_fpr32(fp0
, fs
);
7270 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
7271 gen_store_fpr32(fp0
, fd
);
7272 tcg_temp_free_i32(fp0
);
7277 check_cp1_64bitmode(ctx
);
7279 TCGv_i32 fp32
= tcg_temp_new_i32();
7280 TCGv_i64 fp64
= tcg_temp_new_i64();
7282 gen_load_fpr32(fp32
, fs
);
7283 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
7284 tcg_temp_free_i32(fp32
);
7285 gen_store_fpr64(ctx
, fp64
, fd
);
7286 tcg_temp_free_i64(fp64
);
7291 check_cp1_64bitmode(ctx
);
7293 TCGv_i64 fp64
= tcg_temp_new_i64();
7294 TCGv_i32 fp32_0
= tcg_temp_new_i32();
7295 TCGv_i32 fp32_1
= tcg_temp_new_i32();
7297 gen_load_fpr32(fp32_0
, fs
);
7298 gen_load_fpr32(fp32_1
, ft
);
7299 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
7300 tcg_temp_free_i32(fp32_1
);
7301 tcg_temp_free_i32(fp32_0
);
7302 gen_store_fpr64(ctx
, fp64
, fd
);
7303 tcg_temp_free_i64(fp64
);
7316 case OPC_CMP_NGLE_S
:
7323 if (ctx
->opcode
& (1 << 6)) {
7324 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
7325 opn
= condnames_abs
[func
-48];
7327 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
7328 opn
= condnames
[func
-48];
7332 check_cp1_registers(ctx
, fs
| ft
| fd
);
7334 TCGv_i64 fp0
= tcg_temp_new_i64();
7335 TCGv_i64 fp1
= tcg_temp_new_i64();
7337 gen_load_fpr64(ctx
, fp0
, fs
);
7338 gen_load_fpr64(ctx
, fp1
, ft
);
7339 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
7340 tcg_temp_free_i64(fp1
);
7341 gen_store_fpr64(ctx
, fp0
, fd
);
7342 tcg_temp_free_i64(fp0
);
7348 check_cp1_registers(ctx
, fs
| ft
| fd
);
7350 TCGv_i64 fp0
= tcg_temp_new_i64();
7351 TCGv_i64 fp1
= tcg_temp_new_i64();
7353 gen_load_fpr64(ctx
, fp0
, fs
);
7354 gen_load_fpr64(ctx
, fp1
, ft
);
7355 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
7356 tcg_temp_free_i64(fp1
);
7357 gen_store_fpr64(ctx
, fp0
, fd
);
7358 tcg_temp_free_i64(fp0
);
7364 check_cp1_registers(ctx
, fs
| ft
| fd
);
7366 TCGv_i64 fp0
= tcg_temp_new_i64();
7367 TCGv_i64 fp1
= tcg_temp_new_i64();
7369 gen_load_fpr64(ctx
, fp0
, fs
);
7370 gen_load_fpr64(ctx
, fp1
, ft
);
7371 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
7372 tcg_temp_free_i64(fp1
);
7373 gen_store_fpr64(ctx
, fp0
, fd
);
7374 tcg_temp_free_i64(fp0
);
7380 check_cp1_registers(ctx
, fs
| ft
| fd
);
7382 TCGv_i64 fp0
= tcg_temp_new_i64();
7383 TCGv_i64 fp1
= tcg_temp_new_i64();
7385 gen_load_fpr64(ctx
, fp0
, fs
);
7386 gen_load_fpr64(ctx
, fp1
, ft
);
7387 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
7388 tcg_temp_free_i64(fp1
);
7389 gen_store_fpr64(ctx
, fp0
, fd
);
7390 tcg_temp_free_i64(fp0
);
7396 check_cp1_registers(ctx
, fs
| fd
);
7398 TCGv_i64 fp0
= tcg_temp_new_i64();
7400 gen_load_fpr64(ctx
, fp0
, fs
);
7401 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
7402 gen_store_fpr64(ctx
, fp0
, fd
);
7403 tcg_temp_free_i64(fp0
);
7408 check_cp1_registers(ctx
, fs
| fd
);
7410 TCGv_i64 fp0
= tcg_temp_new_i64();
7412 gen_load_fpr64(ctx
, fp0
, fs
);
7413 gen_helper_float_abs_d(fp0
, fp0
);
7414 gen_store_fpr64(ctx
, fp0
, fd
);
7415 tcg_temp_free_i64(fp0
);
7420 check_cp1_registers(ctx
, fs
| fd
);
7422 TCGv_i64 fp0
= tcg_temp_new_i64();
7424 gen_load_fpr64(ctx
, fp0
, fs
);
7425 gen_store_fpr64(ctx
, fp0
, fd
);
7426 tcg_temp_free_i64(fp0
);
7431 check_cp1_registers(ctx
, fs
| fd
);
7433 TCGv_i64 fp0
= tcg_temp_new_i64();
7435 gen_load_fpr64(ctx
, fp0
, fs
);
7436 gen_helper_float_chs_d(fp0
, fp0
);
7437 gen_store_fpr64(ctx
, fp0
, fd
);
7438 tcg_temp_free_i64(fp0
);
7443 check_cp1_64bitmode(ctx
);
7445 TCGv_i64 fp0
= tcg_temp_new_i64();
7447 gen_load_fpr64(ctx
, fp0
, fs
);
7448 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
7449 gen_store_fpr64(ctx
, fp0
, fd
);
7450 tcg_temp_free_i64(fp0
);
7455 check_cp1_64bitmode(ctx
);
7457 TCGv_i64 fp0
= tcg_temp_new_i64();
7459 gen_load_fpr64(ctx
, fp0
, fs
);
7460 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
7461 gen_store_fpr64(ctx
, fp0
, fd
);
7462 tcg_temp_free_i64(fp0
);
7467 check_cp1_64bitmode(ctx
);
7469 TCGv_i64 fp0
= tcg_temp_new_i64();
7471 gen_load_fpr64(ctx
, fp0
, fs
);
7472 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
7473 gen_store_fpr64(ctx
, fp0
, fd
);
7474 tcg_temp_free_i64(fp0
);
7479 check_cp1_64bitmode(ctx
);
7481 TCGv_i64 fp0
= tcg_temp_new_i64();
7483 gen_load_fpr64(ctx
, fp0
, fs
);
7484 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
7485 gen_store_fpr64(ctx
, fp0
, fd
);
7486 tcg_temp_free_i64(fp0
);
7491 check_cp1_registers(ctx
, fs
);
7493 TCGv_i32 fp32
= tcg_temp_new_i32();
7494 TCGv_i64 fp64
= tcg_temp_new_i64();
7496 gen_load_fpr64(ctx
, fp64
, fs
);
7497 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
7498 tcg_temp_free_i64(fp64
);
7499 gen_store_fpr32(fp32
, fd
);
7500 tcg_temp_free_i32(fp32
);
7505 check_cp1_registers(ctx
, fs
);
7507 TCGv_i32 fp32
= tcg_temp_new_i32();
7508 TCGv_i64 fp64
= tcg_temp_new_i64();
7510 gen_load_fpr64(ctx
, fp64
, fs
);
7511 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
7512 tcg_temp_free_i64(fp64
);
7513 gen_store_fpr32(fp32
, fd
);
7514 tcg_temp_free_i32(fp32
);
7519 check_cp1_registers(ctx
, fs
);
7521 TCGv_i32 fp32
= tcg_temp_new_i32();
7522 TCGv_i64 fp64
= tcg_temp_new_i64();
7524 gen_load_fpr64(ctx
, fp64
, fs
);
7525 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
7526 tcg_temp_free_i64(fp64
);
7527 gen_store_fpr32(fp32
, fd
);
7528 tcg_temp_free_i32(fp32
);
7533 check_cp1_registers(ctx
, fs
);
7535 TCGv_i32 fp32
= tcg_temp_new_i32();
7536 TCGv_i64 fp64
= tcg_temp_new_i64();
7538 gen_load_fpr64(ctx
, fp64
, fs
);
7539 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
7540 tcg_temp_free_i64(fp64
);
7541 gen_store_fpr32(fp32
, fd
);
7542 tcg_temp_free_i32(fp32
);
7547 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7552 int l1
= gen_new_label();
7556 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7558 fp0
= tcg_temp_new_i64();
7559 gen_load_fpr64(ctx
, fp0
, fs
);
7560 gen_store_fpr64(ctx
, fp0
, fd
);
7561 tcg_temp_free_i64(fp0
);
7568 int l1
= gen_new_label();
7572 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7573 fp0
= tcg_temp_new_i64();
7574 gen_load_fpr64(ctx
, fp0
, fs
);
7575 gen_store_fpr64(ctx
, fp0
, fd
);
7576 tcg_temp_free_i64(fp0
);
7583 check_cp1_64bitmode(ctx
);
7585 TCGv_i64 fp0
= tcg_temp_new_i64();
7587 gen_load_fpr64(ctx
, fp0
, fs
);
7588 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
7589 gen_store_fpr64(ctx
, fp0
, fd
);
7590 tcg_temp_free_i64(fp0
);
7595 check_cp1_64bitmode(ctx
);
7597 TCGv_i64 fp0
= tcg_temp_new_i64();
7599 gen_load_fpr64(ctx
, fp0
, fs
);
7600 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
7601 gen_store_fpr64(ctx
, fp0
, fd
);
7602 tcg_temp_free_i64(fp0
);
7607 check_cp1_64bitmode(ctx
);
7609 TCGv_i64 fp0
= tcg_temp_new_i64();
7610 TCGv_i64 fp1
= tcg_temp_new_i64();
7612 gen_load_fpr64(ctx
, fp0
, fs
);
7613 gen_load_fpr64(ctx
, fp1
, ft
);
7614 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
7615 tcg_temp_free_i64(fp1
);
7616 gen_store_fpr64(ctx
, fp0
, fd
);
7617 tcg_temp_free_i64(fp0
);
7622 check_cp1_64bitmode(ctx
);
7624 TCGv_i64 fp0
= tcg_temp_new_i64();
7626 gen_load_fpr64(ctx
, fp0
, fs
);
7627 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
7628 gen_store_fpr64(ctx
, fp0
, fd
);
7629 tcg_temp_free_i64(fp0
);
7634 check_cp1_64bitmode(ctx
);
7636 TCGv_i64 fp0
= tcg_temp_new_i64();
7638 gen_load_fpr64(ctx
, fp0
, fs
);
7639 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
7640 gen_store_fpr64(ctx
, fp0
, fd
);
7641 tcg_temp_free_i64(fp0
);
7646 check_cp1_64bitmode(ctx
);
7648 TCGv_i64 fp0
= tcg_temp_new_i64();
7649 TCGv_i64 fp1
= tcg_temp_new_i64();
7651 gen_load_fpr64(ctx
, fp0
, fs
);
7652 gen_load_fpr64(ctx
, fp1
, ft
);
7653 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
7654 tcg_temp_free_i64(fp1
);
7655 gen_store_fpr64(ctx
, fp0
, fd
);
7656 tcg_temp_free_i64(fp0
);
7669 case OPC_CMP_NGLE_D
:
7676 if (ctx
->opcode
& (1 << 6)) {
7677 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
7678 opn
= condnames_abs
[func
-48];
7680 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
7681 opn
= condnames
[func
-48];
7685 check_cp1_registers(ctx
, fs
);
7687 TCGv_i32 fp32
= tcg_temp_new_i32();
7688 TCGv_i64 fp64
= tcg_temp_new_i64();
7690 gen_load_fpr64(ctx
, fp64
, fs
);
7691 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
7692 tcg_temp_free_i64(fp64
);
7693 gen_store_fpr32(fp32
, fd
);
7694 tcg_temp_free_i32(fp32
);
7699 check_cp1_registers(ctx
, fs
);
7701 TCGv_i32 fp32
= tcg_temp_new_i32();
7702 TCGv_i64 fp64
= tcg_temp_new_i64();
7704 gen_load_fpr64(ctx
, fp64
, fs
);
7705 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
7706 tcg_temp_free_i64(fp64
);
7707 gen_store_fpr32(fp32
, fd
);
7708 tcg_temp_free_i32(fp32
);
7713 check_cp1_64bitmode(ctx
);
7715 TCGv_i64 fp0
= tcg_temp_new_i64();
7717 gen_load_fpr64(ctx
, fp0
, fs
);
7718 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
7719 gen_store_fpr64(ctx
, fp0
, fd
);
7720 tcg_temp_free_i64(fp0
);
7726 TCGv_i32 fp0
= tcg_temp_new_i32();
7728 gen_load_fpr32(fp0
, fs
);
7729 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
7730 gen_store_fpr32(fp0
, fd
);
7731 tcg_temp_free_i32(fp0
);
7736 check_cp1_registers(ctx
, fd
);
7738 TCGv_i32 fp32
= tcg_temp_new_i32();
7739 TCGv_i64 fp64
= tcg_temp_new_i64();
7741 gen_load_fpr32(fp32
, fs
);
7742 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
7743 tcg_temp_free_i32(fp32
);
7744 gen_store_fpr64(ctx
, fp64
, fd
);
7745 tcg_temp_free_i64(fp64
);
7750 check_cp1_64bitmode(ctx
);
7752 TCGv_i32 fp32
= tcg_temp_new_i32();
7753 TCGv_i64 fp64
= tcg_temp_new_i64();
7755 gen_load_fpr64(ctx
, fp64
, fs
);
7756 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
7757 tcg_temp_free_i64(fp64
);
7758 gen_store_fpr32(fp32
, fd
);
7759 tcg_temp_free_i32(fp32
);
7764 check_cp1_64bitmode(ctx
);
7766 TCGv_i64 fp0
= tcg_temp_new_i64();
7768 gen_load_fpr64(ctx
, fp0
, fs
);
7769 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
7770 gen_store_fpr64(ctx
, fp0
, fd
);
7771 tcg_temp_free_i64(fp0
);
7776 check_cp1_64bitmode(ctx
);
7778 TCGv_i64 fp0
= tcg_temp_new_i64();
7780 gen_load_fpr64(ctx
, fp0
, fs
);
7781 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
7782 gen_store_fpr64(ctx
, fp0
, fd
);
7783 tcg_temp_free_i64(fp0
);
7788 check_cp1_64bitmode(ctx
);
7790 TCGv_i64 fp0
= tcg_temp_new_i64();
7791 TCGv_i64 fp1
= tcg_temp_new_i64();
7793 gen_load_fpr64(ctx
, fp0
, fs
);
7794 gen_load_fpr64(ctx
, fp1
, ft
);
7795 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
7796 tcg_temp_free_i64(fp1
);
7797 gen_store_fpr64(ctx
, fp0
, fd
);
7798 tcg_temp_free_i64(fp0
);
7803 check_cp1_64bitmode(ctx
);
7805 TCGv_i64 fp0
= tcg_temp_new_i64();
7806 TCGv_i64 fp1
= tcg_temp_new_i64();
7808 gen_load_fpr64(ctx
, fp0
, fs
);
7809 gen_load_fpr64(ctx
, fp1
, ft
);
7810 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
7811 tcg_temp_free_i64(fp1
);
7812 gen_store_fpr64(ctx
, fp0
, fd
);
7813 tcg_temp_free_i64(fp0
);
7818 check_cp1_64bitmode(ctx
);
7820 TCGv_i64 fp0
= tcg_temp_new_i64();
7821 TCGv_i64 fp1
= tcg_temp_new_i64();
7823 gen_load_fpr64(ctx
, fp0
, fs
);
7824 gen_load_fpr64(ctx
, fp1
, ft
);
7825 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
7826 tcg_temp_free_i64(fp1
);
7827 gen_store_fpr64(ctx
, fp0
, fd
);
7828 tcg_temp_free_i64(fp0
);
7833 check_cp1_64bitmode(ctx
);
7835 TCGv_i64 fp0
= tcg_temp_new_i64();
7837 gen_load_fpr64(ctx
, fp0
, fs
);
7838 gen_helper_float_abs_ps(fp0
, fp0
);
7839 gen_store_fpr64(ctx
, fp0
, fd
);
7840 tcg_temp_free_i64(fp0
);
7845 check_cp1_64bitmode(ctx
);
7847 TCGv_i64 fp0
= tcg_temp_new_i64();
7849 gen_load_fpr64(ctx
, fp0
, fs
);
7850 gen_store_fpr64(ctx
, fp0
, fd
);
7851 tcg_temp_free_i64(fp0
);
7856 check_cp1_64bitmode(ctx
);
7858 TCGv_i64 fp0
= tcg_temp_new_i64();
7860 gen_load_fpr64(ctx
, fp0
, fs
);
7861 gen_helper_float_chs_ps(fp0
, fp0
);
7862 gen_store_fpr64(ctx
, fp0
, fd
);
7863 tcg_temp_free_i64(fp0
);
7868 check_cp1_64bitmode(ctx
);
7869 gen_movcf_ps(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7873 check_cp1_64bitmode(ctx
);
7875 int l1
= gen_new_label();
7879 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7880 fp0
= tcg_temp_new_i64();
7881 gen_load_fpr64(ctx
, fp0
, fs
);
7882 gen_store_fpr64(ctx
, fp0
, fd
);
7883 tcg_temp_free_i64(fp0
);
7889 check_cp1_64bitmode(ctx
);
7891 int l1
= gen_new_label();
7895 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7896 fp0
= tcg_temp_new_i64();
7897 gen_load_fpr64(ctx
, fp0
, fs
);
7898 gen_store_fpr64(ctx
, fp0
, fd
);
7899 tcg_temp_free_i64(fp0
);
7906 check_cp1_64bitmode(ctx
);
7908 TCGv_i64 fp0
= tcg_temp_new_i64();
7909 TCGv_i64 fp1
= tcg_temp_new_i64();
7911 gen_load_fpr64(ctx
, fp0
, ft
);
7912 gen_load_fpr64(ctx
, fp1
, fs
);
7913 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
7914 tcg_temp_free_i64(fp1
);
7915 gen_store_fpr64(ctx
, fp0
, fd
);
7916 tcg_temp_free_i64(fp0
);
7921 check_cp1_64bitmode(ctx
);
7923 TCGv_i64 fp0
= tcg_temp_new_i64();
7924 TCGv_i64 fp1
= tcg_temp_new_i64();
7926 gen_load_fpr64(ctx
, fp0
, ft
);
7927 gen_load_fpr64(ctx
, fp1
, fs
);
7928 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
7929 tcg_temp_free_i64(fp1
);
7930 gen_store_fpr64(ctx
, fp0
, fd
);
7931 tcg_temp_free_i64(fp0
);
7936 check_cp1_64bitmode(ctx
);
7938 TCGv_i64 fp0
= tcg_temp_new_i64();
7939 TCGv_i64 fp1
= tcg_temp_new_i64();
7941 gen_load_fpr64(ctx
, fp0
, fs
);
7942 gen_load_fpr64(ctx
, fp1
, ft
);
7943 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
7944 tcg_temp_free_i64(fp1
);
7945 gen_store_fpr64(ctx
, fp0
, fd
);
7946 tcg_temp_free_i64(fp0
);
7951 check_cp1_64bitmode(ctx
);
7953 TCGv_i64 fp0
= tcg_temp_new_i64();
7955 gen_load_fpr64(ctx
, fp0
, fs
);
7956 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
7957 gen_store_fpr64(ctx
, fp0
, fd
);
7958 tcg_temp_free_i64(fp0
);
7963 check_cp1_64bitmode(ctx
);
7965 TCGv_i64 fp0
= tcg_temp_new_i64();
7967 gen_load_fpr64(ctx
, fp0
, fs
);
7968 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
7969 gen_store_fpr64(ctx
, fp0
, fd
);
7970 tcg_temp_free_i64(fp0
);
7975 check_cp1_64bitmode(ctx
);
7977 TCGv_i64 fp0
= tcg_temp_new_i64();
7978 TCGv_i64 fp1
= tcg_temp_new_i64();
7980 gen_load_fpr64(ctx
, fp0
, fs
);
7981 gen_load_fpr64(ctx
, fp1
, ft
);
7982 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
7983 tcg_temp_free_i64(fp1
);
7984 gen_store_fpr64(ctx
, fp0
, fd
);
7985 tcg_temp_free_i64(fp0
);
7990 check_cp1_64bitmode(ctx
);
7992 TCGv_i32 fp0
= tcg_temp_new_i32();
7994 gen_load_fpr32h(fp0
, fs
);
7995 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
7996 gen_store_fpr32(fp0
, fd
);
7997 tcg_temp_free_i32(fp0
);
8002 check_cp1_64bitmode(ctx
);
8004 TCGv_i64 fp0
= tcg_temp_new_i64();
8006 gen_load_fpr64(ctx
, fp0
, fs
);
8007 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
8008 gen_store_fpr64(ctx
, fp0
, fd
);
8009 tcg_temp_free_i64(fp0
);
8014 check_cp1_64bitmode(ctx
);
8016 TCGv_i32 fp0
= tcg_temp_new_i32();
8018 gen_load_fpr32(fp0
, fs
);
8019 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
8020 gen_store_fpr32(fp0
, fd
);
8021 tcg_temp_free_i32(fp0
);
8026 check_cp1_64bitmode(ctx
);
8028 TCGv_i32 fp0
= tcg_temp_new_i32();
8029 TCGv_i32 fp1
= tcg_temp_new_i32();
8031 gen_load_fpr32(fp0
, fs
);
8032 gen_load_fpr32(fp1
, ft
);
8033 gen_store_fpr32h(fp0
, fd
);
8034 gen_store_fpr32(fp1
, fd
);
8035 tcg_temp_free_i32(fp0
);
8036 tcg_temp_free_i32(fp1
);
8041 check_cp1_64bitmode(ctx
);
8043 TCGv_i32 fp0
= tcg_temp_new_i32();
8044 TCGv_i32 fp1
= tcg_temp_new_i32();
8046 gen_load_fpr32(fp0
, fs
);
8047 gen_load_fpr32h(fp1
, ft
);
8048 gen_store_fpr32(fp1
, fd
);
8049 gen_store_fpr32h(fp0
, fd
);
8050 tcg_temp_free_i32(fp0
);
8051 tcg_temp_free_i32(fp1
);
8056 check_cp1_64bitmode(ctx
);
8058 TCGv_i32 fp0
= tcg_temp_new_i32();
8059 TCGv_i32 fp1
= tcg_temp_new_i32();
8061 gen_load_fpr32h(fp0
, fs
);
8062 gen_load_fpr32(fp1
, ft
);
8063 gen_store_fpr32(fp1
, fd
);
8064 gen_store_fpr32h(fp0
, fd
);
8065 tcg_temp_free_i32(fp0
);
8066 tcg_temp_free_i32(fp1
);
8071 check_cp1_64bitmode(ctx
);
8073 TCGv_i32 fp0
= tcg_temp_new_i32();
8074 TCGv_i32 fp1
= tcg_temp_new_i32();
8076 gen_load_fpr32h(fp0
, fs
);
8077 gen_load_fpr32h(fp1
, ft
);
8078 gen_store_fpr32(fp1
, fd
);
8079 gen_store_fpr32h(fp0
, fd
);
8080 tcg_temp_free_i32(fp0
);
8081 tcg_temp_free_i32(fp1
);
8088 case OPC_CMP_UEQ_PS
:
8089 case OPC_CMP_OLT_PS
:
8090 case OPC_CMP_ULT_PS
:
8091 case OPC_CMP_OLE_PS
:
8092 case OPC_CMP_ULE_PS
:
8094 case OPC_CMP_NGLE_PS
:
8095 case OPC_CMP_SEQ_PS
:
8096 case OPC_CMP_NGL_PS
:
8098 case OPC_CMP_NGE_PS
:
8100 case OPC_CMP_NGT_PS
:
8101 if (ctx
->opcode
& (1 << 6)) {
8102 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
8103 opn
= condnames_abs
[func
-48];
8105 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
8106 opn
= condnames
[func
-48];
8111 generate_exception (ctx
, EXCP_RI
);
8114 (void)opn
; /* avoid a compiler warning */
8117 MIPS_DEBUG("%s %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fs
], fregnames
[ft
]);
8120 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fs
], fregnames
[ft
]);
8123 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fd
], fregnames
[fs
]);
8128 /* Coprocessor 3 (FPU) */
8129 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
8130 int fd
, int fs
, int base
, int index
)
8132 const char *opn
= "extended float load/store";
8134 TCGv t0
= tcg_temp_new();
8137 gen_load_gpr(t0
, index
);
8138 } else if (index
== 0) {
8139 gen_load_gpr(t0
, base
);
8141 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
8143 /* Don't do NOP if destination is zero: we must perform the actual
8145 save_cpu_state(ctx
, 0);
8150 TCGv_i32 fp0
= tcg_temp_new_i32();
8152 tcg_gen_qemu_ld32s(t0
, t0
, ctx
->mem_idx
);
8153 tcg_gen_trunc_tl_i32(fp0
, t0
);
8154 gen_store_fpr32(fp0
, fd
);
8155 tcg_temp_free_i32(fp0
);
8161 check_cp1_registers(ctx
, fd
);
8163 TCGv_i64 fp0
= tcg_temp_new_i64();
8165 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
8166 gen_store_fpr64(ctx
, fp0
, fd
);
8167 tcg_temp_free_i64(fp0
);
8172 check_cp1_64bitmode(ctx
);
8173 tcg_gen_andi_tl(t0
, t0
, ~0x7);
8175 TCGv_i64 fp0
= tcg_temp_new_i64();
8177 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
8178 gen_store_fpr64(ctx
, fp0
, fd
);
8179 tcg_temp_free_i64(fp0
);
8186 TCGv_i32 fp0
= tcg_temp_new_i32();
8187 TCGv t1
= tcg_temp_new();
8189 gen_load_fpr32(fp0
, fs
);
8190 tcg_gen_extu_i32_tl(t1
, fp0
);
8191 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
8192 tcg_temp_free_i32(fp0
);
8200 check_cp1_registers(ctx
, fs
);
8202 TCGv_i64 fp0
= tcg_temp_new_i64();
8204 gen_load_fpr64(ctx
, fp0
, fs
);
8205 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
8206 tcg_temp_free_i64(fp0
);
8212 check_cp1_64bitmode(ctx
);
8213 tcg_gen_andi_tl(t0
, t0
, ~0x7);
8215 TCGv_i64 fp0
= tcg_temp_new_i64();
8217 gen_load_fpr64(ctx
, fp0
, fs
);
8218 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
8219 tcg_temp_free_i64(fp0
);
8226 (void)opn
; (void)store
; /* avoid compiler warnings */
8227 MIPS_DEBUG("%s %s, %s(%s)", opn
, fregnames
[store
? fs
: fd
],
8228 regnames
[index
], regnames
[base
]);
8231 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
8232 int fd
, int fr
, int fs
, int ft
)
8234 const char *opn
= "flt3_arith";
8238 check_cp1_64bitmode(ctx
);
8240 TCGv t0
= tcg_temp_local_new();
8241 TCGv_i32 fp
= tcg_temp_new_i32();
8242 TCGv_i32 fph
= tcg_temp_new_i32();
8243 int l1
= gen_new_label();
8244 int l2
= gen_new_label();
8246 gen_load_gpr(t0
, fr
);
8247 tcg_gen_andi_tl(t0
, t0
, 0x7);
8249 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
8250 gen_load_fpr32(fp
, fs
);
8251 gen_load_fpr32h(fph
, fs
);
8252 gen_store_fpr32(fp
, fd
);
8253 gen_store_fpr32h(fph
, fd
);
8256 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
8258 #ifdef TARGET_WORDS_BIGENDIAN
8259 gen_load_fpr32(fp
, fs
);
8260 gen_load_fpr32h(fph
, ft
);
8261 gen_store_fpr32h(fp
, fd
);
8262 gen_store_fpr32(fph
, fd
);
8264 gen_load_fpr32h(fph
, fs
);
8265 gen_load_fpr32(fp
, ft
);
8266 gen_store_fpr32(fph
, fd
);
8267 gen_store_fpr32h(fp
, fd
);
8270 tcg_temp_free_i32(fp
);
8271 tcg_temp_free_i32(fph
);
8278 TCGv_i32 fp0
= tcg_temp_new_i32();
8279 TCGv_i32 fp1
= tcg_temp_new_i32();
8280 TCGv_i32 fp2
= tcg_temp_new_i32();
8282 gen_load_fpr32(fp0
, fs
);
8283 gen_load_fpr32(fp1
, ft
);
8284 gen_load_fpr32(fp2
, fr
);
8285 gen_helper_float_muladd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8286 tcg_temp_free_i32(fp0
);
8287 tcg_temp_free_i32(fp1
);
8288 gen_store_fpr32(fp2
, fd
);
8289 tcg_temp_free_i32(fp2
);
8295 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8297 TCGv_i64 fp0
= tcg_temp_new_i64();
8298 TCGv_i64 fp1
= tcg_temp_new_i64();
8299 TCGv_i64 fp2
= tcg_temp_new_i64();
8301 gen_load_fpr64(ctx
, fp0
, fs
);
8302 gen_load_fpr64(ctx
, fp1
, ft
);
8303 gen_load_fpr64(ctx
, fp2
, fr
);
8304 gen_helper_float_muladd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8305 tcg_temp_free_i64(fp0
);
8306 tcg_temp_free_i64(fp1
);
8307 gen_store_fpr64(ctx
, fp2
, fd
);
8308 tcg_temp_free_i64(fp2
);
8313 check_cp1_64bitmode(ctx
);
8315 TCGv_i64 fp0
= tcg_temp_new_i64();
8316 TCGv_i64 fp1
= tcg_temp_new_i64();
8317 TCGv_i64 fp2
= tcg_temp_new_i64();
8319 gen_load_fpr64(ctx
, fp0
, fs
);
8320 gen_load_fpr64(ctx
, fp1
, ft
);
8321 gen_load_fpr64(ctx
, fp2
, fr
);
8322 gen_helper_float_muladd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8323 tcg_temp_free_i64(fp0
);
8324 tcg_temp_free_i64(fp1
);
8325 gen_store_fpr64(ctx
, fp2
, fd
);
8326 tcg_temp_free_i64(fp2
);
8333 TCGv_i32 fp0
= tcg_temp_new_i32();
8334 TCGv_i32 fp1
= tcg_temp_new_i32();
8335 TCGv_i32 fp2
= tcg_temp_new_i32();
8337 gen_load_fpr32(fp0
, fs
);
8338 gen_load_fpr32(fp1
, ft
);
8339 gen_load_fpr32(fp2
, fr
);
8340 gen_helper_float_mulsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8341 tcg_temp_free_i32(fp0
);
8342 tcg_temp_free_i32(fp1
);
8343 gen_store_fpr32(fp2
, fd
);
8344 tcg_temp_free_i32(fp2
);
8350 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8352 TCGv_i64 fp0
= tcg_temp_new_i64();
8353 TCGv_i64 fp1
= tcg_temp_new_i64();
8354 TCGv_i64 fp2
= tcg_temp_new_i64();
8356 gen_load_fpr64(ctx
, fp0
, fs
);
8357 gen_load_fpr64(ctx
, fp1
, ft
);
8358 gen_load_fpr64(ctx
, fp2
, fr
);
8359 gen_helper_float_mulsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8360 tcg_temp_free_i64(fp0
);
8361 tcg_temp_free_i64(fp1
);
8362 gen_store_fpr64(ctx
, fp2
, fd
);
8363 tcg_temp_free_i64(fp2
);
8368 check_cp1_64bitmode(ctx
);
8370 TCGv_i64 fp0
= tcg_temp_new_i64();
8371 TCGv_i64 fp1
= tcg_temp_new_i64();
8372 TCGv_i64 fp2
= tcg_temp_new_i64();
8374 gen_load_fpr64(ctx
, fp0
, fs
);
8375 gen_load_fpr64(ctx
, fp1
, ft
);
8376 gen_load_fpr64(ctx
, fp2
, fr
);
8377 gen_helper_float_mulsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8378 tcg_temp_free_i64(fp0
);
8379 tcg_temp_free_i64(fp1
);
8380 gen_store_fpr64(ctx
, fp2
, fd
);
8381 tcg_temp_free_i64(fp2
);
8388 TCGv_i32 fp0
= tcg_temp_new_i32();
8389 TCGv_i32 fp1
= tcg_temp_new_i32();
8390 TCGv_i32 fp2
= tcg_temp_new_i32();
8392 gen_load_fpr32(fp0
, fs
);
8393 gen_load_fpr32(fp1
, ft
);
8394 gen_load_fpr32(fp2
, fr
);
8395 gen_helper_float_nmuladd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8396 tcg_temp_free_i32(fp0
);
8397 tcg_temp_free_i32(fp1
);
8398 gen_store_fpr32(fp2
, fd
);
8399 tcg_temp_free_i32(fp2
);
8405 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8407 TCGv_i64 fp0
= tcg_temp_new_i64();
8408 TCGv_i64 fp1
= tcg_temp_new_i64();
8409 TCGv_i64 fp2
= tcg_temp_new_i64();
8411 gen_load_fpr64(ctx
, fp0
, fs
);
8412 gen_load_fpr64(ctx
, fp1
, ft
);
8413 gen_load_fpr64(ctx
, fp2
, fr
);
8414 gen_helper_float_nmuladd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8415 tcg_temp_free_i64(fp0
);
8416 tcg_temp_free_i64(fp1
);
8417 gen_store_fpr64(ctx
, fp2
, fd
);
8418 tcg_temp_free_i64(fp2
);
8423 check_cp1_64bitmode(ctx
);
8425 TCGv_i64 fp0
= tcg_temp_new_i64();
8426 TCGv_i64 fp1
= tcg_temp_new_i64();
8427 TCGv_i64 fp2
= tcg_temp_new_i64();
8429 gen_load_fpr64(ctx
, fp0
, fs
);
8430 gen_load_fpr64(ctx
, fp1
, ft
);
8431 gen_load_fpr64(ctx
, fp2
, fr
);
8432 gen_helper_float_nmuladd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8433 tcg_temp_free_i64(fp0
);
8434 tcg_temp_free_i64(fp1
);
8435 gen_store_fpr64(ctx
, fp2
, fd
);
8436 tcg_temp_free_i64(fp2
);
8443 TCGv_i32 fp0
= tcg_temp_new_i32();
8444 TCGv_i32 fp1
= tcg_temp_new_i32();
8445 TCGv_i32 fp2
= tcg_temp_new_i32();
8447 gen_load_fpr32(fp0
, fs
);
8448 gen_load_fpr32(fp1
, ft
);
8449 gen_load_fpr32(fp2
, fr
);
8450 gen_helper_float_nmulsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8451 tcg_temp_free_i32(fp0
);
8452 tcg_temp_free_i32(fp1
);
8453 gen_store_fpr32(fp2
, fd
);
8454 tcg_temp_free_i32(fp2
);
8460 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8462 TCGv_i64 fp0
= tcg_temp_new_i64();
8463 TCGv_i64 fp1
= tcg_temp_new_i64();
8464 TCGv_i64 fp2
= tcg_temp_new_i64();
8466 gen_load_fpr64(ctx
, fp0
, fs
);
8467 gen_load_fpr64(ctx
, fp1
, ft
);
8468 gen_load_fpr64(ctx
, fp2
, fr
);
8469 gen_helper_float_nmulsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8470 tcg_temp_free_i64(fp0
);
8471 tcg_temp_free_i64(fp1
);
8472 gen_store_fpr64(ctx
, fp2
, fd
);
8473 tcg_temp_free_i64(fp2
);
8478 check_cp1_64bitmode(ctx
);
8480 TCGv_i64 fp0
= tcg_temp_new_i64();
8481 TCGv_i64 fp1
= tcg_temp_new_i64();
8482 TCGv_i64 fp2
= tcg_temp_new_i64();
8484 gen_load_fpr64(ctx
, fp0
, fs
);
8485 gen_load_fpr64(ctx
, fp1
, ft
);
8486 gen_load_fpr64(ctx
, fp2
, fr
);
8487 gen_helper_float_nmulsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8488 tcg_temp_free_i64(fp0
);
8489 tcg_temp_free_i64(fp1
);
8490 gen_store_fpr64(ctx
, fp2
, fd
);
8491 tcg_temp_free_i64(fp2
);
8497 generate_exception (ctx
, EXCP_RI
);
8500 (void)opn
; /* avoid a compiler warning */
8501 MIPS_DEBUG("%s %s, %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fr
],
8502 fregnames
[fs
], fregnames
[ft
]);
8506 gen_rdhwr (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
)
8510 #if !defined(CONFIG_USER_ONLY)
8511 /* The Linux kernel will emulate rdhwr if it's not supported natively.
8512 Therefore only check the ISA in system mode. */
8513 check_insn(env
, ctx
, ISA_MIPS32R2
);
8515 t0
= tcg_temp_new();
8519 save_cpu_state(ctx
, 1);
8520 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
8521 gen_store_gpr(t0
, rt
);
8524 save_cpu_state(ctx
, 1);
8525 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
8526 gen_store_gpr(t0
, rt
);
8529 save_cpu_state(ctx
, 1);
8530 gen_helper_rdhwr_cc(t0
, cpu_env
);
8531 gen_store_gpr(t0
, rt
);
8534 save_cpu_state(ctx
, 1);
8535 gen_helper_rdhwr_ccres(t0
, cpu_env
);
8536 gen_store_gpr(t0
, rt
);
8539 #if defined(CONFIG_USER_ONLY)
8540 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUMIPSState
, tls_value
));
8541 gen_store_gpr(t0
, rt
);
8544 /* XXX: Some CPUs implement this in hardware.
8545 Not supported yet. */
8547 default: /* Invalid */
8548 MIPS_INVAL("rdhwr");
8549 generate_exception(ctx
, EXCP_RI
);
8555 static void handle_delay_slot (CPUMIPSState
*env
, DisasContext
*ctx
,
8558 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8559 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
8560 /* Branches completion */
8561 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
8562 ctx
->bstate
= BS_BRANCH
;
8563 save_cpu_state(ctx
, 0);
8564 /* FIXME: Need to clear can_do_io. */
8565 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
8567 /* unconditional branch */
8568 MIPS_DEBUG("unconditional branch");
8569 if (proc_hflags
& MIPS_HFLAG_BX
) {
8570 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
8572 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8575 /* blikely taken case */
8576 MIPS_DEBUG("blikely branch taken");
8577 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8580 /* Conditional branch */
8581 MIPS_DEBUG("conditional branch");
8583 int l1
= gen_new_label();
8585 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
8586 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
8588 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8592 /* unconditional branch to register */
8593 MIPS_DEBUG("branch to register");
8594 if (env
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
8595 TCGv t0
= tcg_temp_new();
8596 TCGv_i32 t1
= tcg_temp_new_i32();
8598 tcg_gen_andi_tl(t0
, btarget
, 0x1);
8599 tcg_gen_trunc_tl_i32(t1
, t0
);
8601 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
8602 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
8603 tcg_gen_or_i32(hflags
, hflags
, t1
);
8604 tcg_temp_free_i32(t1
);
8606 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
8608 tcg_gen_mov_tl(cpu_PC
, btarget
);
8610 if (ctx
->singlestep_enabled
) {
8611 save_cpu_state(ctx
, 0);
8612 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
8617 MIPS_DEBUG("unknown branch");
8623 /* ISA extensions (ASEs) */
8624 /* MIPS16 extension to MIPS32 */
8626 /* MIPS16 major opcodes */
8628 M16_OPC_ADDIUSP
= 0x00,
8629 M16_OPC_ADDIUPC
= 0x01,
8632 M16_OPC_BEQZ
= 0x04,
8633 M16_OPC_BNEQZ
= 0x05,
8634 M16_OPC_SHIFT
= 0x06,
8636 M16_OPC_RRIA
= 0x08,
8637 M16_OPC_ADDIU8
= 0x09,
8638 M16_OPC_SLTI
= 0x0a,
8639 M16_OPC_SLTIU
= 0x0b,
8642 M16_OPC_CMPI
= 0x0e,
8646 M16_OPC_LWSP
= 0x12,
8650 M16_OPC_LWPC
= 0x16,
8654 M16_OPC_SWSP
= 0x1a,
8658 M16_OPC_EXTEND
= 0x1e,
8662 /* I8 funct field */
8681 /* RR funct field */
8715 /* I64 funct field */
8727 /* RR ry field for CNVT */
8729 RR_RY_CNVT_ZEB
= 0x0,
8730 RR_RY_CNVT_ZEH
= 0x1,
8731 RR_RY_CNVT_ZEW
= 0x2,
8732 RR_RY_CNVT_SEB
= 0x4,
8733 RR_RY_CNVT_SEH
= 0x5,
8734 RR_RY_CNVT_SEW
= 0x6,
8737 static int xlat (int r
)
8739 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
8744 static void gen_mips16_save (DisasContext
*ctx
,
8745 int xsregs
, int aregs
,
8746 int do_ra
, int do_s0
, int do_s1
,
8749 TCGv t0
= tcg_temp_new();
8750 TCGv t1
= tcg_temp_new();
8780 generate_exception(ctx
, EXCP_RI
);
8786 gen_base_offset_addr(ctx
, t0
, 29, 12);
8787 gen_load_gpr(t1
, 7);
8788 op_st_sw(t1
, t0
, ctx
);
8791 gen_base_offset_addr(ctx
, t0
, 29, 8);
8792 gen_load_gpr(t1
, 6);
8793 op_st_sw(t1
, t0
, ctx
);
8796 gen_base_offset_addr(ctx
, t0
, 29, 4);
8797 gen_load_gpr(t1
, 5);
8798 op_st_sw(t1
, t0
, ctx
);
8801 gen_base_offset_addr(ctx
, t0
, 29, 0);
8802 gen_load_gpr(t1
, 4);
8803 op_st_sw(t1
, t0
, ctx
);
8806 gen_load_gpr(t0
, 29);
8808 #define DECR_AND_STORE(reg) do { \
8809 tcg_gen_subi_tl(t0, t0, 4); \
8810 gen_load_gpr(t1, reg); \
8811 op_st_sw(t1, t0, ctx); \
8875 generate_exception(ctx
, EXCP_RI
);
8891 #undef DECR_AND_STORE
8893 tcg_gen_subi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
8898 static void gen_mips16_restore (DisasContext
*ctx
,
8899 int xsregs
, int aregs
,
8900 int do_ra
, int do_s0
, int do_s1
,
8904 TCGv t0
= tcg_temp_new();
8905 TCGv t1
= tcg_temp_new();
8907 tcg_gen_addi_tl(t0
, cpu_gpr
[29], framesize
);
8909 #define DECR_AND_LOAD(reg) do { \
8910 tcg_gen_subi_tl(t0, t0, 4); \
8911 op_ld_lw(t1, t0, ctx); \
8912 gen_store_gpr(t1, reg); \
8976 generate_exception(ctx
, EXCP_RI
);
8992 #undef DECR_AND_LOAD
8994 tcg_gen_addi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
8999 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
9000 int is_64_bit
, int extended
)
9004 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9005 generate_exception(ctx
, EXCP_RI
);
9009 t0
= tcg_temp_new();
9011 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
9012 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
9014 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9020 #if defined(TARGET_MIPS64)
9021 static void decode_i64_mips16 (CPUMIPSState
*env
, DisasContext
*ctx
,
9022 int ry
, int funct
, int16_t offset
,
9028 offset
= extended
? offset
: offset
<< 3;
9029 gen_ld(env
, ctx
, OPC_LD
, ry
, 29, offset
);
9033 offset
= extended
? offset
: offset
<< 3;
9034 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
9038 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
9039 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
9043 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
9044 gen_arith_imm(env
, ctx
, OPC_DADDIU
, 29, 29, offset
);
9047 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9048 generate_exception(ctx
, EXCP_RI
);
9050 offset
= extended
? offset
: offset
<< 3;
9051 gen_ld(env
, ctx
, OPC_LDPC
, ry
, 0, offset
);
9056 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
9057 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, ry
, offset
);
9061 offset
= extended
? offset
: offset
<< 2;
9062 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
9066 offset
= extended
? offset
: offset
<< 2;
9067 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, 29, offset
);
9073 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
9076 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
9077 int op
, rx
, ry
, funct
, sa
;
9078 int16_t imm
, offset
;
9080 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
9081 op
= (ctx
->opcode
>> 11) & 0x1f;
9082 sa
= (ctx
->opcode
>> 22) & 0x1f;
9083 funct
= (ctx
->opcode
>> 8) & 0x7;
9084 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
9085 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
9086 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
9087 | ((ctx
->opcode
>> 21) & 0x3f) << 5
9088 | (ctx
->opcode
& 0x1f));
9090 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
9093 case M16_OPC_ADDIUSP
:
9094 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 29, imm
);
9096 case M16_OPC_ADDIUPC
:
9097 gen_addiupc(ctx
, rx
, imm
, 0, 1);
9100 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1);
9101 /* No delay slot, so just process as a normal instruction */
9104 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1);
9105 /* No delay slot, so just process as a normal instruction */
9108 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1);
9109 /* No delay slot, so just process as a normal instruction */
9112 switch (ctx
->opcode
& 0x3) {
9114 gen_shift_imm(env
, ctx
, OPC_SLL
, rx
, ry
, sa
);
9117 #if defined(TARGET_MIPS64)
9119 gen_shift_imm(env
, ctx
, OPC_DSLL
, rx
, ry
, sa
);
9121 generate_exception(ctx
, EXCP_RI
);
9125 gen_shift_imm(env
, ctx
, OPC_SRL
, rx
, ry
, sa
);
9128 gen_shift_imm(env
, ctx
, OPC_SRA
, rx
, ry
, sa
);
9132 #if defined(TARGET_MIPS64)
9135 gen_ld(env
, ctx
, OPC_LD
, ry
, rx
, offset
);
9139 imm
= ctx
->opcode
& 0xf;
9140 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
9141 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
9142 imm
= (int16_t) (imm
<< 1) >> 1;
9143 if ((ctx
->opcode
>> 4) & 0x1) {
9144 #if defined(TARGET_MIPS64)
9146 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, rx
, imm
);
9148 generate_exception(ctx
, EXCP_RI
);
9151 gen_arith_imm(env
, ctx
, OPC_ADDIU
, ry
, rx
, imm
);
9154 case M16_OPC_ADDIU8
:
9155 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, rx
, imm
);
9158 gen_slt_imm(env
, ctx
, OPC_SLTI
, 24, rx
, imm
);
9161 gen_slt_imm(env
, ctx
, OPC_SLTIU
, 24, rx
, imm
);
9166 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1);
9169 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1);
9172 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
9175 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, imm
);
9179 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
9180 int aregs
= (ctx
->opcode
>> 16) & 0xf;
9181 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
9182 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
9183 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
9184 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
9185 | (ctx
->opcode
& 0xf)) << 3;
9187 if (ctx
->opcode
& (1 << 7)) {
9188 gen_mips16_save(ctx
, xsregs
, aregs
,
9189 do_ra
, do_s0
, do_s1
,
9192 gen_mips16_restore(ctx
, xsregs
, aregs
,
9193 do_ra
, do_s0
, do_s1
,
9199 generate_exception(ctx
, EXCP_RI
);
9204 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
9207 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
9209 #if defined(TARGET_MIPS64)
9211 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
9215 gen_ld(env
, ctx
, OPC_LB
, ry
, rx
, offset
);
9218 gen_ld(env
, ctx
, OPC_LH
, ry
, rx
, offset
);
9221 gen_ld(env
, ctx
, OPC_LW
, rx
, 29, offset
);
9224 gen_ld(env
, ctx
, OPC_LW
, ry
, rx
, offset
);
9227 gen_ld(env
, ctx
, OPC_LBU
, ry
, rx
, offset
);
9230 gen_ld(env
, ctx
, OPC_LHU
, ry
, rx
, offset
);
9233 gen_ld(env
, ctx
, OPC_LWPC
, rx
, 0, offset
);
9235 #if defined(TARGET_MIPS64)
9237 gen_ld(env
, ctx
, OPC_LWU
, ry
, rx
, offset
);
9241 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
9244 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
9247 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
9250 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
9252 #if defined(TARGET_MIPS64)
9254 decode_i64_mips16(env
, ctx
, ry
, funct
, offset
, 1);
9258 generate_exception(ctx
, EXCP_RI
);
9265 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
9270 int op
, cnvt_op
, op1
, offset
;
9274 op
= (ctx
->opcode
>> 11) & 0x1f;
9275 sa
= (ctx
->opcode
>> 2) & 0x7;
9276 sa
= sa
== 0 ? 8 : sa
;
9277 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
9278 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
9279 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
9280 op1
= offset
= ctx
->opcode
& 0x1f;
9285 case M16_OPC_ADDIUSP
:
9287 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
9289 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 29, imm
);
9292 case M16_OPC_ADDIUPC
:
9293 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
9296 offset
= (ctx
->opcode
& 0x7ff) << 1;
9297 offset
= (int16_t)(offset
<< 4) >> 4;
9298 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
);
9299 /* No delay slot, so just process as a normal instruction */
9302 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
9303 offset
= (((ctx
->opcode
& 0x1f) << 21)
9304 | ((ctx
->opcode
>> 5) & 0x1f) << 16
9306 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALXS
: OPC_JALS
;
9307 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
);
9312 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
9313 /* No delay slot, so just process as a normal instruction */
9316 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
9317 /* No delay slot, so just process as a normal instruction */
9320 switch (ctx
->opcode
& 0x3) {
9322 gen_shift_imm(env
, ctx
, OPC_SLL
, rx
, ry
, sa
);
9325 #if defined(TARGET_MIPS64)
9327 gen_shift_imm(env
, ctx
, OPC_DSLL
, rx
, ry
, sa
);
9329 generate_exception(ctx
, EXCP_RI
);
9333 gen_shift_imm(env
, ctx
, OPC_SRL
, rx
, ry
, sa
);
9336 gen_shift_imm(env
, ctx
, OPC_SRA
, rx
, ry
, sa
);
9340 #if defined(TARGET_MIPS64)
9343 gen_ld(env
, ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
9348 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
9350 if ((ctx
->opcode
>> 4) & 1) {
9351 #if defined(TARGET_MIPS64)
9353 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, rx
, imm
);
9355 generate_exception(ctx
, EXCP_RI
);
9358 gen_arith_imm(env
, ctx
, OPC_ADDIU
, ry
, rx
, imm
);
9362 case M16_OPC_ADDIU8
:
9364 int16_t imm
= (int8_t) ctx
->opcode
;
9366 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, rx
, imm
);
9371 int16_t imm
= (uint8_t) ctx
->opcode
;
9372 gen_slt_imm(env
, ctx
, OPC_SLTI
, 24, rx
, imm
);
9377 int16_t imm
= (uint8_t) ctx
->opcode
;
9378 gen_slt_imm(env
, ctx
, OPC_SLTIU
, 24, rx
, imm
);
9385 funct
= (ctx
->opcode
>> 8) & 0x7;
9388 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
9389 ((int8_t)ctx
->opcode
) << 1);
9392 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
9393 ((int8_t)ctx
->opcode
) << 1);
9396 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
9399 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29,
9400 ((int8_t)ctx
->opcode
) << 3);
9404 int do_ra
= ctx
->opcode
& (1 << 6);
9405 int do_s0
= ctx
->opcode
& (1 << 5);
9406 int do_s1
= ctx
->opcode
& (1 << 4);
9407 int framesize
= ctx
->opcode
& 0xf;
9409 if (framesize
== 0) {
9412 framesize
= framesize
<< 3;
9415 if (ctx
->opcode
& (1 << 7)) {
9416 gen_mips16_save(ctx
, 0, 0,
9417 do_ra
, do_s0
, do_s1
, framesize
);
9419 gen_mips16_restore(ctx
, 0, 0,
9420 do_ra
, do_s0
, do_s1
, framesize
);
9426 int rz
= xlat(ctx
->opcode
& 0x7);
9428 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
9429 ((ctx
->opcode
>> 5) & 0x7);
9430 gen_arith(env
, ctx
, OPC_ADDU
, reg32
, rz
, 0);
9434 reg32
= ctx
->opcode
& 0x1f;
9435 gen_arith(env
, ctx
, OPC_ADDU
, ry
, reg32
, 0);
9438 generate_exception(ctx
, EXCP_RI
);
9445 int16_t imm
= (uint8_t) ctx
->opcode
;
9447 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 0, imm
);
9452 int16_t imm
= (uint8_t) ctx
->opcode
;
9453 gen_logic_imm(env
, ctx
, OPC_XORI
, 24, rx
, imm
);
9456 #if defined(TARGET_MIPS64)
9459 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
9463 gen_ld(env
, ctx
, OPC_LB
, ry
, rx
, offset
);
9466 gen_ld(env
, ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
9469 gen_ld(env
, ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
9472 gen_ld(env
, ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
9475 gen_ld(env
, ctx
, OPC_LBU
, ry
, rx
, offset
);
9478 gen_ld(env
, ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
9481 gen_ld(env
, ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
9483 #if defined (TARGET_MIPS64)
9486 gen_ld(env
, ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
9490 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
9493 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
9496 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
9499 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
9503 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
9506 switch (ctx
->opcode
& 0x3) {
9508 mips32_op
= OPC_ADDU
;
9511 mips32_op
= OPC_SUBU
;
9513 #if defined(TARGET_MIPS64)
9515 mips32_op
= OPC_DADDU
;
9519 mips32_op
= OPC_DSUBU
;
9524 generate_exception(ctx
, EXCP_RI
);
9528 gen_arith(env
, ctx
, mips32_op
, rz
, rx
, ry
);
9537 int nd
= (ctx
->opcode
>> 7) & 0x1;
9538 int link
= (ctx
->opcode
>> 6) & 0x1;
9539 int ra
= (ctx
->opcode
>> 5) & 0x1;
9542 op
= nd
? OPC_JALRC
: OPC_JALRS
;
9547 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0);
9554 /* XXX: not clear which exception should be raised
9555 * when in debug mode...
9557 check_insn(env
, ctx
, ISA_MIPS32
);
9558 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9559 generate_exception(ctx
, EXCP_DBp
);
9561 generate_exception(ctx
, EXCP_DBp
);
9565 gen_slt(env
, ctx
, OPC_SLT
, 24, rx
, ry
);
9568 gen_slt(env
, ctx
, OPC_SLTU
, 24, rx
, ry
);
9571 generate_exception(ctx
, EXCP_BREAK
);
9574 gen_shift(env
, ctx
, OPC_SLLV
, ry
, rx
, ry
);
9577 gen_shift(env
, ctx
, OPC_SRLV
, ry
, rx
, ry
);
9580 gen_shift(env
, ctx
, OPC_SRAV
, ry
, rx
, ry
);
9582 #if defined (TARGET_MIPS64)
9585 gen_shift_imm(env
, ctx
, OPC_DSRL
, ry
, ry
, sa
);
9589 gen_logic(env
, ctx
, OPC_XOR
, 24, rx
, ry
);
9592 gen_arith(env
, ctx
, OPC_SUBU
, rx
, 0, ry
);
9595 gen_logic(env
, ctx
, OPC_AND
, rx
, rx
, ry
);
9598 gen_logic(env
, ctx
, OPC_OR
, rx
, rx
, ry
);
9601 gen_logic(env
, ctx
, OPC_XOR
, rx
, rx
, ry
);
9604 gen_logic(env
, ctx
, OPC_NOR
, rx
, ry
, 0);
9607 gen_HILO(ctx
, OPC_MFHI
, rx
);
9611 case RR_RY_CNVT_ZEB
:
9612 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9614 case RR_RY_CNVT_ZEH
:
9615 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9617 case RR_RY_CNVT_SEB
:
9618 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9620 case RR_RY_CNVT_SEH
:
9621 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9623 #if defined (TARGET_MIPS64)
9624 case RR_RY_CNVT_ZEW
:
9626 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9628 case RR_RY_CNVT_SEW
:
9630 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9634 generate_exception(ctx
, EXCP_RI
);
9639 gen_HILO(ctx
, OPC_MFLO
, rx
);
9641 #if defined (TARGET_MIPS64)
9644 gen_shift_imm(env
, ctx
, OPC_DSRA
, ry
, ry
, sa
);
9648 gen_shift(env
, ctx
, OPC_DSLLV
, ry
, rx
, ry
);
9652 gen_shift(env
, ctx
, OPC_DSRLV
, ry
, rx
, ry
);
9656 gen_shift(env
, ctx
, OPC_DSRAV
, ry
, rx
, ry
);
9660 gen_muldiv(ctx
, OPC_MULT
, rx
, ry
);
9663 gen_muldiv(ctx
, OPC_MULTU
, rx
, ry
);
9666 gen_muldiv(ctx
, OPC_DIV
, rx
, ry
);
9669 gen_muldiv(ctx
, OPC_DIVU
, rx
, ry
);
9671 #if defined (TARGET_MIPS64)
9674 gen_muldiv(ctx
, OPC_DMULT
, rx
, ry
);
9678 gen_muldiv(ctx
, OPC_DMULTU
, rx
, ry
);
9682 gen_muldiv(ctx
, OPC_DDIV
, rx
, ry
);
9686 gen_muldiv(ctx
, OPC_DDIVU
, rx
, ry
);
9690 generate_exception(ctx
, EXCP_RI
);
9694 case M16_OPC_EXTEND
:
9695 decode_extended_mips16_opc(env
, ctx
, is_branch
);
9698 #if defined(TARGET_MIPS64)
9700 funct
= (ctx
->opcode
>> 8) & 0x7;
9701 decode_i64_mips16(env
, ctx
, ry
, funct
, offset
, 0);
9705 generate_exception(ctx
, EXCP_RI
);
9712 /* microMIPS extension to MIPS32 */
9714 /* microMIPS32 major opcodes */
9753 /* 0x20 is reserved */
9763 /* 0x28 and 0x29 are reserved */
9773 /* 0x30 and 0x31 are reserved */
9783 /* 0x38 and 0x39 are reserved */
9794 /* POOL32A encoding of minor opcode field */
9797 /* These opcodes are distinguished only by bits 9..6; those bits are
9798 * what are recorded below. */
9824 /* The following can be distinguished by their lower 6 bits. */
9830 /* POOL32AXF encoding of minor opcode field extension */
9844 /* bits 13..12 for 0x01 */
9850 /* bits 13..12 for 0x2a */
9856 /* bits 13..12 for 0x32 */
9860 /* bits 15..12 for 0x2c */
9876 /* bits 15..12 for 0x34 */
9884 /* bits 15..12 for 0x3c */
9886 JR
= 0x0, /* alias */
9891 /* bits 15..12 for 0x05 */
9895 /* bits 15..12 for 0x0d */
9905 /* bits 15..12 for 0x15 */
9911 /* bits 15..12 for 0x1d */
9915 /* bits 15..12 for 0x2d */
9920 /* bits 15..12 for 0x35 */
9927 /* POOL32B encoding of minor opcode field (bits 15..12) */
9943 /* POOL32C encoding of minor opcode field (bits 15..12) */
9951 /* 0xa is reserved */
9958 /* 0x6 is reserved */
9964 /* POOL32F encoding of minor opcode field (bits 5..0) */
9967 /* These are the bit 7..6 values */
9978 /* These are the bit 8..6 values */
10022 CABS_COND_FMT
= 0x1c, /* MIPS3D */
10026 /* POOL32Fxf encoding of minor opcode extension field */
10064 /* POOL32I encoding of minor opcode field (bits 25..21) */
10089 /* These overlap and are distinguished by bit16 of the instruction */
10098 /* POOL16A encoding of minor opcode field */
10105 /* POOL16B encoding of minor opcode field */
10112 /* POOL16C encoding of minor opcode field */
10132 /* POOL16D encoding of minor opcode field */
10139 /* POOL16E encoding of minor opcode field */
10146 static int mmreg (int r
)
10148 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10153 /* Used for 16-bit store instructions. */
10154 static int mmreg2 (int r
)
10156 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
10161 #define uMIPS_RD(op) ((op >> 7) & 0x7)
10162 #define uMIPS_RS(op) ((op >> 4) & 0x7)
10163 #define uMIPS_RS2(op) uMIPS_RS(op)
10164 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
10165 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
10166 #define uMIPS_RS5(op) (op & 0x1f)
10168 /* Signed immediate */
10169 #define SIMM(op, start, width) \
10170 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
10173 /* Zero-extended immediate */
10174 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
10176 static void gen_addiur1sp (CPUMIPSState
*env
, DisasContext
*ctx
)
10178 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
10180 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
10183 static void gen_addiur2 (CPUMIPSState
*env
, DisasContext
*ctx
)
10185 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
10186 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
10187 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
10189 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
10192 static void gen_addiusp (CPUMIPSState
*env
, DisasContext
*ctx
)
10194 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
10197 if (encoded
<= 1) {
10198 decoded
= 256 + encoded
;
10199 } else if (encoded
<= 255) {
10201 } else if (encoded
<= 509) {
10202 decoded
= encoded
- 512;
10204 decoded
= encoded
- 768;
10207 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
10210 static void gen_addius5 (CPUMIPSState
*env
, DisasContext
*ctx
)
10212 int imm
= SIMM(ctx
->opcode
, 1, 4);
10213 int rd
= (ctx
->opcode
>> 5) & 0x1f;
10215 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rd
, imm
);
10218 static void gen_andi16 (CPUMIPSState
*env
, DisasContext
*ctx
)
10220 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
10221 31, 32, 63, 64, 255, 32768, 65535 };
10222 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
10223 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
10224 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
10226 gen_logic_imm(env
, ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
10229 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
10230 int base
, int16_t offset
)
10232 const char *opn
= "ldst_multiple";
10236 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10237 generate_exception(ctx
, EXCP_RI
);
10241 t0
= tcg_temp_new();
10243 gen_base_offset_addr(ctx
, t0
, base
, offset
);
10245 t1
= tcg_const_tl(reglist
);
10246 t2
= tcg_const_i32(ctx
->mem_idx
);
10248 save_cpu_state(ctx
, 1);
10251 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
10255 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
10258 #ifdef TARGET_MIPS64
10260 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
10264 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
10270 MIPS_DEBUG("%s, %x, %d(%s)", opn
, reglist
, offset
, regnames
[base
]);
10273 tcg_temp_free_i32(t2
);
10277 static void gen_pool16c_insn (CPUMIPSState
*env
, DisasContext
*ctx
, int *is_branch
)
10279 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
10280 int rs
= mmreg(ctx
->opcode
& 0x7);
10283 switch (((ctx
->opcode
) >> 4) & 0x3f) {
10288 gen_logic(env
, ctx
, OPC_NOR
, rd
, rs
, 0);
10294 gen_logic(env
, ctx
, OPC_XOR
, rd
, rd
, rs
);
10300 gen_logic(env
, ctx
, OPC_AND
, rd
, rd
, rs
);
10306 gen_logic(env
, ctx
, OPC_OR
, rd
, rd
, rs
);
10313 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
10314 int offset
= ZIMM(ctx
->opcode
, 0, 4);
10316 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
10325 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
10326 int offset
= ZIMM(ctx
->opcode
, 0, 4);
10328 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
10335 int reg
= ctx
->opcode
& 0x1f;
10337 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
10344 int reg
= ctx
->opcode
& 0x1f;
10346 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
10347 /* Let normal delay slot handling in our caller take us
10348 to the branch target. */
10360 int reg
= ctx
->opcode
& 0x1f;
10362 gen_compute_branch(ctx
, opc
, 2, reg
, 31, 0);
10368 gen_HILO(ctx
, OPC_MFHI
, uMIPS_RS5(ctx
->opcode
));
10372 gen_HILO(ctx
, OPC_MFLO
, uMIPS_RS5(ctx
->opcode
));
10375 generate_exception(ctx
, EXCP_BREAK
);
10378 /* XXX: not clear which exception should be raised
10379 * when in debug mode...
10381 check_insn(env
, ctx
, ISA_MIPS32
);
10382 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10383 generate_exception(ctx
, EXCP_DBp
);
10385 generate_exception(ctx
, EXCP_DBp
);
10388 case JRADDIUSP
+ 0:
10389 case JRADDIUSP
+ 1:
10391 int imm
= ZIMM(ctx
->opcode
, 0, 5);
10393 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0);
10394 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
10395 /* Let normal delay slot handling in our caller take us
10396 to the branch target. */
10400 generate_exception(ctx
, EXCP_RI
);
10405 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
10407 TCGv t0
= tcg_temp_new();
10408 TCGv t1
= tcg_temp_new();
10410 gen_load_gpr(t0
, base
);
10413 gen_load_gpr(t1
, index
);
10414 tcg_gen_shli_tl(t1
, t1
, 2);
10415 gen_op_addr_add(ctx
, t0
, t1
, t0
);
10418 save_cpu_state(ctx
, 0);
10419 op_ld_lw(t1
, t0
, ctx
);
10420 gen_store_gpr(t1
, rd
);
10426 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
10427 int base
, int16_t offset
)
10429 const char *opn
= "ldst_pair";
10432 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
10433 generate_exception(ctx
, EXCP_RI
);
10437 t0
= tcg_temp_new();
10438 t1
= tcg_temp_new();
10440 gen_base_offset_addr(ctx
, t0
, base
, offset
);
10445 generate_exception(ctx
, EXCP_RI
);
10448 save_cpu_state(ctx
, 0);
10449 op_ld_lw(t1
, t0
, ctx
);
10450 gen_store_gpr(t1
, rd
);
10451 tcg_gen_movi_tl(t1
, 4);
10452 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10453 op_ld_lw(t1
, t0
, ctx
);
10454 gen_store_gpr(t1
, rd
+1);
10458 save_cpu_state(ctx
, 0);
10459 gen_load_gpr(t1
, rd
);
10460 op_st_sw(t1
, t0
, ctx
);
10461 tcg_gen_movi_tl(t1
, 4);
10462 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10463 gen_load_gpr(t1
, rd
+1);
10464 op_st_sw(t1
, t0
, ctx
);
10467 #ifdef TARGET_MIPS64
10470 generate_exception(ctx
, EXCP_RI
);
10473 save_cpu_state(ctx
, 0);
10474 op_ld_ld(t1
, t0
, ctx
);
10475 gen_store_gpr(t1
, rd
);
10476 tcg_gen_movi_tl(t1
, 8);
10477 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10478 op_ld_ld(t1
, t0
, ctx
);
10479 gen_store_gpr(t1
, rd
+1);
10483 save_cpu_state(ctx
, 0);
10484 gen_load_gpr(t1
, rd
);
10485 op_st_sd(t1
, t0
, ctx
);
10486 tcg_gen_movi_tl(t1
, 8);
10487 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10488 gen_load_gpr(t1
, rd
+1);
10489 op_st_sd(t1
, t0
, ctx
);
10494 (void)opn
; /* avoid a compiler warning */
10495 MIPS_DEBUG("%s, %s, %d(%s)", opn
, regnames
[rd
], offset
, regnames
[base
]);
10500 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
,
10503 int extension
= (ctx
->opcode
>> 6) & 0x3f;
10504 int minor
= (ctx
->opcode
>> 12) & 0xf;
10505 uint32_t mips32_op
;
10507 switch (extension
) {
10509 mips32_op
= OPC_TEQ
;
10512 mips32_op
= OPC_TGE
;
10515 mips32_op
= OPC_TGEU
;
10518 mips32_op
= OPC_TLT
;
10521 mips32_op
= OPC_TLTU
;
10524 mips32_op
= OPC_TNE
;
10526 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
10528 #ifndef CONFIG_USER_ONLY
10531 check_cp0_enabled(ctx
);
10533 /* Treat as NOP. */
10536 gen_mfc0(env
, ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
10540 check_cp0_enabled(ctx
);
10542 TCGv t0
= tcg_temp_new();
10544 gen_load_gpr(t0
, rt
);
10545 gen_mtc0(env
, ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
10553 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
10556 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
10559 mips32_op
= OPC_CLO
;
10562 mips32_op
= OPC_CLZ
;
10564 check_insn(env
, ctx
, ISA_MIPS32
);
10565 gen_cl(ctx
, mips32_op
, rt
, rs
);
10568 gen_rdhwr(env
, ctx
, rt
, rs
);
10571 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
10574 mips32_op
= OPC_MULT
;
10577 mips32_op
= OPC_MULTU
;
10580 mips32_op
= OPC_DIV
;
10583 mips32_op
= OPC_DIVU
;
10586 mips32_op
= OPC_MADD
;
10589 mips32_op
= OPC_MADDU
;
10592 mips32_op
= OPC_MSUB
;
10595 mips32_op
= OPC_MSUBU
;
10597 check_insn(env
, ctx
, ISA_MIPS32
);
10598 gen_muldiv(ctx
, mips32_op
, rs
, rt
);
10601 goto pool32axf_invalid
;
10612 generate_exception_err(ctx
, EXCP_CpU
, 2);
10615 goto pool32axf_invalid
;
10622 gen_compute_branch (ctx
, OPC_JALR
, 4, rs
, rt
, 0);
10627 gen_compute_branch (ctx
, OPC_JALRS
, 4, rs
, rt
, 0);
10631 goto pool32axf_invalid
;
10637 check_cp0_enabled(ctx
);
10638 check_insn(env
, ctx
, ISA_MIPS32R2
);
10639 gen_load_srsgpr(rt
, rs
);
10642 check_cp0_enabled(ctx
);
10643 check_insn(env
, ctx
, ISA_MIPS32R2
);
10644 gen_store_srsgpr(rt
, rs
);
10647 goto pool32axf_invalid
;
10650 #ifndef CONFIG_USER_ONLY
10654 mips32_op
= OPC_TLBP
;
10657 mips32_op
= OPC_TLBR
;
10660 mips32_op
= OPC_TLBWI
;
10663 mips32_op
= OPC_TLBWR
;
10666 mips32_op
= OPC_WAIT
;
10669 mips32_op
= OPC_DERET
;
10672 mips32_op
= OPC_ERET
;
10674 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
10677 goto pool32axf_invalid
;
10683 check_cp0_enabled(ctx
);
10685 TCGv t0
= tcg_temp_new();
10687 save_cpu_state(ctx
, 1);
10688 gen_helper_di(t0
, cpu_env
);
10689 gen_store_gpr(t0
, rs
);
10690 /* Stop translation as we may have switched the execution mode */
10691 ctx
->bstate
= BS_STOP
;
10696 check_cp0_enabled(ctx
);
10698 TCGv t0
= tcg_temp_new();
10700 save_cpu_state(ctx
, 1);
10701 gen_helper_ei(t0
, cpu_env
);
10702 gen_store_gpr(t0
, rs
);
10703 /* Stop translation as we may have switched the execution mode */
10704 ctx
->bstate
= BS_STOP
;
10709 goto pool32axf_invalid
;
10719 generate_exception(ctx
, EXCP_SYSCALL
);
10720 ctx
->bstate
= BS_STOP
;
10723 check_insn(env
, ctx
, ISA_MIPS32
);
10724 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10725 generate_exception(ctx
, EXCP_DBp
);
10727 generate_exception(ctx
, EXCP_DBp
);
10731 goto pool32axf_invalid
;
10737 gen_HILO(ctx
, OPC_MFHI
, rs
);
10740 gen_HILO(ctx
, OPC_MFLO
, rs
);
10743 gen_HILO(ctx
, OPC_MTHI
, rs
);
10746 gen_HILO(ctx
, OPC_MTLO
, rs
);
10749 goto pool32axf_invalid
;
10754 MIPS_INVAL("pool32axf");
10755 generate_exception(ctx
, EXCP_RI
);
10760 /* Values for microMIPS fmt field. Variable-width, depending on which
10761 formats the instruction supports. */
10780 static void gen_pool32fxf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
10782 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
10783 uint32_t mips32_op
;
10785 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
10786 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
10787 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
10789 switch (extension
) {
10790 case FLOAT_1BIT_FMT(CFC1
, 0):
10791 mips32_op
= OPC_CFC1
;
10793 case FLOAT_1BIT_FMT(CTC1
, 0):
10794 mips32_op
= OPC_CTC1
;
10796 case FLOAT_1BIT_FMT(MFC1
, 0):
10797 mips32_op
= OPC_MFC1
;
10799 case FLOAT_1BIT_FMT(MTC1
, 0):
10800 mips32_op
= OPC_MTC1
;
10802 case FLOAT_1BIT_FMT(MFHC1
, 0):
10803 mips32_op
= OPC_MFHC1
;
10805 case FLOAT_1BIT_FMT(MTHC1
, 0):
10806 mips32_op
= OPC_MTHC1
;
10808 gen_cp1(ctx
, mips32_op
, rt
, rs
);
10811 /* Reciprocal square root */
10812 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
10813 mips32_op
= OPC_RSQRT_S
;
10815 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
10816 mips32_op
= OPC_RSQRT_D
;
10820 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
10821 mips32_op
= OPC_SQRT_S
;
10823 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
10824 mips32_op
= OPC_SQRT_D
;
10828 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
10829 mips32_op
= OPC_RECIP_S
;
10831 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
10832 mips32_op
= OPC_RECIP_D
;
10836 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
10837 mips32_op
= OPC_FLOOR_L_S
;
10839 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
10840 mips32_op
= OPC_FLOOR_L_D
;
10842 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
10843 mips32_op
= OPC_FLOOR_W_S
;
10845 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
10846 mips32_op
= OPC_FLOOR_W_D
;
10850 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
10851 mips32_op
= OPC_CEIL_L_S
;
10853 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
10854 mips32_op
= OPC_CEIL_L_D
;
10856 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
10857 mips32_op
= OPC_CEIL_W_S
;
10859 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
10860 mips32_op
= OPC_CEIL_W_D
;
10864 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
10865 mips32_op
= OPC_TRUNC_L_S
;
10867 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
10868 mips32_op
= OPC_TRUNC_L_D
;
10870 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
10871 mips32_op
= OPC_TRUNC_W_S
;
10873 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
10874 mips32_op
= OPC_TRUNC_W_D
;
10878 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
10879 mips32_op
= OPC_ROUND_L_S
;
10881 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
10882 mips32_op
= OPC_ROUND_L_D
;
10884 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
10885 mips32_op
= OPC_ROUND_W_S
;
10887 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
10888 mips32_op
= OPC_ROUND_W_D
;
10891 /* Integer to floating-point conversion */
10892 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
10893 mips32_op
= OPC_CVT_L_S
;
10895 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
10896 mips32_op
= OPC_CVT_L_D
;
10898 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
10899 mips32_op
= OPC_CVT_W_S
;
10901 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
10902 mips32_op
= OPC_CVT_W_D
;
10905 /* Paired-foo conversions */
10906 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
10907 mips32_op
= OPC_CVT_S_PL
;
10909 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
10910 mips32_op
= OPC_CVT_S_PU
;
10912 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
10913 mips32_op
= OPC_CVT_PW_PS
;
10915 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
10916 mips32_op
= OPC_CVT_PS_PW
;
10919 /* Floating-point moves */
10920 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
10921 mips32_op
= OPC_MOV_S
;
10923 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
10924 mips32_op
= OPC_MOV_D
;
10926 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
10927 mips32_op
= OPC_MOV_PS
;
10930 /* Absolute value */
10931 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
10932 mips32_op
= OPC_ABS_S
;
10934 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
10935 mips32_op
= OPC_ABS_D
;
10937 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
10938 mips32_op
= OPC_ABS_PS
;
10942 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
10943 mips32_op
= OPC_NEG_S
;
10945 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
10946 mips32_op
= OPC_NEG_D
;
10948 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
10949 mips32_op
= OPC_NEG_PS
;
10952 /* Reciprocal square root step */
10953 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
10954 mips32_op
= OPC_RSQRT1_S
;
10956 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
10957 mips32_op
= OPC_RSQRT1_D
;
10959 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
10960 mips32_op
= OPC_RSQRT1_PS
;
10963 /* Reciprocal step */
10964 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
10965 mips32_op
= OPC_RECIP1_S
;
10967 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
10968 mips32_op
= OPC_RECIP1_S
;
10970 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
10971 mips32_op
= OPC_RECIP1_PS
;
10974 /* Conversions from double */
10975 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
10976 mips32_op
= OPC_CVT_D_S
;
10978 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
10979 mips32_op
= OPC_CVT_D_W
;
10981 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
10982 mips32_op
= OPC_CVT_D_L
;
10985 /* Conversions from single */
10986 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
10987 mips32_op
= OPC_CVT_S_D
;
10989 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
10990 mips32_op
= OPC_CVT_S_W
;
10992 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
10993 mips32_op
= OPC_CVT_S_L
;
10995 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
10998 /* Conditional moves on floating-point codes */
10999 case COND_FLOAT_MOV(MOVT
, 0):
11000 case COND_FLOAT_MOV(MOVT
, 1):
11001 case COND_FLOAT_MOV(MOVT
, 2):
11002 case COND_FLOAT_MOV(MOVT
, 3):
11003 case COND_FLOAT_MOV(MOVT
, 4):
11004 case COND_FLOAT_MOV(MOVT
, 5):
11005 case COND_FLOAT_MOV(MOVT
, 6):
11006 case COND_FLOAT_MOV(MOVT
, 7):
11007 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
11009 case COND_FLOAT_MOV(MOVF
, 0):
11010 case COND_FLOAT_MOV(MOVF
, 1):
11011 case COND_FLOAT_MOV(MOVF
, 2):
11012 case COND_FLOAT_MOV(MOVF
, 3):
11013 case COND_FLOAT_MOV(MOVF
, 4):
11014 case COND_FLOAT_MOV(MOVF
, 5):
11015 case COND_FLOAT_MOV(MOVF
, 6):
11016 case COND_FLOAT_MOV(MOVF
, 7):
11017 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
11020 MIPS_INVAL("pool32fxf");
11021 generate_exception(ctx
, EXCP_RI
);
11026 static void decode_micromips32_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
11027 uint16_t insn_hw1
, int *is_branch
)
11031 int rt
, rs
, rd
, rr
;
11033 uint32_t op
, minor
, mips32_op
;
11034 uint32_t cond
, fmt
, cc
;
11036 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11037 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
11039 rt
= (ctx
->opcode
>> 21) & 0x1f;
11040 rs
= (ctx
->opcode
>> 16) & 0x1f;
11041 rd
= (ctx
->opcode
>> 11) & 0x1f;
11042 rr
= (ctx
->opcode
>> 6) & 0x1f;
11043 imm
= (int16_t) ctx
->opcode
;
11045 op
= (ctx
->opcode
>> 26) & 0x3f;
11048 minor
= ctx
->opcode
& 0x3f;
11051 minor
= (ctx
->opcode
>> 6) & 0xf;
11054 mips32_op
= OPC_SLL
;
11057 mips32_op
= OPC_SRA
;
11060 mips32_op
= OPC_SRL
;
11063 mips32_op
= OPC_ROTR
;
11065 gen_shift_imm(env
, ctx
, mips32_op
, rt
, rs
, rd
);
11068 goto pool32a_invalid
;
11072 minor
= (ctx
->opcode
>> 6) & 0xf;
11076 mips32_op
= OPC_ADD
;
11079 mips32_op
= OPC_ADDU
;
11082 mips32_op
= OPC_SUB
;
11085 mips32_op
= OPC_SUBU
;
11088 mips32_op
= OPC_MUL
;
11090 gen_arith(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11094 mips32_op
= OPC_SLLV
;
11097 mips32_op
= OPC_SRLV
;
11100 mips32_op
= OPC_SRAV
;
11103 mips32_op
= OPC_ROTRV
;
11105 gen_shift(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11107 /* Logical operations */
11109 mips32_op
= OPC_AND
;
11112 mips32_op
= OPC_OR
;
11115 mips32_op
= OPC_NOR
;
11118 mips32_op
= OPC_XOR
;
11120 gen_logic(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11122 /* Set less than */
11124 mips32_op
= OPC_SLT
;
11127 mips32_op
= OPC_SLTU
;
11129 gen_slt(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11132 goto pool32a_invalid
;
11136 minor
= (ctx
->opcode
>> 6) & 0xf;
11138 /* Conditional moves */
11140 mips32_op
= OPC_MOVN
;
11143 mips32_op
= OPC_MOVZ
;
11145 gen_cond_move(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11148 gen_ldxs(ctx
, rs
, rt
, rd
);
11151 goto pool32a_invalid
;
11155 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
11158 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
11161 gen_pool32axf(env
, ctx
, rt
, rs
, is_branch
);
11164 generate_exception(ctx
, EXCP_BREAK
);
11168 MIPS_INVAL("pool32a");
11169 generate_exception(ctx
, EXCP_RI
);
11174 minor
= (ctx
->opcode
>> 12) & 0xf;
11177 check_cp0_enabled(ctx
);
11178 /* Treat as no-op. */
11182 /* COP2: Not implemented. */
11183 generate_exception_err(ctx
, EXCP_CpU
, 2);
11187 #ifdef TARGET_MIPS64
11191 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11195 #ifdef TARGET_MIPS64
11199 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11202 MIPS_INVAL("pool32b");
11203 generate_exception(ctx
, EXCP_RI
);
11208 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
11209 minor
= ctx
->opcode
& 0x3f;
11210 check_cp1_enabled(ctx
);
11213 mips32_op
= OPC_ALNV_PS
;
11216 mips32_op
= OPC_MADD_S
;
11219 mips32_op
= OPC_MADD_D
;
11222 mips32_op
= OPC_MADD_PS
;
11225 mips32_op
= OPC_MSUB_S
;
11228 mips32_op
= OPC_MSUB_D
;
11231 mips32_op
= OPC_MSUB_PS
;
11234 mips32_op
= OPC_NMADD_S
;
11237 mips32_op
= OPC_NMADD_D
;
11240 mips32_op
= OPC_NMADD_PS
;
11243 mips32_op
= OPC_NMSUB_S
;
11246 mips32_op
= OPC_NMSUB_D
;
11249 mips32_op
= OPC_NMSUB_PS
;
11251 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
11253 case CABS_COND_FMT
:
11254 cond
= (ctx
->opcode
>> 6) & 0xf;
11255 cc
= (ctx
->opcode
>> 13) & 0x7;
11256 fmt
= (ctx
->opcode
>> 10) & 0x3;
11259 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
11262 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
11265 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
11268 goto pool32f_invalid
;
11272 cond
= (ctx
->opcode
>> 6) & 0xf;
11273 cc
= (ctx
->opcode
>> 13) & 0x7;
11274 fmt
= (ctx
->opcode
>> 10) & 0x3;
11277 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
11280 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
11283 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
11286 goto pool32f_invalid
;
11290 gen_pool32fxf(env
, ctx
, rt
, rs
);
11294 switch ((ctx
->opcode
>> 6) & 0x7) {
11296 mips32_op
= OPC_PLL_PS
;
11299 mips32_op
= OPC_PLU_PS
;
11302 mips32_op
= OPC_PUL_PS
;
11305 mips32_op
= OPC_PUU_PS
;
11308 mips32_op
= OPC_CVT_PS_S
;
11310 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11313 goto pool32f_invalid
;
11318 switch ((ctx
->opcode
>> 6) & 0x7) {
11320 mips32_op
= OPC_LWXC1
;
11323 mips32_op
= OPC_SWXC1
;
11326 mips32_op
= OPC_LDXC1
;
11329 mips32_op
= OPC_SDXC1
;
11332 mips32_op
= OPC_LUXC1
;
11335 mips32_op
= OPC_SUXC1
;
11337 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
11340 goto pool32f_invalid
;
11345 fmt
= (ctx
->opcode
>> 9) & 0x3;
11346 switch ((ctx
->opcode
>> 6) & 0x7) {
11350 mips32_op
= OPC_RSQRT2_S
;
11353 mips32_op
= OPC_RSQRT2_D
;
11356 mips32_op
= OPC_RSQRT2_PS
;
11359 goto pool32f_invalid
;
11365 mips32_op
= OPC_RECIP2_S
;
11368 mips32_op
= OPC_RECIP2_D
;
11371 mips32_op
= OPC_RECIP2_PS
;
11374 goto pool32f_invalid
;
11378 mips32_op
= OPC_ADDR_PS
;
11381 mips32_op
= OPC_MULR_PS
;
11383 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11386 goto pool32f_invalid
;
11390 /* MOV[FT].fmt and PREFX */
11391 cc
= (ctx
->opcode
>> 13) & 0x7;
11392 fmt
= (ctx
->opcode
>> 9) & 0x3;
11393 switch ((ctx
->opcode
>> 6) & 0x7) {
11397 gen_movcf_s(rs
, rt
, cc
, 0);
11400 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
11403 gen_movcf_ps(rs
, rt
, cc
, 0);
11406 goto pool32f_invalid
;
11412 gen_movcf_s(rs
, rt
, cc
, 1);
11415 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
11418 gen_movcf_ps(rs
, rt
, cc
, 1);
11421 goto pool32f_invalid
;
11427 goto pool32f_invalid
;
11430 #define FINSN_3ARG_SDPS(prfx) \
11431 switch ((ctx->opcode >> 8) & 0x3) { \
11433 mips32_op = OPC_##prfx##_S; \
11436 mips32_op = OPC_##prfx##_D; \
11438 case FMT_SDPS_PS: \
11439 mips32_op = OPC_##prfx##_PS; \
11442 goto pool32f_invalid; \
11445 /* regular FP ops */
11446 switch ((ctx
->opcode
>> 6) & 0x3) {
11448 FINSN_3ARG_SDPS(ADD
);
11451 FINSN_3ARG_SDPS(SUB
);
11454 FINSN_3ARG_SDPS(MUL
);
11457 fmt
= (ctx
->opcode
>> 8) & 0x3;
11459 mips32_op
= OPC_DIV_D
;
11460 } else if (fmt
== 0) {
11461 mips32_op
= OPC_DIV_S
;
11463 goto pool32f_invalid
;
11467 goto pool32f_invalid
;
11472 switch ((ctx
->opcode
>> 6) & 0x3) {
11474 FINSN_3ARG_SDPS(MOVN
);
11477 FINSN_3ARG_SDPS(MOVZ
);
11480 goto pool32f_invalid
;
11484 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11488 MIPS_INVAL("pool32f");
11489 generate_exception(ctx
, EXCP_RI
);
11493 generate_exception_err(ctx
, EXCP_CpU
, 1);
11497 minor
= (ctx
->opcode
>> 21) & 0x1f;
11500 mips32_op
= OPC_BLTZ
;
11503 mips32_op
= OPC_BLTZAL
;
11506 mips32_op
= OPC_BLTZALS
;
11509 mips32_op
= OPC_BGEZ
;
11512 mips32_op
= OPC_BGEZAL
;
11515 mips32_op
= OPC_BGEZALS
;
11518 mips32_op
= OPC_BLEZ
;
11521 mips32_op
= OPC_BGTZ
;
11523 gen_compute_branch(ctx
, mips32_op
, 4, rs
, -1, imm
<< 1);
11529 mips32_op
= OPC_TLTI
;
11532 mips32_op
= OPC_TGEI
;
11535 mips32_op
= OPC_TLTIU
;
11538 mips32_op
= OPC_TGEIU
;
11541 mips32_op
= OPC_TNEI
;
11544 mips32_op
= OPC_TEQI
;
11546 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
11551 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
11552 4, rs
, 0, imm
<< 1);
11553 /* Compact branches don't have a delay slot, so just let
11554 the normal delay slot handling take us to the branch
11558 gen_logic_imm(env
, ctx
, OPC_LUI
, rs
, -1, imm
);
11564 /* COP2: Not implemented. */
11565 generate_exception_err(ctx
, EXCP_CpU
, 2);
11568 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
11571 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
11574 mips32_op
= OPC_BC1FANY4
;
11577 mips32_op
= OPC_BC1TANY4
;
11580 check_insn(env
, ctx
, ASE_MIPS3D
);
11583 gen_compute_branch1(env
, ctx
, mips32_op
,
11584 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
11589 /* MIPS DSP: not implemented */
11592 MIPS_INVAL("pool32i");
11593 generate_exception(ctx
, EXCP_RI
);
11598 minor
= (ctx
->opcode
>> 12) & 0xf;
11601 mips32_op
= OPC_LWL
;
11604 mips32_op
= OPC_SWL
;
11607 mips32_op
= OPC_LWR
;
11610 mips32_op
= OPC_SWR
;
11612 #if defined(TARGET_MIPS64)
11614 mips32_op
= OPC_LDL
;
11617 mips32_op
= OPC_SDL
;
11620 mips32_op
= OPC_LDR
;
11623 mips32_op
= OPC_SDR
;
11626 mips32_op
= OPC_LWU
;
11629 mips32_op
= OPC_LLD
;
11633 mips32_op
= OPC_LL
;
11636 gen_ld(env
, ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11639 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11642 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11644 #if defined(TARGET_MIPS64)
11646 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11650 /* Treat as no-op */
11653 MIPS_INVAL("pool32c");
11654 generate_exception(ctx
, EXCP_RI
);
11659 mips32_op
= OPC_ADDI
;
11662 mips32_op
= OPC_ADDIU
;
11664 gen_arith_imm(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11667 /* Logical operations */
11669 mips32_op
= OPC_ORI
;
11672 mips32_op
= OPC_XORI
;
11675 mips32_op
= OPC_ANDI
;
11677 gen_logic_imm(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11680 /* Set less than immediate */
11682 mips32_op
= OPC_SLTI
;
11685 mips32_op
= OPC_SLTIU
;
11687 gen_slt_imm(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11690 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
11691 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
);
11695 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
11696 gen_compute_branch(ctx
, OPC_JALS
, 4, rt
, rs
, offset
);
11700 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1);
11704 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1);
11708 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
11709 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
11713 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
11714 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
11717 /* Floating point (COP1) */
11719 mips32_op
= OPC_LWC1
;
11722 mips32_op
= OPC_LDC1
;
11725 mips32_op
= OPC_SWC1
;
11728 mips32_op
= OPC_SDC1
;
11730 gen_cop1_ldst(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11734 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
11735 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
11737 gen_addiupc(ctx
, reg
, offset
, 0, 0);
11740 /* Loads and stores */
11742 mips32_op
= OPC_LB
;
11745 mips32_op
= OPC_LBU
;
11748 mips32_op
= OPC_LH
;
11751 mips32_op
= OPC_LHU
;
11754 mips32_op
= OPC_LW
;
11756 #ifdef TARGET_MIPS64
11758 mips32_op
= OPC_LD
;
11761 mips32_op
= OPC_SD
;
11765 mips32_op
= OPC_SB
;
11768 mips32_op
= OPC_SH
;
11771 mips32_op
= OPC_SW
;
11774 gen_ld(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11777 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
11780 generate_exception(ctx
, EXCP_RI
);
11785 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
, int *is_branch
)
11789 /* make sure instructions are on a halfword boundary */
11790 if (ctx
->pc
& 0x1) {
11791 env
->CP0_BadVAddr
= ctx
->pc
;
11792 generate_exception(ctx
, EXCP_AdEL
);
11793 ctx
->bstate
= BS_STOP
;
11797 op
= (ctx
->opcode
>> 10) & 0x3f;
11798 /* Enforce properly-sized instructions in a delay slot */
11799 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11800 int bits
= ctx
->hflags
& MIPS_HFLAG_BMASK_EXT
;
11834 case POOL48A
: /* ??? */
11839 if (bits
& MIPS_HFLAG_BDS16
) {
11840 generate_exception(ctx
, EXCP_RI
);
11841 /* Just stop translation; the user is confused. */
11842 ctx
->bstate
= BS_STOP
;
11867 if (bits
& MIPS_HFLAG_BDS32
) {
11868 generate_exception(ctx
, EXCP_RI
);
11869 /* Just stop translation; the user is confused. */
11870 ctx
->bstate
= BS_STOP
;
11881 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11882 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
11883 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
11886 switch (ctx
->opcode
& 0x1) {
11895 gen_arith(env
, ctx
, opc
, rd
, rs1
, rs2
);
11900 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11901 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
11902 int amount
= (ctx
->opcode
>> 1) & 0x7;
11904 amount
= amount
== 0 ? 8 : amount
;
11906 switch (ctx
->opcode
& 0x1) {
11915 gen_shift_imm(env
, ctx
, opc
, rd
, rs
, amount
);
11919 gen_pool16c_insn(env
, ctx
, is_branch
);
11923 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11924 int rb
= 28; /* GP */
11925 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
11927 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11931 if (ctx
->opcode
& 1) {
11932 generate_exception(ctx
, EXCP_RI
);
11935 int enc_dest
= uMIPS_RD(ctx
->opcode
);
11936 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
11937 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
11938 int rd
, rs
, re
, rt
;
11939 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
11940 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
11941 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
11943 rd
= rd_enc
[enc_dest
];
11944 re
= re_enc
[enc_dest
];
11945 rs
= rs_rt_enc
[enc_rs
];
11946 rt
= rs_rt_enc
[enc_rt
];
11948 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, 0);
11949 gen_arith_imm(env
, ctx
, OPC_ADDIU
, re
, rt
, 0);
11954 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11955 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11956 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
11957 offset
= (offset
== 0xf ? -1 : offset
);
11959 gen_ld(env
, ctx
, OPC_LBU
, rd
, rb
, offset
);
11964 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11965 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11966 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
11968 gen_ld(env
, ctx
, OPC_LHU
, rd
, rb
, offset
);
11973 int rd
= (ctx
->opcode
>> 5) & 0x1f;
11974 int rb
= 29; /* SP */
11975 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
11977 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11982 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11983 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11984 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
11986 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11991 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
11992 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11993 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
11995 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
12000 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
12001 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12002 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
12004 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
12009 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12010 int rb
= 29; /* SP */
12011 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
12013 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
12018 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
12019 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12020 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
12022 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
12027 int rd
= uMIPS_RD5(ctx
->opcode
);
12028 int rs
= uMIPS_RS5(ctx
->opcode
);
12030 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, 0);
12034 gen_andi16(env
, ctx
);
12037 switch (ctx
->opcode
& 0x1) {
12039 gen_addius5(env
, ctx
);
12042 gen_addiusp(env
, ctx
);
12047 switch (ctx
->opcode
& 0x1) {
12049 gen_addiur2(env
, ctx
);
12052 gen_addiur1sp(env
, ctx
);
12057 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
12058 SIMM(ctx
->opcode
, 0, 10) << 1);
12063 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
12064 mmreg(uMIPS_RD(ctx
->opcode
)),
12065 0, SIMM(ctx
->opcode
, 0, 7) << 1);
12070 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
12071 int imm
= ZIMM(ctx
->opcode
, 0, 7);
12073 imm
= (imm
== 0x7f ? -1 : imm
);
12074 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
12084 generate_exception(ctx
, EXCP_RI
);
12087 decode_micromips32_opc (env
, ctx
, op
, is_branch
);
12094 /* SmartMIPS extension to MIPS32 */
12096 #if defined(TARGET_MIPS64)
12098 /* MDMX extension to MIPS64 */
12102 static void decode_opc (CPUMIPSState
*env
, DisasContext
*ctx
, int *is_branch
)
12105 int rs
, rt
, rd
, sa
;
12106 uint32_t op
, op1
, op2
;
12109 /* make sure instructions are on a word boundary */
12110 if (ctx
->pc
& 0x3) {
12111 env
->CP0_BadVAddr
= ctx
->pc
;
12112 generate_exception(ctx
, EXCP_AdEL
);
12116 /* Handle blikely not taken case */
12117 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
12118 int l1
= gen_new_label();
12120 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx
")", ctx
->pc
+ 4);
12121 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12122 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
12123 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
12127 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
12128 tcg_gen_debug_insn_start(ctx
->pc
);
12131 op
= MASK_OP_MAJOR(ctx
->opcode
);
12132 rs
= (ctx
->opcode
>> 21) & 0x1f;
12133 rt
= (ctx
->opcode
>> 16) & 0x1f;
12134 rd
= (ctx
->opcode
>> 11) & 0x1f;
12135 sa
= (ctx
->opcode
>> 6) & 0x1f;
12136 imm
= (int16_t)ctx
->opcode
;
12139 op1
= MASK_SPECIAL(ctx
->opcode
);
12141 case OPC_SLL
: /* Shift with immediate */
12143 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12146 switch ((ctx
->opcode
>> 21) & 0x1f) {
12148 /* rotr is decoded as srl on non-R2 CPUs */
12149 if (env
->insn_flags
& ISA_MIPS32R2
) {
12154 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12157 generate_exception(ctx
, EXCP_RI
);
12161 case OPC_MOVN
: /* Conditional move */
12163 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
|
12164 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
12165 gen_cond_move(env
, ctx
, op1
, rd
, rs
, rt
);
12167 case OPC_ADD
... OPC_SUBU
:
12168 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
12170 case OPC_SLLV
: /* Shifts */
12172 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
12175 switch ((ctx
->opcode
>> 6) & 0x1f) {
12177 /* rotrv is decoded as srlv on non-R2 CPUs */
12178 if (env
->insn_flags
& ISA_MIPS32R2
) {
12183 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
12186 generate_exception(ctx
, EXCP_RI
);
12190 case OPC_SLT
: /* Set on less than */
12192 gen_slt(env
, ctx
, op1
, rd
, rs
, rt
);
12194 case OPC_AND
: /* Logic*/
12198 gen_logic(env
, ctx
, op1
, rd
, rs
, rt
);
12200 case OPC_MULT
... OPC_DIVU
:
12202 check_insn(env
, ctx
, INSN_VR54XX
);
12203 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
12204 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
12206 gen_muldiv(ctx
, op1
, rs
, rt
);
12208 case OPC_JR
... OPC_JALR
:
12209 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
);
12212 case OPC_TGE
... OPC_TEQ
: /* Traps */
12214 gen_trap(ctx
, op1
, rs
, rt
, -1);
12216 case OPC_MFHI
: /* Move from HI/LO */
12218 gen_HILO(ctx
, op1
, rd
);
12221 case OPC_MTLO
: /* Move to HI/LO */
12222 gen_HILO(ctx
, op1
, rs
);
12224 case OPC_PMON
: /* Pmon entry point, also R4010 selsl */
12225 #ifdef MIPS_STRICT_STANDARD
12226 MIPS_INVAL("PMON / selsl");
12227 generate_exception(ctx
, EXCP_RI
);
12229 gen_helper_0e0i(pmon
, sa
);
12233 generate_exception(ctx
, EXCP_SYSCALL
);
12234 ctx
->bstate
= BS_STOP
;
12237 generate_exception(ctx
, EXCP_BREAK
);
12240 #ifdef MIPS_STRICT_STANDARD
12241 MIPS_INVAL("SPIM");
12242 generate_exception(ctx
, EXCP_RI
);
12244 /* Implemented as RI exception for now. */
12245 MIPS_INVAL("spim (unofficial)");
12246 generate_exception(ctx
, EXCP_RI
);
12250 /* Treat as NOP. */
12254 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
12255 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12256 check_cp1_enabled(ctx
);
12257 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
12258 (ctx
->opcode
>> 16) & 1);
12260 generate_exception_err(ctx
, EXCP_CpU
, 1);
12264 #if defined(TARGET_MIPS64)
12265 /* MIPS64 specific opcodes */
12270 check_insn(env
, ctx
, ISA_MIPS3
);
12271 check_mips_64(ctx
);
12272 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12275 switch ((ctx
->opcode
>> 21) & 0x1f) {
12277 /* drotr is decoded as dsrl on non-R2 CPUs */
12278 if (env
->insn_flags
& ISA_MIPS32R2
) {
12283 check_insn(env
, ctx
, ISA_MIPS3
);
12284 check_mips_64(ctx
);
12285 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12288 generate_exception(ctx
, EXCP_RI
);
12293 switch ((ctx
->opcode
>> 21) & 0x1f) {
12295 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
12296 if (env
->insn_flags
& ISA_MIPS32R2
) {
12301 check_insn(env
, ctx
, ISA_MIPS3
);
12302 check_mips_64(ctx
);
12303 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12306 generate_exception(ctx
, EXCP_RI
);
12310 case OPC_DADD
... OPC_DSUBU
:
12311 check_insn(env
, ctx
, ISA_MIPS3
);
12312 check_mips_64(ctx
);
12313 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
12317 check_insn(env
, ctx
, ISA_MIPS3
);
12318 check_mips_64(ctx
);
12319 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
12322 switch ((ctx
->opcode
>> 6) & 0x1f) {
12324 /* drotrv is decoded as dsrlv on non-R2 CPUs */
12325 if (env
->insn_flags
& ISA_MIPS32R2
) {
12330 check_insn(env
, ctx
, ISA_MIPS3
);
12331 check_mips_64(ctx
);
12332 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
12335 generate_exception(ctx
, EXCP_RI
);
12339 case OPC_DMULT
... OPC_DDIVU
:
12340 check_insn(env
, ctx
, ISA_MIPS3
);
12341 check_mips_64(ctx
);
12342 gen_muldiv(ctx
, op1
, rs
, rt
);
12345 default: /* Invalid */
12346 MIPS_INVAL("special");
12347 generate_exception(ctx
, EXCP_RI
);
12352 op1
= MASK_SPECIAL2(ctx
->opcode
);
12354 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
12355 case OPC_MSUB
... OPC_MSUBU
:
12356 check_insn(env
, ctx
, ISA_MIPS32
);
12357 gen_muldiv(ctx
, op1
, rs
, rt
);
12360 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
12364 check_insn(env
, ctx
, ISA_MIPS32
);
12365 gen_cl(ctx
, op1
, rd
, rs
);
12368 /* XXX: not clear which exception should be raised
12369 * when in debug mode...
12371 check_insn(env
, ctx
, ISA_MIPS32
);
12372 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
12373 generate_exception(ctx
, EXCP_DBp
);
12375 generate_exception(ctx
, EXCP_DBp
);
12377 /* Treat as NOP. */
12380 case OPC_DIVU_G_2F
:
12381 case OPC_MULT_G_2F
:
12382 case OPC_MULTU_G_2F
:
12384 case OPC_MODU_G_2F
:
12385 check_insn(env
, ctx
, INSN_LOONGSON2F
);
12386 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12388 #if defined(TARGET_MIPS64)
12391 check_insn(env
, ctx
, ISA_MIPS64
);
12392 check_mips_64(ctx
);
12393 gen_cl(ctx
, op1
, rd
, rs
);
12395 case OPC_DMULT_G_2F
:
12396 case OPC_DMULTU_G_2F
:
12397 case OPC_DDIV_G_2F
:
12398 case OPC_DDIVU_G_2F
:
12399 case OPC_DMOD_G_2F
:
12400 case OPC_DMODU_G_2F
:
12401 check_insn(env
, ctx
, INSN_LOONGSON2F
);
12402 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12405 default: /* Invalid */
12406 MIPS_INVAL("special2");
12407 generate_exception(ctx
, EXCP_RI
);
12412 op1
= MASK_SPECIAL3(ctx
->opcode
);
12416 check_insn(env
, ctx
, ISA_MIPS32R2
);
12417 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
12420 check_insn(env
, ctx
, ISA_MIPS32R2
);
12421 op2
= MASK_BSHFL(ctx
->opcode
);
12422 gen_bshfl(ctx
, op2
, rt
, rd
);
12425 gen_rdhwr(env
, ctx
, rt
, rd
);
12428 check_insn(env
, ctx
, ASE_MT
);
12430 TCGv t0
= tcg_temp_new();
12431 TCGv t1
= tcg_temp_new();
12433 gen_load_gpr(t0
, rt
);
12434 gen_load_gpr(t1
, rs
);
12435 gen_helper_fork(t0
, t1
);
12441 check_insn(env
, ctx
, ASE_MT
);
12443 TCGv t0
= tcg_temp_new();
12445 save_cpu_state(ctx
, 1);
12446 gen_load_gpr(t0
, rs
);
12447 gen_helper_yield(t0
, cpu_env
, t0
);
12448 gen_store_gpr(t0
, rd
);
12452 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
12453 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
12454 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
12455 check_insn(env
, ctx
, INSN_LOONGSON2E
);
12456 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12458 #if defined(TARGET_MIPS64)
12459 case OPC_DEXTM
... OPC_DEXT
:
12460 case OPC_DINSM
... OPC_DINS
:
12461 check_insn(env
, ctx
, ISA_MIPS64R2
);
12462 check_mips_64(ctx
);
12463 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
12466 check_insn(env
, ctx
, ISA_MIPS64R2
);
12467 check_mips_64(ctx
);
12468 op2
= MASK_DBSHFL(ctx
->opcode
);
12469 gen_bshfl(ctx
, op2
, rt
, rd
);
12471 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
12472 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
12473 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
12474 check_insn(env
, ctx
, INSN_LOONGSON2E
);
12475 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12478 default: /* Invalid */
12479 MIPS_INVAL("special3");
12480 generate_exception(ctx
, EXCP_RI
);
12485 op1
= MASK_REGIMM(ctx
->opcode
);
12487 case OPC_BLTZ
... OPC_BGEZL
: /* REGIMM branches */
12488 case OPC_BLTZAL
... OPC_BGEZALL
:
12489 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2);
12492 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
12494 gen_trap(ctx
, op1
, rs
, -1, imm
);
12497 check_insn(env
, ctx
, ISA_MIPS32R2
);
12498 /* Treat as NOP. */
12500 default: /* Invalid */
12501 MIPS_INVAL("regimm");
12502 generate_exception(ctx
, EXCP_RI
);
12507 check_cp0_enabled(ctx
);
12508 op1
= MASK_CP0(ctx
->opcode
);
12514 #if defined(TARGET_MIPS64)
12518 #ifndef CONFIG_USER_ONLY
12519 gen_cp0(env
, ctx
, op1
, rt
, rd
);
12520 #endif /* !CONFIG_USER_ONLY */
12522 case OPC_C0_FIRST
... OPC_C0_LAST
:
12523 #ifndef CONFIG_USER_ONLY
12524 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
12525 #endif /* !CONFIG_USER_ONLY */
12528 #ifndef CONFIG_USER_ONLY
12530 TCGv t0
= tcg_temp_new();
12532 op2
= MASK_MFMC0(ctx
->opcode
);
12535 check_insn(env
, ctx
, ASE_MT
);
12536 gen_helper_dmt(t0
);
12537 gen_store_gpr(t0
, rt
);
12540 check_insn(env
, ctx
, ASE_MT
);
12541 gen_helper_emt(t0
);
12542 gen_store_gpr(t0
, rt
);
12545 check_insn(env
, ctx
, ASE_MT
);
12546 gen_helper_dvpe(t0
, cpu_env
);
12547 gen_store_gpr(t0
, rt
);
12550 check_insn(env
, ctx
, ASE_MT
);
12551 gen_helper_evpe(t0
, cpu_env
);
12552 gen_store_gpr(t0
, rt
);
12555 check_insn(env
, ctx
, ISA_MIPS32R2
);
12556 save_cpu_state(ctx
, 1);
12557 gen_helper_di(t0
, cpu_env
);
12558 gen_store_gpr(t0
, rt
);
12559 /* Stop translation as we may have switched the execution mode */
12560 ctx
->bstate
= BS_STOP
;
12563 check_insn(env
, ctx
, ISA_MIPS32R2
);
12564 save_cpu_state(ctx
, 1);
12565 gen_helper_ei(t0
, cpu_env
);
12566 gen_store_gpr(t0
, rt
);
12567 /* Stop translation as we may have switched the execution mode */
12568 ctx
->bstate
= BS_STOP
;
12570 default: /* Invalid */
12571 MIPS_INVAL("mfmc0");
12572 generate_exception(ctx
, EXCP_RI
);
12577 #endif /* !CONFIG_USER_ONLY */
12580 check_insn(env
, ctx
, ISA_MIPS32R2
);
12581 gen_load_srsgpr(rt
, rd
);
12584 check_insn(env
, ctx
, ISA_MIPS32R2
);
12585 gen_store_srsgpr(rt
, rd
);
12589 generate_exception(ctx
, EXCP_RI
);
12593 case OPC_ADDI
: /* Arithmetic with immediate opcode */
12595 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
12597 case OPC_SLTI
: /* Set on less than with immediate opcode */
12599 gen_slt_imm(env
, ctx
, op
, rt
, rs
, imm
);
12601 case OPC_ANDI
: /* Arithmetic with immediate opcode */
12605 gen_logic_imm(env
, ctx
, op
, rt
, rs
, imm
);
12607 case OPC_J
... OPC_JAL
: /* Jump */
12608 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
12609 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
12612 case OPC_BEQ
... OPC_BGTZ
: /* Branch */
12613 case OPC_BEQL
... OPC_BGTZL
:
12614 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2);
12617 case OPC_LB
... OPC_LWR
: /* Load and stores */
12619 gen_ld(env
, ctx
, op
, rt
, rs
, imm
);
12621 case OPC_SB
... OPC_SW
:
12623 gen_st(ctx
, op
, rt
, rs
, imm
);
12626 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
12629 check_cp0_enabled(ctx
);
12630 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
12631 /* Treat as NOP. */
12634 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
12635 /* Treat as NOP. */
12638 /* Floating point (COP1). */
12643 gen_cop1_ldst(env
, ctx
, op
, rt
, rs
, imm
);
12647 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12648 check_cp1_enabled(ctx
);
12649 op1
= MASK_CP1(ctx
->opcode
);
12653 check_insn(env
, ctx
, ISA_MIPS32R2
);
12658 gen_cp1(ctx
, op1
, rt
, rd
);
12660 #if defined(TARGET_MIPS64)
12663 check_insn(env
, ctx
, ISA_MIPS3
);
12664 gen_cp1(ctx
, op1
, rt
, rd
);
12670 check_insn(env
, ctx
, ASE_MIPS3D
);
12673 gen_compute_branch1(env
, ctx
, MASK_BC1(ctx
->opcode
),
12674 (rt
>> 2) & 0x7, imm
<< 2);
12682 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
12687 generate_exception (ctx
, EXCP_RI
);
12691 generate_exception_err(ctx
, EXCP_CpU
, 1);
12700 /* COP2: Not implemented. */
12701 generate_exception_err(ctx
, EXCP_CpU
, 2);
12704 check_insn(env
, ctx
, INSN_LOONGSON2F
);
12705 /* Note that these instructions use different fields. */
12706 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
12710 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12711 check_cp1_enabled(ctx
);
12712 op1
= MASK_CP3(ctx
->opcode
);
12720 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
12723 /* Treat as NOP. */
12738 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
12742 generate_exception (ctx
, EXCP_RI
);
12746 generate_exception_err(ctx
, EXCP_CpU
, 1);
12750 #if defined(TARGET_MIPS64)
12751 /* MIPS64 opcodes */
12753 case OPC_LDL
... OPC_LDR
:
12756 check_insn(env
, ctx
, ISA_MIPS3
);
12757 check_mips_64(ctx
);
12758 gen_ld(env
, ctx
, op
, rt
, rs
, imm
);
12760 case OPC_SDL
... OPC_SDR
:
12762 check_insn(env
, ctx
, ISA_MIPS3
);
12763 check_mips_64(ctx
);
12764 gen_st(ctx
, op
, rt
, rs
, imm
);
12767 check_insn(env
, ctx
, ISA_MIPS3
);
12768 check_mips_64(ctx
);
12769 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
12773 check_insn(env
, ctx
, ISA_MIPS3
);
12774 check_mips_64(ctx
);
12775 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
12779 check_insn(env
, ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
12780 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
12781 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
12785 check_insn(env
, ctx
, ASE_MDMX
);
12786 /* MDMX: Not implemented. */
12787 default: /* Invalid */
12788 MIPS_INVAL("major opcode");
12789 generate_exception(ctx
, EXCP_RI
);
12795 gen_intermediate_code_internal (CPUMIPSState
*env
, TranslationBlock
*tb
,
12799 target_ulong pc_start
;
12800 uint16_t *gen_opc_end
;
12809 qemu_log("search pc %d\n", search_pc
);
12812 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
12815 ctx
.singlestep_enabled
= env
->singlestep_enabled
;
12817 ctx
.bstate
= BS_NONE
;
12818 /* Restore delay slot state from the tb context. */
12819 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
12820 restore_cpu_state(env
, &ctx
);
12821 #ifdef CONFIG_USER_ONLY
12822 ctx
.mem_idx
= MIPS_HFLAG_UM
;
12824 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
12827 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
12828 if (max_insns
== 0)
12829 max_insns
= CF_COUNT_MASK
;
12830 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
12831 gen_icount_start();
12832 while (ctx
.bstate
== BS_NONE
) {
12833 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
12834 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
12835 if (bp
->pc
== ctx
.pc
) {
12836 save_cpu_state(&ctx
, 1);
12837 ctx
.bstate
= BS_BRANCH
;
12838 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
12839 /* Include the breakpoint location or the tb won't
12840 * be flushed when it must be. */
12842 goto done_generating
;
12848 j
= gen_opc_ptr
- gen_opc_buf
;
12852 gen_opc_instr_start
[lj
++] = 0;
12854 gen_opc_pc
[lj
] = ctx
.pc
;
12855 gen_opc_hflags
[lj
] = ctx
.hflags
& MIPS_HFLAG_BMASK
;
12856 gen_opc_instr_start
[lj
] = 1;
12857 gen_opc_icount
[lj
] = num_insns
;
12859 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
12863 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
12864 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
12866 decode_opc(env
, &ctx
, &is_branch
);
12867 } else if (env
->insn_flags
& ASE_MICROMIPS
) {
12868 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
12869 insn_bytes
= decode_micromips_opc(env
, &ctx
, &is_branch
);
12870 } else if (env
->insn_flags
& ASE_MIPS16
) {
12871 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
12872 insn_bytes
= decode_mips16_opc(env
, &ctx
, &is_branch
);
12874 generate_exception(&ctx
, EXCP_RI
);
12875 ctx
.bstate
= BS_STOP
;
12879 handle_delay_slot(env
, &ctx
, insn_bytes
);
12881 ctx
.pc
+= insn_bytes
;
12885 /* Execute a branch and its delay slot as a single instruction.
12886 This is what GDB expects and is consistent with what the
12887 hardware does (e.g. if a delay slot instruction faults, the
12888 reported PC is the PC of the branch). */
12889 if (env
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0)
12892 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
12895 if (gen_opc_ptr
>= gen_opc_end
)
12898 if (num_insns
>= max_insns
)
12904 if (tb
->cflags
& CF_LAST_IO
)
12906 if (env
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
12907 save_cpu_state(&ctx
, ctx
.bstate
== BS_NONE
);
12908 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
12910 switch (ctx
.bstate
) {
12912 gen_goto_tb(&ctx
, 0, ctx
.pc
);
12915 save_cpu_state(&ctx
, 0);
12916 gen_goto_tb(&ctx
, 0, ctx
.pc
);
12919 tcg_gen_exit_tb(0);
12927 gen_icount_end(tb
, num_insns
);
12928 *gen_opc_ptr
= INDEX_op_end
;
12930 j
= gen_opc_ptr
- gen_opc_buf
;
12933 gen_opc_instr_start
[lj
++] = 0;
12935 tb
->size
= ctx
.pc
- pc_start
;
12936 tb
->icount
= num_insns
;
12940 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
12941 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
12942 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
12948 void gen_intermediate_code (CPUMIPSState
*env
, struct TranslationBlock
*tb
)
12950 gen_intermediate_code_internal(env
, tb
, 0);
12953 void gen_intermediate_code_pc (CPUMIPSState
*env
, struct TranslationBlock
*tb
)
12955 gen_intermediate_code_internal(env
, tb
, 1);
12958 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
12962 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
12964 #define printfpr(fp) \
12967 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
12968 " fd:%13g fs:%13g psu: %13g\n", \
12969 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
12970 (double)(fp)->fd, \
12971 (double)(fp)->fs[FP_ENDIAN_IDX], \
12972 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
12975 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
12976 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
12977 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
12978 " fd:%13g fs:%13g psu:%13g\n", \
12979 tmp.w[FP_ENDIAN_IDX], tmp.d, \
12981 (double)tmp.fs[FP_ENDIAN_IDX], \
12982 (double)tmp.fs[!FP_ENDIAN_IDX]); \
12987 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
12988 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
12989 get_float_exception_flags(&env
->active_fpu
.fp_status
));
12990 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
12991 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
12992 printfpr(&env
->active_fpu
.fpr
[i
]);
12998 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
12999 /* Debug help: The architecture requires 32bit code to maintain proper
13000 sign-extended values on 64bit machines. */
13002 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
13005 cpu_mips_check_sign_extensions (CPUMIPSState
*env
, FILE *f
,
13006 fprintf_function cpu_fprintf
,
13011 if (!SIGN_EXT_P(env
->active_tc
.PC
))
13012 cpu_fprintf(f
, "BROKEN: pc=0x" TARGET_FMT_lx
"\n", env
->active_tc
.PC
);
13013 if (!SIGN_EXT_P(env
->active_tc
.HI
[0]))
13014 cpu_fprintf(f
, "BROKEN: HI=0x" TARGET_FMT_lx
"\n", env
->active_tc
.HI
[0]);
13015 if (!SIGN_EXT_P(env
->active_tc
.LO
[0]))
13016 cpu_fprintf(f
, "BROKEN: LO=0x" TARGET_FMT_lx
"\n", env
->active_tc
.LO
[0]);
13017 if (!SIGN_EXT_P(env
->btarget
))
13018 cpu_fprintf(f
, "BROKEN: btarget=0x" TARGET_FMT_lx
"\n", env
->btarget
);
13020 for (i
= 0; i
< 32; i
++) {
13021 if (!SIGN_EXT_P(env
->active_tc
.gpr
[i
]))
13022 cpu_fprintf(f
, "BROKEN: %s=0x" TARGET_FMT_lx
"\n", regnames
[i
], env
->active_tc
.gpr
[i
]);
13025 if (!SIGN_EXT_P(env
->CP0_EPC
))
13026 cpu_fprintf(f
, "BROKEN: EPC=0x" TARGET_FMT_lx
"\n", env
->CP0_EPC
);
13027 if (!SIGN_EXT_P(env
->lladdr
))
13028 cpu_fprintf(f
, "BROKEN: LLAddr=0x" TARGET_FMT_lx
"\n", env
->lladdr
);
13032 void cpu_dump_state (CPUMIPSState
*env
, FILE *f
, fprintf_function cpu_fprintf
,
13037 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
13038 " LO=0x" TARGET_FMT_lx
" ds %04x "
13039 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
13040 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
13041 env
->hflags
, env
->btarget
, env
->bcond
);
13042 for (i
= 0; i
< 32; i
++) {
13044 cpu_fprintf(f
, "GPR%02d:", i
);
13045 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
13047 cpu_fprintf(f
, "\n");
13050 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
13051 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
13052 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx
"\n",
13053 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
13054 if (env
->hflags
& MIPS_HFLAG_FPU
)
13055 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
13056 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
13057 cpu_mips_check_sign_extensions(env
, f
, cpu_fprintf
, flags
);
13061 static void mips_tcg_init(void)
13066 /* Initialize various static tables. */
13070 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
13071 TCGV_UNUSED(cpu_gpr
[0]);
13072 for (i
= 1; i
< 32; i
++)
13073 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
13074 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
13076 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
13077 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
13078 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
13079 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
13080 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
13082 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
13083 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
13085 cpu_ACX
[i
] = tcg_global_mem_new(TCG_AREG0
,
13086 offsetof(CPUMIPSState
, active_tc
.ACX
[i
]),
13089 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
13090 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
13092 bcond
= tcg_global_mem_new(TCG_AREG0
,
13093 offsetof(CPUMIPSState
, bcond
), "bcond");
13094 btarget
= tcg_global_mem_new(TCG_AREG0
,
13095 offsetof(CPUMIPSState
, btarget
), "btarget");
13096 hflags
= tcg_global_mem_new_i32(TCG_AREG0
,
13097 offsetof(CPUMIPSState
, hflags
), "hflags");
13099 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
13100 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
13102 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
13103 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
13106 /* register helpers */
13107 #define GEN_HELPER 2
13108 #include "helper.h"
13113 #include "translate_init.c"
13115 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
13119 const mips_def_t
*def
;
13121 def
= cpu_mips_find_by_name(cpu_model
);
13124 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
13126 env
->cpu_model
= def
;
13127 env
->cpu_model_str
= cpu_model
;
13129 #ifndef CONFIG_USER_ONLY
13130 mmu_init(env
, def
);
13132 fpu_init(env
, def
);
13133 mvp_init(env
, def
);
13135 cpu_reset(CPU(cpu
));
13136 qemu_init_vcpu(env
);
13140 void cpu_state_reset(CPUMIPSState
*env
)
13142 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
13143 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
13144 log_cpu_state(env
, 0);
13147 memset(env
, 0, offsetof(CPUMIPSState
, breakpoints
));
13150 /* Reset registers to their default values */
13151 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
13152 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
13153 #ifdef TARGET_WORDS_BIGENDIAN
13154 env
->CP0_Config0
|= (1 << CP0C0_BE
);
13156 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
13157 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
13158 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
13159 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
13160 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
13161 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
13162 << env
->cpu_model
->CP0_LLAddr_shift
;
13163 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
13164 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
13165 env
->CCRes
= env
->cpu_model
->CCRes
;
13166 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
13167 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
13168 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
13169 env
->current_tc
= 0;
13170 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
13171 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
13172 #if defined(TARGET_MIPS64)
13173 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
13174 env
->SEGMask
|= 3ULL << 62;
13177 env
->PABITS
= env
->cpu_model
->PABITS
;
13178 env
->PAMask
= (target_ulong
)((1ULL << env
->cpu_model
->PABITS
) - 1);
13179 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
13180 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
13181 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
13182 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
13183 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
13184 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
13185 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
13186 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
13187 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
13188 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
13189 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
13190 env
->insn_flags
= env
->cpu_model
->insn_flags
;
13192 #if defined(CONFIG_USER_ONLY)
13193 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
13194 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
13195 hardware registers. */
13196 env
->CP0_HWREna
|= 0x0000000F;
13197 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
13198 env
->CP0_Status
|= (1 << CP0St_CU1
);
13201 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
13202 /* If the exception was raised from a delay slot,
13203 come back to the jump. */
13204 env
->CP0_ErrorEPC
= env
->active_tc
.PC
- 4;
13206 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
13208 env
->active_tc
.PC
= (int32_t)0xBFC00000;
13209 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
13210 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
13211 env
->CP0_Wired
= 0;
13212 env
->CP0_EBase
= 0x80000000 | (env
->cpu_index
& 0x3FF);
13213 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
13214 /* vectored interrupts not implemented, timer on int 7,
13215 no performance counters. */
13216 env
->CP0_IntCtl
= 0xe0000000;
13220 for (i
= 0; i
< 7; i
++) {
13221 env
->CP0_WatchLo
[i
] = 0;
13222 env
->CP0_WatchHi
[i
] = 0x80000000;
13224 env
->CP0_WatchLo
[7] = 0;
13225 env
->CP0_WatchHi
[7] = 0;
13227 /* Count register increments in debug mode, EJTAG version 1 */
13228 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
13230 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
13233 /* Only TC0 on VPE 0 starts as active. */
13234 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
13235 env
->tcs
[i
].CP0_TCBind
= env
->cpu_index
<< CP0TCBd_CurVPE
;
13236 env
->tcs
[i
].CP0_TCHalt
= 1;
13238 env
->active_tc
.CP0_TCHalt
= 1;
13241 if (!env
->cpu_index
) {
13242 /* VPE0 starts up enabled. */
13243 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
13244 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
13246 /* TC0 starts up unhalted. */
13248 env
->active_tc
.CP0_TCHalt
= 0;
13249 env
->tcs
[0].CP0_TCHalt
= 0;
13250 /* With thread 0 active. */
13251 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
13252 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
13256 compute_hflags(env
);
13257 env
->exception_index
= EXCP_NONE
;
13260 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
, int pc_pos
)
13262 env
->active_tc
.PC
= gen_opc_pc
[pc_pos
];
13263 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
13264 env
->hflags
|= gen_opc_hflags
[pc_pos
];