2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
25 #include "disas/disas.h"
27 #include "exec/cpu_ldst.h"
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
32 #define MIPS_DEBUG_DISAS 0
33 //#define MIPS_DEBUG_SIGN_EXTENSIONS
35 /* MIPS major opcodes */
36 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
39 /* indirect opcode tables */
40 OPC_SPECIAL
= (0x00 << 26),
41 OPC_REGIMM
= (0x01 << 26),
42 OPC_CP0
= (0x10 << 26),
43 OPC_CP1
= (0x11 << 26),
44 OPC_CP2
= (0x12 << 26),
45 OPC_CP3
= (0x13 << 26),
46 OPC_SPECIAL2
= (0x1C << 26),
47 OPC_SPECIAL3
= (0x1F << 26),
48 /* arithmetic with immediate */
49 OPC_ADDI
= (0x08 << 26),
50 OPC_ADDIU
= (0x09 << 26),
51 OPC_SLTI
= (0x0A << 26),
52 OPC_SLTIU
= (0x0B << 26),
53 /* logic with immediate */
54 OPC_ANDI
= (0x0C << 26),
55 OPC_ORI
= (0x0D << 26),
56 OPC_XORI
= (0x0E << 26),
57 OPC_LUI
= (0x0F << 26),
58 /* arithmetic with immediate */
59 OPC_DADDI
= (0x18 << 26),
60 OPC_DADDIU
= (0x19 << 26),
61 /* Jump and branches */
63 OPC_JAL
= (0x03 << 26),
64 OPC_JALS
= OPC_JAL
| 0x5,
65 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
66 OPC_BEQL
= (0x14 << 26),
67 OPC_BNE
= (0x05 << 26),
68 OPC_BNEL
= (0x15 << 26),
69 OPC_BLEZ
= (0x06 << 26),
70 OPC_BLEZL
= (0x16 << 26),
71 OPC_BGTZ
= (0x07 << 26),
72 OPC_BGTZL
= (0x17 << 26),
73 OPC_JALX
= (0x1D << 26), /* MIPS 16 only */
74 OPC_JALXS
= OPC_JALX
| 0x5,
76 OPC_LDL
= (0x1A << 26),
77 OPC_LDR
= (0x1B << 26),
78 OPC_LB
= (0x20 << 26),
79 OPC_LH
= (0x21 << 26),
80 OPC_LWL
= (0x22 << 26),
81 OPC_LW
= (0x23 << 26),
82 OPC_LWPC
= OPC_LW
| 0x5,
83 OPC_LBU
= (0x24 << 26),
84 OPC_LHU
= (0x25 << 26),
85 OPC_LWR
= (0x26 << 26),
86 OPC_LWU
= (0x27 << 26),
87 OPC_SB
= (0x28 << 26),
88 OPC_SH
= (0x29 << 26),
89 OPC_SWL
= (0x2A << 26),
90 OPC_SW
= (0x2B << 26),
91 OPC_SDL
= (0x2C << 26),
92 OPC_SDR
= (0x2D << 26),
93 OPC_SWR
= (0x2E << 26),
94 OPC_LL
= (0x30 << 26),
95 OPC_LLD
= (0x34 << 26),
96 OPC_LD
= (0x37 << 26),
97 OPC_LDPC
= OPC_LD
| 0x5,
98 OPC_SC
= (0x38 << 26),
99 OPC_SCD
= (0x3C << 26),
100 OPC_SD
= (0x3F << 26),
101 /* Floating point load/store */
102 OPC_LWC1
= (0x31 << 26),
103 OPC_LWC2
= (0x32 << 26),
104 OPC_LDC1
= (0x35 << 26),
105 OPC_LDC2
= (0x36 << 26),
106 OPC_SWC1
= (0x39 << 26),
107 OPC_SWC2
= (0x3A << 26),
108 OPC_SDC1
= (0x3D << 26),
109 OPC_SDC2
= (0x3E << 26),
110 /* MDMX ASE specific */
111 OPC_MDMX
= (0x1E << 26),
112 /* Cache and prefetch */
113 OPC_CACHE
= (0x2F << 26),
114 OPC_PREF
= (0x33 << 26),
115 /* Reserved major opcode */
116 OPC_MAJOR3B_RESERVED
= (0x3B << 26),
119 /* MIPS special opcodes */
120 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
124 OPC_SLL
= 0x00 | OPC_SPECIAL
,
125 /* NOP is SLL r0, r0, 0 */
126 /* SSNOP is SLL r0, r0, 1 */
127 /* EHB is SLL r0, r0, 3 */
128 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
129 OPC_ROTR
= OPC_SRL
| (1 << 21),
130 OPC_SRA
= 0x03 | OPC_SPECIAL
,
131 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
132 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
133 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
134 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
135 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
136 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
137 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
138 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
139 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
140 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
141 OPC_DROTR
= OPC_DSRL
| (1 << 21),
142 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
143 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
144 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
145 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
146 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
147 /* Multiplication / division */
148 OPC_MULT
= 0x18 | OPC_SPECIAL
,
149 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
150 OPC_DIV
= 0x1A | OPC_SPECIAL
,
151 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
152 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
153 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
154 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
155 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
156 /* 2 registers arithmetic / logic */
157 OPC_ADD
= 0x20 | OPC_SPECIAL
,
158 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
159 OPC_SUB
= 0x22 | OPC_SPECIAL
,
160 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
161 OPC_AND
= 0x24 | OPC_SPECIAL
,
162 OPC_OR
= 0x25 | OPC_SPECIAL
,
163 OPC_XOR
= 0x26 | OPC_SPECIAL
,
164 OPC_NOR
= 0x27 | OPC_SPECIAL
,
165 OPC_SLT
= 0x2A | OPC_SPECIAL
,
166 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
167 OPC_DADD
= 0x2C | OPC_SPECIAL
,
168 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
169 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
170 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
172 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
173 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
174 OPC_JALRC
= OPC_JALR
| (0x5 << 6),
175 OPC_JALRS
= 0x10 | OPC_SPECIAL
| (0x5 << 6),
177 OPC_TGE
= 0x30 | OPC_SPECIAL
,
178 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
179 OPC_TLT
= 0x32 | OPC_SPECIAL
,
180 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
181 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
182 OPC_TNE
= 0x36 | OPC_SPECIAL
,
183 /* HI / LO registers load & stores */
184 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
185 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
186 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
187 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
188 /* Conditional moves */
189 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
190 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
192 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
195 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
196 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
197 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
198 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
199 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
201 OPC_SPECIAL15_RESERVED
= 0x15 | OPC_SPECIAL
,
202 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
203 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
204 OPC_SPECIAL35_RESERVED
= 0x35 | OPC_SPECIAL
,
205 OPC_SPECIAL37_RESERVED
= 0x37 | OPC_SPECIAL
,
206 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
207 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
210 /* Multiplication variants of the vr54xx. */
211 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
214 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
215 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
216 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
217 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
218 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
219 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
220 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
221 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
222 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
223 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
224 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
225 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
226 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
227 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
230 /* REGIMM (rt field) opcodes */
231 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
234 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
235 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
236 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
237 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
238 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
239 OPC_BLTZALS
= OPC_BLTZAL
| 0x5, /* microMIPS */
240 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
241 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
242 OPC_BGEZALS
= OPC_BGEZAL
| 0x5, /* microMIPS */
243 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
244 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
245 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
246 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
247 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
248 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
249 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
250 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
253 /* Special2 opcodes */
254 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
257 /* Multiply & xxx operations */
258 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
259 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
260 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
261 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
262 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
264 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
265 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
266 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
267 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
268 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
269 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
270 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
271 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
272 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
273 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
274 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
275 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
277 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
278 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
279 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
280 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
282 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
285 /* Special3 opcodes */
286 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
289 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
290 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
291 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
292 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
293 OPC_INS
= 0x04 | OPC_SPECIAL3
,
294 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
295 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
296 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
297 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
298 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
299 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
300 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
301 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
304 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
305 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
306 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
307 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
308 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
309 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
310 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
311 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
312 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
313 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
314 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
315 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
318 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
319 /* MIPS DSP Arithmetic */
320 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
321 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
322 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
323 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
324 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
325 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
326 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
327 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
328 /* MIPS DSP GPR-Based Shift Sub-class */
329 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
330 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
331 /* MIPS DSP Multiply Sub-class insns */
332 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
333 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
334 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
335 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
336 /* DSP Bit/Manipulation Sub-class */
337 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
338 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
339 /* MIPS DSP Append Sub-class */
340 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
341 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
342 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
343 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
344 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
348 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
351 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
352 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
353 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
357 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
360 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
361 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
364 /* MIPS DSP REGIMM opcodes */
366 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
367 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
370 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
373 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
374 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
375 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
376 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
379 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
381 /* MIPS DSP Arithmetic Sub-class */
382 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
383 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
384 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
385 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
386 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
387 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
388 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
389 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
390 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
391 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
392 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
393 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
394 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
395 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
396 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
397 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
398 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
399 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
400 /* MIPS DSP Multiply Sub-class insns */
401 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
402 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
403 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
404 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
405 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
406 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
409 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
410 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
412 /* MIPS DSP Arithmetic Sub-class */
413 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
414 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
415 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
416 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
417 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
418 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
419 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
420 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
421 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
422 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
423 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
424 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
425 /* MIPS DSP Multiply Sub-class insns */
426 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
427 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
428 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
429 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
432 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
434 /* MIPS DSP Arithmetic Sub-class */
435 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
436 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
437 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
438 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
439 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
440 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
441 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
442 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
443 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
444 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
445 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
446 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
447 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
448 /* DSP Bit/Manipulation Sub-class */
449 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
450 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
451 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
452 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
453 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
456 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
458 /* MIPS DSP Arithmetic Sub-class */
459 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
460 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
461 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
462 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
463 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
464 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
465 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
466 /* DSP Compare-Pick Sub-class */
467 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
468 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
469 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
470 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
471 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
472 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
473 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
474 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
475 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
476 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
477 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
478 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
479 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
480 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
481 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
484 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
486 /* MIPS DSP GPR-Based Shift Sub-class */
487 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
488 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
489 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
490 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
491 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
492 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
493 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
494 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
495 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
496 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
497 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
498 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
499 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
500 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
501 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
502 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
503 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
504 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
505 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
506 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
507 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
508 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
511 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
513 /* MIPS DSP Multiply Sub-class insns */
514 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
515 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
516 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
517 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
518 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
519 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
520 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
521 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
522 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
523 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
524 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
525 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
526 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
527 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
528 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
529 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
530 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
531 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
532 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
533 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
534 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
535 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
538 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
540 /* DSP Bit/Manipulation Sub-class */
541 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
544 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
546 /* MIPS DSP Append Sub-class */
547 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
548 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
549 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
552 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
554 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
555 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
556 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
557 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
558 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
559 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
560 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
561 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
562 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
563 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
564 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
565 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
566 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
567 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
568 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
569 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
570 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
571 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
574 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
576 /* MIPS DSP Arithmetic Sub-class */
577 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
578 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
579 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
580 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
581 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
582 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
583 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
584 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
585 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
586 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
587 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
588 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
589 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
590 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
591 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
592 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
593 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
594 /* DSP Bit/Manipulation Sub-class */
595 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
596 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
597 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
598 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
599 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
600 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
603 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
605 /* MIPS DSP Multiply Sub-class insns */
606 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
607 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
608 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
609 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
610 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
611 /* MIPS DSP Arithmetic Sub-class */
612 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
613 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
614 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
615 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
616 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
617 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
618 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
619 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
620 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
621 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
622 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
623 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
624 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
625 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
626 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
627 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
628 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
629 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
630 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
631 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
632 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
635 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
637 /* DSP Compare-Pick Sub-class */
638 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
639 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
640 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
641 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
642 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
643 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
644 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
645 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
646 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
647 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
648 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
649 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
650 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
651 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
652 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
653 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
654 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
655 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
656 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
657 /* MIPS DSP Arithmetic Sub-class */
658 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
659 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
660 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
661 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
662 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
663 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
664 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
665 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
668 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
670 /* DSP Append Sub-class */
671 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
672 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
673 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
674 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
677 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
679 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
680 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
681 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
682 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
683 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
684 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
685 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
686 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
687 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
688 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
689 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
690 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
691 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
692 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
693 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
694 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
695 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
696 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
697 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
698 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
699 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
700 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
703 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
705 /* DSP Bit/Manipulation Sub-class */
706 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
709 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
711 /* MIPS DSP Multiply Sub-class insns */
712 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
713 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
714 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
715 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
716 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
717 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
718 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
719 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
720 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
721 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
722 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
723 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
724 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
725 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
726 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
727 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
728 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
729 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
730 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
731 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
732 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
733 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
734 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
735 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
736 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
737 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
740 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
742 /* MIPS DSP GPR-Based Shift Sub-class */
743 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
744 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
745 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
746 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
747 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
748 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
749 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
750 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
751 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
752 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
753 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
754 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
755 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
756 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
757 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
758 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
759 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
760 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
761 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
762 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
763 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
764 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
765 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
766 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
767 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
768 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
771 /* Coprocessor 0 (rs field) */
772 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
775 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
776 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
777 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
778 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
779 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
780 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
781 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
782 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
783 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
784 OPC_C0
= (0x10 << 21) | OPC_CP0
,
785 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
786 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
790 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
793 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
794 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
795 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
796 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
797 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
798 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
801 /* Coprocessor 0 (with rs == C0) */
802 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
805 OPC_TLBR
= 0x01 | OPC_C0
,
806 OPC_TLBWI
= 0x02 | OPC_C0
,
807 OPC_TLBWR
= 0x06 | OPC_C0
,
808 OPC_TLBP
= 0x08 | OPC_C0
,
809 OPC_RFE
= 0x10 | OPC_C0
,
810 OPC_ERET
= 0x18 | OPC_C0
,
811 OPC_DERET
= 0x1F | OPC_C0
,
812 OPC_WAIT
= 0x20 | OPC_C0
,
815 /* Coprocessor 1 (rs field) */
816 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
818 /* Values for the fmt field in FP instructions */
820 /* 0 - 15 are reserved */
821 FMT_S
= 16, /* single fp */
822 FMT_D
= 17, /* double fp */
823 FMT_E
= 18, /* extended fp */
824 FMT_Q
= 19, /* quad fp */
825 FMT_W
= 20, /* 32-bit fixed */
826 FMT_L
= 21, /* 64-bit fixed */
827 FMT_PS
= 22, /* paired single fp */
828 /* 23 - 31 are reserved */
832 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
833 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
834 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
835 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
836 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
837 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
838 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
839 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
840 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
841 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
842 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
843 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
844 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
845 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
846 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
847 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
848 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
849 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
852 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
853 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
856 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
857 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
858 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
859 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
863 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
864 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
868 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
869 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
872 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
875 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
876 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
877 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
878 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
879 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
880 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
881 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
882 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
883 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
886 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
889 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
890 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
891 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
892 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
893 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
894 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
895 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
896 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
898 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
899 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
900 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
901 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
902 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
903 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
904 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
905 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
907 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
908 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
909 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
910 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
911 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
912 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
913 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
914 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
916 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
917 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
918 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
919 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
920 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
921 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
922 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
923 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
925 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
926 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
927 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
928 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
929 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
930 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
932 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
933 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
934 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
935 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
936 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
937 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
939 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
940 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
941 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
942 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
943 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
944 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
946 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
947 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
948 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
949 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
950 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
951 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
953 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
954 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
955 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
956 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
957 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
958 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
960 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
961 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
962 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
963 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
964 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
965 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
967 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
968 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
969 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
970 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
971 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
972 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
974 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
975 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
976 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
977 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
978 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
979 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
983 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
986 OPC_LWXC1
= 0x00 | OPC_CP3
,
987 OPC_LDXC1
= 0x01 | OPC_CP3
,
988 OPC_LUXC1
= 0x05 | OPC_CP3
,
989 OPC_SWXC1
= 0x08 | OPC_CP3
,
990 OPC_SDXC1
= 0x09 | OPC_CP3
,
991 OPC_SUXC1
= 0x0D | OPC_CP3
,
992 OPC_PREFX
= 0x0F | OPC_CP3
,
993 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
994 OPC_MADD_S
= 0x20 | OPC_CP3
,
995 OPC_MADD_D
= 0x21 | OPC_CP3
,
996 OPC_MADD_PS
= 0x26 | OPC_CP3
,
997 OPC_MSUB_S
= 0x28 | OPC_CP3
,
998 OPC_MSUB_D
= 0x29 | OPC_CP3
,
999 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1000 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1001 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1002 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1003 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1004 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1005 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1008 /* global register indices */
1009 static TCGv_ptr cpu_env
;
1010 static TCGv cpu_gpr
[32], cpu_PC
;
1011 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
], cpu_ACX
[MIPS_DSP_ACC
];
1012 static TCGv cpu_dspctrl
, btarget
, bcond
;
1013 static TCGv_i32 hflags
;
1014 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1015 static TCGv_i64 fpu_f64
[32];
1017 static uint32_t gen_opc_hflags
[OPC_BUF_SIZE
];
1018 static target_ulong gen_opc_btarget
[OPC_BUF_SIZE
];
1020 #include "exec/gen-icount.h"
1022 #define gen_helper_0e0i(name, arg) do { \
1023 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1024 gen_helper_##name(cpu_env, helper_tmp); \
1025 tcg_temp_free_i32(helper_tmp); \
1028 #define gen_helper_0e1i(name, arg1, arg2) do { \
1029 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1030 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1031 tcg_temp_free_i32(helper_tmp); \
1034 #define gen_helper_1e0i(name, ret, arg1) do { \
1035 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1036 gen_helper_##name(ret, cpu_env, helper_tmp); \
1037 tcg_temp_free_i32(helper_tmp); \
1040 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1041 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1042 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1043 tcg_temp_free_i32(helper_tmp); \
1046 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1047 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1048 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1049 tcg_temp_free_i32(helper_tmp); \
1052 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1053 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1054 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1055 tcg_temp_free_i32(helper_tmp); \
1058 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1059 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1060 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1061 tcg_temp_free_i32(helper_tmp); \
1064 typedef struct DisasContext
{
1065 struct TranslationBlock
*tb
;
1066 target_ulong pc
, saved_pc
;
1068 int singlestep_enabled
;
1070 /* Routine used to access memory */
1072 uint32_t hflags
, saved_hflags
;
1074 target_ulong btarget
;
1078 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1079 * exception condition */
1080 BS_STOP
= 1, /* We want to stop translation for any reason */
1081 BS_BRANCH
= 2, /* We reached a branch condition */
1082 BS_EXCP
= 3, /* We reached an exception condition */
1085 static const char * const regnames
[] = {
1086 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1087 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1088 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1089 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1092 static const char * const regnames_HI
[] = {
1093 "HI0", "HI1", "HI2", "HI3",
1096 static const char * const regnames_LO
[] = {
1097 "LO0", "LO1", "LO2", "LO3",
1100 static const char * const regnames_ACX
[] = {
1101 "ACX0", "ACX1", "ACX2", "ACX3",
1104 static const char * const fregnames
[] = {
1105 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1106 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1107 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1108 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1111 #define MIPS_DEBUG(fmt, ...) \
1113 if (MIPS_DEBUG_DISAS) { \
1114 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1115 TARGET_FMT_lx ": %08x " fmt "\n", \
1116 ctx->pc, ctx->opcode , ## __VA_ARGS__); \
1120 #define LOG_DISAS(...) \
1122 if (MIPS_DEBUG_DISAS) { \
1123 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1127 #define MIPS_INVAL(op) \
1128 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
1129 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F))
1131 /* General purpose registers moves. */
1132 static inline void gen_load_gpr (TCGv t
, int reg
)
1135 tcg_gen_movi_tl(t
, 0);
1137 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1140 static inline void gen_store_gpr (TCGv t
, int reg
)
1143 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1146 /* Moves to/from ACX register. */
1147 static inline void gen_load_ACX (TCGv t
, int reg
)
1149 tcg_gen_mov_tl(t
, cpu_ACX
[reg
]);
1152 static inline void gen_store_ACX (TCGv t
, int reg
)
1154 tcg_gen_mov_tl(cpu_ACX
[reg
], t
);
1157 /* Moves to/from shadow registers. */
1158 static inline void gen_load_srsgpr (int from
, int to
)
1160 TCGv t0
= tcg_temp_new();
1163 tcg_gen_movi_tl(t0
, 0);
1165 TCGv_i32 t2
= tcg_temp_new_i32();
1166 TCGv_ptr addr
= tcg_temp_new_ptr();
1168 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1169 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1170 tcg_gen_andi_i32(t2
, t2
, 0xf);
1171 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1172 tcg_gen_ext_i32_ptr(addr
, t2
);
1173 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1175 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1176 tcg_temp_free_ptr(addr
);
1177 tcg_temp_free_i32(t2
);
1179 gen_store_gpr(t0
, to
);
1183 static inline void gen_store_srsgpr (int from
, int to
)
1186 TCGv t0
= tcg_temp_new();
1187 TCGv_i32 t2
= tcg_temp_new_i32();
1188 TCGv_ptr addr
= tcg_temp_new_ptr();
1190 gen_load_gpr(t0
, from
);
1191 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1192 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1193 tcg_gen_andi_i32(t2
, t2
, 0xf);
1194 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1195 tcg_gen_ext_i32_ptr(addr
, t2
);
1196 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1198 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1199 tcg_temp_free_ptr(addr
);
1200 tcg_temp_free_i32(t2
);
1205 /* Floating point register moves. */
1206 static void gen_load_fpr32(TCGv_i32 t
, int reg
)
1208 tcg_gen_trunc_i64_i32(t
, fpu_f64
[reg
]);
1211 static void gen_store_fpr32(TCGv_i32 t
, int reg
)
1213 TCGv_i64 t64
= tcg_temp_new_i64();
1214 tcg_gen_extu_i32_i64(t64
, t
);
1215 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1216 tcg_temp_free_i64(t64
);
1219 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1221 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1222 TCGv_i64 t64
= tcg_temp_new_i64();
1223 tcg_gen_shri_i64(t64
, fpu_f64
[reg
], 32);
1224 tcg_gen_trunc_i64_i32(t
, t64
);
1225 tcg_temp_free_i64(t64
);
1227 gen_load_fpr32(t
, reg
| 1);
1231 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1233 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1234 TCGv_i64 t64
= tcg_temp_new_i64();
1235 tcg_gen_extu_i32_i64(t64
, t
);
1236 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1237 tcg_temp_free_i64(t64
);
1239 gen_store_fpr32(t
, reg
| 1);
1243 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1245 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1246 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1248 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1252 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1254 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1255 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1258 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1259 t0
= tcg_temp_new_i64();
1260 tcg_gen_shri_i64(t0
, t
, 32);
1261 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1262 tcg_temp_free_i64(t0
);
1266 static inline int get_fp_bit (int cc
)
1275 static inline void gen_save_pc(target_ulong pc
)
1277 tcg_gen_movi_tl(cpu_PC
, pc
);
1280 static inline void save_cpu_state (DisasContext
*ctx
, int do_save_pc
)
1282 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1283 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1284 gen_save_pc(ctx
->pc
);
1285 ctx
->saved_pc
= ctx
->pc
;
1287 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1288 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1289 ctx
->saved_hflags
= ctx
->hflags
;
1290 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1296 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1302 static inline void restore_cpu_state (CPUMIPSState
*env
, DisasContext
*ctx
)
1304 ctx
->saved_hflags
= ctx
->hflags
;
1305 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1311 ctx
->btarget
= env
->btarget
;
1317 generate_exception_err (DisasContext
*ctx
, int excp
, int err
)
1319 TCGv_i32 texcp
= tcg_const_i32(excp
);
1320 TCGv_i32 terr
= tcg_const_i32(err
);
1321 save_cpu_state(ctx
, 1);
1322 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1323 tcg_temp_free_i32(terr
);
1324 tcg_temp_free_i32(texcp
);
1328 generate_exception (DisasContext
*ctx
, int excp
)
1330 save_cpu_state(ctx
, 1);
1331 gen_helper_0e0i(raise_exception
, excp
);
1334 /* Addresses computation */
1335 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1337 tcg_gen_add_tl(ret
, arg0
, arg1
);
1339 #if defined(TARGET_MIPS64)
1340 /* For compatibility with 32-bit code, data reference in user mode
1341 with Status_UX = 0 should be casted to 32-bit and sign extended.
1342 See the MIPS64 PRA manual, section 4.10. */
1343 if (((ctx
->hflags
& MIPS_HFLAG_KSU
) == MIPS_HFLAG_UM
) &&
1344 !(ctx
->hflags
& MIPS_HFLAG_UX
)) {
1345 tcg_gen_ext32s_i64(ret
, ret
);
1350 static inline void check_cp0_enabled(DisasContext
*ctx
)
1352 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1353 generate_exception_err(ctx
, EXCP_CpU
, 0);
1356 static inline void check_cp1_enabled(DisasContext
*ctx
)
1358 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1359 generate_exception_err(ctx
, EXCP_CpU
, 1);
1362 /* Verify that the processor is running with COP1X instructions enabled.
1363 This is associated with the nabla symbol in the MIPS32 and MIPS64
1366 static inline void check_cop1x(DisasContext
*ctx
)
1368 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1369 generate_exception(ctx
, EXCP_RI
);
1372 /* Verify that the processor is running with 64-bit floating-point
1373 operations enabled. */
1375 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1377 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1378 generate_exception(ctx
, EXCP_RI
);
1382 * Verify if floating point register is valid; an operation is not defined
1383 * if bit 0 of any register specification is set and the FR bit in the
1384 * Status register equals zero, since the register numbers specify an
1385 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1386 * in the Status register equals one, both even and odd register numbers
1387 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1389 * Multiple 64 bit wide registers can be checked by calling
1390 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1392 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1394 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1395 generate_exception(ctx
, EXCP_RI
);
1398 /* Verify that the processor is running with DSP instructions enabled.
1399 This is enabled by CP0 Status register MX(24) bit.
1402 static inline void check_dsp(DisasContext
*ctx
)
1404 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1405 if (ctx
->insn_flags
& ASE_DSP
) {
1406 generate_exception(ctx
, EXCP_DSPDIS
);
1408 generate_exception(ctx
, EXCP_RI
);
1413 static inline void check_dspr2(DisasContext
*ctx
)
1415 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1416 if (ctx
->insn_flags
& ASE_DSP
) {
1417 generate_exception(ctx
, EXCP_DSPDIS
);
1419 generate_exception(ctx
, EXCP_RI
);
1424 /* This code generates a "reserved instruction" exception if the
1425 CPU does not support the instruction set corresponding to flags. */
1426 static inline void check_insn(DisasContext
*ctx
, int flags
)
1428 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1429 generate_exception(ctx
, EXCP_RI
);
1433 /* This code generates a "reserved instruction" exception if 64-bit
1434 instructions are not enabled. */
1435 static inline void check_mips_64(DisasContext
*ctx
)
1437 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1438 generate_exception(ctx
, EXCP_RI
);
1441 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1442 calling interface for 32 and 64-bit FPRs. No sense in changing
1443 all callers for gen_load_fpr32 when we need the CTX parameter for
1445 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(x, y)
1446 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1447 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1448 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1449 int ft, int fs, int cc) \
1451 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1452 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1455 check_cp1_64bitmode(ctx); \
1461 check_cp1_registers(ctx, fs | ft); \
1469 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1470 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1472 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1473 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1474 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1475 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1476 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1477 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1478 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1479 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1480 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1481 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1482 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1483 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1484 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1485 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1486 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1487 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1490 tcg_temp_free_i##bits (fp0); \
1491 tcg_temp_free_i##bits (fp1); \
1494 FOP_CONDS(, 0, d
, FMT_D
, 64)
1495 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1496 FOP_CONDS(, 0, s
, FMT_S
, 32)
1497 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1498 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1499 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1501 #undef gen_ldcmp_fpr32
1502 #undef gen_ldcmp_fpr64
1504 /* load/store instructions. */
1505 #ifdef CONFIG_USER_ONLY
1506 #define OP_LD_ATOMIC(insn,fname) \
1507 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
1509 TCGv t0 = tcg_temp_new(); \
1510 tcg_gen_mov_tl(t0, arg1); \
1511 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
1512 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
1513 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
1514 tcg_temp_free(t0); \
1517 #define OP_LD_ATOMIC(insn,fname) \
1518 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
1520 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
1523 OP_LD_ATOMIC(ll
,ld32s
);
1524 #if defined(TARGET_MIPS64)
1525 OP_LD_ATOMIC(lld
,ld64
);
1529 #ifdef CONFIG_USER_ONLY
1530 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
1531 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
1533 TCGv t0 = tcg_temp_new(); \
1534 int l1 = gen_new_label(); \
1535 int l2 = gen_new_label(); \
1537 tcg_gen_andi_tl(t0, arg2, almask); \
1538 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
1539 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
1540 generate_exception(ctx, EXCP_AdES); \
1541 gen_set_label(l1); \
1542 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
1543 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
1544 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
1545 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
1546 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
1547 gen_helper_0e0i(raise_exception, EXCP_SC); \
1548 gen_set_label(l2); \
1549 tcg_gen_movi_tl(t0, 0); \
1550 gen_store_gpr(t0, rt); \
1551 tcg_temp_free(t0); \
1554 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
1555 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
1557 TCGv t0 = tcg_temp_new(); \
1558 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
1559 gen_store_gpr(t0, rt); \
1560 tcg_temp_free(t0); \
1563 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
1564 #if defined(TARGET_MIPS64)
1565 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
1569 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
1570 int base
, int16_t offset
)
1573 tcg_gen_movi_tl(addr
, offset
);
1574 } else if (offset
== 0) {
1575 gen_load_gpr(addr
, base
);
1577 tcg_gen_movi_tl(addr
, offset
);
1578 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
1582 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
1584 target_ulong pc
= ctx
->pc
;
1586 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
1587 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
1592 pc
&= ~(target_ulong
)3;
1597 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
1598 int rt
, int base
, int16_t offset
)
1600 const char *opn
= "ld";
1603 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
1604 /* Loongson CPU uses a load to zero register for prefetch.
1605 We emulate it as a NOP. On other CPU we must perform the
1606 actual memory access. */
1611 t0
= tcg_temp_new();
1612 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1615 #if defined(TARGET_MIPS64)
1617 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
1618 gen_store_gpr(t0
, rt
);
1622 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
1623 gen_store_gpr(t0
, rt
);
1627 save_cpu_state(ctx
, 1);
1628 op_ld_lld(t0
, t0
, ctx
);
1629 gen_store_gpr(t0
, rt
);
1633 t1
= tcg_temp_new();
1634 tcg_gen_andi_tl(t1
, t0
, 7);
1635 #ifndef TARGET_WORDS_BIGENDIAN
1636 tcg_gen_xori_tl(t1
, t1
, 7);
1638 tcg_gen_shli_tl(t1
, t1
, 3);
1639 tcg_gen_andi_tl(t0
, t0
, ~7);
1640 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
1641 tcg_gen_shl_tl(t0
, t0
, t1
);
1642 tcg_gen_xori_tl(t1
, t1
, 63);
1643 t2
= tcg_const_tl(0x7fffffffffffffffull
);
1644 tcg_gen_shr_tl(t2
, t2
, t1
);
1645 gen_load_gpr(t1
, rt
);
1646 tcg_gen_and_tl(t1
, t1
, t2
);
1648 tcg_gen_or_tl(t0
, t0
, t1
);
1650 gen_store_gpr(t0
, rt
);
1654 t1
= tcg_temp_new();
1655 tcg_gen_andi_tl(t1
, t0
, 7);
1656 #ifdef TARGET_WORDS_BIGENDIAN
1657 tcg_gen_xori_tl(t1
, t1
, 7);
1659 tcg_gen_shli_tl(t1
, t1
, 3);
1660 tcg_gen_andi_tl(t0
, t0
, ~7);
1661 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
1662 tcg_gen_shr_tl(t0
, t0
, t1
);
1663 tcg_gen_xori_tl(t1
, t1
, 63);
1664 t2
= tcg_const_tl(0xfffffffffffffffeull
);
1665 tcg_gen_shl_tl(t2
, t2
, t1
);
1666 gen_load_gpr(t1
, rt
);
1667 tcg_gen_and_tl(t1
, t1
, t2
);
1669 tcg_gen_or_tl(t0
, t0
, t1
);
1671 gen_store_gpr(t0
, rt
);
1675 t1
= tcg_const_tl(pc_relative_pc(ctx
));
1676 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1678 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
1679 gen_store_gpr(t0
, rt
);
1684 t1
= tcg_const_tl(pc_relative_pc(ctx
));
1685 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1687 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
1688 gen_store_gpr(t0
, rt
);
1692 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
1693 gen_store_gpr(t0
, rt
);
1697 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
1698 gen_store_gpr(t0
, rt
);
1702 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
);
1703 gen_store_gpr(t0
, rt
);
1707 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
1708 gen_store_gpr(t0
, rt
);
1712 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
1713 gen_store_gpr(t0
, rt
);
1717 t1
= tcg_temp_new();
1718 tcg_gen_andi_tl(t1
, t0
, 3);
1719 #ifndef TARGET_WORDS_BIGENDIAN
1720 tcg_gen_xori_tl(t1
, t1
, 3);
1722 tcg_gen_shli_tl(t1
, t1
, 3);
1723 tcg_gen_andi_tl(t0
, t0
, ~3);
1724 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
1725 tcg_gen_shl_tl(t0
, t0
, t1
);
1726 tcg_gen_xori_tl(t1
, t1
, 31);
1727 t2
= tcg_const_tl(0x7fffffffull
);
1728 tcg_gen_shr_tl(t2
, t2
, t1
);
1729 gen_load_gpr(t1
, rt
);
1730 tcg_gen_and_tl(t1
, t1
, t2
);
1732 tcg_gen_or_tl(t0
, t0
, t1
);
1734 tcg_gen_ext32s_tl(t0
, t0
);
1735 gen_store_gpr(t0
, rt
);
1739 t1
= tcg_temp_new();
1740 tcg_gen_andi_tl(t1
, t0
, 3);
1741 #ifdef TARGET_WORDS_BIGENDIAN
1742 tcg_gen_xori_tl(t1
, t1
, 3);
1744 tcg_gen_shli_tl(t1
, t1
, 3);
1745 tcg_gen_andi_tl(t0
, t0
, ~3);
1746 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
1747 tcg_gen_shr_tl(t0
, t0
, t1
);
1748 tcg_gen_xori_tl(t1
, t1
, 31);
1749 t2
= tcg_const_tl(0xfffffffeull
);
1750 tcg_gen_shl_tl(t2
, t2
, t1
);
1751 gen_load_gpr(t1
, rt
);
1752 tcg_gen_and_tl(t1
, t1
, t2
);
1754 tcg_gen_or_tl(t0
, t0
, t1
);
1756 tcg_gen_ext32s_tl(t0
, t0
);
1757 gen_store_gpr(t0
, rt
);
1761 save_cpu_state(ctx
, 1);
1762 op_ld_ll(t0
, t0
, ctx
);
1763 gen_store_gpr(t0
, rt
);
1767 (void)opn
; /* avoid a compiler warning */
1768 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1773 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
1774 int base
, int16_t offset
)
1776 const char *opn
= "st";
1777 TCGv t0
= tcg_temp_new();
1778 TCGv t1
= tcg_temp_new();
1780 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1781 gen_load_gpr(t1
, rt
);
1783 #if defined(TARGET_MIPS64)
1785 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
1789 save_cpu_state(ctx
, 1);
1790 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
1794 save_cpu_state(ctx
, 1);
1795 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
1800 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
1804 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
);
1808 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
1812 save_cpu_state(ctx
, 1);
1813 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
1817 save_cpu_state(ctx
, 1);
1818 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
1822 (void)opn
; /* avoid a compiler warning */
1823 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1829 /* Store conditional */
1830 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
1831 int base
, int16_t offset
)
1833 const char *opn
= "st_cond";
1836 #ifdef CONFIG_USER_ONLY
1837 t0
= tcg_temp_local_new();
1838 t1
= tcg_temp_local_new();
1840 t0
= tcg_temp_new();
1841 t1
= tcg_temp_new();
1843 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1844 gen_load_gpr(t1
, rt
);
1846 #if defined(TARGET_MIPS64)
1848 save_cpu_state(ctx
, 1);
1849 op_st_scd(t1
, t0
, rt
, ctx
);
1854 save_cpu_state(ctx
, 1);
1855 op_st_sc(t1
, t0
, rt
, ctx
);
1859 (void)opn
; /* avoid a compiler warning */
1860 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1865 /* Load and store */
1866 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
1867 int base
, int16_t offset
)
1869 const char *opn
= "flt_ldst";
1870 TCGv t0
= tcg_temp_new();
1872 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1873 /* Don't do NOP if destination is zero: we must perform the actual
1878 TCGv_i32 fp0
= tcg_temp_new_i32();
1879 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
);
1880 gen_store_fpr32(fp0
, ft
);
1881 tcg_temp_free_i32(fp0
);
1887 TCGv_i32 fp0
= tcg_temp_new_i32();
1888 gen_load_fpr32(fp0
, ft
);
1889 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
1890 tcg_temp_free_i32(fp0
);
1896 TCGv_i64 fp0
= tcg_temp_new_i64();
1897 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
1898 gen_store_fpr64(ctx
, fp0
, ft
);
1899 tcg_temp_free_i64(fp0
);
1905 TCGv_i64 fp0
= tcg_temp_new_i64();
1906 gen_load_fpr64(ctx
, fp0
, ft
);
1907 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
1908 tcg_temp_free_i64(fp0
);
1914 generate_exception(ctx
, EXCP_RI
);
1917 (void)opn
; /* avoid a compiler warning */
1918 MIPS_DEBUG("%s %s, %d(%s)", opn
, fregnames
[ft
], offset
, regnames
[base
]);
1923 static void gen_cop1_ldst(CPUMIPSState
*env
, DisasContext
*ctx
,
1924 uint32_t op
, int rt
, int rs
, int16_t imm
)
1926 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
1927 check_cp1_enabled(ctx
);
1928 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
1930 generate_exception_err(ctx
, EXCP_CpU
, 1);
1934 /* Arithmetic with immediate operand */
1935 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
1936 int rt
, int rs
, int16_t imm
)
1938 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1939 const char *opn
= "imm arith";
1941 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
1942 /* If no destination, treat it as a NOP.
1943 For addi, we must generate the overflow exception when needed. */
1950 TCGv t0
= tcg_temp_local_new();
1951 TCGv t1
= tcg_temp_new();
1952 TCGv t2
= tcg_temp_new();
1953 int l1
= gen_new_label();
1955 gen_load_gpr(t1
, rs
);
1956 tcg_gen_addi_tl(t0
, t1
, uimm
);
1957 tcg_gen_ext32s_tl(t0
, t0
);
1959 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1960 tcg_gen_xori_tl(t2
, t0
, uimm
);
1961 tcg_gen_and_tl(t1
, t1
, t2
);
1963 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1965 /* operands of same sign, result different sign */
1966 generate_exception(ctx
, EXCP_OVERFLOW
);
1968 tcg_gen_ext32s_tl(t0
, t0
);
1969 gen_store_gpr(t0
, rt
);
1976 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1977 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
1979 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1983 #if defined(TARGET_MIPS64)
1986 TCGv t0
= tcg_temp_local_new();
1987 TCGv t1
= tcg_temp_new();
1988 TCGv t2
= tcg_temp_new();
1989 int l1
= gen_new_label();
1991 gen_load_gpr(t1
, rs
);
1992 tcg_gen_addi_tl(t0
, t1
, uimm
);
1994 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1995 tcg_gen_xori_tl(t2
, t0
, uimm
);
1996 tcg_gen_and_tl(t1
, t1
, t2
);
1998 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2000 /* operands of same sign, result different sign */
2001 generate_exception(ctx
, EXCP_OVERFLOW
);
2003 gen_store_gpr(t0
, rt
);
2010 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2012 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2018 (void)opn
; /* avoid a compiler warning */
2019 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
2022 /* Logic with immediate operand */
2023 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2024 int rt
, int rs
, int16_t imm
)
2029 /* If no destination, treat it as a NOP. */
2033 uimm
= (uint16_t)imm
;
2036 if (likely(rs
!= 0))
2037 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2039 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2040 MIPS_DEBUG("andi %s, %s, " TARGET_FMT_lx
, regnames
[rt
],
2041 regnames
[rs
], uimm
);
2045 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2047 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2048 MIPS_DEBUG("ori %s, %s, " TARGET_FMT_lx
, regnames
[rt
],
2049 regnames
[rs
], uimm
);
2052 if (likely(rs
!= 0))
2053 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2055 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2056 MIPS_DEBUG("xori %s, %s, " TARGET_FMT_lx
, regnames
[rt
],
2057 regnames
[rs
], uimm
);
2060 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2061 MIPS_DEBUG("lui %s, " TARGET_FMT_lx
, regnames
[rt
], uimm
);
2065 MIPS_DEBUG("Unknown logical immediate opcode %08x", opc
);
2070 /* Set on less than with immediate operand */
2071 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2072 int rt
, int rs
, int16_t imm
)
2074 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2075 const char *opn
= "imm arith";
2079 /* If no destination, treat it as a NOP. */
2083 t0
= tcg_temp_new();
2084 gen_load_gpr(t0
, rs
);
2087 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2091 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2095 (void)opn
; /* avoid a compiler warning */
2096 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
2100 /* Shifts with immediate operand */
2101 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2102 int rt
, int rs
, int16_t imm
)
2104 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2105 const char *opn
= "imm shift";
2109 /* If no destination, treat it as a NOP. */
2114 t0
= tcg_temp_new();
2115 gen_load_gpr(t0
, rs
);
2118 tcg_gen_shli_tl(t0
, t0
, uimm
);
2119 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2123 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2128 tcg_gen_ext32u_tl(t0
, t0
);
2129 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2131 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2137 TCGv_i32 t1
= tcg_temp_new_i32();
2139 tcg_gen_trunc_tl_i32(t1
, t0
);
2140 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2141 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2142 tcg_temp_free_i32(t1
);
2144 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2148 #if defined(TARGET_MIPS64)
2150 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2154 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2158 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2163 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2165 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2170 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2174 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2178 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2182 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2187 (void)opn
; /* avoid a compiler warning */
2188 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
2193 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2194 int rd
, int rs
, int rt
)
2196 const char *opn
= "arith";
2198 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2199 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2200 /* If no destination, treat it as a NOP.
2201 For add & sub, we must generate the overflow exception when needed. */
2209 TCGv t0
= tcg_temp_local_new();
2210 TCGv t1
= tcg_temp_new();
2211 TCGv t2
= tcg_temp_new();
2212 int l1
= gen_new_label();
2214 gen_load_gpr(t1
, rs
);
2215 gen_load_gpr(t2
, rt
);
2216 tcg_gen_add_tl(t0
, t1
, t2
);
2217 tcg_gen_ext32s_tl(t0
, t0
);
2218 tcg_gen_xor_tl(t1
, t1
, t2
);
2219 tcg_gen_xor_tl(t2
, t0
, t2
);
2220 tcg_gen_andc_tl(t1
, t2
, t1
);
2222 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2224 /* operands of same sign, result different sign */
2225 generate_exception(ctx
, EXCP_OVERFLOW
);
2227 gen_store_gpr(t0
, rd
);
2233 if (rs
!= 0 && rt
!= 0) {
2234 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2235 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2236 } else if (rs
== 0 && rt
!= 0) {
2237 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2238 } else if (rs
!= 0 && rt
== 0) {
2239 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2241 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2247 TCGv t0
= tcg_temp_local_new();
2248 TCGv t1
= tcg_temp_new();
2249 TCGv t2
= tcg_temp_new();
2250 int l1
= gen_new_label();
2252 gen_load_gpr(t1
, rs
);
2253 gen_load_gpr(t2
, rt
);
2254 tcg_gen_sub_tl(t0
, t1
, t2
);
2255 tcg_gen_ext32s_tl(t0
, t0
);
2256 tcg_gen_xor_tl(t2
, t1
, t2
);
2257 tcg_gen_xor_tl(t1
, t0
, t1
);
2258 tcg_gen_and_tl(t1
, t1
, t2
);
2260 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2262 /* operands of different sign, first operand and result different sign */
2263 generate_exception(ctx
, EXCP_OVERFLOW
);
2265 gen_store_gpr(t0
, rd
);
2271 if (rs
!= 0 && rt
!= 0) {
2272 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2273 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2274 } else if (rs
== 0 && rt
!= 0) {
2275 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2276 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2277 } else if (rs
!= 0 && rt
== 0) {
2278 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2280 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2284 #if defined(TARGET_MIPS64)
2287 TCGv t0
= tcg_temp_local_new();
2288 TCGv t1
= tcg_temp_new();
2289 TCGv t2
= tcg_temp_new();
2290 int l1
= gen_new_label();
2292 gen_load_gpr(t1
, rs
);
2293 gen_load_gpr(t2
, rt
);
2294 tcg_gen_add_tl(t0
, t1
, t2
);
2295 tcg_gen_xor_tl(t1
, t1
, t2
);
2296 tcg_gen_xor_tl(t2
, t0
, t2
);
2297 tcg_gen_andc_tl(t1
, t2
, t1
);
2299 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2301 /* operands of same sign, result different sign */
2302 generate_exception(ctx
, EXCP_OVERFLOW
);
2304 gen_store_gpr(t0
, rd
);
2310 if (rs
!= 0 && rt
!= 0) {
2311 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2312 } else if (rs
== 0 && rt
!= 0) {
2313 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2314 } else if (rs
!= 0 && rt
== 0) {
2315 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2317 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2323 TCGv t0
= tcg_temp_local_new();
2324 TCGv t1
= tcg_temp_new();
2325 TCGv t2
= tcg_temp_new();
2326 int l1
= gen_new_label();
2328 gen_load_gpr(t1
, rs
);
2329 gen_load_gpr(t2
, rt
);
2330 tcg_gen_sub_tl(t0
, t1
, t2
);
2331 tcg_gen_xor_tl(t2
, t1
, t2
);
2332 tcg_gen_xor_tl(t1
, t0
, t1
);
2333 tcg_gen_and_tl(t1
, t1
, t2
);
2335 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2337 /* operands of different sign, first operand and result different sign */
2338 generate_exception(ctx
, EXCP_OVERFLOW
);
2340 gen_store_gpr(t0
, rd
);
2346 if (rs
!= 0 && rt
!= 0) {
2347 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2348 } else if (rs
== 0 && rt
!= 0) {
2349 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2350 } else if (rs
!= 0 && rt
== 0) {
2351 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2353 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2359 if (likely(rs
!= 0 && rt
!= 0)) {
2360 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2361 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2363 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2368 (void)opn
; /* avoid a compiler warning */
2369 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2372 /* Conditional move */
2373 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2374 int rd
, int rs
, int rt
)
2376 const char *opn
= "cond move";
2380 /* If no destination, treat it as a NOP. */
2385 t0
= tcg_temp_new();
2386 gen_load_gpr(t0
, rt
);
2387 t1
= tcg_const_tl(0);
2388 t2
= tcg_temp_new();
2389 gen_load_gpr(t2
, rs
);
2392 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2396 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2404 (void)opn
; /* avoid a compiler warning */
2405 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2409 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2410 int rd
, int rs
, int rt
)
2412 const char *opn
= "logic";
2415 /* If no destination, treat it as a NOP. */
2422 if (likely(rs
!= 0 && rt
!= 0)) {
2423 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2425 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2430 if (rs
!= 0 && rt
!= 0) {
2431 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2432 } else if (rs
== 0 && rt
!= 0) {
2433 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2434 } else if (rs
!= 0 && rt
== 0) {
2435 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2437 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2442 if (likely(rs
!= 0 && rt
!= 0)) {
2443 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2444 } else if (rs
== 0 && rt
!= 0) {
2445 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2446 } else if (rs
!= 0 && rt
== 0) {
2447 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2449 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2454 if (likely(rs
!= 0 && rt
!= 0)) {
2455 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2456 } else if (rs
== 0 && rt
!= 0) {
2457 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2458 } else if (rs
!= 0 && rt
== 0) {
2459 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2461 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2466 (void)opn
; /* avoid a compiler warning */
2467 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2470 /* Set on lower than */
2471 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2472 int rd
, int rs
, int rt
)
2474 const char *opn
= "slt";
2478 /* If no destination, treat it as a NOP. */
2483 t0
= tcg_temp_new();
2484 t1
= tcg_temp_new();
2485 gen_load_gpr(t0
, rs
);
2486 gen_load_gpr(t1
, rt
);
2489 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2493 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2497 (void)opn
; /* avoid a compiler warning */
2498 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2504 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2505 int rd
, int rs
, int rt
)
2507 const char *opn
= "shifts";
2511 /* If no destination, treat it as a NOP.
2512 For add & sub, we must generate the overflow exception when needed. */
2517 t0
= tcg_temp_new();
2518 t1
= tcg_temp_new();
2519 gen_load_gpr(t0
, rs
);
2520 gen_load_gpr(t1
, rt
);
2523 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2524 tcg_gen_shl_tl(t0
, t1
, t0
);
2525 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2529 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2530 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2534 tcg_gen_ext32u_tl(t1
, t1
);
2535 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2536 tcg_gen_shr_tl(t0
, t1
, t0
);
2537 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2542 TCGv_i32 t2
= tcg_temp_new_i32();
2543 TCGv_i32 t3
= tcg_temp_new_i32();
2545 tcg_gen_trunc_tl_i32(t2
, t0
);
2546 tcg_gen_trunc_tl_i32(t3
, t1
);
2547 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2548 tcg_gen_rotr_i32(t2
, t3
, t2
);
2549 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2550 tcg_temp_free_i32(t2
);
2551 tcg_temp_free_i32(t3
);
2555 #if defined(TARGET_MIPS64)
2557 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2558 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2562 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2563 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2567 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2568 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
2572 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2573 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
2578 (void)opn
; /* avoid a compiler warning */
2579 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2584 /* Arithmetic on HI/LO registers */
2585 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
2587 const char *opn
= "hilo";
2589 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
2601 #if defined(TARGET_MIPS64)
2603 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
2607 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
2612 #if defined(TARGET_MIPS64)
2614 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
2618 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
2624 #if defined(TARGET_MIPS64)
2626 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
2630 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
2633 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
2639 #if defined(TARGET_MIPS64)
2641 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
2645 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
2648 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
2653 (void)opn
; /* avoid a compiler warning */
2654 MIPS_DEBUG("%s %s", opn
, regnames
[reg
]);
2657 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
2658 int acc
, int rs
, int rt
)
2660 const char *opn
= "mul/div";
2663 t0
= tcg_temp_new();
2664 t1
= tcg_temp_new();
2666 gen_load_gpr(t0
, rs
);
2667 gen_load_gpr(t1
, rt
);
2676 TCGv t2
= tcg_temp_new();
2677 TCGv t3
= tcg_temp_new();
2678 tcg_gen_ext32s_tl(t0
, t0
);
2679 tcg_gen_ext32s_tl(t1
, t1
);
2680 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
2681 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
2682 tcg_gen_and_tl(t2
, t2
, t3
);
2683 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
2684 tcg_gen_or_tl(t2
, t2
, t3
);
2685 tcg_gen_movi_tl(t3
, 0);
2686 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
2687 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
2688 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
2689 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
2690 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
2698 TCGv t2
= tcg_const_tl(0);
2699 TCGv t3
= tcg_const_tl(1);
2700 tcg_gen_ext32u_tl(t0
, t0
);
2701 tcg_gen_ext32u_tl(t1
, t1
);
2702 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
2703 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
2704 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
2705 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
2706 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
2714 TCGv_i32 t2
= tcg_temp_new_i32();
2715 TCGv_i32 t3
= tcg_temp_new_i32();
2716 tcg_gen_trunc_tl_i32(t2
, t0
);
2717 tcg_gen_trunc_tl_i32(t3
, t1
);
2718 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
2719 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
2720 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
2721 tcg_temp_free_i32(t2
);
2722 tcg_temp_free_i32(t3
);
2728 TCGv_i32 t2
= tcg_temp_new_i32();
2729 TCGv_i32 t3
= tcg_temp_new_i32();
2730 tcg_gen_trunc_tl_i32(t2
, t0
);
2731 tcg_gen_trunc_tl_i32(t3
, t1
);
2732 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
2733 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
2734 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
2735 tcg_temp_free_i32(t2
);
2736 tcg_temp_free_i32(t3
);
2740 #if defined(TARGET_MIPS64)
2743 TCGv t2
= tcg_temp_new();
2744 TCGv t3
= tcg_temp_new();
2745 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
2746 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
2747 tcg_gen_and_tl(t2
, t2
, t3
);
2748 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
2749 tcg_gen_or_tl(t2
, t2
, t3
);
2750 tcg_gen_movi_tl(t3
, 0);
2751 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
2752 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
2753 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
2761 TCGv t2
= tcg_const_tl(0);
2762 TCGv t3
= tcg_const_tl(1);
2763 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
2764 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
2765 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
2772 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
2776 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
2782 TCGv_i64 t2
= tcg_temp_new_i64();
2783 TCGv_i64 t3
= tcg_temp_new_i64();
2785 tcg_gen_ext_tl_i64(t2
, t0
);
2786 tcg_gen_ext_tl_i64(t3
, t1
);
2787 tcg_gen_mul_i64(t2
, t2
, t3
);
2788 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
2789 tcg_gen_add_i64(t2
, t2
, t3
);
2790 tcg_temp_free_i64(t3
);
2791 tcg_gen_trunc_i64_tl(t0
, t2
);
2792 tcg_gen_shri_i64(t2
, t2
, 32);
2793 tcg_gen_trunc_i64_tl(t1
, t2
);
2794 tcg_temp_free_i64(t2
);
2795 tcg_gen_ext32s_tl(cpu_LO
[acc
], t0
);
2796 tcg_gen_ext32s_tl(cpu_HI
[acc
], t1
);
2802 TCGv_i64 t2
= tcg_temp_new_i64();
2803 TCGv_i64 t3
= tcg_temp_new_i64();
2805 tcg_gen_ext32u_tl(t0
, t0
);
2806 tcg_gen_ext32u_tl(t1
, t1
);
2807 tcg_gen_extu_tl_i64(t2
, t0
);
2808 tcg_gen_extu_tl_i64(t3
, t1
);
2809 tcg_gen_mul_i64(t2
, t2
, t3
);
2810 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
2811 tcg_gen_add_i64(t2
, t2
, t3
);
2812 tcg_temp_free_i64(t3
);
2813 tcg_gen_trunc_i64_tl(t0
, t2
);
2814 tcg_gen_shri_i64(t2
, t2
, 32);
2815 tcg_gen_trunc_i64_tl(t1
, t2
);
2816 tcg_temp_free_i64(t2
);
2817 tcg_gen_ext32s_tl(cpu_LO
[acc
], t0
);
2818 tcg_gen_ext32s_tl(cpu_HI
[acc
], t1
);
2824 TCGv_i64 t2
= tcg_temp_new_i64();
2825 TCGv_i64 t3
= tcg_temp_new_i64();
2827 tcg_gen_ext_tl_i64(t2
, t0
);
2828 tcg_gen_ext_tl_i64(t3
, t1
);
2829 tcg_gen_mul_i64(t2
, t2
, t3
);
2830 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
2831 tcg_gen_sub_i64(t2
, t3
, t2
);
2832 tcg_temp_free_i64(t3
);
2833 tcg_gen_trunc_i64_tl(t0
, t2
);
2834 tcg_gen_shri_i64(t2
, t2
, 32);
2835 tcg_gen_trunc_i64_tl(t1
, t2
);
2836 tcg_temp_free_i64(t2
);
2837 tcg_gen_ext32s_tl(cpu_LO
[acc
], t0
);
2838 tcg_gen_ext32s_tl(cpu_HI
[acc
], t1
);
2844 TCGv_i64 t2
= tcg_temp_new_i64();
2845 TCGv_i64 t3
= tcg_temp_new_i64();
2847 tcg_gen_ext32u_tl(t0
, t0
);
2848 tcg_gen_ext32u_tl(t1
, t1
);
2849 tcg_gen_extu_tl_i64(t2
, t0
);
2850 tcg_gen_extu_tl_i64(t3
, t1
);
2851 tcg_gen_mul_i64(t2
, t2
, t3
);
2852 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
2853 tcg_gen_sub_i64(t2
, t3
, t2
);
2854 tcg_temp_free_i64(t3
);
2855 tcg_gen_trunc_i64_tl(t0
, t2
);
2856 tcg_gen_shri_i64(t2
, t2
, 32);
2857 tcg_gen_trunc_i64_tl(t1
, t2
);
2858 tcg_temp_free_i64(t2
);
2859 tcg_gen_ext32s_tl(cpu_LO
[acc
], t0
);
2860 tcg_gen_ext32s_tl(cpu_HI
[acc
], t1
);
2866 generate_exception(ctx
, EXCP_RI
);
2869 (void)opn
; /* avoid a compiler warning */
2870 MIPS_DEBUG("%s %s %s", opn
, regnames
[rs
], regnames
[rt
]);
2876 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
2877 int rd
, int rs
, int rt
)
2879 const char *opn
= "mul vr54xx";
2880 TCGv t0
= tcg_temp_new();
2881 TCGv t1
= tcg_temp_new();
2883 gen_load_gpr(t0
, rs
);
2884 gen_load_gpr(t1
, rt
);
2887 case OPC_VR54XX_MULS
:
2888 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
2891 case OPC_VR54XX_MULSU
:
2892 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
2895 case OPC_VR54XX_MACC
:
2896 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
2899 case OPC_VR54XX_MACCU
:
2900 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
2903 case OPC_VR54XX_MSAC
:
2904 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
2907 case OPC_VR54XX_MSACU
:
2908 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
2911 case OPC_VR54XX_MULHI
:
2912 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
2915 case OPC_VR54XX_MULHIU
:
2916 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
2919 case OPC_VR54XX_MULSHI
:
2920 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
2923 case OPC_VR54XX_MULSHIU
:
2924 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
2927 case OPC_VR54XX_MACCHI
:
2928 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
2931 case OPC_VR54XX_MACCHIU
:
2932 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
2935 case OPC_VR54XX_MSACHI
:
2936 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
2939 case OPC_VR54XX_MSACHIU
:
2940 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
2944 MIPS_INVAL("mul vr54xx");
2945 generate_exception(ctx
, EXCP_RI
);
2948 gen_store_gpr(t0
, rd
);
2949 (void)opn
; /* avoid a compiler warning */
2950 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2957 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
2960 const char *opn
= "CLx";
2968 t0
= tcg_temp_new();
2969 gen_load_gpr(t0
, rs
);
2972 gen_helper_clo(cpu_gpr
[rd
], t0
);
2976 gen_helper_clz(cpu_gpr
[rd
], t0
);
2979 #if defined(TARGET_MIPS64)
2981 gen_helper_dclo(cpu_gpr
[rd
], t0
);
2985 gen_helper_dclz(cpu_gpr
[rd
], t0
);
2990 (void)opn
; /* avoid a compiler warning */
2991 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2995 /* Godson integer instructions */
2996 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
2997 int rd
, int rs
, int rt
)
2999 const char *opn
= "loongson";
3011 case OPC_MULTU_G_2E
:
3012 case OPC_MULTU_G_2F
:
3013 #if defined(TARGET_MIPS64)
3014 case OPC_DMULT_G_2E
:
3015 case OPC_DMULT_G_2F
:
3016 case OPC_DMULTU_G_2E
:
3017 case OPC_DMULTU_G_2F
:
3019 t0
= tcg_temp_new();
3020 t1
= tcg_temp_new();
3023 t0
= tcg_temp_local_new();
3024 t1
= tcg_temp_local_new();
3028 gen_load_gpr(t0
, rs
);
3029 gen_load_gpr(t1
, rt
);
3034 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3035 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3038 case OPC_MULTU_G_2E
:
3039 case OPC_MULTU_G_2F
:
3040 tcg_gen_ext32u_tl(t0
, t0
);
3041 tcg_gen_ext32u_tl(t1
, t1
);
3042 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3043 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3049 int l1
= gen_new_label();
3050 int l2
= gen_new_label();
3051 int l3
= gen_new_label();
3052 tcg_gen_ext32s_tl(t0
, t0
);
3053 tcg_gen_ext32s_tl(t1
, t1
);
3054 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3055 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3058 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3059 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3060 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3063 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3064 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3072 int l1
= gen_new_label();
3073 int l2
= gen_new_label();
3074 tcg_gen_ext32u_tl(t0
, t0
);
3075 tcg_gen_ext32u_tl(t1
, t1
);
3076 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3077 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3080 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3081 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3089 int l1
= gen_new_label();
3090 int l2
= gen_new_label();
3091 int l3
= gen_new_label();
3092 tcg_gen_ext32u_tl(t0
, t0
);
3093 tcg_gen_ext32u_tl(t1
, t1
);
3094 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3095 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3096 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3098 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3101 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3102 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3110 int l1
= gen_new_label();
3111 int l2
= gen_new_label();
3112 tcg_gen_ext32u_tl(t0
, t0
);
3113 tcg_gen_ext32u_tl(t1
, t1
);
3114 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3115 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3118 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3119 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3124 #if defined(TARGET_MIPS64)
3125 case OPC_DMULT_G_2E
:
3126 case OPC_DMULT_G_2F
:
3127 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3130 case OPC_DMULTU_G_2E
:
3131 case OPC_DMULTU_G_2F
:
3132 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3138 int l1
= gen_new_label();
3139 int l2
= gen_new_label();
3140 int l3
= gen_new_label();
3141 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3142 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3145 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3146 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3147 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3150 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3155 case OPC_DDIVU_G_2E
:
3156 case OPC_DDIVU_G_2F
:
3158 int l1
= gen_new_label();
3159 int l2
= gen_new_label();
3160 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3161 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3164 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3172 int l1
= gen_new_label();
3173 int l2
= gen_new_label();
3174 int l3
= gen_new_label();
3175 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3176 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3177 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3179 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3182 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3187 case OPC_DMODU_G_2E
:
3188 case OPC_DMODU_G_2F
:
3190 int l1
= gen_new_label();
3191 int l2
= gen_new_label();
3192 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3193 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3196 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3204 (void)opn
; /* avoid a compiler warning */
3205 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
3210 /* Loongson multimedia instructions */
3211 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3213 const char *opn
= "loongson_cp2";
3214 uint32_t opc
, shift_max
;
3217 opc
= MASK_LMI(ctx
->opcode
);
3223 t0
= tcg_temp_local_new_i64();
3224 t1
= tcg_temp_local_new_i64();
3227 t0
= tcg_temp_new_i64();
3228 t1
= tcg_temp_new_i64();
3232 gen_load_fpr64(ctx
, t0
, rs
);
3233 gen_load_fpr64(ctx
, t1
, rt
);
3235 #define LMI_HELPER(UP, LO) \
3236 case OPC_##UP: gen_helper_##LO(t0, t0, t1); opn = #LO; break
3237 #define LMI_HELPER_1(UP, LO) \
3238 case OPC_##UP: gen_helper_##LO(t0, t0); opn = #LO; break
3239 #define LMI_DIRECT(UP, LO, OP) \
3240 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); opn = #LO; break
3243 LMI_HELPER(PADDSH
, paddsh
);
3244 LMI_HELPER(PADDUSH
, paddush
);
3245 LMI_HELPER(PADDH
, paddh
);
3246 LMI_HELPER(PADDW
, paddw
);
3247 LMI_HELPER(PADDSB
, paddsb
);
3248 LMI_HELPER(PADDUSB
, paddusb
);
3249 LMI_HELPER(PADDB
, paddb
);
3251 LMI_HELPER(PSUBSH
, psubsh
);
3252 LMI_HELPER(PSUBUSH
, psubush
);
3253 LMI_HELPER(PSUBH
, psubh
);
3254 LMI_HELPER(PSUBW
, psubw
);
3255 LMI_HELPER(PSUBSB
, psubsb
);
3256 LMI_HELPER(PSUBUSB
, psubusb
);
3257 LMI_HELPER(PSUBB
, psubb
);
3259 LMI_HELPER(PSHUFH
, pshufh
);
3260 LMI_HELPER(PACKSSWH
, packsswh
);
3261 LMI_HELPER(PACKSSHB
, packsshb
);
3262 LMI_HELPER(PACKUSHB
, packushb
);
3264 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3265 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3266 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3267 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3268 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3269 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3271 LMI_HELPER(PAVGH
, pavgh
);
3272 LMI_HELPER(PAVGB
, pavgb
);
3273 LMI_HELPER(PMAXSH
, pmaxsh
);
3274 LMI_HELPER(PMINSH
, pminsh
);
3275 LMI_HELPER(PMAXUB
, pmaxub
);
3276 LMI_HELPER(PMINUB
, pminub
);
3278 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3279 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3280 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3281 LMI_HELPER(PCMPGTH
, pcmpgth
);
3282 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3283 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3285 LMI_HELPER(PSLLW
, psllw
);
3286 LMI_HELPER(PSLLH
, psllh
);
3287 LMI_HELPER(PSRLW
, psrlw
);
3288 LMI_HELPER(PSRLH
, psrlh
);
3289 LMI_HELPER(PSRAW
, psraw
);
3290 LMI_HELPER(PSRAH
, psrah
);
3292 LMI_HELPER(PMULLH
, pmullh
);
3293 LMI_HELPER(PMULHH
, pmulhh
);
3294 LMI_HELPER(PMULHUH
, pmulhuh
);
3295 LMI_HELPER(PMADDHW
, pmaddhw
);
3297 LMI_HELPER(PASUBUB
, pasubub
);
3298 LMI_HELPER_1(BIADD
, biadd
);
3299 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3301 LMI_DIRECT(PADDD
, paddd
, add
);
3302 LMI_DIRECT(PSUBD
, psubd
, sub
);
3303 LMI_DIRECT(XOR_CP2
, xor, xor);
3304 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3305 LMI_DIRECT(AND_CP2
, and, and);
3306 LMI_DIRECT(PANDN
, pandn
, andc
);
3307 LMI_DIRECT(OR
, or, or);
3310 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3314 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3318 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3322 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3327 tcg_gen_andi_i64(t1
, t1
, 3);
3328 tcg_gen_shli_i64(t1
, t1
, 4);
3329 tcg_gen_shr_i64(t0
, t0
, t1
);
3330 tcg_gen_ext16u_i64(t0
, t0
);
3335 tcg_gen_add_i64(t0
, t0
, t1
);
3336 tcg_gen_ext32s_i64(t0
, t0
);
3340 tcg_gen_sub_i64(t0
, t0
, t1
);
3341 tcg_gen_ext32s_i64(t0
, t0
);
3370 /* Make sure shift count isn't TCG undefined behaviour. */
3371 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
3376 tcg_gen_shl_i64(t0
, t0
, t1
);
3380 /* Since SRA is UndefinedResult without sign-extended inputs,
3381 we can treat SRA and DSRA the same. */
3382 tcg_gen_sar_i64(t0
, t0
, t1
);
3385 /* We want to shift in zeros for SRL; zero-extend first. */
3386 tcg_gen_ext32u_i64(t0
, t0
);
3389 tcg_gen_shr_i64(t0
, t0
, t1
);
3393 if (shift_max
== 32) {
3394 tcg_gen_ext32s_i64(t0
, t0
);
3397 /* Shifts larger than MAX produce zero. */
3398 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
3399 tcg_gen_neg_i64(t1
, t1
);
3400 tcg_gen_and_i64(t0
, t0
, t1
);
3406 TCGv_i64 t2
= tcg_temp_new_i64();
3407 int lab
= gen_new_label();
3409 tcg_gen_mov_i64(t2
, t0
);
3410 tcg_gen_add_i64(t0
, t1
, t2
);
3411 if (opc
== OPC_ADD_CP2
) {
3412 tcg_gen_ext32s_i64(t0
, t0
);
3414 tcg_gen_xor_i64(t1
, t1
, t2
);
3415 tcg_gen_xor_i64(t2
, t2
, t0
);
3416 tcg_gen_andc_i64(t1
, t2
, t1
);
3417 tcg_temp_free_i64(t2
);
3418 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
3419 generate_exception(ctx
, EXCP_OVERFLOW
);
3422 opn
= (opc
== OPC_ADD_CP2
? "add" : "dadd");
3429 TCGv_i64 t2
= tcg_temp_new_i64();
3430 int lab
= gen_new_label();
3432 tcg_gen_mov_i64(t2
, t0
);
3433 tcg_gen_sub_i64(t0
, t1
, t2
);
3434 if (opc
== OPC_SUB_CP2
) {
3435 tcg_gen_ext32s_i64(t0
, t0
);
3437 tcg_gen_xor_i64(t1
, t1
, t2
);
3438 tcg_gen_xor_i64(t2
, t2
, t0
);
3439 tcg_gen_and_i64(t1
, t1
, t2
);
3440 tcg_temp_free_i64(t2
);
3441 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
3442 generate_exception(ctx
, EXCP_OVERFLOW
);
3445 opn
= (opc
== OPC_SUB_CP2
? "sub" : "dsub");
3450 tcg_gen_ext32u_i64(t0
, t0
);
3451 tcg_gen_ext32u_i64(t1
, t1
);
3452 tcg_gen_mul_i64(t0
, t0
, t1
);
3462 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
3463 FD field is the CC field? */
3466 generate_exception(ctx
, EXCP_RI
);
3473 gen_store_fpr64(ctx
, t0
, rd
);
3475 (void)opn
; /* avoid a compiler warning */
3476 MIPS_DEBUG("%s %s, %s, %s", opn
,
3477 fregnames
[rd
], fregnames
[rs
], fregnames
[rt
]);
3478 tcg_temp_free_i64(t0
);
3479 tcg_temp_free_i64(t1
);
3483 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
3484 int rs
, int rt
, int16_t imm
)
3487 TCGv t0
= tcg_temp_new();
3488 TCGv t1
= tcg_temp_new();
3491 /* Load needed operands */
3499 /* Compare two registers */
3501 gen_load_gpr(t0
, rs
);
3502 gen_load_gpr(t1
, rt
);
3512 /* Compare register to immediate */
3513 if (rs
!= 0 || imm
!= 0) {
3514 gen_load_gpr(t0
, rs
);
3515 tcg_gen_movi_tl(t1
, (int32_t)imm
);
3522 case OPC_TEQ
: /* rs == rs */
3523 case OPC_TEQI
: /* r0 == 0 */
3524 case OPC_TGE
: /* rs >= rs */
3525 case OPC_TGEI
: /* r0 >= 0 */
3526 case OPC_TGEU
: /* rs >= rs unsigned */
3527 case OPC_TGEIU
: /* r0 >= 0 unsigned */
3529 generate_exception(ctx
, EXCP_TRAP
);
3531 case OPC_TLT
: /* rs < rs */
3532 case OPC_TLTI
: /* r0 < 0 */
3533 case OPC_TLTU
: /* rs < rs unsigned */
3534 case OPC_TLTIU
: /* r0 < 0 unsigned */
3535 case OPC_TNE
: /* rs != rs */
3536 case OPC_TNEI
: /* r0 != 0 */
3537 /* Never trap: treat as NOP. */
3541 int l1
= gen_new_label();
3546 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
3550 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
3554 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
3558 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
3562 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
3566 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
3569 generate_exception(ctx
, EXCP_TRAP
);
3576 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
3578 TranslationBlock
*tb
;
3580 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
3581 likely(!ctx
->singlestep_enabled
)) {
3584 tcg_gen_exit_tb((uintptr_t)tb
+ n
);
3587 if (ctx
->singlestep_enabled
) {
3588 save_cpu_state(ctx
, 0);
3589 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
3595 /* Branches (before delay slot) */
3596 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
3598 int rs
, int rt
, int32_t offset
)
3600 target_ulong btgt
= -1;
3602 int bcond_compute
= 0;
3603 TCGv t0
= tcg_temp_new();
3604 TCGv t1
= tcg_temp_new();
3606 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3607 #ifdef MIPS_DEBUG_DISAS
3608 LOG_DISAS("Branch in delay slot at PC 0x" TARGET_FMT_lx
"\n", ctx
->pc
);
3610 generate_exception(ctx
, EXCP_RI
);
3614 /* Load needed operands */
3620 /* Compare two registers */
3622 gen_load_gpr(t0
, rs
);
3623 gen_load_gpr(t1
, rt
);
3626 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
3642 /* Compare to zero */
3644 gen_load_gpr(t0
, rs
);
3647 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
3650 #if defined(TARGET_MIPS64)
3652 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
3654 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
3657 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
3664 /* Jump to immediate */
3665 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
3671 /* Jump to register */
3672 if (offset
!= 0 && offset
!= 16) {
3673 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
3674 others are reserved. */
3675 MIPS_INVAL("jump hint");
3676 generate_exception(ctx
, EXCP_RI
);
3679 gen_load_gpr(btarget
, rs
);
3682 MIPS_INVAL("branch/jump");
3683 generate_exception(ctx
, EXCP_RI
);
3686 if (bcond_compute
== 0) {
3687 /* No condition to be computed */
3689 case OPC_BEQ
: /* rx == rx */
3690 case OPC_BEQL
: /* rx == rx likely */
3691 case OPC_BGEZ
: /* 0 >= 0 */
3692 case OPC_BGEZL
: /* 0 >= 0 likely */
3693 case OPC_BLEZ
: /* 0 <= 0 */
3694 case OPC_BLEZL
: /* 0 <= 0 likely */
3696 ctx
->hflags
|= MIPS_HFLAG_B
;
3697 MIPS_DEBUG("balways");
3700 case OPC_BGEZAL
: /* 0 >= 0 */
3701 case OPC_BGEZALL
: /* 0 >= 0 likely */
3702 ctx
->hflags
|= (opc
== OPC_BGEZALS
3704 : MIPS_HFLAG_BDS32
);
3705 /* Always take and link */
3707 ctx
->hflags
|= MIPS_HFLAG_B
;
3708 MIPS_DEBUG("balways and link");
3710 case OPC_BNE
: /* rx != rx */
3711 case OPC_BGTZ
: /* 0 > 0 */
3712 case OPC_BLTZ
: /* 0 < 0 */
3714 MIPS_DEBUG("bnever (NOP)");
3717 case OPC_BLTZAL
: /* 0 < 0 */
3718 ctx
->hflags
|= (opc
== OPC_BLTZALS
3720 : MIPS_HFLAG_BDS32
);
3721 /* Handle as an unconditional branch to get correct delay
3724 btgt
= ctx
->pc
+ (opc
== OPC_BLTZALS
? 6 : 8);
3725 ctx
->hflags
|= MIPS_HFLAG_B
;
3726 MIPS_DEBUG("bnever and link");
3728 case OPC_BLTZALL
: /* 0 < 0 likely */
3729 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
3730 /* Skip the instruction in the delay slot */
3731 MIPS_DEBUG("bnever, link and skip");
3734 case OPC_BNEL
: /* rx != rx likely */
3735 case OPC_BGTZL
: /* 0 > 0 likely */
3736 case OPC_BLTZL
: /* 0 < 0 likely */
3737 /* Skip the instruction in the delay slot */
3738 MIPS_DEBUG("bnever and skip");
3742 ctx
->hflags
|= MIPS_HFLAG_B
;
3743 MIPS_DEBUG("j " TARGET_FMT_lx
, btgt
);
3747 ctx
->hflags
|= MIPS_HFLAG_BX
;
3752 ctx
->hflags
|= MIPS_HFLAG_B
;
3753 ctx
->hflags
|= ((opc
== OPC_JALS
|| opc
== OPC_JALXS
)
3755 : MIPS_HFLAG_BDS32
);
3756 MIPS_DEBUG("jal " TARGET_FMT_lx
, btgt
);
3759 ctx
->hflags
|= MIPS_HFLAG_BR
;
3760 if (insn_bytes
== 4)
3761 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
3762 MIPS_DEBUG("jr %s", regnames
[rs
]);
3768 ctx
->hflags
|= MIPS_HFLAG_BR
;
3769 ctx
->hflags
|= (opc
== OPC_JALRS
3771 : MIPS_HFLAG_BDS32
);
3772 MIPS_DEBUG("jalr %s, %s", regnames
[rt
], regnames
[rs
]);
3775 MIPS_INVAL("branch/jump");
3776 generate_exception(ctx
, EXCP_RI
);
3782 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
3783 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx
,
3784 regnames
[rs
], regnames
[rt
], btgt
);
3787 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
3788 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx
,
3789 regnames
[rs
], regnames
[rt
], btgt
);
3792 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
3793 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx
,
3794 regnames
[rs
], regnames
[rt
], btgt
);
3797 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
3798 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx
,
3799 regnames
[rs
], regnames
[rt
], btgt
);
3802 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3803 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3806 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3807 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3811 ctx
->hflags
|= (opc
== OPC_BGEZALS
3813 : MIPS_HFLAG_BDS32
);
3814 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3815 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3819 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3821 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3824 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
3825 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3828 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
3829 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3832 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
3833 MIPS_DEBUG("blez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3836 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
3837 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3840 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3841 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3844 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3845 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3848 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
3849 MIPS_DEBUG("bposge32 " TARGET_FMT_lx
, btgt
);
3851 #if defined(TARGET_MIPS64)
3853 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
3854 MIPS_DEBUG("bposge64 " TARGET_FMT_lx
, btgt
);
3859 ctx
->hflags
|= (opc
== OPC_BLTZALS
3861 : MIPS_HFLAG_BDS32
);
3862 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3864 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3866 ctx
->hflags
|= MIPS_HFLAG_BC
;
3869 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3871 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3873 ctx
->hflags
|= MIPS_HFLAG_BL
;
3876 MIPS_INVAL("conditional branch/jump");
3877 generate_exception(ctx
, EXCP_RI
);
3881 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx
,
3882 blink
, ctx
->hflags
, btgt
);
3884 ctx
->btarget
= btgt
;
3886 int post_delay
= insn_bytes
;
3887 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
3889 if (opc
!= OPC_JALRC
)
3890 post_delay
+= ((ctx
->hflags
& MIPS_HFLAG_BDS16
) ? 2 : 4);
3892 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
3896 if (insn_bytes
== 2)
3897 ctx
->hflags
|= MIPS_HFLAG_B16
;
3902 /* special3 bitfield operations */
3903 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
3904 int rs
, int lsb
, int msb
)
3906 TCGv t0
= tcg_temp_new();
3907 TCGv t1
= tcg_temp_new();
3909 gen_load_gpr(t1
, rs
);
3914 tcg_gen_shri_tl(t0
, t1
, lsb
);
3916 tcg_gen_andi_tl(t0
, t0
, (1 << (msb
+ 1)) - 1);
3918 tcg_gen_ext32s_tl(t0
, t0
);
3921 #if defined(TARGET_MIPS64)
3923 tcg_gen_shri_tl(t0
, t1
, lsb
);
3925 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1 + 32)) - 1);
3929 tcg_gen_shri_tl(t0
, t1
, lsb
+ 32);
3930 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
3933 tcg_gen_shri_tl(t0
, t1
, lsb
);
3934 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
3940 gen_load_gpr(t0
, rt
);
3941 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
3942 tcg_gen_ext32s_tl(t0
, t0
);
3944 #if defined(TARGET_MIPS64)
3946 gen_load_gpr(t0
, rt
);
3947 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
+ 32 - lsb
+ 1);
3950 gen_load_gpr(t0
, rt
);
3951 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
+ 32, msb
- lsb
+ 1);
3954 gen_load_gpr(t0
, rt
);
3955 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
3960 MIPS_INVAL("bitops");
3961 generate_exception(ctx
, EXCP_RI
);
3966 gen_store_gpr(t0
, rt
);
3971 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
3976 /* If no destination, treat it as a NOP. */
3981 t0
= tcg_temp_new();
3982 gen_load_gpr(t0
, rt
);
3986 TCGv t1
= tcg_temp_new();
3988 tcg_gen_shri_tl(t1
, t0
, 8);
3989 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
3990 tcg_gen_shli_tl(t0
, t0
, 8);
3991 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
3992 tcg_gen_or_tl(t0
, t0
, t1
);
3994 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3998 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4001 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4003 #if defined(TARGET_MIPS64)
4006 TCGv t1
= tcg_temp_new();
4008 tcg_gen_shri_tl(t1
, t0
, 8);
4009 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4010 tcg_gen_shli_tl(t0
, t0
, 8);
4011 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4012 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4018 TCGv t1
= tcg_temp_new();
4020 tcg_gen_shri_tl(t1
, t0
, 16);
4021 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4022 tcg_gen_shli_tl(t0
, t0
, 16);
4023 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4024 tcg_gen_or_tl(t0
, t0
, t1
);
4025 tcg_gen_shri_tl(t1
, t0
, 32);
4026 tcg_gen_shli_tl(t0
, t0
, 32);
4027 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4033 MIPS_INVAL("bsfhl");
4034 generate_exception(ctx
, EXCP_RI
);
4041 #ifndef CONFIG_USER_ONLY
4042 /* CP0 (MMU and control) */
4043 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4045 TCGv_i32 t0
= tcg_temp_new_i32();
4047 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4048 tcg_gen_ext_i32_tl(arg
, t0
);
4049 tcg_temp_free_i32(t0
);
4052 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4054 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4055 tcg_gen_ext32s_tl(arg
, arg
);
4058 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4060 TCGv_i32 t0
= tcg_temp_new_i32();
4062 tcg_gen_trunc_tl_i32(t0
, arg
);
4063 tcg_gen_st_i32(t0
, cpu_env
, off
);
4064 tcg_temp_free_i32(t0
);
4067 static inline void gen_mtc0_store64 (TCGv arg
, target_ulong off
)
4069 tcg_gen_ext32s_tl(arg
, arg
);
4070 tcg_gen_st_tl(arg
, cpu_env
, off
);
4073 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4075 const char *rn
= "invalid";
4078 check_insn(ctx
, ISA_MIPS32
);
4084 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4088 check_insn(ctx
, ASE_MT
);
4089 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4093 check_insn(ctx
, ASE_MT
);
4094 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4098 check_insn(ctx
, ASE_MT
);
4099 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4109 gen_helper_mfc0_random(arg
, cpu_env
);
4113 check_insn(ctx
, ASE_MT
);
4114 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4118 check_insn(ctx
, ASE_MT
);
4119 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4123 check_insn(ctx
, ASE_MT
);
4124 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4128 check_insn(ctx
, ASE_MT
);
4129 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
4133 check_insn(ctx
, ASE_MT
);
4134 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4138 check_insn(ctx
, ASE_MT
);
4139 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4140 rn
= "VPEScheFBack";
4143 check_insn(ctx
, ASE_MT
);
4144 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
4154 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4155 tcg_gen_ext32s_tl(arg
, arg
);
4159 check_insn(ctx
, ASE_MT
);
4160 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
4164 check_insn(ctx
, ASE_MT
);
4165 gen_helper_mfc0_tcbind(arg
, cpu_env
);
4169 check_insn(ctx
, ASE_MT
);
4170 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
4174 check_insn(ctx
, ASE_MT
);
4175 gen_helper_mfc0_tchalt(arg
, cpu_env
);
4179 check_insn(ctx
, ASE_MT
);
4180 gen_helper_mfc0_tccontext(arg
, cpu_env
);
4184 check_insn(ctx
, ASE_MT
);
4185 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
4189 check_insn(ctx
, ASE_MT
);
4190 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
4200 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4201 tcg_gen_ext32s_tl(arg
, arg
);
4211 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
4212 tcg_gen_ext32s_tl(arg
, arg
);
4216 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
4217 rn
= "ContextConfig";
4226 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
4230 check_insn(ctx
, ISA_MIPS32R2
);
4231 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
4241 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
4245 check_insn(ctx
, ISA_MIPS32R2
);
4246 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
4250 check_insn(ctx
, ISA_MIPS32R2
);
4251 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
4255 check_insn(ctx
, ISA_MIPS32R2
);
4256 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
4260 check_insn(ctx
, ISA_MIPS32R2
);
4261 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
4265 check_insn(ctx
, ISA_MIPS32R2
);
4266 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
4276 check_insn(ctx
, ISA_MIPS32R2
);
4277 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
4287 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
4288 tcg_gen_ext32s_tl(arg
, arg
);
4298 /* Mark as an IO operation because we read the time. */
4301 gen_helper_mfc0_count(arg
, cpu_env
);
4305 /* Break the TB to be able to take timer interrupts immediately
4306 after reading count. */
4307 ctx
->bstate
= BS_STOP
;
4310 /* 6,7 are implementation dependent */
4318 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
4319 tcg_gen_ext32s_tl(arg
, arg
);
4329 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
4332 /* 6,7 are implementation dependent */
4340 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
4344 check_insn(ctx
, ISA_MIPS32R2
);
4345 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
4349 check_insn(ctx
, ISA_MIPS32R2
);
4350 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
4354 check_insn(ctx
, ISA_MIPS32R2
);
4355 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
4365 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
4375 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
4376 tcg_gen_ext32s_tl(arg
, arg
);
4386 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
4390 check_insn(ctx
, ISA_MIPS32R2
);
4391 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
4401 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
4405 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
4409 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
4413 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
4417 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
4421 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
4424 /* 6,7 are implementation dependent */
4426 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
4430 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
4440 gen_helper_mfc0_lladdr(arg
, cpu_env
);
4450 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
4460 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
4470 #if defined(TARGET_MIPS64)
4471 check_insn(ctx
, ISA_MIPS3
);
4472 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
4473 tcg_gen_ext32s_tl(arg
, arg
);
4482 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4485 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
4493 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4494 rn
= "'Diagnostic"; /* implementation dependent */
4499 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
4503 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
4504 rn
= "TraceControl";
4507 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
4508 rn
= "TraceControl2";
4511 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
4512 rn
= "UserTraceData";
4515 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
4526 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
4527 tcg_gen_ext32s_tl(arg
, arg
);
4537 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
4538 rn
= "Performance0";
4541 // gen_helper_mfc0_performance1(arg);
4542 rn
= "Performance1";
4545 // gen_helper_mfc0_performance2(arg);
4546 rn
= "Performance2";
4549 // gen_helper_mfc0_performance3(arg);
4550 rn
= "Performance3";
4553 // gen_helper_mfc0_performance4(arg);
4554 rn
= "Performance4";
4557 // gen_helper_mfc0_performance5(arg);
4558 rn
= "Performance5";
4561 // gen_helper_mfc0_performance6(arg);
4562 rn
= "Performance6";
4565 // gen_helper_mfc0_performance7(arg);
4566 rn
= "Performance7";
4573 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4579 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4592 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4599 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
4612 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
4619 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
4629 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
4630 tcg_gen_ext32s_tl(arg
, arg
);
4641 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
4651 (void)rn
; /* avoid a compiler warning */
4652 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4656 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4657 generate_exception(ctx
, EXCP_RI
);
4660 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4662 const char *rn
= "invalid";
4665 check_insn(ctx
, ISA_MIPS32
);
4674 gen_helper_mtc0_index(cpu_env
, arg
);
4678 check_insn(ctx
, ASE_MT
);
4679 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
4683 check_insn(ctx
, ASE_MT
);
4688 check_insn(ctx
, ASE_MT
);
4703 check_insn(ctx
, ASE_MT
);
4704 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
4708 check_insn(ctx
, ASE_MT
);
4709 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
4713 check_insn(ctx
, ASE_MT
);
4714 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
4718 check_insn(ctx
, ASE_MT
);
4719 gen_helper_mtc0_yqmask(cpu_env
, arg
);
4723 check_insn(ctx
, ASE_MT
);
4724 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4728 check_insn(ctx
, ASE_MT
);
4729 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4730 rn
= "VPEScheFBack";
4733 check_insn(ctx
, ASE_MT
);
4734 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
4744 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
4748 check_insn(ctx
, ASE_MT
);
4749 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
4753 check_insn(ctx
, ASE_MT
);
4754 gen_helper_mtc0_tcbind(cpu_env
, arg
);
4758 check_insn(ctx
, ASE_MT
);
4759 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
4763 check_insn(ctx
, ASE_MT
);
4764 gen_helper_mtc0_tchalt(cpu_env
, arg
);
4768 check_insn(ctx
, ASE_MT
);
4769 gen_helper_mtc0_tccontext(cpu_env
, arg
);
4773 check_insn(ctx
, ASE_MT
);
4774 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
4778 check_insn(ctx
, ASE_MT
);
4779 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
4789 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
4799 gen_helper_mtc0_context(cpu_env
, arg
);
4803 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
4804 rn
= "ContextConfig";
4813 gen_helper_mtc0_pagemask(cpu_env
, arg
);
4817 check_insn(ctx
, ISA_MIPS32R2
);
4818 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
4828 gen_helper_mtc0_wired(cpu_env
, arg
);
4832 check_insn(ctx
, ISA_MIPS32R2
);
4833 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
4837 check_insn(ctx
, ISA_MIPS32R2
);
4838 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
4842 check_insn(ctx
, ISA_MIPS32R2
);
4843 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
4847 check_insn(ctx
, ISA_MIPS32R2
);
4848 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
4852 check_insn(ctx
, ISA_MIPS32R2
);
4853 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
4863 check_insn(ctx
, ISA_MIPS32R2
);
4864 gen_helper_mtc0_hwrena(cpu_env
, arg
);
4878 gen_helper_mtc0_count(cpu_env
, arg
);
4881 /* 6,7 are implementation dependent */
4889 gen_helper_mtc0_entryhi(cpu_env
, arg
);
4899 gen_helper_mtc0_compare(cpu_env
, arg
);
4902 /* 6,7 are implementation dependent */
4910 save_cpu_state(ctx
, 1);
4911 gen_helper_mtc0_status(cpu_env
, arg
);
4912 /* BS_STOP isn't good enough here, hflags may have changed. */
4913 gen_save_pc(ctx
->pc
+ 4);
4914 ctx
->bstate
= BS_EXCP
;
4918 check_insn(ctx
, ISA_MIPS32R2
);
4919 gen_helper_mtc0_intctl(cpu_env
, arg
);
4920 /* Stop translation as we may have switched the execution mode */
4921 ctx
->bstate
= BS_STOP
;
4925 check_insn(ctx
, ISA_MIPS32R2
);
4926 gen_helper_mtc0_srsctl(cpu_env
, arg
);
4927 /* Stop translation as we may have switched the execution mode */
4928 ctx
->bstate
= BS_STOP
;
4932 check_insn(ctx
, ISA_MIPS32R2
);
4933 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
4934 /* Stop translation as we may have switched the execution mode */
4935 ctx
->bstate
= BS_STOP
;
4945 save_cpu_state(ctx
, 1);
4946 gen_helper_mtc0_cause(cpu_env
, arg
);
4956 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_EPC
));
4970 check_insn(ctx
, ISA_MIPS32R2
);
4971 gen_helper_mtc0_ebase(cpu_env
, arg
);
4981 gen_helper_mtc0_config0(cpu_env
, arg
);
4983 /* Stop translation as we may have switched the execution mode */
4984 ctx
->bstate
= BS_STOP
;
4987 /* ignored, read only */
4991 gen_helper_mtc0_config2(cpu_env
, arg
);
4993 /* Stop translation as we may have switched the execution mode */
4994 ctx
->bstate
= BS_STOP
;
4997 /* ignored, read only */
5001 gen_helper_mtc0_config4(cpu_env
, arg
);
5003 ctx
->bstate
= BS_STOP
;
5006 gen_helper_mtc0_config5(cpu_env
, arg
);
5008 /* Stop translation as we may have switched the execution mode */
5009 ctx
->bstate
= BS_STOP
;
5011 /* 6,7 are implementation dependent */
5021 rn
= "Invalid config selector";
5028 gen_helper_mtc0_lladdr(cpu_env
, arg
);
5038 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
5048 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
5058 #if defined(TARGET_MIPS64)
5059 check_insn(ctx
, ISA_MIPS3
);
5060 gen_helper_mtc0_xcontext(cpu_env
, arg
);
5069 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5072 gen_helper_mtc0_framemask(cpu_env
, arg
);
5081 rn
= "Diagnostic"; /* implementation dependent */
5086 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
5087 /* BS_STOP isn't good enough here, hflags may have changed. */
5088 gen_save_pc(ctx
->pc
+ 4);
5089 ctx
->bstate
= BS_EXCP
;
5093 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
5094 rn
= "TraceControl";
5095 /* Stop translation as we may have switched the execution mode */
5096 ctx
->bstate
= BS_STOP
;
5099 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
5100 rn
= "TraceControl2";
5101 /* Stop translation as we may have switched the execution mode */
5102 ctx
->bstate
= BS_STOP
;
5105 /* Stop translation as we may have switched the execution mode */
5106 ctx
->bstate
= BS_STOP
;
5107 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
5108 rn
= "UserTraceData";
5109 /* Stop translation as we may have switched the execution mode */
5110 ctx
->bstate
= BS_STOP
;
5113 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
5114 /* Stop translation as we may have switched the execution mode */
5115 ctx
->bstate
= BS_STOP
;
5126 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_DEPC
));
5136 gen_helper_mtc0_performance0(cpu_env
, arg
);
5137 rn
= "Performance0";
5140 // gen_helper_mtc0_performance1(arg);
5141 rn
= "Performance1";
5144 // gen_helper_mtc0_performance2(arg);
5145 rn
= "Performance2";
5148 // gen_helper_mtc0_performance3(arg);
5149 rn
= "Performance3";
5152 // gen_helper_mtc0_performance4(arg);
5153 rn
= "Performance4";
5156 // gen_helper_mtc0_performance5(arg);
5157 rn
= "Performance5";
5160 // gen_helper_mtc0_performance6(arg);
5161 rn
= "Performance6";
5164 // gen_helper_mtc0_performance7(arg);
5165 rn
= "Performance7";
5191 gen_helper_mtc0_taglo(cpu_env
, arg
);
5198 gen_helper_mtc0_datalo(cpu_env
, arg
);
5211 gen_helper_mtc0_taghi(cpu_env
, arg
);
5218 gen_helper_mtc0_datahi(cpu_env
, arg
);
5229 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5240 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5246 /* Stop translation as we may have switched the execution mode */
5247 ctx
->bstate
= BS_STOP
;
5252 (void)rn
; /* avoid a compiler warning */
5253 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5254 /* For simplicity assume that all writes can cause interrupts. */
5257 ctx
->bstate
= BS_STOP
;
5262 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5263 generate_exception(ctx
, EXCP_RI
);
5266 #if defined(TARGET_MIPS64)
5267 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5269 const char *rn
= "invalid";
5272 check_insn(ctx
, ISA_MIPS64
);
5278 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
5282 check_insn(ctx
, ASE_MT
);
5283 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
5287 check_insn(ctx
, ASE_MT
);
5288 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
5292 check_insn(ctx
, ASE_MT
);
5293 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
5303 gen_helper_mfc0_random(arg
, cpu_env
);
5307 check_insn(ctx
, ASE_MT
);
5308 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5312 check_insn(ctx
, ASE_MT
);
5313 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5317 check_insn(ctx
, ASE_MT
);
5318 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5322 check_insn(ctx
, ASE_MT
);
5323 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
5327 check_insn(ctx
, ASE_MT
);
5328 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5332 check_insn(ctx
, ASE_MT
);
5333 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5334 rn
= "VPEScheFBack";
5337 check_insn(ctx
, ASE_MT
);
5338 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5348 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
5352 check_insn(ctx
, ASE_MT
);
5353 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5357 check_insn(ctx
, ASE_MT
);
5358 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5362 check_insn(ctx
, ASE_MT
);
5363 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
5367 check_insn(ctx
, ASE_MT
);
5368 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
5372 check_insn(ctx
, ASE_MT
);
5373 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
5377 check_insn(ctx
, ASE_MT
);
5378 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
5382 check_insn(ctx
, ASE_MT
);
5383 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
5393 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
5403 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5407 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
5408 rn
= "ContextConfig";
5417 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5421 check_insn(ctx
, ISA_MIPS32R2
);
5422 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5432 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5436 check_insn(ctx
, ISA_MIPS32R2
);
5437 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5441 check_insn(ctx
, ISA_MIPS32R2
);
5442 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5446 check_insn(ctx
, ISA_MIPS32R2
);
5447 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5451 check_insn(ctx
, ISA_MIPS32R2
);
5452 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5456 check_insn(ctx
, ISA_MIPS32R2
);
5457 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5467 check_insn(ctx
, ISA_MIPS32R2
);
5468 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5478 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5488 /* Mark as an IO operation because we read the time. */
5491 gen_helper_mfc0_count(arg
, cpu_env
);
5495 /* Break the TB to be able to take timer interrupts immediately
5496 after reading count. */
5497 ctx
->bstate
= BS_STOP
;
5500 /* 6,7 are implementation dependent */
5508 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5518 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5521 /* 6,7 are implementation dependent */
5529 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5533 check_insn(ctx
, ISA_MIPS32R2
);
5534 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5538 check_insn(ctx
, ISA_MIPS32R2
);
5539 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5543 check_insn(ctx
, ISA_MIPS32R2
);
5544 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5554 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5564 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5574 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5578 check_insn(ctx
, ISA_MIPS32R2
);
5579 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5589 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5593 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5597 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5601 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5604 /* 6,7 are implementation dependent */
5606 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5610 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5620 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
5630 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
5640 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5650 check_insn(ctx
, ISA_MIPS3
);
5651 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5659 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5662 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5670 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5671 rn
= "'Diagnostic"; /* implementation dependent */
5676 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5680 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
5681 rn
= "TraceControl";
5684 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
5685 rn
= "TraceControl2";
5688 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
5689 rn
= "UserTraceData";
5692 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
5703 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5713 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5714 rn
= "Performance0";
5717 // gen_helper_dmfc0_performance1(arg);
5718 rn
= "Performance1";
5721 // gen_helper_dmfc0_performance2(arg);
5722 rn
= "Performance2";
5725 // gen_helper_dmfc0_performance3(arg);
5726 rn
= "Performance3";
5729 // gen_helper_dmfc0_performance4(arg);
5730 rn
= "Performance4";
5733 // gen_helper_dmfc0_performance5(arg);
5734 rn
= "Performance5";
5737 // gen_helper_dmfc0_performance6(arg);
5738 rn
= "Performance6";
5741 // gen_helper_dmfc0_performance7(arg);
5742 rn
= "Performance7";
5749 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5756 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5769 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5776 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5789 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5796 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5806 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5817 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5827 (void)rn
; /* avoid a compiler warning */
5828 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5832 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5833 generate_exception(ctx
, EXCP_RI
);
5836 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5838 const char *rn
= "invalid";
5841 check_insn(ctx
, ISA_MIPS64
);
5850 gen_helper_mtc0_index(cpu_env
, arg
);
5854 check_insn(ctx
, ASE_MT
);
5855 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5859 check_insn(ctx
, ASE_MT
);
5864 check_insn(ctx
, ASE_MT
);
5879 check_insn(ctx
, ASE_MT
);
5880 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5884 check_insn(ctx
, ASE_MT
);
5885 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5889 check_insn(ctx
, ASE_MT
);
5890 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5894 check_insn(ctx
, ASE_MT
);
5895 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5899 check_insn(ctx
, ASE_MT
);
5900 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5904 check_insn(ctx
, ASE_MT
);
5905 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5906 rn
= "VPEScheFBack";
5909 check_insn(ctx
, ASE_MT
);
5910 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5920 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5924 check_insn(ctx
, ASE_MT
);
5925 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5929 check_insn(ctx
, ASE_MT
);
5930 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5934 check_insn(ctx
, ASE_MT
);
5935 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5939 check_insn(ctx
, ASE_MT
);
5940 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5944 check_insn(ctx
, ASE_MT
);
5945 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5949 check_insn(ctx
, ASE_MT
);
5950 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5954 check_insn(ctx
, ASE_MT
);
5955 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5965 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5975 gen_helper_mtc0_context(cpu_env
, arg
);
5979 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5980 rn
= "ContextConfig";
5989 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5993 check_insn(ctx
, ISA_MIPS32R2
);
5994 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
6004 gen_helper_mtc0_wired(cpu_env
, arg
);
6008 check_insn(ctx
, ISA_MIPS32R2
);
6009 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
6013 check_insn(ctx
, ISA_MIPS32R2
);
6014 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
6018 check_insn(ctx
, ISA_MIPS32R2
);
6019 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
6023 check_insn(ctx
, ISA_MIPS32R2
);
6024 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
6028 check_insn(ctx
, ISA_MIPS32R2
);
6029 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
6039 check_insn(ctx
, ISA_MIPS32R2
);
6040 gen_helper_mtc0_hwrena(cpu_env
, arg
);
6054 gen_helper_mtc0_count(cpu_env
, arg
);
6057 /* 6,7 are implementation dependent */
6061 /* Stop translation as we may have switched the execution mode */
6062 ctx
->bstate
= BS_STOP
;
6067 gen_helper_mtc0_entryhi(cpu_env
, arg
);
6077 gen_helper_mtc0_compare(cpu_env
, arg
);
6080 /* 6,7 are implementation dependent */
6084 /* Stop translation as we may have switched the execution mode */
6085 ctx
->bstate
= BS_STOP
;
6090 save_cpu_state(ctx
, 1);
6091 gen_helper_mtc0_status(cpu_env
, arg
);
6092 /* BS_STOP isn't good enough here, hflags may have changed. */
6093 gen_save_pc(ctx
->pc
+ 4);
6094 ctx
->bstate
= BS_EXCP
;
6098 check_insn(ctx
, ISA_MIPS32R2
);
6099 gen_helper_mtc0_intctl(cpu_env
, arg
);
6100 /* Stop translation as we may have switched the execution mode */
6101 ctx
->bstate
= BS_STOP
;
6105 check_insn(ctx
, ISA_MIPS32R2
);
6106 gen_helper_mtc0_srsctl(cpu_env
, arg
);
6107 /* Stop translation as we may have switched the execution mode */
6108 ctx
->bstate
= BS_STOP
;
6112 check_insn(ctx
, ISA_MIPS32R2
);
6113 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6114 /* Stop translation as we may have switched the execution mode */
6115 ctx
->bstate
= BS_STOP
;
6125 save_cpu_state(ctx
, 1);
6126 /* Mark as an IO operation because we may trigger a software
6131 gen_helper_mtc0_cause(cpu_env
, arg
);
6135 /* Stop translation as we may have triggered an intetrupt */
6136 ctx
->bstate
= BS_STOP
;
6146 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6160 check_insn(ctx
, ISA_MIPS32R2
);
6161 gen_helper_mtc0_ebase(cpu_env
, arg
);
6171 gen_helper_mtc0_config0(cpu_env
, arg
);
6173 /* Stop translation as we may have switched the execution mode */
6174 ctx
->bstate
= BS_STOP
;
6177 /* ignored, read only */
6181 gen_helper_mtc0_config2(cpu_env
, arg
);
6183 /* Stop translation as we may have switched the execution mode */
6184 ctx
->bstate
= BS_STOP
;
6190 /* 6,7 are implementation dependent */
6192 rn
= "Invalid config selector";
6199 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6209 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6219 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6229 check_insn(ctx
, ISA_MIPS3
);
6230 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6238 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6241 gen_helper_mtc0_framemask(cpu_env
, arg
);
6250 rn
= "Diagnostic"; /* implementation dependent */
6255 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6256 /* BS_STOP isn't good enough here, hflags may have changed. */
6257 gen_save_pc(ctx
->pc
+ 4);
6258 ctx
->bstate
= BS_EXCP
;
6262 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6263 /* Stop translation as we may have switched the execution mode */
6264 ctx
->bstate
= BS_STOP
;
6265 rn
= "TraceControl";
6268 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6269 /* Stop translation as we may have switched the execution mode */
6270 ctx
->bstate
= BS_STOP
;
6271 rn
= "TraceControl2";
6274 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6275 /* Stop translation as we may have switched the execution mode */
6276 ctx
->bstate
= BS_STOP
;
6277 rn
= "UserTraceData";
6280 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6281 /* Stop translation as we may have switched the execution mode */
6282 ctx
->bstate
= BS_STOP
;
6293 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6303 gen_helper_mtc0_performance0(cpu_env
, arg
);
6304 rn
= "Performance0";
6307 // gen_helper_mtc0_performance1(cpu_env, arg);
6308 rn
= "Performance1";
6311 // gen_helper_mtc0_performance2(cpu_env, arg);
6312 rn
= "Performance2";
6315 // gen_helper_mtc0_performance3(cpu_env, arg);
6316 rn
= "Performance3";
6319 // gen_helper_mtc0_performance4(cpu_env, arg);
6320 rn
= "Performance4";
6323 // gen_helper_mtc0_performance5(cpu_env, arg);
6324 rn
= "Performance5";
6327 // gen_helper_mtc0_performance6(cpu_env, arg);
6328 rn
= "Performance6";
6331 // gen_helper_mtc0_performance7(cpu_env, arg);
6332 rn
= "Performance7";
6358 gen_helper_mtc0_taglo(cpu_env
, arg
);
6365 gen_helper_mtc0_datalo(cpu_env
, arg
);
6378 gen_helper_mtc0_taghi(cpu_env
, arg
);
6385 gen_helper_mtc0_datahi(cpu_env
, arg
);
6396 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6407 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6413 /* Stop translation as we may have switched the execution mode */
6414 ctx
->bstate
= BS_STOP
;
6419 (void)rn
; /* avoid a compiler warning */
6420 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6421 /* For simplicity assume that all writes can cause interrupts. */
6424 ctx
->bstate
= BS_STOP
;
6429 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6430 generate_exception(ctx
, EXCP_RI
);
6432 #endif /* TARGET_MIPS64 */
6434 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
6435 int u
, int sel
, int h
)
6437 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
6438 TCGv t0
= tcg_temp_local_new();
6440 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
6441 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
6442 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
6443 tcg_gen_movi_tl(t0
, -1);
6444 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
6445 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
6446 tcg_gen_movi_tl(t0
, -1);
6452 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
6455 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
6465 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
6468 gen_helper_mftc0_tcbind(t0
, cpu_env
);
6471 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
6474 gen_helper_mftc0_tchalt(t0
, cpu_env
);
6477 gen_helper_mftc0_tccontext(t0
, cpu_env
);
6480 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
6483 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
6486 gen_mfc0(ctx
, t0
, rt
, sel
);
6493 gen_helper_mftc0_entryhi(t0
, cpu_env
);
6496 gen_mfc0(ctx
, t0
, rt
, sel
);
6502 gen_helper_mftc0_status(t0
, cpu_env
);
6505 gen_mfc0(ctx
, t0
, rt
, sel
);
6511 gen_helper_mftc0_cause(t0
, cpu_env
);
6521 gen_helper_mftc0_epc(t0
, cpu_env
);
6531 gen_helper_mftc0_ebase(t0
, cpu_env
);
6541 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
6551 gen_helper_mftc0_debug(t0
, cpu_env
);
6554 gen_mfc0(ctx
, t0
, rt
, sel
);
6559 gen_mfc0(ctx
, t0
, rt
, sel
);
6561 } else switch (sel
) {
6562 /* GPR registers. */
6564 gen_helper_1e0i(mftgpr
, t0
, rt
);
6566 /* Auxiliary CPU registers */
6570 gen_helper_1e0i(mftlo
, t0
, 0);
6573 gen_helper_1e0i(mfthi
, t0
, 0);
6576 gen_helper_1e0i(mftacx
, t0
, 0);
6579 gen_helper_1e0i(mftlo
, t0
, 1);
6582 gen_helper_1e0i(mfthi
, t0
, 1);
6585 gen_helper_1e0i(mftacx
, t0
, 1);
6588 gen_helper_1e0i(mftlo
, t0
, 2);
6591 gen_helper_1e0i(mfthi
, t0
, 2);
6594 gen_helper_1e0i(mftacx
, t0
, 2);
6597 gen_helper_1e0i(mftlo
, t0
, 3);
6600 gen_helper_1e0i(mfthi
, t0
, 3);
6603 gen_helper_1e0i(mftacx
, t0
, 3);
6606 gen_helper_mftdsp(t0
, cpu_env
);
6612 /* Floating point (COP1). */
6614 /* XXX: For now we support only a single FPU context. */
6616 TCGv_i32 fp0
= tcg_temp_new_i32();
6618 gen_load_fpr32(fp0
, rt
);
6619 tcg_gen_ext_i32_tl(t0
, fp0
);
6620 tcg_temp_free_i32(fp0
);
6622 TCGv_i32 fp0
= tcg_temp_new_i32();
6624 gen_load_fpr32h(ctx
, fp0
, rt
);
6625 tcg_gen_ext_i32_tl(t0
, fp0
);
6626 tcg_temp_free_i32(fp0
);
6630 /* XXX: For now we support only a single FPU context. */
6631 gen_helper_1e0i(cfc1
, t0
, rt
);
6633 /* COP2: Not implemented. */
6640 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
6641 gen_store_gpr(t0
, rd
);
6647 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
6648 generate_exception(ctx
, EXCP_RI
);
6651 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
6652 int u
, int sel
, int h
)
6654 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
6655 TCGv t0
= tcg_temp_local_new();
6657 gen_load_gpr(t0
, rt
);
6658 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
6659 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
6660 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
6662 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
6663 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
6670 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
6673 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
6683 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
6686 gen_helper_mttc0_tcbind(cpu_env
, t0
);
6689 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
6692 gen_helper_mttc0_tchalt(cpu_env
, t0
);
6695 gen_helper_mttc0_tccontext(cpu_env
, t0
);
6698 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
6701 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
6704 gen_mtc0(ctx
, t0
, rd
, sel
);
6711 gen_helper_mttc0_entryhi(cpu_env
, t0
);
6714 gen_mtc0(ctx
, t0
, rd
, sel
);
6720 gen_helper_mttc0_status(cpu_env
, t0
);
6723 gen_mtc0(ctx
, t0
, rd
, sel
);
6729 gen_helper_mttc0_cause(cpu_env
, t0
);
6739 gen_helper_mttc0_ebase(cpu_env
, t0
);
6749 gen_helper_mttc0_debug(cpu_env
, t0
);
6752 gen_mtc0(ctx
, t0
, rd
, sel
);
6757 gen_mtc0(ctx
, t0
, rd
, sel
);
6759 } else switch (sel
) {
6760 /* GPR registers. */
6762 gen_helper_0e1i(mttgpr
, t0
, rd
);
6764 /* Auxiliary CPU registers */
6768 gen_helper_0e1i(mttlo
, t0
, 0);
6771 gen_helper_0e1i(mtthi
, t0
, 0);
6774 gen_helper_0e1i(mttacx
, t0
, 0);
6777 gen_helper_0e1i(mttlo
, t0
, 1);
6780 gen_helper_0e1i(mtthi
, t0
, 1);
6783 gen_helper_0e1i(mttacx
, t0
, 1);
6786 gen_helper_0e1i(mttlo
, t0
, 2);
6789 gen_helper_0e1i(mtthi
, t0
, 2);
6792 gen_helper_0e1i(mttacx
, t0
, 2);
6795 gen_helper_0e1i(mttlo
, t0
, 3);
6798 gen_helper_0e1i(mtthi
, t0
, 3);
6801 gen_helper_0e1i(mttacx
, t0
, 3);
6804 gen_helper_mttdsp(cpu_env
, t0
);
6810 /* Floating point (COP1). */
6812 /* XXX: For now we support only a single FPU context. */
6814 TCGv_i32 fp0
= tcg_temp_new_i32();
6816 tcg_gen_trunc_tl_i32(fp0
, t0
);
6817 gen_store_fpr32(fp0
, rd
);
6818 tcg_temp_free_i32(fp0
);
6820 TCGv_i32 fp0
= tcg_temp_new_i32();
6822 tcg_gen_trunc_tl_i32(fp0
, t0
);
6823 gen_store_fpr32h(ctx
, fp0
, rd
);
6824 tcg_temp_free_i32(fp0
);
6828 /* XXX: For now we support only a single FPU context. */
6830 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
6832 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
6833 tcg_temp_free_i32(fs_tmp
);
6836 /* COP2: Not implemented. */
6843 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
6849 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
6850 generate_exception(ctx
, EXCP_RI
);
6853 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
6855 const char *opn
= "ldst";
6857 check_cp0_enabled(ctx
);
6864 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
6869 TCGv t0
= tcg_temp_new();
6871 gen_load_gpr(t0
, rt
);
6872 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
6877 #if defined(TARGET_MIPS64)
6879 check_insn(ctx
, ISA_MIPS3
);
6884 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
6888 check_insn(ctx
, ISA_MIPS3
);
6890 TCGv t0
= tcg_temp_new();
6892 gen_load_gpr(t0
, rt
);
6893 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
6900 check_insn(ctx
, ASE_MT
);
6905 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
6906 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
6910 check_insn(ctx
, ASE_MT
);
6911 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
6912 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
6917 if (!env
->tlb
->helper_tlbwi
)
6919 gen_helper_tlbwi(cpu_env
);
6923 if (!env
->tlb
->helper_tlbwr
)
6925 gen_helper_tlbwr(cpu_env
);
6929 if (!env
->tlb
->helper_tlbp
)
6931 gen_helper_tlbp(cpu_env
);
6935 if (!env
->tlb
->helper_tlbr
)
6937 gen_helper_tlbr(cpu_env
);
6941 check_insn(ctx
, ISA_MIPS2
);
6942 gen_helper_eret(cpu_env
);
6943 ctx
->bstate
= BS_EXCP
;
6947 check_insn(ctx
, ISA_MIPS32
);
6948 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
6950 generate_exception(ctx
, EXCP_RI
);
6952 gen_helper_deret(cpu_env
);
6953 ctx
->bstate
= BS_EXCP
;
6958 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
6959 /* If we get an exception, we want to restart at next instruction */
6961 save_cpu_state(ctx
, 1);
6963 gen_helper_wait(cpu_env
);
6964 ctx
->bstate
= BS_EXCP
;
6969 generate_exception(ctx
, EXCP_RI
);
6972 (void)opn
; /* avoid a compiler warning */
6973 MIPS_DEBUG("%s %s %d", opn
, regnames
[rt
], rd
);
6975 #endif /* !CONFIG_USER_ONLY */
6977 /* CP1 Branches (before delay slot) */
6978 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
6979 int32_t cc
, int32_t offset
)
6981 target_ulong btarget
;
6982 const char *opn
= "cp1 cond branch";
6983 TCGv_i32 t0
= tcg_temp_new_i32();
6986 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
6988 btarget
= ctx
->pc
+ 4 + offset
;
6992 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6993 tcg_gen_not_i32(t0
, t0
);
6994 tcg_gen_andi_i32(t0
, t0
, 1);
6995 tcg_gen_extu_i32_tl(bcond
, t0
);
6999 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
7000 tcg_gen_not_i32(t0
, t0
);
7001 tcg_gen_andi_i32(t0
, t0
, 1);
7002 tcg_gen_extu_i32_tl(bcond
, t0
);
7006 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
7007 tcg_gen_andi_i32(t0
, t0
, 1);
7008 tcg_gen_extu_i32_tl(bcond
, t0
);
7012 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
7013 tcg_gen_andi_i32(t0
, t0
, 1);
7014 tcg_gen_extu_i32_tl(bcond
, t0
);
7017 ctx
->hflags
|= MIPS_HFLAG_BL
;
7021 TCGv_i32 t1
= tcg_temp_new_i32();
7022 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
7023 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
7024 tcg_gen_nand_i32(t0
, t0
, t1
);
7025 tcg_temp_free_i32(t1
);
7026 tcg_gen_andi_i32(t0
, t0
, 1);
7027 tcg_gen_extu_i32_tl(bcond
, t0
);
7033 TCGv_i32 t1
= tcg_temp_new_i32();
7034 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
7035 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
7036 tcg_gen_or_i32(t0
, t0
, t1
);
7037 tcg_temp_free_i32(t1
);
7038 tcg_gen_andi_i32(t0
, t0
, 1);
7039 tcg_gen_extu_i32_tl(bcond
, t0
);
7045 TCGv_i32 t1
= tcg_temp_new_i32();
7046 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
7047 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
7048 tcg_gen_and_i32(t0
, t0
, t1
);
7049 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
7050 tcg_gen_and_i32(t0
, t0
, t1
);
7051 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
7052 tcg_gen_nand_i32(t0
, t0
, t1
);
7053 tcg_temp_free_i32(t1
);
7054 tcg_gen_andi_i32(t0
, t0
, 1);
7055 tcg_gen_extu_i32_tl(bcond
, t0
);
7061 TCGv_i32 t1
= tcg_temp_new_i32();
7062 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
7063 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
7064 tcg_gen_or_i32(t0
, t0
, t1
);
7065 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
7066 tcg_gen_or_i32(t0
, t0
, t1
);
7067 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
7068 tcg_gen_or_i32(t0
, t0
, t1
);
7069 tcg_temp_free_i32(t1
);
7070 tcg_gen_andi_i32(t0
, t0
, 1);
7071 tcg_gen_extu_i32_tl(bcond
, t0
);
7075 ctx
->hflags
|= MIPS_HFLAG_BC
;
7079 generate_exception (ctx
, EXCP_RI
);
7082 (void)opn
; /* avoid a compiler warning */
7083 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx
, opn
,
7084 ctx
->hflags
, btarget
);
7085 ctx
->btarget
= btarget
;
7088 tcg_temp_free_i32(t0
);
7091 /* Coprocessor 1 (FPU) */
7093 #define FOP(func, fmt) (((fmt) << 21) | (func))
7096 OPC_ADD_S
= FOP(0, FMT_S
),
7097 OPC_SUB_S
= FOP(1, FMT_S
),
7098 OPC_MUL_S
= FOP(2, FMT_S
),
7099 OPC_DIV_S
= FOP(3, FMT_S
),
7100 OPC_SQRT_S
= FOP(4, FMT_S
),
7101 OPC_ABS_S
= FOP(5, FMT_S
),
7102 OPC_MOV_S
= FOP(6, FMT_S
),
7103 OPC_NEG_S
= FOP(7, FMT_S
),
7104 OPC_ROUND_L_S
= FOP(8, FMT_S
),
7105 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
7106 OPC_CEIL_L_S
= FOP(10, FMT_S
),
7107 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
7108 OPC_ROUND_W_S
= FOP(12, FMT_S
),
7109 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
7110 OPC_CEIL_W_S
= FOP(14, FMT_S
),
7111 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
7112 OPC_MOVCF_S
= FOP(17, FMT_S
),
7113 OPC_MOVZ_S
= FOP(18, FMT_S
),
7114 OPC_MOVN_S
= FOP(19, FMT_S
),
7115 OPC_RECIP_S
= FOP(21, FMT_S
),
7116 OPC_RSQRT_S
= FOP(22, FMT_S
),
7117 OPC_RECIP2_S
= FOP(28, FMT_S
),
7118 OPC_RECIP1_S
= FOP(29, FMT_S
),
7119 OPC_RSQRT1_S
= FOP(30, FMT_S
),
7120 OPC_RSQRT2_S
= FOP(31, FMT_S
),
7121 OPC_CVT_D_S
= FOP(33, FMT_S
),
7122 OPC_CVT_W_S
= FOP(36, FMT_S
),
7123 OPC_CVT_L_S
= FOP(37, FMT_S
),
7124 OPC_CVT_PS_S
= FOP(38, FMT_S
),
7125 OPC_CMP_F_S
= FOP (48, FMT_S
),
7126 OPC_CMP_UN_S
= FOP (49, FMT_S
),
7127 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
7128 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
7129 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
7130 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
7131 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
7132 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
7133 OPC_CMP_SF_S
= FOP (56, FMT_S
),
7134 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
7135 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
7136 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
7137 OPC_CMP_LT_S
= FOP (60, FMT_S
),
7138 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
7139 OPC_CMP_LE_S
= FOP (62, FMT_S
),
7140 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
7142 OPC_ADD_D
= FOP(0, FMT_D
),
7143 OPC_SUB_D
= FOP(1, FMT_D
),
7144 OPC_MUL_D
= FOP(2, FMT_D
),
7145 OPC_DIV_D
= FOP(3, FMT_D
),
7146 OPC_SQRT_D
= FOP(4, FMT_D
),
7147 OPC_ABS_D
= FOP(5, FMT_D
),
7148 OPC_MOV_D
= FOP(6, FMT_D
),
7149 OPC_NEG_D
= FOP(7, FMT_D
),
7150 OPC_ROUND_L_D
= FOP(8, FMT_D
),
7151 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
7152 OPC_CEIL_L_D
= FOP(10, FMT_D
),
7153 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
7154 OPC_ROUND_W_D
= FOP(12, FMT_D
),
7155 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
7156 OPC_CEIL_W_D
= FOP(14, FMT_D
),
7157 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
7158 OPC_MOVCF_D
= FOP(17, FMT_D
),
7159 OPC_MOVZ_D
= FOP(18, FMT_D
),
7160 OPC_MOVN_D
= FOP(19, FMT_D
),
7161 OPC_RECIP_D
= FOP(21, FMT_D
),
7162 OPC_RSQRT_D
= FOP(22, FMT_D
),
7163 OPC_RECIP2_D
= FOP(28, FMT_D
),
7164 OPC_RECIP1_D
= FOP(29, FMT_D
),
7165 OPC_RSQRT1_D
= FOP(30, FMT_D
),
7166 OPC_RSQRT2_D
= FOP(31, FMT_D
),
7167 OPC_CVT_S_D
= FOP(32, FMT_D
),
7168 OPC_CVT_W_D
= FOP(36, FMT_D
),
7169 OPC_CVT_L_D
= FOP(37, FMT_D
),
7170 OPC_CMP_F_D
= FOP (48, FMT_D
),
7171 OPC_CMP_UN_D
= FOP (49, FMT_D
),
7172 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
7173 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
7174 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
7175 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
7176 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
7177 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
7178 OPC_CMP_SF_D
= FOP (56, FMT_D
),
7179 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
7180 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
7181 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
7182 OPC_CMP_LT_D
= FOP (60, FMT_D
),
7183 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
7184 OPC_CMP_LE_D
= FOP (62, FMT_D
),
7185 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
7187 OPC_CVT_S_W
= FOP(32, FMT_W
),
7188 OPC_CVT_D_W
= FOP(33, FMT_W
),
7189 OPC_CVT_S_L
= FOP(32, FMT_L
),
7190 OPC_CVT_D_L
= FOP(33, FMT_L
),
7191 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
7193 OPC_ADD_PS
= FOP(0, FMT_PS
),
7194 OPC_SUB_PS
= FOP(1, FMT_PS
),
7195 OPC_MUL_PS
= FOP(2, FMT_PS
),
7196 OPC_DIV_PS
= FOP(3, FMT_PS
),
7197 OPC_ABS_PS
= FOP(5, FMT_PS
),
7198 OPC_MOV_PS
= FOP(6, FMT_PS
),
7199 OPC_NEG_PS
= FOP(7, FMT_PS
),
7200 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
7201 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
7202 OPC_MOVN_PS
= FOP(19, FMT_PS
),
7203 OPC_ADDR_PS
= FOP(24, FMT_PS
),
7204 OPC_MULR_PS
= FOP(26, FMT_PS
),
7205 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
7206 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
7207 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
7208 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
7210 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
7211 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
7212 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
7213 OPC_PLL_PS
= FOP(44, FMT_PS
),
7214 OPC_PLU_PS
= FOP(45, FMT_PS
),
7215 OPC_PUL_PS
= FOP(46, FMT_PS
),
7216 OPC_PUU_PS
= FOP(47, FMT_PS
),
7217 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
7218 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
7219 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
7220 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
7221 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
7222 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
7223 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
7224 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
7225 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
7226 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
7227 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
7228 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
7229 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
7230 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
7231 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
7232 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
7235 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
7237 const char *opn
= "cp1 move";
7238 TCGv t0
= tcg_temp_new();
7243 TCGv_i32 fp0
= tcg_temp_new_i32();
7245 gen_load_fpr32(fp0
, fs
);
7246 tcg_gen_ext_i32_tl(t0
, fp0
);
7247 tcg_temp_free_i32(fp0
);
7249 gen_store_gpr(t0
, rt
);
7253 gen_load_gpr(t0
, rt
);
7255 TCGv_i32 fp0
= tcg_temp_new_i32();
7257 tcg_gen_trunc_tl_i32(fp0
, t0
);
7258 gen_store_fpr32(fp0
, fs
);
7259 tcg_temp_free_i32(fp0
);
7264 gen_helper_1e0i(cfc1
, t0
, fs
);
7265 gen_store_gpr(t0
, rt
);
7269 gen_load_gpr(t0
, rt
);
7271 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
7273 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7274 tcg_temp_free_i32(fs_tmp
);
7278 #if defined(TARGET_MIPS64)
7280 gen_load_fpr64(ctx
, t0
, fs
);
7281 gen_store_gpr(t0
, rt
);
7285 gen_load_gpr(t0
, rt
);
7286 gen_store_fpr64(ctx
, t0
, fs
);
7292 TCGv_i32 fp0
= tcg_temp_new_i32();
7294 gen_load_fpr32h(ctx
, fp0
, fs
);
7295 tcg_gen_ext_i32_tl(t0
, fp0
);
7296 tcg_temp_free_i32(fp0
);
7298 gen_store_gpr(t0
, rt
);
7302 gen_load_gpr(t0
, rt
);
7304 TCGv_i32 fp0
= tcg_temp_new_i32();
7306 tcg_gen_trunc_tl_i32(fp0
, t0
);
7307 gen_store_fpr32h(ctx
, fp0
, fs
);
7308 tcg_temp_free_i32(fp0
);
7314 generate_exception (ctx
, EXCP_RI
);
7317 (void)opn
; /* avoid a compiler warning */
7318 MIPS_DEBUG("%s %s %s", opn
, regnames
[rt
], fregnames
[fs
]);
7324 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
7340 l1
= gen_new_label();
7341 t0
= tcg_temp_new_i32();
7342 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
7343 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
7344 tcg_temp_free_i32(t0
);
7346 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
7348 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
7353 static inline void gen_movcf_s (int fs
, int fd
, int cc
, int tf
)
7356 TCGv_i32 t0
= tcg_temp_new_i32();
7357 int l1
= gen_new_label();
7364 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
7365 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
7366 gen_load_fpr32(t0
, fs
);
7367 gen_store_fpr32(t0
, fd
);
7369 tcg_temp_free_i32(t0
);
7372 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
7375 TCGv_i32 t0
= tcg_temp_new_i32();
7377 int l1
= gen_new_label();
7384 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
7385 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
7386 tcg_temp_free_i32(t0
);
7387 fp0
= tcg_temp_new_i64();
7388 gen_load_fpr64(ctx
, fp0
, fs
);
7389 gen_store_fpr64(ctx
, fp0
, fd
);
7390 tcg_temp_free_i64(fp0
);
7394 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
7398 TCGv_i32 t0
= tcg_temp_new_i32();
7399 int l1
= gen_new_label();
7400 int l2
= gen_new_label();
7407 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
7408 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
7409 gen_load_fpr32(t0
, fs
);
7410 gen_store_fpr32(t0
, fd
);
7413 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
7414 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
7415 gen_load_fpr32h(ctx
, t0
, fs
);
7416 gen_store_fpr32h(ctx
, t0
, fd
);
7417 tcg_temp_free_i32(t0
);
7422 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
7423 int ft
, int fs
, int fd
, int cc
)
7425 const char *opn
= "farith";
7426 const char *condnames
[] = {
7444 const char *condnames_abs
[] = {
7462 enum { BINOP
, CMPOP
, OTHEROP
} optype
= OTHEROP
;
7463 uint32_t func
= ctx
->opcode
& 0x3f;
7468 TCGv_i32 fp0
= tcg_temp_new_i32();
7469 TCGv_i32 fp1
= tcg_temp_new_i32();
7471 gen_load_fpr32(fp0
, fs
);
7472 gen_load_fpr32(fp1
, ft
);
7473 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
7474 tcg_temp_free_i32(fp1
);
7475 gen_store_fpr32(fp0
, fd
);
7476 tcg_temp_free_i32(fp0
);
7483 TCGv_i32 fp0
= tcg_temp_new_i32();
7484 TCGv_i32 fp1
= tcg_temp_new_i32();
7486 gen_load_fpr32(fp0
, fs
);
7487 gen_load_fpr32(fp1
, ft
);
7488 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
7489 tcg_temp_free_i32(fp1
);
7490 gen_store_fpr32(fp0
, fd
);
7491 tcg_temp_free_i32(fp0
);
7498 TCGv_i32 fp0
= tcg_temp_new_i32();
7499 TCGv_i32 fp1
= tcg_temp_new_i32();
7501 gen_load_fpr32(fp0
, fs
);
7502 gen_load_fpr32(fp1
, ft
);
7503 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
7504 tcg_temp_free_i32(fp1
);
7505 gen_store_fpr32(fp0
, fd
);
7506 tcg_temp_free_i32(fp0
);
7513 TCGv_i32 fp0
= tcg_temp_new_i32();
7514 TCGv_i32 fp1
= tcg_temp_new_i32();
7516 gen_load_fpr32(fp0
, fs
);
7517 gen_load_fpr32(fp1
, ft
);
7518 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
7519 tcg_temp_free_i32(fp1
);
7520 gen_store_fpr32(fp0
, fd
);
7521 tcg_temp_free_i32(fp0
);
7528 TCGv_i32 fp0
= tcg_temp_new_i32();
7530 gen_load_fpr32(fp0
, fs
);
7531 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
7532 gen_store_fpr32(fp0
, fd
);
7533 tcg_temp_free_i32(fp0
);
7539 TCGv_i32 fp0
= tcg_temp_new_i32();
7541 gen_load_fpr32(fp0
, fs
);
7542 gen_helper_float_abs_s(fp0
, fp0
);
7543 gen_store_fpr32(fp0
, fd
);
7544 tcg_temp_free_i32(fp0
);
7550 TCGv_i32 fp0
= tcg_temp_new_i32();
7552 gen_load_fpr32(fp0
, fs
);
7553 gen_store_fpr32(fp0
, fd
);
7554 tcg_temp_free_i32(fp0
);
7560 TCGv_i32 fp0
= tcg_temp_new_i32();
7562 gen_load_fpr32(fp0
, fs
);
7563 gen_helper_float_chs_s(fp0
, fp0
);
7564 gen_store_fpr32(fp0
, fd
);
7565 tcg_temp_free_i32(fp0
);
7570 check_cp1_64bitmode(ctx
);
7572 TCGv_i32 fp32
= tcg_temp_new_i32();
7573 TCGv_i64 fp64
= tcg_temp_new_i64();
7575 gen_load_fpr32(fp32
, fs
);
7576 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
7577 tcg_temp_free_i32(fp32
);
7578 gen_store_fpr64(ctx
, fp64
, fd
);
7579 tcg_temp_free_i64(fp64
);
7584 check_cp1_64bitmode(ctx
);
7586 TCGv_i32 fp32
= tcg_temp_new_i32();
7587 TCGv_i64 fp64
= tcg_temp_new_i64();
7589 gen_load_fpr32(fp32
, fs
);
7590 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
7591 tcg_temp_free_i32(fp32
);
7592 gen_store_fpr64(ctx
, fp64
, fd
);
7593 tcg_temp_free_i64(fp64
);
7598 check_cp1_64bitmode(ctx
);
7600 TCGv_i32 fp32
= tcg_temp_new_i32();
7601 TCGv_i64 fp64
= tcg_temp_new_i64();
7603 gen_load_fpr32(fp32
, fs
);
7604 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
7605 tcg_temp_free_i32(fp32
);
7606 gen_store_fpr64(ctx
, fp64
, fd
);
7607 tcg_temp_free_i64(fp64
);
7612 check_cp1_64bitmode(ctx
);
7614 TCGv_i32 fp32
= tcg_temp_new_i32();
7615 TCGv_i64 fp64
= tcg_temp_new_i64();
7617 gen_load_fpr32(fp32
, fs
);
7618 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
7619 tcg_temp_free_i32(fp32
);
7620 gen_store_fpr64(ctx
, fp64
, fd
);
7621 tcg_temp_free_i64(fp64
);
7627 TCGv_i32 fp0
= tcg_temp_new_i32();
7629 gen_load_fpr32(fp0
, fs
);
7630 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
7631 gen_store_fpr32(fp0
, fd
);
7632 tcg_temp_free_i32(fp0
);
7638 TCGv_i32 fp0
= tcg_temp_new_i32();
7640 gen_load_fpr32(fp0
, fs
);
7641 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
7642 gen_store_fpr32(fp0
, fd
);
7643 tcg_temp_free_i32(fp0
);
7649 TCGv_i32 fp0
= tcg_temp_new_i32();
7651 gen_load_fpr32(fp0
, fs
);
7652 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
7653 gen_store_fpr32(fp0
, fd
);
7654 tcg_temp_free_i32(fp0
);
7660 TCGv_i32 fp0
= tcg_temp_new_i32();
7662 gen_load_fpr32(fp0
, fs
);
7663 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
7664 gen_store_fpr32(fp0
, fd
);
7665 tcg_temp_free_i32(fp0
);
7670 gen_movcf_s(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7675 int l1
= gen_new_label();
7679 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7681 fp0
= tcg_temp_new_i32();
7682 gen_load_fpr32(fp0
, fs
);
7683 gen_store_fpr32(fp0
, fd
);
7684 tcg_temp_free_i32(fp0
);
7691 int l1
= gen_new_label();
7695 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7696 fp0
= tcg_temp_new_i32();
7697 gen_load_fpr32(fp0
, fs
);
7698 gen_store_fpr32(fp0
, fd
);
7699 tcg_temp_free_i32(fp0
);
7708 TCGv_i32 fp0
= tcg_temp_new_i32();
7710 gen_load_fpr32(fp0
, fs
);
7711 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
7712 gen_store_fpr32(fp0
, fd
);
7713 tcg_temp_free_i32(fp0
);
7720 TCGv_i32 fp0
= tcg_temp_new_i32();
7722 gen_load_fpr32(fp0
, fs
);
7723 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
7724 gen_store_fpr32(fp0
, fd
);
7725 tcg_temp_free_i32(fp0
);
7730 check_cp1_64bitmode(ctx
);
7732 TCGv_i32 fp0
= tcg_temp_new_i32();
7733 TCGv_i32 fp1
= tcg_temp_new_i32();
7735 gen_load_fpr32(fp0
, fs
);
7736 gen_load_fpr32(fp1
, ft
);
7737 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
7738 tcg_temp_free_i32(fp1
);
7739 gen_store_fpr32(fp0
, fd
);
7740 tcg_temp_free_i32(fp0
);
7745 check_cp1_64bitmode(ctx
);
7747 TCGv_i32 fp0
= tcg_temp_new_i32();
7749 gen_load_fpr32(fp0
, fs
);
7750 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
7751 gen_store_fpr32(fp0
, fd
);
7752 tcg_temp_free_i32(fp0
);
7757 check_cp1_64bitmode(ctx
);
7759 TCGv_i32 fp0
= tcg_temp_new_i32();
7761 gen_load_fpr32(fp0
, fs
);
7762 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
7763 gen_store_fpr32(fp0
, fd
);
7764 tcg_temp_free_i32(fp0
);
7769 check_cp1_64bitmode(ctx
);
7771 TCGv_i32 fp0
= tcg_temp_new_i32();
7772 TCGv_i32 fp1
= tcg_temp_new_i32();
7774 gen_load_fpr32(fp0
, fs
);
7775 gen_load_fpr32(fp1
, ft
);
7776 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
7777 tcg_temp_free_i32(fp1
);
7778 gen_store_fpr32(fp0
, fd
);
7779 tcg_temp_free_i32(fp0
);
7784 check_cp1_registers(ctx
, fd
);
7786 TCGv_i32 fp32
= tcg_temp_new_i32();
7787 TCGv_i64 fp64
= tcg_temp_new_i64();
7789 gen_load_fpr32(fp32
, fs
);
7790 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
7791 tcg_temp_free_i32(fp32
);
7792 gen_store_fpr64(ctx
, fp64
, fd
);
7793 tcg_temp_free_i64(fp64
);
7799 TCGv_i32 fp0
= tcg_temp_new_i32();
7801 gen_load_fpr32(fp0
, fs
);
7802 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
7803 gen_store_fpr32(fp0
, fd
);
7804 tcg_temp_free_i32(fp0
);
7809 check_cp1_64bitmode(ctx
);
7811 TCGv_i32 fp32
= tcg_temp_new_i32();
7812 TCGv_i64 fp64
= tcg_temp_new_i64();
7814 gen_load_fpr32(fp32
, fs
);
7815 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
7816 tcg_temp_free_i32(fp32
);
7817 gen_store_fpr64(ctx
, fp64
, fd
);
7818 tcg_temp_free_i64(fp64
);
7823 check_cp1_64bitmode(ctx
);
7825 TCGv_i64 fp64
= tcg_temp_new_i64();
7826 TCGv_i32 fp32_0
= tcg_temp_new_i32();
7827 TCGv_i32 fp32_1
= tcg_temp_new_i32();
7829 gen_load_fpr32(fp32_0
, fs
);
7830 gen_load_fpr32(fp32_1
, ft
);
7831 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
7832 tcg_temp_free_i32(fp32_1
);
7833 tcg_temp_free_i32(fp32_0
);
7834 gen_store_fpr64(ctx
, fp64
, fd
);
7835 tcg_temp_free_i64(fp64
);
7848 case OPC_CMP_NGLE_S
:
7855 if (ctx
->opcode
& (1 << 6)) {
7856 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
7857 opn
= condnames_abs
[func
-48];
7859 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
7860 opn
= condnames
[func
-48];
7864 check_cp1_registers(ctx
, fs
| ft
| fd
);
7866 TCGv_i64 fp0
= tcg_temp_new_i64();
7867 TCGv_i64 fp1
= tcg_temp_new_i64();
7869 gen_load_fpr64(ctx
, fp0
, fs
);
7870 gen_load_fpr64(ctx
, fp1
, ft
);
7871 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
7872 tcg_temp_free_i64(fp1
);
7873 gen_store_fpr64(ctx
, fp0
, fd
);
7874 tcg_temp_free_i64(fp0
);
7880 check_cp1_registers(ctx
, fs
| ft
| fd
);
7882 TCGv_i64 fp0
= tcg_temp_new_i64();
7883 TCGv_i64 fp1
= tcg_temp_new_i64();
7885 gen_load_fpr64(ctx
, fp0
, fs
);
7886 gen_load_fpr64(ctx
, fp1
, ft
);
7887 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
7888 tcg_temp_free_i64(fp1
);
7889 gen_store_fpr64(ctx
, fp0
, fd
);
7890 tcg_temp_free_i64(fp0
);
7896 check_cp1_registers(ctx
, fs
| ft
| fd
);
7898 TCGv_i64 fp0
= tcg_temp_new_i64();
7899 TCGv_i64 fp1
= tcg_temp_new_i64();
7901 gen_load_fpr64(ctx
, fp0
, fs
);
7902 gen_load_fpr64(ctx
, fp1
, ft
);
7903 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
7904 tcg_temp_free_i64(fp1
);
7905 gen_store_fpr64(ctx
, fp0
, fd
);
7906 tcg_temp_free_i64(fp0
);
7912 check_cp1_registers(ctx
, fs
| ft
| fd
);
7914 TCGv_i64 fp0
= tcg_temp_new_i64();
7915 TCGv_i64 fp1
= tcg_temp_new_i64();
7917 gen_load_fpr64(ctx
, fp0
, fs
);
7918 gen_load_fpr64(ctx
, fp1
, ft
);
7919 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
7920 tcg_temp_free_i64(fp1
);
7921 gen_store_fpr64(ctx
, fp0
, fd
);
7922 tcg_temp_free_i64(fp0
);
7928 check_cp1_registers(ctx
, fs
| fd
);
7930 TCGv_i64 fp0
= tcg_temp_new_i64();
7932 gen_load_fpr64(ctx
, fp0
, fs
);
7933 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
7934 gen_store_fpr64(ctx
, fp0
, fd
);
7935 tcg_temp_free_i64(fp0
);
7940 check_cp1_registers(ctx
, fs
| fd
);
7942 TCGv_i64 fp0
= tcg_temp_new_i64();
7944 gen_load_fpr64(ctx
, fp0
, fs
);
7945 gen_helper_float_abs_d(fp0
, fp0
);
7946 gen_store_fpr64(ctx
, fp0
, fd
);
7947 tcg_temp_free_i64(fp0
);
7952 check_cp1_registers(ctx
, fs
| fd
);
7954 TCGv_i64 fp0
= tcg_temp_new_i64();
7956 gen_load_fpr64(ctx
, fp0
, fs
);
7957 gen_store_fpr64(ctx
, fp0
, fd
);
7958 tcg_temp_free_i64(fp0
);
7963 check_cp1_registers(ctx
, fs
| fd
);
7965 TCGv_i64 fp0
= tcg_temp_new_i64();
7967 gen_load_fpr64(ctx
, fp0
, fs
);
7968 gen_helper_float_chs_d(fp0
, fp0
);
7969 gen_store_fpr64(ctx
, fp0
, fd
);
7970 tcg_temp_free_i64(fp0
);
7975 check_cp1_64bitmode(ctx
);
7977 TCGv_i64 fp0
= tcg_temp_new_i64();
7979 gen_load_fpr64(ctx
, fp0
, fs
);
7980 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
7981 gen_store_fpr64(ctx
, fp0
, fd
);
7982 tcg_temp_free_i64(fp0
);
7987 check_cp1_64bitmode(ctx
);
7989 TCGv_i64 fp0
= tcg_temp_new_i64();
7991 gen_load_fpr64(ctx
, fp0
, fs
);
7992 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
7993 gen_store_fpr64(ctx
, fp0
, fd
);
7994 tcg_temp_free_i64(fp0
);
7999 check_cp1_64bitmode(ctx
);
8001 TCGv_i64 fp0
= tcg_temp_new_i64();
8003 gen_load_fpr64(ctx
, fp0
, fs
);
8004 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
8005 gen_store_fpr64(ctx
, fp0
, fd
);
8006 tcg_temp_free_i64(fp0
);
8011 check_cp1_64bitmode(ctx
);
8013 TCGv_i64 fp0
= tcg_temp_new_i64();
8015 gen_load_fpr64(ctx
, fp0
, fs
);
8016 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
8017 gen_store_fpr64(ctx
, fp0
, fd
);
8018 tcg_temp_free_i64(fp0
);
8023 check_cp1_registers(ctx
, fs
);
8025 TCGv_i32 fp32
= tcg_temp_new_i32();
8026 TCGv_i64 fp64
= tcg_temp_new_i64();
8028 gen_load_fpr64(ctx
, fp64
, fs
);
8029 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
8030 tcg_temp_free_i64(fp64
);
8031 gen_store_fpr32(fp32
, fd
);
8032 tcg_temp_free_i32(fp32
);
8037 check_cp1_registers(ctx
, fs
);
8039 TCGv_i32 fp32
= tcg_temp_new_i32();
8040 TCGv_i64 fp64
= tcg_temp_new_i64();
8042 gen_load_fpr64(ctx
, fp64
, fs
);
8043 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
8044 tcg_temp_free_i64(fp64
);
8045 gen_store_fpr32(fp32
, fd
);
8046 tcg_temp_free_i32(fp32
);
8051 check_cp1_registers(ctx
, fs
);
8053 TCGv_i32 fp32
= tcg_temp_new_i32();
8054 TCGv_i64 fp64
= tcg_temp_new_i64();
8056 gen_load_fpr64(ctx
, fp64
, fs
);
8057 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
8058 tcg_temp_free_i64(fp64
);
8059 gen_store_fpr32(fp32
, fd
);
8060 tcg_temp_free_i32(fp32
);
8065 check_cp1_registers(ctx
, fs
);
8067 TCGv_i32 fp32
= tcg_temp_new_i32();
8068 TCGv_i64 fp64
= tcg_temp_new_i64();
8070 gen_load_fpr64(ctx
, fp64
, fs
);
8071 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
8072 tcg_temp_free_i64(fp64
);
8073 gen_store_fpr32(fp32
, fd
);
8074 tcg_temp_free_i32(fp32
);
8079 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
8084 int l1
= gen_new_label();
8088 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
8090 fp0
= tcg_temp_new_i64();
8091 gen_load_fpr64(ctx
, fp0
, fs
);
8092 gen_store_fpr64(ctx
, fp0
, fd
);
8093 tcg_temp_free_i64(fp0
);
8100 int l1
= gen_new_label();
8104 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
8105 fp0
= tcg_temp_new_i64();
8106 gen_load_fpr64(ctx
, fp0
, fs
);
8107 gen_store_fpr64(ctx
, fp0
, fd
);
8108 tcg_temp_free_i64(fp0
);
8115 check_cp1_64bitmode(ctx
);
8117 TCGv_i64 fp0
= tcg_temp_new_i64();
8119 gen_load_fpr64(ctx
, fp0
, fs
);
8120 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
8121 gen_store_fpr64(ctx
, fp0
, fd
);
8122 tcg_temp_free_i64(fp0
);
8127 check_cp1_64bitmode(ctx
);
8129 TCGv_i64 fp0
= tcg_temp_new_i64();
8131 gen_load_fpr64(ctx
, fp0
, fs
);
8132 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
8133 gen_store_fpr64(ctx
, fp0
, fd
);
8134 tcg_temp_free_i64(fp0
);
8139 check_cp1_64bitmode(ctx
);
8141 TCGv_i64 fp0
= tcg_temp_new_i64();
8142 TCGv_i64 fp1
= tcg_temp_new_i64();
8144 gen_load_fpr64(ctx
, fp0
, fs
);
8145 gen_load_fpr64(ctx
, fp1
, ft
);
8146 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
8147 tcg_temp_free_i64(fp1
);
8148 gen_store_fpr64(ctx
, fp0
, fd
);
8149 tcg_temp_free_i64(fp0
);
8154 check_cp1_64bitmode(ctx
);
8156 TCGv_i64 fp0
= tcg_temp_new_i64();
8158 gen_load_fpr64(ctx
, fp0
, fs
);
8159 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
8160 gen_store_fpr64(ctx
, fp0
, fd
);
8161 tcg_temp_free_i64(fp0
);
8166 check_cp1_64bitmode(ctx
);
8168 TCGv_i64 fp0
= tcg_temp_new_i64();
8170 gen_load_fpr64(ctx
, fp0
, fs
);
8171 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
8172 gen_store_fpr64(ctx
, fp0
, fd
);
8173 tcg_temp_free_i64(fp0
);
8178 check_cp1_64bitmode(ctx
);
8180 TCGv_i64 fp0
= tcg_temp_new_i64();
8181 TCGv_i64 fp1
= tcg_temp_new_i64();
8183 gen_load_fpr64(ctx
, fp0
, fs
);
8184 gen_load_fpr64(ctx
, fp1
, ft
);
8185 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
8186 tcg_temp_free_i64(fp1
);
8187 gen_store_fpr64(ctx
, fp0
, fd
);
8188 tcg_temp_free_i64(fp0
);
8201 case OPC_CMP_NGLE_D
:
8208 if (ctx
->opcode
& (1 << 6)) {
8209 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
8210 opn
= condnames_abs
[func
-48];
8212 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
8213 opn
= condnames
[func
-48];
8217 check_cp1_registers(ctx
, fs
);
8219 TCGv_i32 fp32
= tcg_temp_new_i32();
8220 TCGv_i64 fp64
= tcg_temp_new_i64();
8222 gen_load_fpr64(ctx
, fp64
, fs
);
8223 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
8224 tcg_temp_free_i64(fp64
);
8225 gen_store_fpr32(fp32
, fd
);
8226 tcg_temp_free_i32(fp32
);
8231 check_cp1_registers(ctx
, fs
);
8233 TCGv_i32 fp32
= tcg_temp_new_i32();
8234 TCGv_i64 fp64
= tcg_temp_new_i64();
8236 gen_load_fpr64(ctx
, fp64
, fs
);
8237 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
8238 tcg_temp_free_i64(fp64
);
8239 gen_store_fpr32(fp32
, fd
);
8240 tcg_temp_free_i32(fp32
);
8245 check_cp1_64bitmode(ctx
);
8247 TCGv_i64 fp0
= tcg_temp_new_i64();
8249 gen_load_fpr64(ctx
, fp0
, fs
);
8250 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
8251 gen_store_fpr64(ctx
, fp0
, fd
);
8252 tcg_temp_free_i64(fp0
);
8258 TCGv_i32 fp0
= tcg_temp_new_i32();
8260 gen_load_fpr32(fp0
, fs
);
8261 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
8262 gen_store_fpr32(fp0
, fd
);
8263 tcg_temp_free_i32(fp0
);
8268 check_cp1_registers(ctx
, fd
);
8270 TCGv_i32 fp32
= tcg_temp_new_i32();
8271 TCGv_i64 fp64
= tcg_temp_new_i64();
8273 gen_load_fpr32(fp32
, fs
);
8274 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
8275 tcg_temp_free_i32(fp32
);
8276 gen_store_fpr64(ctx
, fp64
, fd
);
8277 tcg_temp_free_i64(fp64
);
8282 check_cp1_64bitmode(ctx
);
8284 TCGv_i32 fp32
= tcg_temp_new_i32();
8285 TCGv_i64 fp64
= tcg_temp_new_i64();
8287 gen_load_fpr64(ctx
, fp64
, fs
);
8288 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
8289 tcg_temp_free_i64(fp64
);
8290 gen_store_fpr32(fp32
, fd
);
8291 tcg_temp_free_i32(fp32
);
8296 check_cp1_64bitmode(ctx
);
8298 TCGv_i64 fp0
= tcg_temp_new_i64();
8300 gen_load_fpr64(ctx
, fp0
, fs
);
8301 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
8302 gen_store_fpr64(ctx
, fp0
, fd
);
8303 tcg_temp_free_i64(fp0
);
8308 check_cp1_64bitmode(ctx
);
8310 TCGv_i64 fp0
= tcg_temp_new_i64();
8312 gen_load_fpr64(ctx
, fp0
, fs
);
8313 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
8314 gen_store_fpr64(ctx
, fp0
, fd
);
8315 tcg_temp_free_i64(fp0
);
8320 check_cp1_64bitmode(ctx
);
8322 TCGv_i64 fp0
= tcg_temp_new_i64();
8323 TCGv_i64 fp1
= tcg_temp_new_i64();
8325 gen_load_fpr64(ctx
, fp0
, fs
);
8326 gen_load_fpr64(ctx
, fp1
, ft
);
8327 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
8328 tcg_temp_free_i64(fp1
);
8329 gen_store_fpr64(ctx
, fp0
, fd
);
8330 tcg_temp_free_i64(fp0
);
8335 check_cp1_64bitmode(ctx
);
8337 TCGv_i64 fp0
= tcg_temp_new_i64();
8338 TCGv_i64 fp1
= tcg_temp_new_i64();
8340 gen_load_fpr64(ctx
, fp0
, fs
);
8341 gen_load_fpr64(ctx
, fp1
, ft
);
8342 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
8343 tcg_temp_free_i64(fp1
);
8344 gen_store_fpr64(ctx
, fp0
, fd
);
8345 tcg_temp_free_i64(fp0
);
8350 check_cp1_64bitmode(ctx
);
8352 TCGv_i64 fp0
= tcg_temp_new_i64();
8353 TCGv_i64 fp1
= tcg_temp_new_i64();
8355 gen_load_fpr64(ctx
, fp0
, fs
);
8356 gen_load_fpr64(ctx
, fp1
, ft
);
8357 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
8358 tcg_temp_free_i64(fp1
);
8359 gen_store_fpr64(ctx
, fp0
, fd
);
8360 tcg_temp_free_i64(fp0
);
8365 check_cp1_64bitmode(ctx
);
8367 TCGv_i64 fp0
= tcg_temp_new_i64();
8369 gen_load_fpr64(ctx
, fp0
, fs
);
8370 gen_helper_float_abs_ps(fp0
, fp0
);
8371 gen_store_fpr64(ctx
, fp0
, fd
);
8372 tcg_temp_free_i64(fp0
);
8377 check_cp1_64bitmode(ctx
);
8379 TCGv_i64 fp0
= tcg_temp_new_i64();
8381 gen_load_fpr64(ctx
, fp0
, fs
);
8382 gen_store_fpr64(ctx
, fp0
, fd
);
8383 tcg_temp_free_i64(fp0
);
8388 check_cp1_64bitmode(ctx
);
8390 TCGv_i64 fp0
= tcg_temp_new_i64();
8392 gen_load_fpr64(ctx
, fp0
, fs
);
8393 gen_helper_float_chs_ps(fp0
, fp0
);
8394 gen_store_fpr64(ctx
, fp0
, fd
);
8395 tcg_temp_free_i64(fp0
);
8400 check_cp1_64bitmode(ctx
);
8401 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
8405 check_cp1_64bitmode(ctx
);
8407 int l1
= gen_new_label();
8411 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
8412 fp0
= tcg_temp_new_i64();
8413 gen_load_fpr64(ctx
, fp0
, fs
);
8414 gen_store_fpr64(ctx
, fp0
, fd
);
8415 tcg_temp_free_i64(fp0
);
8421 check_cp1_64bitmode(ctx
);
8423 int l1
= gen_new_label();
8427 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
8428 fp0
= tcg_temp_new_i64();
8429 gen_load_fpr64(ctx
, fp0
, fs
);
8430 gen_store_fpr64(ctx
, fp0
, fd
);
8431 tcg_temp_free_i64(fp0
);
8438 check_cp1_64bitmode(ctx
);
8440 TCGv_i64 fp0
= tcg_temp_new_i64();
8441 TCGv_i64 fp1
= tcg_temp_new_i64();
8443 gen_load_fpr64(ctx
, fp0
, ft
);
8444 gen_load_fpr64(ctx
, fp1
, fs
);
8445 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
8446 tcg_temp_free_i64(fp1
);
8447 gen_store_fpr64(ctx
, fp0
, fd
);
8448 tcg_temp_free_i64(fp0
);
8453 check_cp1_64bitmode(ctx
);
8455 TCGv_i64 fp0
= tcg_temp_new_i64();
8456 TCGv_i64 fp1
= tcg_temp_new_i64();
8458 gen_load_fpr64(ctx
, fp0
, ft
);
8459 gen_load_fpr64(ctx
, fp1
, fs
);
8460 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
8461 tcg_temp_free_i64(fp1
);
8462 gen_store_fpr64(ctx
, fp0
, fd
);
8463 tcg_temp_free_i64(fp0
);
8468 check_cp1_64bitmode(ctx
);
8470 TCGv_i64 fp0
= tcg_temp_new_i64();
8471 TCGv_i64 fp1
= tcg_temp_new_i64();
8473 gen_load_fpr64(ctx
, fp0
, fs
);
8474 gen_load_fpr64(ctx
, fp1
, ft
);
8475 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
8476 tcg_temp_free_i64(fp1
);
8477 gen_store_fpr64(ctx
, fp0
, fd
);
8478 tcg_temp_free_i64(fp0
);
8483 check_cp1_64bitmode(ctx
);
8485 TCGv_i64 fp0
= tcg_temp_new_i64();
8487 gen_load_fpr64(ctx
, fp0
, fs
);
8488 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
8489 gen_store_fpr64(ctx
, fp0
, fd
);
8490 tcg_temp_free_i64(fp0
);
8495 check_cp1_64bitmode(ctx
);
8497 TCGv_i64 fp0
= tcg_temp_new_i64();
8499 gen_load_fpr64(ctx
, fp0
, fs
);
8500 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
8501 gen_store_fpr64(ctx
, fp0
, fd
);
8502 tcg_temp_free_i64(fp0
);
8507 check_cp1_64bitmode(ctx
);
8509 TCGv_i64 fp0
= tcg_temp_new_i64();
8510 TCGv_i64 fp1
= tcg_temp_new_i64();
8512 gen_load_fpr64(ctx
, fp0
, fs
);
8513 gen_load_fpr64(ctx
, fp1
, ft
);
8514 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
8515 tcg_temp_free_i64(fp1
);
8516 gen_store_fpr64(ctx
, fp0
, fd
);
8517 tcg_temp_free_i64(fp0
);
8522 check_cp1_64bitmode(ctx
);
8524 TCGv_i32 fp0
= tcg_temp_new_i32();
8526 gen_load_fpr32h(ctx
, fp0
, fs
);
8527 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
8528 gen_store_fpr32(fp0
, fd
);
8529 tcg_temp_free_i32(fp0
);
8534 check_cp1_64bitmode(ctx
);
8536 TCGv_i64 fp0
= tcg_temp_new_i64();
8538 gen_load_fpr64(ctx
, fp0
, fs
);
8539 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
8540 gen_store_fpr64(ctx
, fp0
, fd
);
8541 tcg_temp_free_i64(fp0
);
8546 check_cp1_64bitmode(ctx
);
8548 TCGv_i32 fp0
= tcg_temp_new_i32();
8550 gen_load_fpr32(fp0
, fs
);
8551 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
8552 gen_store_fpr32(fp0
, fd
);
8553 tcg_temp_free_i32(fp0
);
8558 check_cp1_64bitmode(ctx
);
8560 TCGv_i32 fp0
= tcg_temp_new_i32();
8561 TCGv_i32 fp1
= tcg_temp_new_i32();
8563 gen_load_fpr32(fp0
, fs
);
8564 gen_load_fpr32(fp1
, ft
);
8565 gen_store_fpr32h(ctx
, fp0
, fd
);
8566 gen_store_fpr32(fp1
, fd
);
8567 tcg_temp_free_i32(fp0
);
8568 tcg_temp_free_i32(fp1
);
8573 check_cp1_64bitmode(ctx
);
8575 TCGv_i32 fp0
= tcg_temp_new_i32();
8576 TCGv_i32 fp1
= tcg_temp_new_i32();
8578 gen_load_fpr32(fp0
, fs
);
8579 gen_load_fpr32h(ctx
, fp1
, ft
);
8580 gen_store_fpr32(fp1
, fd
);
8581 gen_store_fpr32h(ctx
, fp0
, fd
);
8582 tcg_temp_free_i32(fp0
);
8583 tcg_temp_free_i32(fp1
);
8588 check_cp1_64bitmode(ctx
);
8590 TCGv_i32 fp0
= tcg_temp_new_i32();
8591 TCGv_i32 fp1
= tcg_temp_new_i32();
8593 gen_load_fpr32h(ctx
, fp0
, fs
);
8594 gen_load_fpr32(fp1
, ft
);
8595 gen_store_fpr32(fp1
, fd
);
8596 gen_store_fpr32h(ctx
, fp0
, fd
);
8597 tcg_temp_free_i32(fp0
);
8598 tcg_temp_free_i32(fp1
);
8603 check_cp1_64bitmode(ctx
);
8605 TCGv_i32 fp0
= tcg_temp_new_i32();
8606 TCGv_i32 fp1
= tcg_temp_new_i32();
8608 gen_load_fpr32h(ctx
, fp0
, fs
);
8609 gen_load_fpr32h(ctx
, fp1
, ft
);
8610 gen_store_fpr32(fp1
, fd
);
8611 gen_store_fpr32h(ctx
, fp0
, fd
);
8612 tcg_temp_free_i32(fp0
);
8613 tcg_temp_free_i32(fp1
);
8620 case OPC_CMP_UEQ_PS
:
8621 case OPC_CMP_OLT_PS
:
8622 case OPC_CMP_ULT_PS
:
8623 case OPC_CMP_OLE_PS
:
8624 case OPC_CMP_ULE_PS
:
8626 case OPC_CMP_NGLE_PS
:
8627 case OPC_CMP_SEQ_PS
:
8628 case OPC_CMP_NGL_PS
:
8630 case OPC_CMP_NGE_PS
:
8632 case OPC_CMP_NGT_PS
:
8633 if (ctx
->opcode
& (1 << 6)) {
8634 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
8635 opn
= condnames_abs
[func
-48];
8637 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
8638 opn
= condnames
[func
-48];
8643 generate_exception (ctx
, EXCP_RI
);
8646 (void)opn
; /* avoid a compiler warning */
8649 MIPS_DEBUG("%s %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fs
], fregnames
[ft
]);
8652 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fs
], fregnames
[ft
]);
8655 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fd
], fregnames
[fs
]);
8660 /* Coprocessor 3 (FPU) */
8661 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
8662 int fd
, int fs
, int base
, int index
)
8664 const char *opn
= "extended float load/store";
8666 TCGv t0
= tcg_temp_new();
8669 gen_load_gpr(t0
, index
);
8670 } else if (index
== 0) {
8671 gen_load_gpr(t0
, base
);
8673 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
8675 /* Don't do NOP if destination is zero: we must perform the actual
8681 TCGv_i32 fp0
= tcg_temp_new_i32();
8683 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
8684 tcg_gen_trunc_tl_i32(fp0
, t0
);
8685 gen_store_fpr32(fp0
, fd
);
8686 tcg_temp_free_i32(fp0
);
8692 check_cp1_registers(ctx
, fd
);
8694 TCGv_i64 fp0
= tcg_temp_new_i64();
8695 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
8696 gen_store_fpr64(ctx
, fp0
, fd
);
8697 tcg_temp_free_i64(fp0
);
8702 check_cp1_64bitmode(ctx
);
8703 tcg_gen_andi_tl(t0
, t0
, ~0x7);
8705 TCGv_i64 fp0
= tcg_temp_new_i64();
8707 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
8708 gen_store_fpr64(ctx
, fp0
, fd
);
8709 tcg_temp_free_i64(fp0
);
8716 TCGv_i32 fp0
= tcg_temp_new_i32();
8717 gen_load_fpr32(fp0
, fs
);
8718 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
8719 tcg_temp_free_i32(fp0
);
8726 check_cp1_registers(ctx
, fs
);
8728 TCGv_i64 fp0
= tcg_temp_new_i64();
8729 gen_load_fpr64(ctx
, fp0
, fs
);
8730 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
8731 tcg_temp_free_i64(fp0
);
8737 check_cp1_64bitmode(ctx
);
8738 tcg_gen_andi_tl(t0
, t0
, ~0x7);
8740 TCGv_i64 fp0
= tcg_temp_new_i64();
8741 gen_load_fpr64(ctx
, fp0
, fs
);
8742 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
8743 tcg_temp_free_i64(fp0
);
8750 (void)opn
; (void)store
; /* avoid compiler warnings */
8751 MIPS_DEBUG("%s %s, %s(%s)", opn
, fregnames
[store
? fs
: fd
],
8752 regnames
[index
], regnames
[base
]);
8755 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
8756 int fd
, int fr
, int fs
, int ft
)
8758 const char *opn
= "flt3_arith";
8762 check_cp1_64bitmode(ctx
);
8764 TCGv t0
= tcg_temp_local_new();
8765 TCGv_i32 fp
= tcg_temp_new_i32();
8766 TCGv_i32 fph
= tcg_temp_new_i32();
8767 int l1
= gen_new_label();
8768 int l2
= gen_new_label();
8770 gen_load_gpr(t0
, fr
);
8771 tcg_gen_andi_tl(t0
, t0
, 0x7);
8773 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
8774 gen_load_fpr32(fp
, fs
);
8775 gen_load_fpr32h(ctx
, fph
, fs
);
8776 gen_store_fpr32(fp
, fd
);
8777 gen_store_fpr32h(ctx
, fph
, fd
);
8780 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
8782 #ifdef TARGET_WORDS_BIGENDIAN
8783 gen_load_fpr32(fp
, fs
);
8784 gen_load_fpr32h(ctx
, fph
, ft
);
8785 gen_store_fpr32h(ctx
, fp
, fd
);
8786 gen_store_fpr32(fph
, fd
);
8788 gen_load_fpr32h(ctx
, fph
, fs
);
8789 gen_load_fpr32(fp
, ft
);
8790 gen_store_fpr32(fph
, fd
);
8791 gen_store_fpr32h(ctx
, fp
, fd
);
8794 tcg_temp_free_i32(fp
);
8795 tcg_temp_free_i32(fph
);
8802 TCGv_i32 fp0
= tcg_temp_new_i32();
8803 TCGv_i32 fp1
= tcg_temp_new_i32();
8804 TCGv_i32 fp2
= tcg_temp_new_i32();
8806 gen_load_fpr32(fp0
, fs
);
8807 gen_load_fpr32(fp1
, ft
);
8808 gen_load_fpr32(fp2
, fr
);
8809 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8810 tcg_temp_free_i32(fp0
);
8811 tcg_temp_free_i32(fp1
);
8812 gen_store_fpr32(fp2
, fd
);
8813 tcg_temp_free_i32(fp2
);
8819 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8821 TCGv_i64 fp0
= tcg_temp_new_i64();
8822 TCGv_i64 fp1
= tcg_temp_new_i64();
8823 TCGv_i64 fp2
= tcg_temp_new_i64();
8825 gen_load_fpr64(ctx
, fp0
, fs
);
8826 gen_load_fpr64(ctx
, fp1
, ft
);
8827 gen_load_fpr64(ctx
, fp2
, fr
);
8828 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8829 tcg_temp_free_i64(fp0
);
8830 tcg_temp_free_i64(fp1
);
8831 gen_store_fpr64(ctx
, fp2
, fd
);
8832 tcg_temp_free_i64(fp2
);
8837 check_cp1_64bitmode(ctx
);
8839 TCGv_i64 fp0
= tcg_temp_new_i64();
8840 TCGv_i64 fp1
= tcg_temp_new_i64();
8841 TCGv_i64 fp2
= tcg_temp_new_i64();
8843 gen_load_fpr64(ctx
, fp0
, fs
);
8844 gen_load_fpr64(ctx
, fp1
, ft
);
8845 gen_load_fpr64(ctx
, fp2
, fr
);
8846 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8847 tcg_temp_free_i64(fp0
);
8848 tcg_temp_free_i64(fp1
);
8849 gen_store_fpr64(ctx
, fp2
, fd
);
8850 tcg_temp_free_i64(fp2
);
8857 TCGv_i32 fp0
= tcg_temp_new_i32();
8858 TCGv_i32 fp1
= tcg_temp_new_i32();
8859 TCGv_i32 fp2
= tcg_temp_new_i32();
8861 gen_load_fpr32(fp0
, fs
);
8862 gen_load_fpr32(fp1
, ft
);
8863 gen_load_fpr32(fp2
, fr
);
8864 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8865 tcg_temp_free_i32(fp0
);
8866 tcg_temp_free_i32(fp1
);
8867 gen_store_fpr32(fp2
, fd
);
8868 tcg_temp_free_i32(fp2
);
8874 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8876 TCGv_i64 fp0
= tcg_temp_new_i64();
8877 TCGv_i64 fp1
= tcg_temp_new_i64();
8878 TCGv_i64 fp2
= tcg_temp_new_i64();
8880 gen_load_fpr64(ctx
, fp0
, fs
);
8881 gen_load_fpr64(ctx
, fp1
, ft
);
8882 gen_load_fpr64(ctx
, fp2
, fr
);
8883 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8884 tcg_temp_free_i64(fp0
);
8885 tcg_temp_free_i64(fp1
);
8886 gen_store_fpr64(ctx
, fp2
, fd
);
8887 tcg_temp_free_i64(fp2
);
8892 check_cp1_64bitmode(ctx
);
8894 TCGv_i64 fp0
= tcg_temp_new_i64();
8895 TCGv_i64 fp1
= tcg_temp_new_i64();
8896 TCGv_i64 fp2
= tcg_temp_new_i64();
8898 gen_load_fpr64(ctx
, fp0
, fs
);
8899 gen_load_fpr64(ctx
, fp1
, ft
);
8900 gen_load_fpr64(ctx
, fp2
, fr
);
8901 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8902 tcg_temp_free_i64(fp0
);
8903 tcg_temp_free_i64(fp1
);
8904 gen_store_fpr64(ctx
, fp2
, fd
);
8905 tcg_temp_free_i64(fp2
);
8912 TCGv_i32 fp0
= tcg_temp_new_i32();
8913 TCGv_i32 fp1
= tcg_temp_new_i32();
8914 TCGv_i32 fp2
= tcg_temp_new_i32();
8916 gen_load_fpr32(fp0
, fs
);
8917 gen_load_fpr32(fp1
, ft
);
8918 gen_load_fpr32(fp2
, fr
);
8919 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8920 tcg_temp_free_i32(fp0
);
8921 tcg_temp_free_i32(fp1
);
8922 gen_store_fpr32(fp2
, fd
);
8923 tcg_temp_free_i32(fp2
);
8929 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8931 TCGv_i64 fp0
= tcg_temp_new_i64();
8932 TCGv_i64 fp1
= tcg_temp_new_i64();
8933 TCGv_i64 fp2
= tcg_temp_new_i64();
8935 gen_load_fpr64(ctx
, fp0
, fs
);
8936 gen_load_fpr64(ctx
, fp1
, ft
);
8937 gen_load_fpr64(ctx
, fp2
, fr
);
8938 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8939 tcg_temp_free_i64(fp0
);
8940 tcg_temp_free_i64(fp1
);
8941 gen_store_fpr64(ctx
, fp2
, fd
);
8942 tcg_temp_free_i64(fp2
);
8947 check_cp1_64bitmode(ctx
);
8949 TCGv_i64 fp0
= tcg_temp_new_i64();
8950 TCGv_i64 fp1
= tcg_temp_new_i64();
8951 TCGv_i64 fp2
= tcg_temp_new_i64();
8953 gen_load_fpr64(ctx
, fp0
, fs
);
8954 gen_load_fpr64(ctx
, fp1
, ft
);
8955 gen_load_fpr64(ctx
, fp2
, fr
);
8956 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8957 tcg_temp_free_i64(fp0
);
8958 tcg_temp_free_i64(fp1
);
8959 gen_store_fpr64(ctx
, fp2
, fd
);
8960 tcg_temp_free_i64(fp2
);
8967 TCGv_i32 fp0
= tcg_temp_new_i32();
8968 TCGv_i32 fp1
= tcg_temp_new_i32();
8969 TCGv_i32 fp2
= tcg_temp_new_i32();
8971 gen_load_fpr32(fp0
, fs
);
8972 gen_load_fpr32(fp1
, ft
);
8973 gen_load_fpr32(fp2
, fr
);
8974 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8975 tcg_temp_free_i32(fp0
);
8976 tcg_temp_free_i32(fp1
);
8977 gen_store_fpr32(fp2
, fd
);
8978 tcg_temp_free_i32(fp2
);
8984 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8986 TCGv_i64 fp0
= tcg_temp_new_i64();
8987 TCGv_i64 fp1
= tcg_temp_new_i64();
8988 TCGv_i64 fp2
= tcg_temp_new_i64();
8990 gen_load_fpr64(ctx
, fp0
, fs
);
8991 gen_load_fpr64(ctx
, fp1
, ft
);
8992 gen_load_fpr64(ctx
, fp2
, fr
);
8993 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8994 tcg_temp_free_i64(fp0
);
8995 tcg_temp_free_i64(fp1
);
8996 gen_store_fpr64(ctx
, fp2
, fd
);
8997 tcg_temp_free_i64(fp2
);
9002 check_cp1_64bitmode(ctx
);
9004 TCGv_i64 fp0
= tcg_temp_new_i64();
9005 TCGv_i64 fp1
= tcg_temp_new_i64();
9006 TCGv_i64 fp2
= tcg_temp_new_i64();
9008 gen_load_fpr64(ctx
, fp0
, fs
);
9009 gen_load_fpr64(ctx
, fp1
, ft
);
9010 gen_load_fpr64(ctx
, fp2
, fr
);
9011 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9012 tcg_temp_free_i64(fp0
);
9013 tcg_temp_free_i64(fp1
);
9014 gen_store_fpr64(ctx
, fp2
, fd
);
9015 tcg_temp_free_i64(fp2
);
9021 generate_exception (ctx
, EXCP_RI
);
9024 (void)opn
; /* avoid a compiler warning */
9025 MIPS_DEBUG("%s %s, %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fr
],
9026 fregnames
[fs
], fregnames
[ft
]);
9029 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
)
9033 #if !defined(CONFIG_USER_ONLY)
9034 /* The Linux kernel will emulate rdhwr if it's not supported natively.
9035 Therefore only check the ISA in system mode. */
9036 check_insn(ctx
, ISA_MIPS32R2
);
9038 t0
= tcg_temp_new();
9042 save_cpu_state(ctx
, 1);
9043 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
9044 gen_store_gpr(t0
, rt
);
9047 save_cpu_state(ctx
, 1);
9048 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
9049 gen_store_gpr(t0
, rt
);
9052 save_cpu_state(ctx
, 1);
9053 gen_helper_rdhwr_cc(t0
, cpu_env
);
9054 gen_store_gpr(t0
, rt
);
9057 save_cpu_state(ctx
, 1);
9058 gen_helper_rdhwr_ccres(t0
, cpu_env
);
9059 gen_store_gpr(t0
, rt
);
9062 #if defined(CONFIG_USER_ONLY)
9063 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUMIPSState
, tls_value
));
9064 gen_store_gpr(t0
, rt
);
9067 /* XXX: Some CPUs implement this in hardware.
9068 Not supported yet. */
9070 default: /* Invalid */
9071 MIPS_INVAL("rdhwr");
9072 generate_exception(ctx
, EXCP_RI
);
9078 static void handle_delay_slot(DisasContext
*ctx
, int insn_bytes
)
9080 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
9081 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
9082 /* Branches completion */
9083 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
9084 ctx
->bstate
= BS_BRANCH
;
9085 save_cpu_state(ctx
, 0);
9086 /* FIXME: Need to clear can_do_io. */
9087 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
9089 /* unconditional branch */
9090 MIPS_DEBUG("unconditional branch");
9091 if (proc_hflags
& MIPS_HFLAG_BX
) {
9092 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
9094 gen_goto_tb(ctx
, 0, ctx
->btarget
);
9097 /* blikely taken case */
9098 MIPS_DEBUG("blikely branch taken");
9099 gen_goto_tb(ctx
, 0, ctx
->btarget
);
9102 /* Conditional branch */
9103 MIPS_DEBUG("conditional branch");
9105 int l1
= gen_new_label();
9107 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
9108 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
9110 gen_goto_tb(ctx
, 0, ctx
->btarget
);
9114 /* unconditional branch to register */
9115 MIPS_DEBUG("branch to register");
9116 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
9117 TCGv t0
= tcg_temp_new();
9118 TCGv_i32 t1
= tcg_temp_new_i32();
9120 tcg_gen_andi_tl(t0
, btarget
, 0x1);
9121 tcg_gen_trunc_tl_i32(t1
, t0
);
9123 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
9124 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
9125 tcg_gen_or_i32(hflags
, hflags
, t1
);
9126 tcg_temp_free_i32(t1
);
9128 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
9130 tcg_gen_mov_tl(cpu_PC
, btarget
);
9132 if (ctx
->singlestep_enabled
) {
9133 save_cpu_state(ctx
, 0);
9134 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
9139 MIPS_DEBUG("unknown branch");
9145 /* ISA extensions (ASEs) */
9146 /* MIPS16 extension to MIPS32 */
9148 /* MIPS16 major opcodes */
9150 M16_OPC_ADDIUSP
= 0x00,
9151 M16_OPC_ADDIUPC
= 0x01,
9154 M16_OPC_BEQZ
= 0x04,
9155 M16_OPC_BNEQZ
= 0x05,
9156 M16_OPC_SHIFT
= 0x06,
9158 M16_OPC_RRIA
= 0x08,
9159 M16_OPC_ADDIU8
= 0x09,
9160 M16_OPC_SLTI
= 0x0a,
9161 M16_OPC_SLTIU
= 0x0b,
9164 M16_OPC_CMPI
= 0x0e,
9168 M16_OPC_LWSP
= 0x12,
9172 M16_OPC_LWPC
= 0x16,
9176 M16_OPC_SWSP
= 0x1a,
9180 M16_OPC_EXTEND
= 0x1e,
9184 /* I8 funct field */
9203 /* RR funct field */
9237 /* I64 funct field */
9249 /* RR ry field for CNVT */
9251 RR_RY_CNVT_ZEB
= 0x0,
9252 RR_RY_CNVT_ZEH
= 0x1,
9253 RR_RY_CNVT_ZEW
= 0x2,
9254 RR_RY_CNVT_SEB
= 0x4,
9255 RR_RY_CNVT_SEH
= 0x5,
9256 RR_RY_CNVT_SEW
= 0x6,
9259 static int xlat (int r
)
9261 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
9266 static void gen_mips16_save (DisasContext
*ctx
,
9267 int xsregs
, int aregs
,
9268 int do_ra
, int do_s0
, int do_s1
,
9271 TCGv t0
= tcg_temp_new();
9272 TCGv t1
= tcg_temp_new();
9302 generate_exception(ctx
, EXCP_RI
);
9308 gen_base_offset_addr(ctx
, t0
, 29, 12);
9309 gen_load_gpr(t1
, 7);
9310 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
9313 gen_base_offset_addr(ctx
, t0
, 29, 8);
9314 gen_load_gpr(t1
, 6);
9315 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
9318 gen_base_offset_addr(ctx
, t0
, 29, 4);
9319 gen_load_gpr(t1
, 5);
9320 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
9323 gen_base_offset_addr(ctx
, t0
, 29, 0);
9324 gen_load_gpr(t1
, 4);
9325 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
9328 gen_load_gpr(t0
, 29);
9330 #define DECR_AND_STORE(reg) do { \
9331 tcg_gen_subi_tl(t0, t0, 4); \
9332 gen_load_gpr(t1, reg); \
9333 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
9397 generate_exception(ctx
, EXCP_RI
);
9413 #undef DECR_AND_STORE
9415 tcg_gen_subi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
9420 static void gen_mips16_restore (DisasContext
*ctx
,
9421 int xsregs
, int aregs
,
9422 int do_ra
, int do_s0
, int do_s1
,
9426 TCGv t0
= tcg_temp_new();
9427 TCGv t1
= tcg_temp_new();
9429 tcg_gen_addi_tl(t0
, cpu_gpr
[29], framesize
);
9431 #define DECR_AND_LOAD(reg) do { \
9432 tcg_gen_subi_tl(t0, t0, 4); \
9433 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
9434 gen_store_gpr(t1, reg); \
9498 generate_exception(ctx
, EXCP_RI
);
9514 #undef DECR_AND_LOAD
9516 tcg_gen_addi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
9521 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
9522 int is_64_bit
, int extended
)
9526 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9527 generate_exception(ctx
, EXCP_RI
);
9531 t0
= tcg_temp_new();
9533 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
9534 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
9536 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9542 #if defined(TARGET_MIPS64)
9543 static void decode_i64_mips16 (DisasContext
*ctx
,
9544 int ry
, int funct
, int16_t offset
,
9550 offset
= extended
? offset
: offset
<< 3;
9551 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
9555 offset
= extended
? offset
: offset
<< 3;
9556 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
9560 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
9561 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
9565 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
9566 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
9569 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9570 generate_exception(ctx
, EXCP_RI
);
9572 offset
= extended
? offset
: offset
<< 3;
9573 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
9578 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
9579 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
9583 offset
= extended
? offset
: offset
<< 2;
9584 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
9588 offset
= extended
? offset
: offset
<< 2;
9589 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
9595 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
9597 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
9598 int op
, rx
, ry
, funct
, sa
;
9599 int16_t imm
, offset
;
9601 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
9602 op
= (ctx
->opcode
>> 11) & 0x1f;
9603 sa
= (ctx
->opcode
>> 22) & 0x1f;
9604 funct
= (ctx
->opcode
>> 8) & 0x7;
9605 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
9606 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
9607 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
9608 | ((ctx
->opcode
>> 21) & 0x3f) << 5
9609 | (ctx
->opcode
& 0x1f));
9611 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
9614 case M16_OPC_ADDIUSP
:
9615 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
9617 case M16_OPC_ADDIUPC
:
9618 gen_addiupc(ctx
, rx
, imm
, 0, 1);
9621 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1);
9622 /* No delay slot, so just process as a normal instruction */
9625 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1);
9626 /* No delay slot, so just process as a normal instruction */
9629 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1);
9630 /* No delay slot, so just process as a normal instruction */
9633 switch (ctx
->opcode
& 0x3) {
9635 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
9638 #if defined(TARGET_MIPS64)
9640 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
9642 generate_exception(ctx
, EXCP_RI
);
9646 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
9649 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
9653 #if defined(TARGET_MIPS64)
9656 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
9660 imm
= ctx
->opcode
& 0xf;
9661 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
9662 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
9663 imm
= (int16_t) (imm
<< 1) >> 1;
9664 if ((ctx
->opcode
>> 4) & 0x1) {
9665 #if defined(TARGET_MIPS64)
9667 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
9669 generate_exception(ctx
, EXCP_RI
);
9672 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
9675 case M16_OPC_ADDIU8
:
9676 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
9679 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
9682 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
9687 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1);
9690 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1);
9693 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
9696 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
9700 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
9701 int aregs
= (ctx
->opcode
>> 16) & 0xf;
9702 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
9703 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
9704 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
9705 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
9706 | (ctx
->opcode
& 0xf)) << 3;
9708 if (ctx
->opcode
& (1 << 7)) {
9709 gen_mips16_save(ctx
, xsregs
, aregs
,
9710 do_ra
, do_s0
, do_s1
,
9713 gen_mips16_restore(ctx
, xsregs
, aregs
,
9714 do_ra
, do_s0
, do_s1
,
9720 generate_exception(ctx
, EXCP_RI
);
9725 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
9728 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
9730 #if defined(TARGET_MIPS64)
9732 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
9736 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
9739 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
9742 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
9745 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
9748 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
9751 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
9754 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
9756 #if defined(TARGET_MIPS64)
9758 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
9762 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
9765 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
9768 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
9771 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
9773 #if defined(TARGET_MIPS64)
9775 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
9779 generate_exception(ctx
, EXCP_RI
);
9786 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
9790 int op
, cnvt_op
, op1
, offset
;
9794 op
= (ctx
->opcode
>> 11) & 0x1f;
9795 sa
= (ctx
->opcode
>> 2) & 0x7;
9796 sa
= sa
== 0 ? 8 : sa
;
9797 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
9798 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
9799 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
9800 op1
= offset
= ctx
->opcode
& 0x1f;
9805 case M16_OPC_ADDIUSP
:
9807 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
9809 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
9812 case M16_OPC_ADDIUPC
:
9813 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
9816 offset
= (ctx
->opcode
& 0x7ff) << 1;
9817 offset
= (int16_t)(offset
<< 4) >> 4;
9818 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
);
9819 /* No delay slot, so just process as a normal instruction */
9822 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
9823 offset
= (((ctx
->opcode
& 0x1f) << 21)
9824 | ((ctx
->opcode
>> 5) & 0x1f) << 16
9826 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALXS
: OPC_JALS
;
9827 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
);
9831 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
9832 /* No delay slot, so just process as a normal instruction */
9835 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
9836 /* No delay slot, so just process as a normal instruction */
9839 switch (ctx
->opcode
& 0x3) {
9841 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
9844 #if defined(TARGET_MIPS64)
9846 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
9848 generate_exception(ctx
, EXCP_RI
);
9852 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
9855 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
9859 #if defined(TARGET_MIPS64)
9862 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
9867 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
9869 if ((ctx
->opcode
>> 4) & 1) {
9870 #if defined(TARGET_MIPS64)
9872 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
9874 generate_exception(ctx
, EXCP_RI
);
9877 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
9881 case M16_OPC_ADDIU8
:
9883 int16_t imm
= (int8_t) ctx
->opcode
;
9885 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
9890 int16_t imm
= (uint8_t) ctx
->opcode
;
9891 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
9896 int16_t imm
= (uint8_t) ctx
->opcode
;
9897 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
9904 funct
= (ctx
->opcode
>> 8) & 0x7;
9907 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
9908 ((int8_t)ctx
->opcode
) << 1);
9911 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
9912 ((int8_t)ctx
->opcode
) << 1);
9915 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
9918 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
9919 ((int8_t)ctx
->opcode
) << 3);
9923 int do_ra
= ctx
->opcode
& (1 << 6);
9924 int do_s0
= ctx
->opcode
& (1 << 5);
9925 int do_s1
= ctx
->opcode
& (1 << 4);
9926 int framesize
= ctx
->opcode
& 0xf;
9928 if (framesize
== 0) {
9931 framesize
= framesize
<< 3;
9934 if (ctx
->opcode
& (1 << 7)) {
9935 gen_mips16_save(ctx
, 0, 0,
9936 do_ra
, do_s0
, do_s1
, framesize
);
9938 gen_mips16_restore(ctx
, 0, 0,
9939 do_ra
, do_s0
, do_s1
, framesize
);
9945 int rz
= xlat(ctx
->opcode
& 0x7);
9947 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
9948 ((ctx
->opcode
>> 5) & 0x7);
9949 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
9953 reg32
= ctx
->opcode
& 0x1f;
9954 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
9957 generate_exception(ctx
, EXCP_RI
);
9964 int16_t imm
= (uint8_t) ctx
->opcode
;
9966 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
9971 int16_t imm
= (uint8_t) ctx
->opcode
;
9972 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
9975 #if defined(TARGET_MIPS64)
9978 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
9982 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
9985 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
9988 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
9991 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
9994 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
9997 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
10000 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
10002 #if defined (TARGET_MIPS64)
10004 check_mips_64(ctx
);
10005 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
10009 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
10012 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
10015 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
10018 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
10022 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
10025 switch (ctx
->opcode
& 0x3) {
10027 mips32_op
= OPC_ADDU
;
10030 mips32_op
= OPC_SUBU
;
10032 #if defined(TARGET_MIPS64)
10034 mips32_op
= OPC_DADDU
;
10035 check_mips_64(ctx
);
10038 mips32_op
= OPC_DSUBU
;
10039 check_mips_64(ctx
);
10043 generate_exception(ctx
, EXCP_RI
);
10047 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
10056 int nd
= (ctx
->opcode
>> 7) & 0x1;
10057 int link
= (ctx
->opcode
>> 6) & 0x1;
10058 int ra
= (ctx
->opcode
>> 5) & 0x1;
10061 op
= nd
? OPC_JALRC
: OPC_JALRS
;
10066 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0);
10070 /* XXX: not clear which exception should be raised
10071 * when in debug mode...
10073 check_insn(ctx
, ISA_MIPS32
);
10074 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10075 generate_exception(ctx
, EXCP_DBp
);
10077 generate_exception(ctx
, EXCP_DBp
);
10081 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
10084 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
10087 generate_exception(ctx
, EXCP_BREAK
);
10090 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
10093 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
10096 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
10098 #if defined (TARGET_MIPS64)
10100 check_mips_64(ctx
);
10101 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
10105 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
10108 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
10111 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
10114 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
10117 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
10120 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
10123 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
10127 case RR_RY_CNVT_ZEB
:
10128 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
10130 case RR_RY_CNVT_ZEH
:
10131 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
10133 case RR_RY_CNVT_SEB
:
10134 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
10136 case RR_RY_CNVT_SEH
:
10137 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
10139 #if defined (TARGET_MIPS64)
10140 case RR_RY_CNVT_ZEW
:
10141 check_mips_64(ctx
);
10142 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
10144 case RR_RY_CNVT_SEW
:
10145 check_mips_64(ctx
);
10146 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
10150 generate_exception(ctx
, EXCP_RI
);
10155 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
10157 #if defined (TARGET_MIPS64)
10159 check_mips_64(ctx
);
10160 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
10163 check_mips_64(ctx
);
10164 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
10167 check_mips_64(ctx
);
10168 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
10171 check_mips_64(ctx
);
10172 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
10176 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
10179 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
10182 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
10185 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
10187 #if defined (TARGET_MIPS64)
10189 check_mips_64(ctx
);
10190 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
10193 check_mips_64(ctx
);
10194 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
10197 check_mips_64(ctx
);
10198 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
10201 check_mips_64(ctx
);
10202 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
10206 generate_exception(ctx
, EXCP_RI
);
10210 case M16_OPC_EXTEND
:
10211 decode_extended_mips16_opc(env
, ctx
);
10214 #if defined(TARGET_MIPS64)
10216 funct
= (ctx
->opcode
>> 8) & 0x7;
10217 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
10221 generate_exception(ctx
, EXCP_RI
);
10228 /* microMIPS extension to MIPS32/MIPS64 */
10231 * microMIPS32/microMIPS64 major opcodes
10233 * 1. MIPS Architecture for Programmers Volume II-B:
10234 * The microMIPS32 Instruction Set (Revision 3.05)
10236 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
10238 * 2. MIPS Architecture For Programmers Volume II-A:
10239 * The MIPS64 Instruction Set (Revision 3.51)
10267 POOL32S
= 0x16, /* MIPS64 */
10268 DADDIU32
= 0x17, /* MIPS64 */
10270 /* 0x1f is reserved */
10279 /* 0x20 is reserved */
10289 /* 0x28 and 0x29 are reserved */
10299 /* 0x30 and 0x31 are reserved */
10306 SD32
= 0x36, /* MIPS64 */
10307 LD32
= 0x37, /* MIPS64 */
10309 /* 0x38 and 0x39 are reserved */
10320 /* POOL32A encoding of minor opcode field */
10323 /* These opcodes are distinguished only by bits 9..6; those bits are
10324 * what are recorded below. */
10350 /* The following can be distinguished by their lower 6 bits. */
10356 /* POOL32AXF encoding of minor opcode field extension */
10359 * 1. MIPS Architecture for Programmers Volume II-B:
10360 * The microMIPS32 Instruction Set (Revision 3.05)
10362 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
10364 * 2. MIPS Architecture for Programmers VolumeIV-e:
10365 * The MIPS DSP Application-Specific Extension
10366 * to the microMIPS32 Architecture (Revision 2.34)
10368 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
10383 /* begin of microMIPS32 DSP */
10385 /* bits 13..12 for 0x01 */
10391 /* bits 13..12 for 0x2a */
10397 /* bits 13..12 for 0x32 */
10401 /* end of microMIPS32 DSP */
10403 /* bits 15..12 for 0x2c */
10419 /* bits 15..12 for 0x34 */
10427 /* bits 15..12 for 0x3c */
10429 JR
= 0x0, /* alias */
10434 /* bits 15..12 for 0x05 */
10438 /* bits 15..12 for 0x0d */
10448 /* bits 15..12 for 0x15 */
10454 /* bits 15..12 for 0x1d */
10458 /* bits 15..12 for 0x2d */
10463 /* bits 15..12 for 0x35 */
10470 /* POOL32B encoding of minor opcode field (bits 15..12) */
10486 /* POOL32C encoding of minor opcode field (bits 15..12) */
10494 /* 0xa is reserved */
10501 /* 0x6 is reserved */
10507 /* POOL32F encoding of minor opcode field (bits 5..0) */
10510 /* These are the bit 7..6 values */
10521 /* These are the bit 8..6 values */
10565 CABS_COND_FMT
= 0x1c, /* MIPS3D */
10569 /* POOL32Fxf encoding of minor opcode extension field */
10607 /* POOL32I encoding of minor opcode field (bits 25..21) */
10632 /* These overlap and are distinguished by bit16 of the instruction */
10641 /* POOL16A encoding of minor opcode field */
10648 /* POOL16B encoding of minor opcode field */
10655 /* POOL16C encoding of minor opcode field */
10675 /* POOL16D encoding of minor opcode field */
10682 /* POOL16E encoding of minor opcode field */
10689 static int mmreg (int r
)
10691 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10696 /* Used for 16-bit store instructions. */
10697 static int mmreg2 (int r
)
10699 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
10704 #define uMIPS_RD(op) ((op >> 7) & 0x7)
10705 #define uMIPS_RS(op) ((op >> 4) & 0x7)
10706 #define uMIPS_RS2(op) uMIPS_RS(op)
10707 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
10708 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
10709 #define uMIPS_RS5(op) (op & 0x1f)
10711 /* Signed immediate */
10712 #define SIMM(op, start, width) \
10713 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
10716 /* Zero-extended immediate */
10717 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
10719 static void gen_addiur1sp(DisasContext
*ctx
)
10721 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
10723 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
10726 static void gen_addiur2(DisasContext
*ctx
)
10728 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
10729 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
10730 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
10732 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
10735 static void gen_addiusp(DisasContext
*ctx
)
10737 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
10740 if (encoded
<= 1) {
10741 decoded
= 256 + encoded
;
10742 } else if (encoded
<= 255) {
10744 } else if (encoded
<= 509) {
10745 decoded
= encoded
- 512;
10747 decoded
= encoded
- 768;
10750 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
10753 static void gen_addius5(DisasContext
*ctx
)
10755 int imm
= SIMM(ctx
->opcode
, 1, 4);
10756 int rd
= (ctx
->opcode
>> 5) & 0x1f;
10758 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
10761 static void gen_andi16(DisasContext
*ctx
)
10763 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
10764 31, 32, 63, 64, 255, 32768, 65535 };
10765 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
10766 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
10767 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
10769 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
10772 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
10773 int base
, int16_t offset
)
10775 const char *opn
= "ldst_multiple";
10779 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10780 generate_exception(ctx
, EXCP_RI
);
10784 t0
= tcg_temp_new();
10786 gen_base_offset_addr(ctx
, t0
, base
, offset
);
10788 t1
= tcg_const_tl(reglist
);
10789 t2
= tcg_const_i32(ctx
->mem_idx
);
10791 save_cpu_state(ctx
, 1);
10794 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
10798 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
10801 #ifdef TARGET_MIPS64
10803 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
10807 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
10813 MIPS_DEBUG("%s, %x, %d(%s)", opn
, reglist
, offset
, regnames
[base
]);
10816 tcg_temp_free_i32(t2
);
10820 static void gen_pool16c_insn(DisasContext
*ctx
)
10822 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
10823 int rs
= mmreg(ctx
->opcode
& 0x7);
10826 switch (((ctx
->opcode
) >> 4) & 0x3f) {
10831 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
10837 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
10843 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
10849 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
10856 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
10857 int offset
= ZIMM(ctx
->opcode
, 0, 4);
10859 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
10868 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
10869 int offset
= ZIMM(ctx
->opcode
, 0, 4);
10871 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
10878 int reg
= ctx
->opcode
& 0x1f;
10880 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
10886 int reg
= ctx
->opcode
& 0x1f;
10888 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
10889 /* Let normal delay slot handling in our caller take us
10890 to the branch target. */
10902 int reg
= ctx
->opcode
& 0x1f;
10904 gen_compute_branch(ctx
, opc
, 2, reg
, 31, 0);
10909 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
10913 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
10916 generate_exception(ctx
, EXCP_BREAK
);
10919 /* XXX: not clear which exception should be raised
10920 * when in debug mode...
10922 check_insn(ctx
, ISA_MIPS32
);
10923 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10924 generate_exception(ctx
, EXCP_DBp
);
10926 generate_exception(ctx
, EXCP_DBp
);
10929 case JRADDIUSP
+ 0:
10930 case JRADDIUSP
+ 1:
10932 int imm
= ZIMM(ctx
->opcode
, 0, 5);
10934 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0);
10935 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
10936 /* Let normal delay slot handling in our caller take us
10937 to the branch target. */
10941 generate_exception(ctx
, EXCP_RI
);
10946 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
10948 TCGv t0
= tcg_temp_new();
10949 TCGv t1
= tcg_temp_new();
10951 gen_load_gpr(t0
, base
);
10954 gen_load_gpr(t1
, index
);
10955 tcg_gen_shli_tl(t1
, t1
, 2);
10956 gen_op_addr_add(ctx
, t0
, t1
, t0
);
10959 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
10960 gen_store_gpr(t1
, rd
);
10966 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
10967 int base
, int16_t offset
)
10969 const char *opn
= "ldst_pair";
10972 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
10973 generate_exception(ctx
, EXCP_RI
);
10977 t0
= tcg_temp_new();
10978 t1
= tcg_temp_new();
10980 gen_base_offset_addr(ctx
, t0
, base
, offset
);
10985 generate_exception(ctx
, EXCP_RI
);
10988 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
10989 gen_store_gpr(t1
, rd
);
10990 tcg_gen_movi_tl(t1
, 4);
10991 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10992 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
10993 gen_store_gpr(t1
, rd
+1);
10997 gen_load_gpr(t1
, rd
);
10998 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10999 tcg_gen_movi_tl(t1
, 4);
11000 gen_op_addr_add(ctx
, t0
, t0
, t1
);
11001 gen_load_gpr(t1
, rd
+1);
11002 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11005 #ifdef TARGET_MIPS64
11008 generate_exception(ctx
, EXCP_RI
);
11011 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
11012 gen_store_gpr(t1
, rd
);
11013 tcg_gen_movi_tl(t1
, 8);
11014 gen_op_addr_add(ctx
, t0
, t0
, t1
);
11015 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
11016 gen_store_gpr(t1
, rd
+1);
11020 gen_load_gpr(t1
, rd
);
11021 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
11022 tcg_gen_movi_tl(t1
, 8);
11023 gen_op_addr_add(ctx
, t0
, t0
, t1
);
11024 gen_load_gpr(t1
, rd
+1);
11025 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
11030 (void)opn
; /* avoid a compiler warning */
11031 MIPS_DEBUG("%s, %s, %d(%s)", opn
, regnames
[rd
], offset
, regnames
[base
]);
11036 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
11038 int extension
= (ctx
->opcode
>> 6) & 0x3f;
11039 int minor
= (ctx
->opcode
>> 12) & 0xf;
11040 uint32_t mips32_op
;
11042 switch (extension
) {
11044 mips32_op
= OPC_TEQ
;
11047 mips32_op
= OPC_TGE
;
11050 mips32_op
= OPC_TGEU
;
11053 mips32_op
= OPC_TLT
;
11056 mips32_op
= OPC_TLTU
;
11059 mips32_op
= OPC_TNE
;
11061 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
11063 #ifndef CONFIG_USER_ONLY
11066 check_cp0_enabled(ctx
);
11068 /* Treat as NOP. */
11071 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
11075 check_cp0_enabled(ctx
);
11077 TCGv t0
= tcg_temp_new();
11079 gen_load_gpr(t0
, rt
);
11080 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
11086 switch (minor
& 3) {
11088 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
11091 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
11094 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
11097 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
11100 goto pool32axf_invalid
;
11104 switch (minor
& 3) {
11106 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
11109 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
11112 goto pool32axf_invalid
;
11118 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
11121 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
11124 mips32_op
= OPC_CLO
;
11127 mips32_op
= OPC_CLZ
;
11129 check_insn(ctx
, ISA_MIPS32
);
11130 gen_cl(ctx
, mips32_op
, rt
, rs
);
11133 gen_rdhwr(ctx
, rt
, rs
);
11136 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
11139 mips32_op
= OPC_MULT
;
11142 mips32_op
= OPC_MULTU
;
11145 mips32_op
= OPC_DIV
;
11148 mips32_op
= OPC_DIVU
;
11151 check_insn(ctx
, ISA_MIPS32
);
11152 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
11155 mips32_op
= OPC_MADD
;
11158 mips32_op
= OPC_MADDU
;
11161 mips32_op
= OPC_MSUB
;
11164 mips32_op
= OPC_MSUBU
;
11166 check_insn(ctx
, ISA_MIPS32
);
11167 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
11170 goto pool32axf_invalid
;
11181 generate_exception_err(ctx
, EXCP_CpU
, 2);
11184 goto pool32axf_invalid
;
11191 gen_compute_branch (ctx
, OPC_JALR
, 4, rs
, rt
, 0);
11195 gen_compute_branch (ctx
, OPC_JALRS
, 4, rs
, rt
, 0);
11198 goto pool32axf_invalid
;
11204 check_cp0_enabled(ctx
);
11205 check_insn(ctx
, ISA_MIPS32R2
);
11206 gen_load_srsgpr(rt
, rs
);
11209 check_cp0_enabled(ctx
);
11210 check_insn(ctx
, ISA_MIPS32R2
);
11211 gen_store_srsgpr(rt
, rs
);
11214 goto pool32axf_invalid
;
11217 #ifndef CONFIG_USER_ONLY
11221 mips32_op
= OPC_TLBP
;
11224 mips32_op
= OPC_TLBR
;
11227 mips32_op
= OPC_TLBWI
;
11230 mips32_op
= OPC_TLBWR
;
11233 mips32_op
= OPC_WAIT
;
11236 mips32_op
= OPC_DERET
;
11239 mips32_op
= OPC_ERET
;
11241 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
11244 goto pool32axf_invalid
;
11250 check_cp0_enabled(ctx
);
11252 TCGv t0
= tcg_temp_new();
11254 save_cpu_state(ctx
, 1);
11255 gen_helper_di(t0
, cpu_env
);
11256 gen_store_gpr(t0
, rs
);
11257 /* Stop translation as we may have switched the execution mode */
11258 ctx
->bstate
= BS_STOP
;
11263 check_cp0_enabled(ctx
);
11265 TCGv t0
= tcg_temp_new();
11267 save_cpu_state(ctx
, 1);
11268 gen_helper_ei(t0
, cpu_env
);
11269 gen_store_gpr(t0
, rs
);
11270 /* Stop translation as we may have switched the execution mode */
11271 ctx
->bstate
= BS_STOP
;
11276 goto pool32axf_invalid
;
11286 generate_exception(ctx
, EXCP_SYSCALL
);
11287 ctx
->bstate
= BS_STOP
;
11290 check_insn(ctx
, ISA_MIPS32
);
11291 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
11292 generate_exception(ctx
, EXCP_DBp
);
11294 generate_exception(ctx
, EXCP_DBp
);
11298 goto pool32axf_invalid
;
11302 switch (minor
& 3) {
11304 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
11307 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
11310 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
11313 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
11316 goto pool32axf_invalid
;
11322 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
11325 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
11328 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
11331 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
11334 goto pool32axf_invalid
;
11339 MIPS_INVAL("pool32axf");
11340 generate_exception(ctx
, EXCP_RI
);
11345 /* Values for microMIPS fmt field. Variable-width, depending on which
11346 formats the instruction supports. */
11365 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
11367 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
11368 uint32_t mips32_op
;
11370 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
11371 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
11372 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
11374 switch (extension
) {
11375 case FLOAT_1BIT_FMT(CFC1
, 0):
11376 mips32_op
= OPC_CFC1
;
11378 case FLOAT_1BIT_FMT(CTC1
, 0):
11379 mips32_op
= OPC_CTC1
;
11381 case FLOAT_1BIT_FMT(MFC1
, 0):
11382 mips32_op
= OPC_MFC1
;
11384 case FLOAT_1BIT_FMT(MTC1
, 0):
11385 mips32_op
= OPC_MTC1
;
11387 case FLOAT_1BIT_FMT(MFHC1
, 0):
11388 mips32_op
= OPC_MFHC1
;
11390 case FLOAT_1BIT_FMT(MTHC1
, 0):
11391 mips32_op
= OPC_MTHC1
;
11393 gen_cp1(ctx
, mips32_op
, rt
, rs
);
11396 /* Reciprocal square root */
11397 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
11398 mips32_op
= OPC_RSQRT_S
;
11400 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
11401 mips32_op
= OPC_RSQRT_D
;
11405 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
11406 mips32_op
= OPC_SQRT_S
;
11408 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
11409 mips32_op
= OPC_SQRT_D
;
11413 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
11414 mips32_op
= OPC_RECIP_S
;
11416 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
11417 mips32_op
= OPC_RECIP_D
;
11421 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
11422 mips32_op
= OPC_FLOOR_L_S
;
11424 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
11425 mips32_op
= OPC_FLOOR_L_D
;
11427 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
11428 mips32_op
= OPC_FLOOR_W_S
;
11430 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
11431 mips32_op
= OPC_FLOOR_W_D
;
11435 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
11436 mips32_op
= OPC_CEIL_L_S
;
11438 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
11439 mips32_op
= OPC_CEIL_L_D
;
11441 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
11442 mips32_op
= OPC_CEIL_W_S
;
11444 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
11445 mips32_op
= OPC_CEIL_W_D
;
11449 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
11450 mips32_op
= OPC_TRUNC_L_S
;
11452 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
11453 mips32_op
= OPC_TRUNC_L_D
;
11455 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
11456 mips32_op
= OPC_TRUNC_W_S
;
11458 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
11459 mips32_op
= OPC_TRUNC_W_D
;
11463 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
11464 mips32_op
= OPC_ROUND_L_S
;
11466 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
11467 mips32_op
= OPC_ROUND_L_D
;
11469 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
11470 mips32_op
= OPC_ROUND_W_S
;
11472 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
11473 mips32_op
= OPC_ROUND_W_D
;
11476 /* Integer to floating-point conversion */
11477 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
11478 mips32_op
= OPC_CVT_L_S
;
11480 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
11481 mips32_op
= OPC_CVT_L_D
;
11483 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
11484 mips32_op
= OPC_CVT_W_S
;
11486 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
11487 mips32_op
= OPC_CVT_W_D
;
11490 /* Paired-foo conversions */
11491 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
11492 mips32_op
= OPC_CVT_S_PL
;
11494 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
11495 mips32_op
= OPC_CVT_S_PU
;
11497 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
11498 mips32_op
= OPC_CVT_PW_PS
;
11500 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
11501 mips32_op
= OPC_CVT_PS_PW
;
11504 /* Floating-point moves */
11505 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
11506 mips32_op
= OPC_MOV_S
;
11508 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
11509 mips32_op
= OPC_MOV_D
;
11511 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
11512 mips32_op
= OPC_MOV_PS
;
11515 /* Absolute value */
11516 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
11517 mips32_op
= OPC_ABS_S
;
11519 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
11520 mips32_op
= OPC_ABS_D
;
11522 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
11523 mips32_op
= OPC_ABS_PS
;
11527 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
11528 mips32_op
= OPC_NEG_S
;
11530 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
11531 mips32_op
= OPC_NEG_D
;
11533 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
11534 mips32_op
= OPC_NEG_PS
;
11537 /* Reciprocal square root step */
11538 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
11539 mips32_op
= OPC_RSQRT1_S
;
11541 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
11542 mips32_op
= OPC_RSQRT1_D
;
11544 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
11545 mips32_op
= OPC_RSQRT1_PS
;
11548 /* Reciprocal step */
11549 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
11550 mips32_op
= OPC_RECIP1_S
;
11552 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
11553 mips32_op
= OPC_RECIP1_S
;
11555 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
11556 mips32_op
= OPC_RECIP1_PS
;
11559 /* Conversions from double */
11560 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
11561 mips32_op
= OPC_CVT_D_S
;
11563 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
11564 mips32_op
= OPC_CVT_D_W
;
11566 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
11567 mips32_op
= OPC_CVT_D_L
;
11570 /* Conversions from single */
11571 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
11572 mips32_op
= OPC_CVT_S_D
;
11574 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
11575 mips32_op
= OPC_CVT_S_W
;
11577 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
11578 mips32_op
= OPC_CVT_S_L
;
11580 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
11583 /* Conditional moves on floating-point codes */
11584 case COND_FLOAT_MOV(MOVT
, 0):
11585 case COND_FLOAT_MOV(MOVT
, 1):
11586 case COND_FLOAT_MOV(MOVT
, 2):
11587 case COND_FLOAT_MOV(MOVT
, 3):
11588 case COND_FLOAT_MOV(MOVT
, 4):
11589 case COND_FLOAT_MOV(MOVT
, 5):
11590 case COND_FLOAT_MOV(MOVT
, 6):
11591 case COND_FLOAT_MOV(MOVT
, 7):
11592 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
11594 case COND_FLOAT_MOV(MOVF
, 0):
11595 case COND_FLOAT_MOV(MOVF
, 1):
11596 case COND_FLOAT_MOV(MOVF
, 2):
11597 case COND_FLOAT_MOV(MOVF
, 3):
11598 case COND_FLOAT_MOV(MOVF
, 4):
11599 case COND_FLOAT_MOV(MOVF
, 5):
11600 case COND_FLOAT_MOV(MOVF
, 6):
11601 case COND_FLOAT_MOV(MOVF
, 7):
11602 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
11605 MIPS_INVAL("pool32fxf");
11606 generate_exception(ctx
, EXCP_RI
);
11611 static void decode_micromips32_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
11616 int rt
, rs
, rd
, rr
;
11618 uint32_t op
, minor
, mips32_op
;
11619 uint32_t cond
, fmt
, cc
;
11621 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11622 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
11624 rt
= (ctx
->opcode
>> 21) & 0x1f;
11625 rs
= (ctx
->opcode
>> 16) & 0x1f;
11626 rd
= (ctx
->opcode
>> 11) & 0x1f;
11627 rr
= (ctx
->opcode
>> 6) & 0x1f;
11628 imm
= (int16_t) ctx
->opcode
;
11630 op
= (ctx
->opcode
>> 26) & 0x3f;
11633 minor
= ctx
->opcode
& 0x3f;
11636 minor
= (ctx
->opcode
>> 6) & 0xf;
11639 mips32_op
= OPC_SLL
;
11642 mips32_op
= OPC_SRA
;
11645 mips32_op
= OPC_SRL
;
11648 mips32_op
= OPC_ROTR
;
11650 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
11653 goto pool32a_invalid
;
11657 minor
= (ctx
->opcode
>> 6) & 0xf;
11661 mips32_op
= OPC_ADD
;
11664 mips32_op
= OPC_ADDU
;
11667 mips32_op
= OPC_SUB
;
11670 mips32_op
= OPC_SUBU
;
11673 mips32_op
= OPC_MUL
;
11675 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
11679 mips32_op
= OPC_SLLV
;
11682 mips32_op
= OPC_SRLV
;
11685 mips32_op
= OPC_SRAV
;
11688 mips32_op
= OPC_ROTRV
;
11690 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
11692 /* Logical operations */
11694 mips32_op
= OPC_AND
;
11697 mips32_op
= OPC_OR
;
11700 mips32_op
= OPC_NOR
;
11703 mips32_op
= OPC_XOR
;
11705 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
11707 /* Set less than */
11709 mips32_op
= OPC_SLT
;
11712 mips32_op
= OPC_SLTU
;
11714 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
11717 goto pool32a_invalid
;
11721 minor
= (ctx
->opcode
>> 6) & 0xf;
11723 /* Conditional moves */
11725 mips32_op
= OPC_MOVN
;
11728 mips32_op
= OPC_MOVZ
;
11730 gen_cond_move(ctx
, mips32_op
, rd
, rs
, rt
);
11733 gen_ldxs(ctx
, rs
, rt
, rd
);
11736 goto pool32a_invalid
;
11740 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
11743 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
11746 gen_pool32axf(env
, ctx
, rt
, rs
);
11749 generate_exception(ctx
, EXCP_BREAK
);
11753 MIPS_INVAL("pool32a");
11754 generate_exception(ctx
, EXCP_RI
);
11759 minor
= (ctx
->opcode
>> 12) & 0xf;
11762 check_cp0_enabled(ctx
);
11763 /* Treat as no-op. */
11767 /* COP2: Not implemented. */
11768 generate_exception_err(ctx
, EXCP_CpU
, 2);
11772 #ifdef TARGET_MIPS64
11776 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11780 #ifdef TARGET_MIPS64
11784 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11787 MIPS_INVAL("pool32b");
11788 generate_exception(ctx
, EXCP_RI
);
11793 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
11794 minor
= ctx
->opcode
& 0x3f;
11795 check_cp1_enabled(ctx
);
11798 mips32_op
= OPC_ALNV_PS
;
11801 mips32_op
= OPC_MADD_S
;
11804 mips32_op
= OPC_MADD_D
;
11807 mips32_op
= OPC_MADD_PS
;
11810 mips32_op
= OPC_MSUB_S
;
11813 mips32_op
= OPC_MSUB_D
;
11816 mips32_op
= OPC_MSUB_PS
;
11819 mips32_op
= OPC_NMADD_S
;
11822 mips32_op
= OPC_NMADD_D
;
11825 mips32_op
= OPC_NMADD_PS
;
11828 mips32_op
= OPC_NMSUB_S
;
11831 mips32_op
= OPC_NMSUB_D
;
11834 mips32_op
= OPC_NMSUB_PS
;
11836 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
11838 case CABS_COND_FMT
:
11839 cond
= (ctx
->opcode
>> 6) & 0xf;
11840 cc
= (ctx
->opcode
>> 13) & 0x7;
11841 fmt
= (ctx
->opcode
>> 10) & 0x3;
11844 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
11847 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
11850 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
11853 goto pool32f_invalid
;
11857 cond
= (ctx
->opcode
>> 6) & 0xf;
11858 cc
= (ctx
->opcode
>> 13) & 0x7;
11859 fmt
= (ctx
->opcode
>> 10) & 0x3;
11862 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
11865 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
11868 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
11871 goto pool32f_invalid
;
11875 gen_pool32fxf(ctx
, rt
, rs
);
11879 switch ((ctx
->opcode
>> 6) & 0x7) {
11881 mips32_op
= OPC_PLL_PS
;
11884 mips32_op
= OPC_PLU_PS
;
11887 mips32_op
= OPC_PUL_PS
;
11890 mips32_op
= OPC_PUU_PS
;
11893 mips32_op
= OPC_CVT_PS_S
;
11895 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11898 goto pool32f_invalid
;
11903 switch ((ctx
->opcode
>> 6) & 0x7) {
11905 mips32_op
= OPC_LWXC1
;
11908 mips32_op
= OPC_SWXC1
;
11911 mips32_op
= OPC_LDXC1
;
11914 mips32_op
= OPC_SDXC1
;
11917 mips32_op
= OPC_LUXC1
;
11920 mips32_op
= OPC_SUXC1
;
11922 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
11925 goto pool32f_invalid
;
11930 fmt
= (ctx
->opcode
>> 9) & 0x3;
11931 switch ((ctx
->opcode
>> 6) & 0x7) {
11935 mips32_op
= OPC_RSQRT2_S
;
11938 mips32_op
= OPC_RSQRT2_D
;
11941 mips32_op
= OPC_RSQRT2_PS
;
11944 goto pool32f_invalid
;
11950 mips32_op
= OPC_RECIP2_S
;
11953 mips32_op
= OPC_RECIP2_D
;
11956 mips32_op
= OPC_RECIP2_PS
;
11959 goto pool32f_invalid
;
11963 mips32_op
= OPC_ADDR_PS
;
11966 mips32_op
= OPC_MULR_PS
;
11968 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11971 goto pool32f_invalid
;
11975 /* MOV[FT].fmt and PREFX */
11976 cc
= (ctx
->opcode
>> 13) & 0x7;
11977 fmt
= (ctx
->opcode
>> 9) & 0x3;
11978 switch ((ctx
->opcode
>> 6) & 0x7) {
11982 gen_movcf_s(rs
, rt
, cc
, 0);
11985 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
11988 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
11991 goto pool32f_invalid
;
11997 gen_movcf_s(rs
, rt
, cc
, 1);
12000 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
12003 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
12006 goto pool32f_invalid
;
12012 goto pool32f_invalid
;
12015 #define FINSN_3ARG_SDPS(prfx) \
12016 switch ((ctx->opcode >> 8) & 0x3) { \
12018 mips32_op = OPC_##prfx##_S; \
12021 mips32_op = OPC_##prfx##_D; \
12023 case FMT_SDPS_PS: \
12024 mips32_op = OPC_##prfx##_PS; \
12027 goto pool32f_invalid; \
12030 /* regular FP ops */
12031 switch ((ctx
->opcode
>> 6) & 0x3) {
12033 FINSN_3ARG_SDPS(ADD
);
12036 FINSN_3ARG_SDPS(SUB
);
12039 FINSN_3ARG_SDPS(MUL
);
12042 fmt
= (ctx
->opcode
>> 8) & 0x3;
12044 mips32_op
= OPC_DIV_D
;
12045 } else if (fmt
== 0) {
12046 mips32_op
= OPC_DIV_S
;
12048 goto pool32f_invalid
;
12052 goto pool32f_invalid
;
12057 switch ((ctx
->opcode
>> 6) & 0x3) {
12059 FINSN_3ARG_SDPS(MOVN
);
12062 FINSN_3ARG_SDPS(MOVZ
);
12065 goto pool32f_invalid
;
12069 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
12073 MIPS_INVAL("pool32f");
12074 generate_exception(ctx
, EXCP_RI
);
12078 generate_exception_err(ctx
, EXCP_CpU
, 1);
12082 minor
= (ctx
->opcode
>> 21) & 0x1f;
12085 mips32_op
= OPC_BLTZ
;
12088 mips32_op
= OPC_BLTZAL
;
12091 mips32_op
= OPC_BLTZALS
;
12094 mips32_op
= OPC_BGEZ
;
12097 mips32_op
= OPC_BGEZAL
;
12100 mips32_op
= OPC_BGEZALS
;
12103 mips32_op
= OPC_BLEZ
;
12106 mips32_op
= OPC_BGTZ
;
12108 gen_compute_branch(ctx
, mips32_op
, 4, rs
, -1, imm
<< 1);
12113 mips32_op
= OPC_TLTI
;
12116 mips32_op
= OPC_TGEI
;
12119 mips32_op
= OPC_TLTIU
;
12122 mips32_op
= OPC_TGEIU
;
12125 mips32_op
= OPC_TNEI
;
12128 mips32_op
= OPC_TEQI
;
12130 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
12135 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
12136 4, rs
, 0, imm
<< 1);
12137 /* Compact branches don't have a delay slot, so just let
12138 the normal delay slot handling take us to the branch
12142 gen_logic_imm(ctx
, OPC_LUI
, rs
, -1, imm
);
12148 /* COP2: Not implemented. */
12149 generate_exception_err(ctx
, EXCP_CpU
, 2);
12152 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
12155 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
12158 mips32_op
= OPC_BC1FANY4
;
12161 mips32_op
= OPC_BC1TANY4
;
12164 check_insn(ctx
, ASE_MIPS3D
);
12167 gen_compute_branch1(ctx
, mips32_op
,
12168 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
12172 /* MIPS DSP: not implemented */
12175 MIPS_INVAL("pool32i");
12176 generate_exception(ctx
, EXCP_RI
);
12181 minor
= (ctx
->opcode
>> 12) & 0xf;
12184 mips32_op
= OPC_LWL
;
12187 mips32_op
= OPC_SWL
;
12190 mips32_op
= OPC_LWR
;
12193 mips32_op
= OPC_SWR
;
12195 #if defined(TARGET_MIPS64)
12197 mips32_op
= OPC_LDL
;
12200 mips32_op
= OPC_SDL
;
12203 mips32_op
= OPC_LDR
;
12206 mips32_op
= OPC_SDR
;
12209 mips32_op
= OPC_LWU
;
12212 mips32_op
= OPC_LLD
;
12216 mips32_op
= OPC_LL
;
12219 gen_ld(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
12222 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
12225 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
12227 #if defined(TARGET_MIPS64)
12229 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
12233 /* Treat as no-op */
12236 MIPS_INVAL("pool32c");
12237 generate_exception(ctx
, EXCP_RI
);
12242 mips32_op
= OPC_ADDI
;
12245 mips32_op
= OPC_ADDIU
;
12247 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
12250 /* Logical operations */
12252 mips32_op
= OPC_ORI
;
12255 mips32_op
= OPC_XORI
;
12258 mips32_op
= OPC_ANDI
;
12260 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
12263 /* Set less than immediate */
12265 mips32_op
= OPC_SLTI
;
12268 mips32_op
= OPC_SLTIU
;
12270 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
12273 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
12274 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
);
12277 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
12278 gen_compute_branch(ctx
, OPC_JALS
, 4, rt
, rs
, offset
);
12281 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1);
12284 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1);
12287 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
12288 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
12291 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
12292 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
12294 /* Floating point (COP1) */
12296 mips32_op
= OPC_LWC1
;
12299 mips32_op
= OPC_LDC1
;
12302 mips32_op
= OPC_SWC1
;
12305 mips32_op
= OPC_SDC1
;
12307 gen_cop1_ldst(env
, ctx
, mips32_op
, rt
, rs
, imm
);
12311 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
12312 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
12314 gen_addiupc(ctx
, reg
, offset
, 0, 0);
12317 /* Loads and stores */
12319 mips32_op
= OPC_LB
;
12322 mips32_op
= OPC_LBU
;
12325 mips32_op
= OPC_LH
;
12328 mips32_op
= OPC_LHU
;
12331 mips32_op
= OPC_LW
;
12333 #ifdef TARGET_MIPS64
12335 mips32_op
= OPC_LD
;
12338 mips32_op
= OPC_SD
;
12342 mips32_op
= OPC_SB
;
12345 mips32_op
= OPC_SH
;
12348 mips32_op
= OPC_SW
;
12351 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
12354 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
12357 generate_exception(ctx
, EXCP_RI
);
12362 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
12366 /* make sure instructions are on a halfword boundary */
12367 if (ctx
->pc
& 0x1) {
12368 env
->CP0_BadVAddr
= ctx
->pc
;
12369 generate_exception(ctx
, EXCP_AdEL
);
12370 ctx
->bstate
= BS_STOP
;
12374 op
= (ctx
->opcode
>> 10) & 0x3f;
12375 /* Enforce properly-sized instructions in a delay slot */
12376 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12377 int bits
= ctx
->hflags
& MIPS_HFLAG_BMASK_EXT
;
12415 if (bits
& MIPS_HFLAG_BDS16
) {
12416 generate_exception(ctx
, EXCP_RI
);
12417 /* Just stop translation; the user is confused. */
12418 ctx
->bstate
= BS_STOP
;
12443 if (bits
& MIPS_HFLAG_BDS32
) {
12444 generate_exception(ctx
, EXCP_RI
);
12445 /* Just stop translation; the user is confused. */
12446 ctx
->bstate
= BS_STOP
;
12457 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12458 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
12459 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
12462 switch (ctx
->opcode
& 0x1) {
12471 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
12476 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12477 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12478 int amount
= (ctx
->opcode
>> 1) & 0x7;
12480 amount
= amount
== 0 ? 8 : amount
;
12482 switch (ctx
->opcode
& 0x1) {
12491 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
12495 gen_pool16c_insn(ctx
);
12499 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12500 int rb
= 28; /* GP */
12501 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
12503 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
12507 if (ctx
->opcode
& 1) {
12508 generate_exception(ctx
, EXCP_RI
);
12511 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12512 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12513 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
12514 int rd
, rs
, re
, rt
;
12515 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12516 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12517 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12519 rd
= rd_enc
[enc_dest
];
12520 re
= re_enc
[enc_dest
];
12521 rs
= rs_rt_enc
[enc_rs
];
12522 rt
= rs_rt_enc
[enc_rt
];
12524 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, 0);
12525 gen_arith_imm(ctx
, OPC_ADDIU
, re
, rt
, 0);
12530 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12531 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12532 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
12533 offset
= (offset
== 0xf ? -1 : offset
);
12535 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
12540 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12541 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12542 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
12544 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
12549 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12550 int rb
= 29; /* SP */
12551 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
12553 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
12558 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12559 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12560 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
12562 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
12567 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
12568 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12569 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
12571 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
12576 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
12577 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12578 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
12580 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
12585 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12586 int rb
= 29; /* SP */
12587 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
12589 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
12594 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
12595 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12596 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
12598 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
12603 int rd
= uMIPS_RD5(ctx
->opcode
);
12604 int rs
= uMIPS_RS5(ctx
->opcode
);
12606 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, 0);
12613 switch (ctx
->opcode
& 0x1) {
12623 switch (ctx
->opcode
& 0x1) {
12628 gen_addiur1sp(ctx
);
12633 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
12634 SIMM(ctx
->opcode
, 0, 10) << 1);
12638 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
12639 mmreg(uMIPS_RD(ctx
->opcode
)),
12640 0, SIMM(ctx
->opcode
, 0, 7) << 1);
12644 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
12645 int imm
= ZIMM(ctx
->opcode
, 0, 7);
12647 imm
= (imm
== 0x7f ? -1 : imm
);
12648 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
12658 generate_exception(ctx
, EXCP_RI
);
12661 decode_micromips32_opc (env
, ctx
, op
);
12668 /* SmartMIPS extension to MIPS32 */
12670 #if defined(TARGET_MIPS64)
12672 /* MDMX extension to MIPS64 */
12676 /* MIPSDSP functions. */
12677 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
12678 int rd
, int base
, int offset
)
12680 const char *opn
= "ldx";
12684 t0
= tcg_temp_new();
12687 gen_load_gpr(t0
, offset
);
12688 } else if (offset
== 0) {
12689 gen_load_gpr(t0
, base
);
12691 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
12696 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
12697 gen_store_gpr(t0
, rd
);
12701 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
12702 gen_store_gpr(t0
, rd
);
12706 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12707 gen_store_gpr(t0
, rd
);
12710 #if defined(TARGET_MIPS64)
12712 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12713 gen_store_gpr(t0
, rd
);
12718 (void)opn
; /* avoid a compiler warning */
12719 MIPS_DEBUG("%s %s, %s(%s)", opn
,
12720 regnames
[rd
], regnames
[offset
], regnames
[base
]);
12724 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
12725 int ret
, int v1
, int v2
)
12727 const char *opn
= "mipsdsp arith";
12732 /* Treat as NOP. */
12737 v1_t
= tcg_temp_new();
12738 v2_t
= tcg_temp_new();
12740 gen_load_gpr(v1_t
, v1
);
12741 gen_load_gpr(v2_t
, v2
);
12744 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
12745 case OPC_MULT_G_2E
:
12749 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12751 case OPC_ADDUH_R_QB
:
12752 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12755 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12757 case OPC_ADDQH_R_PH
:
12758 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12761 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12763 case OPC_ADDQH_R_W
:
12764 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12767 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12769 case OPC_SUBUH_R_QB
:
12770 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12773 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12775 case OPC_SUBQH_R_PH
:
12776 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12779 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12781 case OPC_SUBQH_R_W
:
12782 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12786 case OPC_ABSQ_S_PH_DSP
:
12788 case OPC_ABSQ_S_QB
:
12790 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
12792 case OPC_ABSQ_S_PH
:
12794 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
12798 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
12800 case OPC_PRECEQ_W_PHL
:
12802 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
12803 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
12805 case OPC_PRECEQ_W_PHR
:
12807 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
12808 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
12809 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
12811 case OPC_PRECEQU_PH_QBL
:
12813 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
12815 case OPC_PRECEQU_PH_QBR
:
12817 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
12819 case OPC_PRECEQU_PH_QBLA
:
12821 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
12823 case OPC_PRECEQU_PH_QBRA
:
12825 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
12827 case OPC_PRECEU_PH_QBL
:
12829 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
12831 case OPC_PRECEU_PH_QBR
:
12833 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
12835 case OPC_PRECEU_PH_QBLA
:
12837 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
12839 case OPC_PRECEU_PH_QBRA
:
12841 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
12845 case OPC_ADDU_QB_DSP
:
12849 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12851 case OPC_ADDQ_S_PH
:
12853 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12857 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12861 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12863 case OPC_ADDU_S_QB
:
12865 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12869 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12871 case OPC_ADDU_S_PH
:
12873 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12877 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12879 case OPC_SUBQ_S_PH
:
12881 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12885 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12889 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12891 case OPC_SUBU_S_QB
:
12893 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12897 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12899 case OPC_SUBU_S_PH
:
12901 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12905 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12909 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12913 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
12915 case OPC_RADDU_W_QB
:
12917 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
12921 case OPC_CMPU_EQ_QB_DSP
:
12923 case OPC_PRECR_QB_PH
:
12925 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12927 case OPC_PRECRQ_QB_PH
:
12929 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12931 case OPC_PRECR_SRA_PH_W
:
12934 TCGv_i32 sa_t
= tcg_const_i32(v2
);
12935 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
12937 tcg_temp_free_i32(sa_t
);
12940 case OPC_PRECR_SRA_R_PH_W
:
12943 TCGv_i32 sa_t
= tcg_const_i32(v2
);
12944 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
12946 tcg_temp_free_i32(sa_t
);
12949 case OPC_PRECRQ_PH_W
:
12951 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12953 case OPC_PRECRQ_RS_PH_W
:
12955 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12957 case OPC_PRECRQU_S_QB_PH
:
12959 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12963 #ifdef TARGET_MIPS64
12964 case OPC_ABSQ_S_QH_DSP
:
12966 case OPC_PRECEQ_L_PWL
:
12968 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
12970 case OPC_PRECEQ_L_PWR
:
12972 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
12974 case OPC_PRECEQ_PW_QHL
:
12976 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
12978 case OPC_PRECEQ_PW_QHR
:
12980 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
12982 case OPC_PRECEQ_PW_QHLA
:
12984 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
12986 case OPC_PRECEQ_PW_QHRA
:
12988 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
12990 case OPC_PRECEQU_QH_OBL
:
12992 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
12994 case OPC_PRECEQU_QH_OBR
:
12996 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
12998 case OPC_PRECEQU_QH_OBLA
:
13000 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
13002 case OPC_PRECEQU_QH_OBRA
:
13004 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
13006 case OPC_PRECEU_QH_OBL
:
13008 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
13010 case OPC_PRECEU_QH_OBR
:
13012 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
13014 case OPC_PRECEU_QH_OBLA
:
13016 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
13018 case OPC_PRECEU_QH_OBRA
:
13020 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
13022 case OPC_ABSQ_S_OB
:
13024 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
13026 case OPC_ABSQ_S_PW
:
13028 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
13030 case OPC_ABSQ_S_QH
:
13032 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
13036 case OPC_ADDU_OB_DSP
:
13038 case OPC_RADDU_L_OB
:
13040 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
13044 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13046 case OPC_SUBQ_S_PW
:
13048 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13052 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13054 case OPC_SUBQ_S_QH
:
13056 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13060 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13062 case OPC_SUBU_S_OB
:
13064 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13068 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13070 case OPC_SUBU_S_QH
:
13072 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13076 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13078 case OPC_SUBUH_R_OB
:
13080 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13084 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13086 case OPC_ADDQ_S_PW
:
13088 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13092 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13094 case OPC_ADDQ_S_QH
:
13096 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13100 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13102 case OPC_ADDU_S_OB
:
13104 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13108 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13110 case OPC_ADDU_S_QH
:
13112 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13116 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13118 case OPC_ADDUH_R_OB
:
13120 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13124 case OPC_CMPU_EQ_OB_DSP
:
13126 case OPC_PRECR_OB_QH
:
13128 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
13130 case OPC_PRECR_SRA_QH_PW
:
13133 TCGv_i32 ret_t
= tcg_const_i32(ret
);
13134 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
13135 tcg_temp_free_i32(ret_t
);
13138 case OPC_PRECR_SRA_R_QH_PW
:
13141 TCGv_i32 sa_v
= tcg_const_i32(ret
);
13142 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
13143 tcg_temp_free_i32(sa_v
);
13146 case OPC_PRECRQ_OB_QH
:
13148 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
13150 case OPC_PRECRQ_PW_L
:
13152 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
13154 case OPC_PRECRQ_QH_PW
:
13156 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
13158 case OPC_PRECRQ_RS_QH_PW
:
13160 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13162 case OPC_PRECRQU_S_OB_QH
:
13164 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13171 tcg_temp_free(v1_t
);
13172 tcg_temp_free(v2_t
);
13174 (void)opn
; /* avoid a compiler warning */
13175 MIPS_DEBUG("%s", opn
);
13178 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
13179 int ret
, int v1
, int v2
)
13182 const char *opn
= "mipsdsp shift";
13188 /* Treat as NOP. */
13193 t0
= tcg_temp_new();
13194 v1_t
= tcg_temp_new();
13195 v2_t
= tcg_temp_new();
13197 tcg_gen_movi_tl(t0
, v1
);
13198 gen_load_gpr(v1_t
, v1
);
13199 gen_load_gpr(v2_t
, v2
);
13202 case OPC_SHLL_QB_DSP
:
13204 op2
= MASK_SHLL_QB(ctx
->opcode
);
13208 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
13212 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13216 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
13220 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13222 case OPC_SHLL_S_PH
:
13224 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
13226 case OPC_SHLLV_S_PH
:
13228 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13232 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
13234 case OPC_SHLLV_S_W
:
13236 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13240 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
13244 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13248 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
13252 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
13256 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
13258 case OPC_SHRA_R_QB
:
13260 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
13264 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13266 case OPC_SHRAV_R_QB
:
13268 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13272 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
13274 case OPC_SHRA_R_PH
:
13276 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
13280 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
13282 case OPC_SHRAV_R_PH
:
13284 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
13288 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
13290 case OPC_SHRAV_R_W
:
13292 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
13294 default: /* Invalid */
13295 MIPS_INVAL("MASK SHLL.QB");
13296 generate_exception(ctx
, EXCP_RI
);
13301 #ifdef TARGET_MIPS64
13302 case OPC_SHLL_OB_DSP
:
13303 op2
= MASK_SHLL_OB(ctx
->opcode
);
13307 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
13311 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
13313 case OPC_SHLL_S_PW
:
13315 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
13317 case OPC_SHLLV_S_PW
:
13319 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
13323 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
13327 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
13331 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
13335 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
13337 case OPC_SHLL_S_QH
:
13339 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
13341 case OPC_SHLLV_S_QH
:
13343 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
13347 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
13351 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
13353 case OPC_SHRA_R_OB
:
13355 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
13357 case OPC_SHRAV_R_OB
:
13359 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
13363 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
13367 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
13369 case OPC_SHRA_R_PW
:
13371 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
13373 case OPC_SHRAV_R_PW
:
13375 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
13379 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
13383 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
13385 case OPC_SHRA_R_QH
:
13387 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
13389 case OPC_SHRAV_R_QH
:
13391 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
13395 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
13399 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
13403 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
13407 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
13409 default: /* Invalid */
13410 MIPS_INVAL("MASK SHLL.OB");
13411 generate_exception(ctx
, EXCP_RI
);
13419 tcg_temp_free(v1_t
);
13420 tcg_temp_free(v2_t
);
13421 (void)opn
; /* avoid a compiler warning */
13422 MIPS_DEBUG("%s", opn
);
13425 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
13426 int ret
, int v1
, int v2
, int check_ret
)
13428 const char *opn
= "mipsdsp multiply";
13433 if ((ret
== 0) && (check_ret
== 1)) {
13434 /* Treat as NOP. */
13439 t0
= tcg_temp_new_i32();
13440 v1_t
= tcg_temp_new();
13441 v2_t
= tcg_temp_new();
13443 tcg_gen_movi_i32(t0
, ret
);
13444 gen_load_gpr(v1_t
, v1
);
13445 gen_load_gpr(v2_t
, v2
);
13448 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
13449 * the same mask and op1. */
13450 case OPC_MULT_G_2E
:
13454 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13457 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13460 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13462 case OPC_MULQ_RS_W
:
13463 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13467 case OPC_DPA_W_PH_DSP
:
13469 case OPC_DPAU_H_QBL
:
13471 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
13473 case OPC_DPAU_H_QBR
:
13475 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
13477 case OPC_DPSU_H_QBL
:
13479 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
13481 case OPC_DPSU_H_QBR
:
13483 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
13487 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13489 case OPC_DPAX_W_PH
:
13491 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13493 case OPC_DPAQ_S_W_PH
:
13495 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13497 case OPC_DPAQX_S_W_PH
:
13499 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13501 case OPC_DPAQX_SA_W_PH
:
13503 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13507 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13509 case OPC_DPSX_W_PH
:
13511 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13513 case OPC_DPSQ_S_W_PH
:
13515 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13517 case OPC_DPSQX_S_W_PH
:
13519 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13521 case OPC_DPSQX_SA_W_PH
:
13523 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13525 case OPC_MULSAQ_S_W_PH
:
13527 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13529 case OPC_DPAQ_SA_L_W
:
13531 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
13533 case OPC_DPSQ_SA_L_W
:
13535 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
13537 case OPC_MAQ_S_W_PHL
:
13539 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
13541 case OPC_MAQ_S_W_PHR
:
13543 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
13545 case OPC_MAQ_SA_W_PHL
:
13547 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
13549 case OPC_MAQ_SA_W_PHR
:
13551 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
13553 case OPC_MULSA_W_PH
:
13555 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13559 #ifdef TARGET_MIPS64
13560 case OPC_DPAQ_W_QH_DSP
:
13562 int ac
= ret
& 0x03;
13563 tcg_gen_movi_i32(t0
, ac
);
13568 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
13572 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
13576 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
13580 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
13584 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13586 case OPC_DPAQ_S_W_QH
:
13588 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13590 case OPC_DPAQ_SA_L_PW
:
13592 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
13594 case OPC_DPAU_H_OBL
:
13596 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
13598 case OPC_DPAU_H_OBR
:
13600 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
13604 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13606 case OPC_DPSQ_S_W_QH
:
13608 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13610 case OPC_DPSQ_SA_L_PW
:
13612 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
13614 case OPC_DPSU_H_OBL
:
13616 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
13618 case OPC_DPSU_H_OBR
:
13620 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
13622 case OPC_MAQ_S_L_PWL
:
13624 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
13626 case OPC_MAQ_S_L_PWR
:
13628 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
13630 case OPC_MAQ_S_W_QHLL
:
13632 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
13634 case OPC_MAQ_SA_W_QHLL
:
13636 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
13638 case OPC_MAQ_S_W_QHLR
:
13640 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
13642 case OPC_MAQ_SA_W_QHLR
:
13644 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
13646 case OPC_MAQ_S_W_QHRL
:
13648 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
13650 case OPC_MAQ_SA_W_QHRL
:
13652 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
13654 case OPC_MAQ_S_W_QHRR
:
13656 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
13658 case OPC_MAQ_SA_W_QHRR
:
13660 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
13662 case OPC_MULSAQ_S_L_PW
:
13664 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
13666 case OPC_MULSAQ_S_W_QH
:
13668 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13674 case OPC_ADDU_QB_DSP
:
13676 case OPC_MULEU_S_PH_QBL
:
13678 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13680 case OPC_MULEU_S_PH_QBR
:
13682 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13684 case OPC_MULQ_RS_PH
:
13686 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13688 case OPC_MULEQ_S_W_PHL
:
13690 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13692 case OPC_MULEQ_S_W_PHR
:
13694 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13696 case OPC_MULQ_S_PH
:
13698 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13702 #ifdef TARGET_MIPS64
13703 case OPC_ADDU_OB_DSP
:
13705 case OPC_MULEQ_S_PW_QHL
:
13707 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13709 case OPC_MULEQ_S_PW_QHR
:
13711 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13713 case OPC_MULEU_S_QH_OBL
:
13715 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13717 case OPC_MULEU_S_QH_OBR
:
13719 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13721 case OPC_MULQ_RS_QH
:
13723 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13730 tcg_temp_free_i32(t0
);
13731 tcg_temp_free(v1_t
);
13732 tcg_temp_free(v2_t
);
13734 (void)opn
; /* avoid a compiler warning */
13735 MIPS_DEBUG("%s", opn
);
13739 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
13742 const char *opn
= "mipsdsp Bit/ Manipulation";
13748 /* Treat as NOP. */
13753 t0
= tcg_temp_new();
13754 val_t
= tcg_temp_new();
13755 gen_load_gpr(val_t
, val
);
13758 case OPC_ABSQ_S_PH_DSP
:
13762 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
13767 target_long result
;
13768 imm
= (ctx
->opcode
>> 16) & 0xFF;
13769 result
= (uint32_t)imm
<< 24 |
13770 (uint32_t)imm
<< 16 |
13771 (uint32_t)imm
<< 8 |
13773 result
= (int32_t)result
;
13774 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
13779 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
13780 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
13781 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13782 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13783 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13784 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
13789 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13790 imm
= (int16_t)(imm
<< 6) >> 6;
13791 tcg_gen_movi_tl(cpu_gpr
[ret
], \
13792 (target_long
)((int32_t)imm
<< 16 | \
13798 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
13799 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13800 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13801 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
13805 #ifdef TARGET_MIPS64
13806 case OPC_ABSQ_S_QH_DSP
:
13813 imm
= (ctx
->opcode
>> 16) & 0xFF;
13814 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
13815 temp
= (temp
<< 16) | temp
;
13816 temp
= (temp
<< 32) | temp
;
13817 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
13825 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13826 imm
= (int16_t)(imm
<< 6) >> 6;
13827 temp
= ((target_long
)imm
<< 32) \
13828 | ((target_long
)imm
& 0xFFFFFFFF);
13829 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
13837 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13838 imm
= (int16_t)(imm
<< 6) >> 6;
13840 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
13841 ((uint64_t)(uint16_t)imm
<< 32) |
13842 ((uint64_t)(uint16_t)imm
<< 16) |
13843 (uint64_t)(uint16_t)imm
;
13844 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
13849 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
13850 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
13851 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13852 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13853 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13854 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
13855 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13859 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
13860 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
13861 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13865 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
13866 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13867 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13868 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
13869 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13876 tcg_temp_free(val_t
);
13878 (void)opn
; /* avoid a compiler warning */
13879 MIPS_DEBUG("%s", opn
);
13882 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
13883 uint32_t op1
, uint32_t op2
,
13884 int ret
, int v1
, int v2
, int check_ret
)
13886 const char *opn
= "mipsdsp add compare pick";
13891 if ((ret
== 0) && (check_ret
== 1)) {
13892 /* Treat as NOP. */
13897 t1
= tcg_temp_new();
13898 v1_t
= tcg_temp_new();
13899 v2_t
= tcg_temp_new();
13901 gen_load_gpr(v1_t
, v1
);
13902 gen_load_gpr(v2_t
, v2
);
13905 case OPC_CMPU_EQ_QB_DSP
:
13907 case OPC_CMPU_EQ_QB
:
13909 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
13911 case OPC_CMPU_LT_QB
:
13913 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
13915 case OPC_CMPU_LE_QB
:
13917 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
13919 case OPC_CMPGU_EQ_QB
:
13921 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13923 case OPC_CMPGU_LT_QB
:
13925 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13927 case OPC_CMPGU_LE_QB
:
13929 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13931 case OPC_CMPGDU_EQ_QB
:
13933 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
13934 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
13935 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
13936 tcg_gen_shli_tl(t1
, t1
, 24);
13937 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
13939 case OPC_CMPGDU_LT_QB
:
13941 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
13942 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
13943 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
13944 tcg_gen_shli_tl(t1
, t1
, 24);
13945 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
13947 case OPC_CMPGDU_LE_QB
:
13949 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
13950 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
13951 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
13952 tcg_gen_shli_tl(t1
, t1
, 24);
13953 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
13955 case OPC_CMP_EQ_PH
:
13957 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
13959 case OPC_CMP_LT_PH
:
13961 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
13963 case OPC_CMP_LE_PH
:
13965 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
13969 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13973 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13975 case OPC_PACKRL_PH
:
13977 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
13981 #ifdef TARGET_MIPS64
13982 case OPC_CMPU_EQ_OB_DSP
:
13984 case OPC_CMP_EQ_PW
:
13986 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
13988 case OPC_CMP_LT_PW
:
13990 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
13992 case OPC_CMP_LE_PW
:
13994 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
13996 case OPC_CMP_EQ_QH
:
13998 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
14000 case OPC_CMP_LT_QH
:
14002 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
14004 case OPC_CMP_LE_QH
:
14006 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
14008 case OPC_CMPGDU_EQ_OB
:
14010 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
14012 case OPC_CMPGDU_LT_OB
:
14014 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
14016 case OPC_CMPGDU_LE_OB
:
14018 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
14020 case OPC_CMPGU_EQ_OB
:
14022 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
14024 case OPC_CMPGU_LT_OB
:
14026 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
14028 case OPC_CMPGU_LE_OB
:
14030 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
14032 case OPC_CMPU_EQ_OB
:
14034 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
14036 case OPC_CMPU_LT_OB
:
14038 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
14040 case OPC_CMPU_LE_OB
:
14042 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
14044 case OPC_PACKRL_PW
:
14046 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
14050 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
14054 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
14058 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
14066 tcg_temp_free(v1_t
);
14067 tcg_temp_free(v2_t
);
14069 (void)opn
; /* avoid a compiler warning */
14070 MIPS_DEBUG("%s", opn
);
14073 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
14074 uint32_t op1
, int rt
, int rs
, int sa
)
14076 const char *opn
= "mipsdsp append/dappend";
14082 /* Treat as NOP. */
14087 t0
= tcg_temp_new();
14088 gen_load_gpr(t0
, rs
);
14091 case OPC_APPEND_DSP
:
14092 switch (MASK_APPEND(ctx
->opcode
)) {
14095 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
14097 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
14101 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
14102 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
14103 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
14104 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
14106 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
14110 if (sa
!= 0 && sa
!= 2) {
14111 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
14112 tcg_gen_ext32u_tl(t0
, t0
);
14113 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
14114 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
14116 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
14118 default: /* Invalid */
14119 MIPS_INVAL("MASK APPEND");
14120 generate_exception(ctx
, EXCP_RI
);
14124 #ifdef TARGET_MIPS64
14125 case OPC_DAPPEND_DSP
:
14126 switch (MASK_DAPPEND(ctx
->opcode
)) {
14129 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
14133 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
14134 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
14135 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
14139 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
14140 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
14141 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
14146 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
14147 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
14148 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
14149 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
14152 default: /* Invalid */
14153 MIPS_INVAL("MASK DAPPEND");
14154 generate_exception(ctx
, EXCP_RI
);
14161 (void)opn
; /* avoid a compiler warning */
14162 MIPS_DEBUG("%s", opn
);
14165 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
14166 int ret
, int v1
, int v2
, int check_ret
)
14169 const char *opn
= "mipsdsp accumulator";
14176 if ((ret
== 0) && (check_ret
== 1)) {
14177 /* Treat as NOP. */
14182 t0
= tcg_temp_new();
14183 t1
= tcg_temp_new();
14184 v1_t
= tcg_temp_new();
14185 v2_t
= tcg_temp_new();
14187 gen_load_gpr(v1_t
, v1
);
14188 gen_load_gpr(v2_t
, v2
);
14191 case OPC_EXTR_W_DSP
:
14195 tcg_gen_movi_tl(t0
, v2
);
14196 tcg_gen_movi_tl(t1
, v1
);
14197 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14200 tcg_gen_movi_tl(t0
, v2
);
14201 tcg_gen_movi_tl(t1
, v1
);
14202 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14204 case OPC_EXTR_RS_W
:
14205 tcg_gen_movi_tl(t0
, v2
);
14206 tcg_gen_movi_tl(t1
, v1
);
14207 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14210 tcg_gen_movi_tl(t0
, v2
);
14211 tcg_gen_movi_tl(t1
, v1
);
14212 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14214 case OPC_EXTRV_S_H
:
14215 tcg_gen_movi_tl(t0
, v2
);
14216 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14219 tcg_gen_movi_tl(t0
, v2
);
14220 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14222 case OPC_EXTRV_R_W
:
14223 tcg_gen_movi_tl(t0
, v2
);
14224 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14226 case OPC_EXTRV_RS_W
:
14227 tcg_gen_movi_tl(t0
, v2
);
14228 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14231 tcg_gen_movi_tl(t0
, v2
);
14232 tcg_gen_movi_tl(t1
, v1
);
14233 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14236 tcg_gen_movi_tl(t0
, v2
);
14237 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14240 tcg_gen_movi_tl(t0
, v2
);
14241 tcg_gen_movi_tl(t1
, v1
);
14242 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14245 tcg_gen_movi_tl(t0
, v2
);
14246 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14249 imm
= (ctx
->opcode
>> 20) & 0x3F;
14250 tcg_gen_movi_tl(t0
, ret
);
14251 tcg_gen_movi_tl(t1
, imm
);
14252 gen_helper_shilo(t0
, t1
, cpu_env
);
14255 tcg_gen_movi_tl(t0
, ret
);
14256 gen_helper_shilo(t0
, v1_t
, cpu_env
);
14259 tcg_gen_movi_tl(t0
, ret
);
14260 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
14263 imm
= (ctx
->opcode
>> 11) & 0x3FF;
14264 tcg_gen_movi_tl(t0
, imm
);
14265 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
14268 imm
= (ctx
->opcode
>> 16) & 0x03FF;
14269 tcg_gen_movi_tl(t0
, imm
);
14270 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
14274 #ifdef TARGET_MIPS64
14275 case OPC_DEXTR_W_DSP
:
14279 tcg_gen_movi_tl(t0
, ret
);
14280 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
14284 int shift
= (ctx
->opcode
>> 19) & 0x7F;
14285 int ac
= (ctx
->opcode
>> 11) & 0x03;
14286 tcg_gen_movi_tl(t0
, shift
);
14287 tcg_gen_movi_tl(t1
, ac
);
14288 gen_helper_dshilo(t0
, t1
, cpu_env
);
14293 int ac
= (ctx
->opcode
>> 11) & 0x03;
14294 tcg_gen_movi_tl(t0
, ac
);
14295 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
14299 tcg_gen_movi_tl(t0
, v2
);
14300 tcg_gen_movi_tl(t1
, v1
);
14302 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14305 tcg_gen_movi_tl(t0
, v2
);
14306 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14309 tcg_gen_movi_tl(t0
, v2
);
14310 tcg_gen_movi_tl(t1
, v1
);
14311 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14314 tcg_gen_movi_tl(t0
, v2
);
14315 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14318 tcg_gen_movi_tl(t0
, v2
);
14319 tcg_gen_movi_tl(t1
, v1
);
14320 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14322 case OPC_DEXTR_R_L
:
14323 tcg_gen_movi_tl(t0
, v2
);
14324 tcg_gen_movi_tl(t1
, v1
);
14325 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14327 case OPC_DEXTR_RS_L
:
14328 tcg_gen_movi_tl(t0
, v2
);
14329 tcg_gen_movi_tl(t1
, v1
);
14330 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14333 tcg_gen_movi_tl(t0
, v2
);
14334 tcg_gen_movi_tl(t1
, v1
);
14335 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14337 case OPC_DEXTR_R_W
:
14338 tcg_gen_movi_tl(t0
, v2
);
14339 tcg_gen_movi_tl(t1
, v1
);
14340 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14342 case OPC_DEXTR_RS_W
:
14343 tcg_gen_movi_tl(t0
, v2
);
14344 tcg_gen_movi_tl(t1
, v1
);
14345 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14347 case OPC_DEXTR_S_H
:
14348 tcg_gen_movi_tl(t0
, v2
);
14349 tcg_gen_movi_tl(t1
, v1
);
14350 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14352 case OPC_DEXTRV_S_H
:
14353 tcg_gen_movi_tl(t0
, v2
);
14354 tcg_gen_movi_tl(t1
, v1
);
14355 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
14358 tcg_gen_movi_tl(t0
, v2
);
14359 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14361 case OPC_DEXTRV_R_L
:
14362 tcg_gen_movi_tl(t0
, v2
);
14363 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14365 case OPC_DEXTRV_RS_L
:
14366 tcg_gen_movi_tl(t0
, v2
);
14367 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14370 tcg_gen_movi_tl(t0
, v2
);
14371 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14373 case OPC_DEXTRV_R_W
:
14374 tcg_gen_movi_tl(t0
, v2
);
14375 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14377 case OPC_DEXTRV_RS_W
:
14378 tcg_gen_movi_tl(t0
, v2
);
14379 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
14388 tcg_temp_free(v1_t
);
14389 tcg_temp_free(v2_t
);
14391 (void)opn
; /* avoid a compiler warning */
14392 MIPS_DEBUG("%s", opn
);
14395 /* End MIPSDSP functions. */
14397 static void decode_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14400 int rs
, rt
, rd
, sa
;
14401 uint32_t op
, op1
, op2
;
14404 /* make sure instructions are on a word boundary */
14405 if (ctx
->pc
& 0x3) {
14406 env
->CP0_BadVAddr
= ctx
->pc
;
14407 generate_exception(ctx
, EXCP_AdEL
);
14411 /* Handle blikely not taken case */
14412 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
14413 int l1
= gen_new_label();
14415 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx
")", ctx
->pc
+ 4);
14416 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
14417 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
14418 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
14422 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
14423 tcg_gen_debug_insn_start(ctx
->pc
);
14426 op
= MASK_OP_MAJOR(ctx
->opcode
);
14427 rs
= (ctx
->opcode
>> 21) & 0x1f;
14428 rt
= (ctx
->opcode
>> 16) & 0x1f;
14429 rd
= (ctx
->opcode
>> 11) & 0x1f;
14430 sa
= (ctx
->opcode
>> 6) & 0x1f;
14431 imm
= (int16_t)ctx
->opcode
;
14434 op1
= MASK_SPECIAL(ctx
->opcode
);
14436 case OPC_SLL
: /* Shift with immediate */
14438 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14441 switch ((ctx
->opcode
>> 21) & 0x1f) {
14443 /* rotr is decoded as srl on non-R2 CPUs */
14444 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
14449 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14452 generate_exception(ctx
, EXCP_RI
);
14456 case OPC_MOVN
: /* Conditional move */
14458 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
14459 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
14460 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
14462 case OPC_ADD
... OPC_SUBU
:
14463 gen_arith(ctx
, op1
, rd
, rs
, rt
);
14465 case OPC_SLLV
: /* Shifts */
14467 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14470 switch ((ctx
->opcode
>> 6) & 0x1f) {
14472 /* rotrv is decoded as srlv on non-R2 CPUs */
14473 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
14478 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14481 generate_exception(ctx
, EXCP_RI
);
14485 case OPC_SLT
: /* Set on less than */
14487 gen_slt(ctx
, op1
, rd
, rs
, rt
);
14489 case OPC_AND
: /* Logic*/
14493 gen_logic(ctx
, op1
, rd
, rs
, rt
);
14498 check_insn(ctx
, INSN_VR54XX
);
14499 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
14500 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
14502 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
14507 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
14509 case OPC_JR
... OPC_JALR
:
14510 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
);
14512 case OPC_TGE
... OPC_TEQ
: /* Traps */
14514 gen_trap(ctx
, op1
, rs
, rt
, -1);
14516 case OPC_MFHI
: /* Move from HI/LO */
14518 gen_HILO(ctx
, op1
, rs
& 3, rd
);
14521 case OPC_MTLO
: /* Move to HI/LO */
14522 gen_HILO(ctx
, op1
, rd
& 3, rs
);
14524 case OPC_PMON
: /* Pmon entry point, also R4010 selsl */
14525 #ifdef MIPS_STRICT_STANDARD
14526 MIPS_INVAL("PMON / selsl");
14527 generate_exception(ctx
, EXCP_RI
);
14529 gen_helper_0e0i(pmon
, sa
);
14533 generate_exception(ctx
, EXCP_SYSCALL
);
14534 ctx
->bstate
= BS_STOP
;
14537 generate_exception(ctx
, EXCP_BREAK
);
14540 #ifdef MIPS_STRICT_STANDARD
14541 MIPS_INVAL("SPIM");
14542 generate_exception(ctx
, EXCP_RI
);
14544 /* Implemented as RI exception for now. */
14545 MIPS_INVAL("spim (unofficial)");
14546 generate_exception(ctx
, EXCP_RI
);
14550 /* Treat as NOP. */
14554 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
14555 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14556 check_cp1_enabled(ctx
);
14557 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
14558 (ctx
->opcode
>> 16) & 1);
14560 generate_exception_err(ctx
, EXCP_CpU
, 1);
14564 #if defined(TARGET_MIPS64)
14565 /* MIPS64 specific opcodes */
14570 check_insn(ctx
, ISA_MIPS3
);
14571 check_mips_64(ctx
);
14572 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14575 switch ((ctx
->opcode
>> 21) & 0x1f) {
14577 /* drotr is decoded as dsrl on non-R2 CPUs */
14578 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
14583 check_insn(ctx
, ISA_MIPS3
);
14584 check_mips_64(ctx
);
14585 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14588 generate_exception(ctx
, EXCP_RI
);
14593 switch ((ctx
->opcode
>> 21) & 0x1f) {
14595 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
14596 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
14601 check_insn(ctx
, ISA_MIPS3
);
14602 check_mips_64(ctx
);
14603 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14606 generate_exception(ctx
, EXCP_RI
);
14610 case OPC_DADD
... OPC_DSUBU
:
14611 check_insn(ctx
, ISA_MIPS3
);
14612 check_mips_64(ctx
);
14613 gen_arith(ctx
, op1
, rd
, rs
, rt
);
14617 check_insn(ctx
, ISA_MIPS3
);
14618 check_mips_64(ctx
);
14619 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14622 switch ((ctx
->opcode
>> 6) & 0x1f) {
14624 /* drotrv is decoded as dsrlv on non-R2 CPUs */
14625 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
14630 check_insn(ctx
, ISA_MIPS3
);
14631 check_mips_64(ctx
);
14632 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14635 generate_exception(ctx
, EXCP_RI
);
14639 case OPC_DMULT
... OPC_DDIVU
:
14640 check_insn(ctx
, ISA_MIPS3
);
14641 check_mips_64(ctx
);
14642 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
14645 default: /* Invalid */
14646 MIPS_INVAL("special");
14647 generate_exception(ctx
, EXCP_RI
);
14652 op1
= MASK_SPECIAL2(ctx
->opcode
);
14654 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
14655 case OPC_MSUB
... OPC_MSUBU
:
14656 check_insn(ctx
, ISA_MIPS32
);
14657 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
14660 gen_arith(ctx
, op1
, rd
, rs
, rt
);
14664 check_insn(ctx
, ISA_MIPS32
);
14665 gen_cl(ctx
, op1
, rd
, rs
);
14668 /* XXX: not clear which exception should be raised
14669 * when in debug mode...
14671 check_insn(ctx
, ISA_MIPS32
);
14672 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
14673 generate_exception(ctx
, EXCP_DBp
);
14675 generate_exception(ctx
, EXCP_DBp
);
14677 /* Treat as NOP. */
14680 case OPC_DIVU_G_2F
:
14681 case OPC_MULT_G_2F
:
14682 case OPC_MULTU_G_2F
:
14684 case OPC_MODU_G_2F
:
14685 check_insn(ctx
, INSN_LOONGSON2F
);
14686 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14688 #if defined(TARGET_MIPS64)
14691 check_insn(ctx
, ISA_MIPS64
);
14692 check_mips_64(ctx
);
14693 gen_cl(ctx
, op1
, rd
, rs
);
14695 case OPC_DMULT_G_2F
:
14696 case OPC_DMULTU_G_2F
:
14697 case OPC_DDIV_G_2F
:
14698 case OPC_DDIVU_G_2F
:
14699 case OPC_DMOD_G_2F
:
14700 case OPC_DMODU_G_2F
:
14701 check_insn(ctx
, INSN_LOONGSON2F
);
14702 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14705 default: /* Invalid */
14706 MIPS_INVAL("special2");
14707 generate_exception(ctx
, EXCP_RI
);
14712 op1
= MASK_SPECIAL3(ctx
->opcode
);
14716 check_insn(ctx
, ISA_MIPS32R2
);
14717 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
14720 check_insn(ctx
, ISA_MIPS32R2
);
14721 op2
= MASK_BSHFL(ctx
->opcode
);
14722 gen_bshfl(ctx
, op2
, rt
, rd
);
14725 gen_rdhwr(ctx
, rt
, rd
);
14728 check_insn(ctx
, ASE_MT
);
14730 TCGv t0
= tcg_temp_new();
14731 TCGv t1
= tcg_temp_new();
14733 gen_load_gpr(t0
, rt
);
14734 gen_load_gpr(t1
, rs
);
14735 gen_helper_fork(t0
, t1
);
14741 check_insn(ctx
, ASE_MT
);
14743 TCGv t0
= tcg_temp_new();
14745 save_cpu_state(ctx
, 1);
14746 gen_load_gpr(t0
, rs
);
14747 gen_helper_yield(t0
, cpu_env
, t0
);
14748 gen_store_gpr(t0
, rd
);
14752 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
14753 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
14754 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
14755 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
14756 * the same mask and op1. */
14757 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
14758 op2
= MASK_ADDUH_QB(ctx
->opcode
);
14761 case OPC_ADDUH_R_QB
:
14763 case OPC_ADDQH_R_PH
:
14765 case OPC_ADDQH_R_W
:
14767 case OPC_SUBUH_R_QB
:
14769 case OPC_SUBQH_R_PH
:
14771 case OPC_SUBQH_R_W
:
14772 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14777 case OPC_MULQ_RS_W
:
14778 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14781 MIPS_INVAL("MASK ADDUH.QB");
14782 generate_exception(ctx
, EXCP_RI
);
14785 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
14786 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14788 generate_exception(ctx
, EXCP_RI
);
14792 op2
= MASK_LX(ctx
->opcode
);
14794 #if defined(TARGET_MIPS64)
14800 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
14802 default: /* Invalid */
14803 MIPS_INVAL("MASK LX");
14804 generate_exception(ctx
, EXCP_RI
);
14808 case OPC_ABSQ_S_PH_DSP
:
14809 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
14811 case OPC_ABSQ_S_QB
:
14812 case OPC_ABSQ_S_PH
:
14814 case OPC_PRECEQ_W_PHL
:
14815 case OPC_PRECEQ_W_PHR
:
14816 case OPC_PRECEQU_PH_QBL
:
14817 case OPC_PRECEQU_PH_QBR
:
14818 case OPC_PRECEQU_PH_QBLA
:
14819 case OPC_PRECEQU_PH_QBRA
:
14820 case OPC_PRECEU_PH_QBL
:
14821 case OPC_PRECEU_PH_QBR
:
14822 case OPC_PRECEU_PH_QBLA
:
14823 case OPC_PRECEU_PH_QBRA
:
14824 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14831 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
14834 MIPS_INVAL("MASK ABSQ_S.PH");
14835 generate_exception(ctx
, EXCP_RI
);
14839 case OPC_ADDU_QB_DSP
:
14840 op2
= MASK_ADDU_QB(ctx
->opcode
);
14843 case OPC_ADDQ_S_PH
:
14846 case OPC_ADDU_S_QB
:
14848 case OPC_ADDU_S_PH
:
14850 case OPC_SUBQ_S_PH
:
14853 case OPC_SUBU_S_QB
:
14855 case OPC_SUBU_S_PH
:
14859 case OPC_RADDU_W_QB
:
14860 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14862 case OPC_MULEU_S_PH_QBL
:
14863 case OPC_MULEU_S_PH_QBR
:
14864 case OPC_MULQ_RS_PH
:
14865 case OPC_MULEQ_S_W_PHL
:
14866 case OPC_MULEQ_S_W_PHR
:
14867 case OPC_MULQ_S_PH
:
14868 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14870 default: /* Invalid */
14871 MIPS_INVAL("MASK ADDU.QB");
14872 generate_exception(ctx
, EXCP_RI
);
14877 case OPC_CMPU_EQ_QB_DSP
:
14878 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
14880 case OPC_PRECR_SRA_PH_W
:
14881 case OPC_PRECR_SRA_R_PH_W
:
14882 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
14884 case OPC_PRECR_QB_PH
:
14885 case OPC_PRECRQ_QB_PH
:
14886 case OPC_PRECRQ_PH_W
:
14887 case OPC_PRECRQ_RS_PH_W
:
14888 case OPC_PRECRQU_S_QB_PH
:
14889 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14891 case OPC_CMPU_EQ_QB
:
14892 case OPC_CMPU_LT_QB
:
14893 case OPC_CMPU_LE_QB
:
14894 case OPC_CMP_EQ_PH
:
14895 case OPC_CMP_LT_PH
:
14896 case OPC_CMP_LE_PH
:
14897 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14899 case OPC_CMPGU_EQ_QB
:
14900 case OPC_CMPGU_LT_QB
:
14901 case OPC_CMPGU_LE_QB
:
14902 case OPC_CMPGDU_EQ_QB
:
14903 case OPC_CMPGDU_LT_QB
:
14904 case OPC_CMPGDU_LE_QB
:
14907 case OPC_PACKRL_PH
:
14908 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14910 default: /* Invalid */
14911 MIPS_INVAL("MASK CMPU.EQ.QB");
14912 generate_exception(ctx
, EXCP_RI
);
14916 case OPC_SHLL_QB_DSP
:
14917 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
14919 case OPC_DPA_W_PH_DSP
:
14920 op2
= MASK_DPA_W_PH(ctx
->opcode
);
14922 case OPC_DPAU_H_QBL
:
14923 case OPC_DPAU_H_QBR
:
14924 case OPC_DPSU_H_QBL
:
14925 case OPC_DPSU_H_QBR
:
14927 case OPC_DPAX_W_PH
:
14928 case OPC_DPAQ_S_W_PH
:
14929 case OPC_DPAQX_S_W_PH
:
14930 case OPC_DPAQX_SA_W_PH
:
14932 case OPC_DPSX_W_PH
:
14933 case OPC_DPSQ_S_W_PH
:
14934 case OPC_DPSQX_S_W_PH
:
14935 case OPC_DPSQX_SA_W_PH
:
14936 case OPC_MULSAQ_S_W_PH
:
14937 case OPC_DPAQ_SA_L_W
:
14938 case OPC_DPSQ_SA_L_W
:
14939 case OPC_MAQ_S_W_PHL
:
14940 case OPC_MAQ_S_W_PHR
:
14941 case OPC_MAQ_SA_W_PHL
:
14942 case OPC_MAQ_SA_W_PHR
:
14943 case OPC_MULSA_W_PH
:
14944 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14946 default: /* Invalid */
14947 MIPS_INVAL("MASK DPAW.PH");
14948 generate_exception(ctx
, EXCP_RI
);
14953 op2
= MASK_INSV(ctx
->opcode
);
14965 t0
= tcg_temp_new();
14966 t1
= tcg_temp_new();
14968 gen_load_gpr(t0
, rt
);
14969 gen_load_gpr(t1
, rs
);
14971 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
14977 default: /* Invalid */
14978 MIPS_INVAL("MASK INSV");
14979 generate_exception(ctx
, EXCP_RI
);
14983 case OPC_APPEND_DSP
:
14984 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
14986 case OPC_EXTR_W_DSP
:
14987 op2
= MASK_EXTR_W(ctx
->opcode
);
14991 case OPC_EXTR_RS_W
:
14993 case OPC_EXTRV_S_H
:
14995 case OPC_EXTRV_R_W
:
14996 case OPC_EXTRV_RS_W
:
15001 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
15004 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
15010 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
15012 default: /* Invalid */
15013 MIPS_INVAL("MASK EXTR.W");
15014 generate_exception(ctx
, EXCP_RI
);
15018 #if defined(TARGET_MIPS64)
15019 case OPC_DEXTM
... OPC_DEXT
:
15020 case OPC_DINSM
... OPC_DINS
:
15021 check_insn(ctx
, ISA_MIPS64R2
);
15022 check_mips_64(ctx
);
15023 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
15026 check_insn(ctx
, ISA_MIPS64R2
);
15027 check_mips_64(ctx
);
15028 op2
= MASK_DBSHFL(ctx
->opcode
);
15029 gen_bshfl(ctx
, op2
, rt
, rd
);
15031 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
15032 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
15033 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
15034 check_insn(ctx
, INSN_LOONGSON2E
);
15035 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
15037 case OPC_ABSQ_S_QH_DSP
:
15038 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
15040 case OPC_PRECEQ_L_PWL
:
15041 case OPC_PRECEQ_L_PWR
:
15042 case OPC_PRECEQ_PW_QHL
:
15043 case OPC_PRECEQ_PW_QHR
:
15044 case OPC_PRECEQ_PW_QHLA
:
15045 case OPC_PRECEQ_PW_QHRA
:
15046 case OPC_PRECEQU_QH_OBL
:
15047 case OPC_PRECEQU_QH_OBR
:
15048 case OPC_PRECEQU_QH_OBLA
:
15049 case OPC_PRECEQU_QH_OBRA
:
15050 case OPC_PRECEU_QH_OBL
:
15051 case OPC_PRECEU_QH_OBR
:
15052 case OPC_PRECEU_QH_OBLA
:
15053 case OPC_PRECEU_QH_OBRA
:
15054 case OPC_ABSQ_S_OB
:
15055 case OPC_ABSQ_S_PW
:
15056 case OPC_ABSQ_S_QH
:
15057 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
15065 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
15067 default: /* Invalid */
15068 MIPS_INVAL("MASK ABSQ_S.QH");
15069 generate_exception(ctx
, EXCP_RI
);
15073 case OPC_ADDU_OB_DSP
:
15074 op2
= MASK_ADDU_OB(ctx
->opcode
);
15076 case OPC_RADDU_L_OB
:
15078 case OPC_SUBQ_S_PW
:
15080 case OPC_SUBQ_S_QH
:
15082 case OPC_SUBU_S_OB
:
15084 case OPC_SUBU_S_QH
:
15086 case OPC_SUBUH_R_OB
:
15088 case OPC_ADDQ_S_PW
:
15090 case OPC_ADDQ_S_QH
:
15092 case OPC_ADDU_S_OB
:
15094 case OPC_ADDU_S_QH
:
15096 case OPC_ADDUH_R_OB
:
15097 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
15099 case OPC_MULEQ_S_PW_QHL
:
15100 case OPC_MULEQ_S_PW_QHR
:
15101 case OPC_MULEU_S_QH_OBL
:
15102 case OPC_MULEU_S_QH_OBR
:
15103 case OPC_MULQ_RS_QH
:
15104 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
15106 default: /* Invalid */
15107 MIPS_INVAL("MASK ADDU.OB");
15108 generate_exception(ctx
, EXCP_RI
);
15112 case OPC_CMPU_EQ_OB_DSP
:
15113 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
15115 case OPC_PRECR_SRA_QH_PW
:
15116 case OPC_PRECR_SRA_R_QH_PW
:
15117 /* Return value is rt. */
15118 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
15120 case OPC_PRECR_OB_QH
:
15121 case OPC_PRECRQ_OB_QH
:
15122 case OPC_PRECRQ_PW_L
:
15123 case OPC_PRECRQ_QH_PW
:
15124 case OPC_PRECRQ_RS_QH_PW
:
15125 case OPC_PRECRQU_S_OB_QH
:
15126 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
15128 case OPC_CMPU_EQ_OB
:
15129 case OPC_CMPU_LT_OB
:
15130 case OPC_CMPU_LE_OB
:
15131 case OPC_CMP_EQ_QH
:
15132 case OPC_CMP_LT_QH
:
15133 case OPC_CMP_LE_QH
:
15134 case OPC_CMP_EQ_PW
:
15135 case OPC_CMP_LT_PW
:
15136 case OPC_CMP_LE_PW
:
15137 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
15139 case OPC_CMPGDU_EQ_OB
:
15140 case OPC_CMPGDU_LT_OB
:
15141 case OPC_CMPGDU_LE_OB
:
15142 case OPC_CMPGU_EQ_OB
:
15143 case OPC_CMPGU_LT_OB
:
15144 case OPC_CMPGU_LE_OB
:
15145 case OPC_PACKRL_PW
:
15149 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
15151 default: /* Invalid */
15152 MIPS_INVAL("MASK CMPU_EQ.OB");
15153 generate_exception(ctx
, EXCP_RI
);
15157 case OPC_DAPPEND_DSP
:
15158 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
15160 case OPC_DEXTR_W_DSP
:
15161 op2
= MASK_DEXTR_W(ctx
->opcode
);
15168 case OPC_DEXTR_R_L
:
15169 case OPC_DEXTR_RS_L
:
15171 case OPC_DEXTR_R_W
:
15172 case OPC_DEXTR_RS_W
:
15173 case OPC_DEXTR_S_H
:
15175 case OPC_DEXTRV_R_L
:
15176 case OPC_DEXTRV_RS_L
:
15177 case OPC_DEXTRV_S_H
:
15179 case OPC_DEXTRV_R_W
:
15180 case OPC_DEXTRV_RS_W
:
15181 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
15186 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
15188 default: /* Invalid */
15189 MIPS_INVAL("MASK EXTR.W");
15190 generate_exception(ctx
, EXCP_RI
);
15194 case OPC_DPAQ_W_QH_DSP
:
15195 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
15197 case OPC_DPAU_H_OBL
:
15198 case OPC_DPAU_H_OBR
:
15199 case OPC_DPSU_H_OBL
:
15200 case OPC_DPSU_H_OBR
:
15202 case OPC_DPAQ_S_W_QH
:
15204 case OPC_DPSQ_S_W_QH
:
15205 case OPC_MULSAQ_S_W_QH
:
15206 case OPC_DPAQ_SA_L_PW
:
15207 case OPC_DPSQ_SA_L_PW
:
15208 case OPC_MULSAQ_S_L_PW
:
15209 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
15211 case OPC_MAQ_S_W_QHLL
:
15212 case OPC_MAQ_S_W_QHLR
:
15213 case OPC_MAQ_S_W_QHRL
:
15214 case OPC_MAQ_S_W_QHRR
:
15215 case OPC_MAQ_SA_W_QHLL
:
15216 case OPC_MAQ_SA_W_QHLR
:
15217 case OPC_MAQ_SA_W_QHRL
:
15218 case OPC_MAQ_SA_W_QHRR
:
15219 case OPC_MAQ_S_L_PWL
:
15220 case OPC_MAQ_S_L_PWR
:
15225 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
15227 default: /* Invalid */
15228 MIPS_INVAL("MASK DPAQ.W.QH");
15229 generate_exception(ctx
, EXCP_RI
);
15233 case OPC_DINSV_DSP
:
15234 op2
= MASK_INSV(ctx
->opcode
);
15246 t0
= tcg_temp_new();
15247 t1
= tcg_temp_new();
15249 gen_load_gpr(t0
, rt
);
15250 gen_load_gpr(t1
, rs
);
15252 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
15255 default: /* Invalid */
15256 MIPS_INVAL("MASK DINSV");
15257 generate_exception(ctx
, EXCP_RI
);
15261 case OPC_SHLL_OB_DSP
:
15262 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
15265 default: /* Invalid */
15266 MIPS_INVAL("special3");
15267 generate_exception(ctx
, EXCP_RI
);
15272 op1
= MASK_REGIMM(ctx
->opcode
);
15274 case OPC_BLTZ
... OPC_BGEZL
: /* REGIMM branches */
15275 case OPC_BLTZAL
... OPC_BGEZALL
:
15276 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2);
15278 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
15280 gen_trap(ctx
, op1
, rs
, -1, imm
);
15283 check_insn(ctx
, ISA_MIPS32R2
);
15284 /* Treat as NOP. */
15286 case OPC_BPOSGE32
: /* MIPS DSP branch */
15287 #if defined(TARGET_MIPS64)
15291 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2);
15293 default: /* Invalid */
15294 MIPS_INVAL("regimm");
15295 generate_exception(ctx
, EXCP_RI
);
15300 check_cp0_enabled(ctx
);
15301 op1
= MASK_CP0(ctx
->opcode
);
15307 #if defined(TARGET_MIPS64)
15311 #ifndef CONFIG_USER_ONLY
15312 gen_cp0(env
, ctx
, op1
, rt
, rd
);
15313 #endif /* !CONFIG_USER_ONLY */
15315 case OPC_C0_FIRST
... OPC_C0_LAST
:
15316 #ifndef CONFIG_USER_ONLY
15317 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
15318 #endif /* !CONFIG_USER_ONLY */
15321 #ifndef CONFIG_USER_ONLY
15323 TCGv t0
= tcg_temp_new();
15325 op2
= MASK_MFMC0(ctx
->opcode
);
15328 check_insn(ctx
, ASE_MT
);
15329 gen_helper_dmt(t0
);
15330 gen_store_gpr(t0
, rt
);
15333 check_insn(ctx
, ASE_MT
);
15334 gen_helper_emt(t0
);
15335 gen_store_gpr(t0
, rt
);
15338 check_insn(ctx
, ASE_MT
);
15339 gen_helper_dvpe(t0
, cpu_env
);
15340 gen_store_gpr(t0
, rt
);
15343 check_insn(ctx
, ASE_MT
);
15344 gen_helper_evpe(t0
, cpu_env
);
15345 gen_store_gpr(t0
, rt
);
15348 check_insn(ctx
, ISA_MIPS32R2
);
15349 save_cpu_state(ctx
, 1);
15350 gen_helper_di(t0
, cpu_env
);
15351 gen_store_gpr(t0
, rt
);
15352 /* Stop translation as we may have switched the execution mode */
15353 ctx
->bstate
= BS_STOP
;
15356 check_insn(ctx
, ISA_MIPS32R2
);
15357 save_cpu_state(ctx
, 1);
15358 gen_helper_ei(t0
, cpu_env
);
15359 gen_store_gpr(t0
, rt
);
15360 /* Stop translation as we may have switched the execution mode */
15361 ctx
->bstate
= BS_STOP
;
15363 default: /* Invalid */
15364 MIPS_INVAL("mfmc0");
15365 generate_exception(ctx
, EXCP_RI
);
15370 #endif /* !CONFIG_USER_ONLY */
15373 check_insn(ctx
, ISA_MIPS32R2
);
15374 gen_load_srsgpr(rt
, rd
);
15377 check_insn(ctx
, ISA_MIPS32R2
);
15378 gen_store_srsgpr(rt
, rd
);
15382 generate_exception(ctx
, EXCP_RI
);
15386 case OPC_ADDI
: /* Arithmetic with immediate opcode */
15388 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15390 case OPC_SLTI
: /* Set on less than with immediate opcode */
15392 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
15394 case OPC_ANDI
: /* Arithmetic with immediate opcode */
15398 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
15400 case OPC_J
... OPC_JAL
: /* Jump */
15401 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15402 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
15404 case OPC_BEQ
... OPC_BGTZ
: /* Branch */
15405 case OPC_BEQL
... OPC_BGTZL
:
15406 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2);
15408 case OPC_LB
... OPC_LWR
: /* Load and stores */
15410 gen_ld(ctx
, op
, rt
, rs
, imm
);
15412 case OPC_SB
... OPC_SW
:
15414 gen_st(ctx
, op
, rt
, rs
, imm
);
15417 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
15420 check_cp0_enabled(ctx
);
15421 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
15422 /* Treat as NOP. */
15425 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
15426 /* Treat as NOP. */
15429 /* Floating point (COP1). */
15434 gen_cop1_ldst(env
, ctx
, op
, rt
, rs
, imm
);
15438 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
15439 check_cp1_enabled(ctx
);
15440 op1
= MASK_CP1(ctx
->opcode
);
15444 check_insn(ctx
, ISA_MIPS32R2
);
15449 gen_cp1(ctx
, op1
, rt
, rd
);
15451 #if defined(TARGET_MIPS64)
15454 check_insn(ctx
, ISA_MIPS3
);
15455 gen_cp1(ctx
, op1
, rt
, rd
);
15461 check_insn(ctx
, ASE_MIPS3D
);
15464 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
15465 (rt
>> 2) & 0x7, imm
<< 2);
15472 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
15477 generate_exception (ctx
, EXCP_RI
);
15481 generate_exception_err(ctx
, EXCP_CpU
, 1);
15490 /* COP2: Not implemented. */
15491 generate_exception_err(ctx
, EXCP_CpU
, 2);
15494 check_insn(ctx
, INSN_LOONGSON2F
);
15495 /* Note that these instructions use different fields. */
15496 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
15500 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
15501 check_cp1_enabled(ctx
);
15502 op1
= MASK_CP3(ctx
->opcode
);
15510 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
15513 /* Treat as NOP. */
15528 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
15532 generate_exception (ctx
, EXCP_RI
);
15536 generate_exception_err(ctx
, EXCP_CpU
, 1);
15540 #if defined(TARGET_MIPS64)
15541 /* MIPS64 opcodes */
15543 case OPC_LDL
... OPC_LDR
:
15546 check_insn(ctx
, ISA_MIPS3
);
15547 check_mips_64(ctx
);
15548 gen_ld(ctx
, op
, rt
, rs
, imm
);
15550 case OPC_SDL
... OPC_SDR
:
15552 check_insn(ctx
, ISA_MIPS3
);
15553 check_mips_64(ctx
);
15554 gen_st(ctx
, op
, rt
, rs
, imm
);
15557 check_insn(ctx
, ISA_MIPS3
);
15558 check_mips_64(ctx
);
15559 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
15563 check_insn(ctx
, ISA_MIPS3
);
15564 check_mips_64(ctx
);
15565 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15569 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
15570 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15571 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
15574 check_insn(ctx
, ASE_MDMX
);
15575 /* MDMX: Not implemented. */
15576 default: /* Invalid */
15577 MIPS_INVAL("major opcode");
15578 generate_exception(ctx
, EXCP_RI
);
15584 gen_intermediate_code_internal(MIPSCPU
*cpu
, TranslationBlock
*tb
,
15587 CPUState
*cs
= CPU(cpu
);
15588 CPUMIPSState
*env
= &cpu
->env
;
15590 target_ulong pc_start
;
15591 uint16_t *gen_opc_end
;
15600 qemu_log("search pc %d\n", search_pc
);
15603 gen_opc_end
= tcg_ctx
.gen_opc_buf
+ OPC_MAX_SIZE
;
15606 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
15607 ctx
.insn_flags
= env
->insn_flags
;
15609 ctx
.bstate
= BS_NONE
;
15610 /* Restore delay slot state from the tb context. */
15611 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
15612 restore_cpu_state(env
, &ctx
);
15613 #ifdef CONFIG_USER_ONLY
15614 ctx
.mem_idx
= MIPS_HFLAG_UM
;
15616 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
15619 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
15620 if (max_insns
== 0)
15621 max_insns
= CF_COUNT_MASK
;
15622 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
15624 while (ctx
.bstate
== BS_NONE
) {
15625 if (unlikely(!QTAILQ_EMPTY(&cs
->breakpoints
))) {
15626 QTAILQ_FOREACH(bp
, &cs
->breakpoints
, entry
) {
15627 if (bp
->pc
== ctx
.pc
) {
15628 save_cpu_state(&ctx
, 1);
15629 ctx
.bstate
= BS_BRANCH
;
15630 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
15631 /* Include the breakpoint location or the tb won't
15632 * be flushed when it must be. */
15634 goto done_generating
;
15640 j
= tcg_ctx
.gen_opc_ptr
- tcg_ctx
.gen_opc_buf
;
15644 tcg_ctx
.gen_opc_instr_start
[lj
++] = 0;
15646 tcg_ctx
.gen_opc_pc
[lj
] = ctx
.pc
;
15647 gen_opc_hflags
[lj
] = ctx
.hflags
& MIPS_HFLAG_BMASK
;
15648 gen_opc_btarget
[lj
] = ctx
.btarget
;
15649 tcg_ctx
.gen_opc_instr_start
[lj
] = 1;
15650 tcg_ctx
.gen_opc_icount
[lj
] = num_insns
;
15652 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
15655 is_delay
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
15656 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
15657 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
15659 decode_opc(env
, &ctx
);
15660 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
15661 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
15662 insn_bytes
= decode_micromips_opc(env
, &ctx
);
15663 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
15664 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
15665 insn_bytes
= decode_mips16_opc(env
, &ctx
);
15667 generate_exception(&ctx
, EXCP_RI
);
15668 ctx
.bstate
= BS_STOP
;
15672 handle_delay_slot(&ctx
, insn_bytes
);
15674 ctx
.pc
+= insn_bytes
;
15678 /* Execute a branch and its delay slot as a single instruction.
15679 This is what GDB expects and is consistent with what the
15680 hardware does (e.g. if a delay slot instruction faults, the
15681 reported PC is the PC of the branch). */
15682 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
15686 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
15689 if (tcg_ctx
.gen_opc_ptr
>= gen_opc_end
) {
15693 if (num_insns
>= max_insns
)
15699 if (tb
->cflags
& CF_LAST_IO
) {
15702 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
15703 save_cpu_state(&ctx
, ctx
.bstate
== BS_NONE
);
15704 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
15706 switch (ctx
.bstate
) {
15708 gen_goto_tb(&ctx
, 0, ctx
.pc
);
15711 save_cpu_state(&ctx
, 0);
15712 gen_goto_tb(&ctx
, 0, ctx
.pc
);
15715 tcg_gen_exit_tb(0);
15723 gen_tb_end(tb
, num_insns
);
15724 *tcg_ctx
.gen_opc_ptr
= INDEX_op_end
;
15726 j
= tcg_ctx
.gen_opc_ptr
- tcg_ctx
.gen_opc_buf
;
15729 tcg_ctx
.gen_opc_instr_start
[lj
++] = 0;
15731 tb
->size
= ctx
.pc
- pc_start
;
15732 tb
->icount
= num_insns
;
15736 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
15737 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
15738 log_target_disas(env
, pc_start
, ctx
.pc
- pc_start
, 0);
15744 void gen_intermediate_code (CPUMIPSState
*env
, struct TranslationBlock
*tb
)
15746 gen_intermediate_code_internal(mips_env_get_cpu(env
), tb
, false);
15749 void gen_intermediate_code_pc (CPUMIPSState
*env
, struct TranslationBlock
*tb
)
15751 gen_intermediate_code_internal(mips_env_get_cpu(env
), tb
, true);
15754 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
15758 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
15760 #define printfpr(fp) \
15763 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
15764 " fd:%13g fs:%13g psu: %13g\n", \
15765 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
15766 (double)(fp)->fd, \
15767 (double)(fp)->fs[FP_ENDIAN_IDX], \
15768 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
15771 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
15772 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
15773 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
15774 " fd:%13g fs:%13g psu:%13g\n", \
15775 tmp.w[FP_ENDIAN_IDX], tmp.d, \
15777 (double)tmp.fs[FP_ENDIAN_IDX], \
15778 (double)tmp.fs[!FP_ENDIAN_IDX]); \
15783 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
15784 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
15785 get_float_exception_flags(&env
->active_fpu
.fp_status
));
15786 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
15787 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
15788 printfpr(&env
->active_fpu
.fpr
[i
]);
15794 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
15795 /* Debug help: The architecture requires 32bit code to maintain proper
15796 sign-extended values on 64bit machines. */
15798 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
15801 cpu_mips_check_sign_extensions (CPUMIPSState
*env
, FILE *f
,
15802 fprintf_function cpu_fprintf
,
15807 if (!SIGN_EXT_P(env
->active_tc
.PC
))
15808 cpu_fprintf(f
, "BROKEN: pc=0x" TARGET_FMT_lx
"\n", env
->active_tc
.PC
);
15809 if (!SIGN_EXT_P(env
->active_tc
.HI
[0]))
15810 cpu_fprintf(f
, "BROKEN: HI=0x" TARGET_FMT_lx
"\n", env
->active_tc
.HI
[0]);
15811 if (!SIGN_EXT_P(env
->active_tc
.LO
[0]))
15812 cpu_fprintf(f
, "BROKEN: LO=0x" TARGET_FMT_lx
"\n", env
->active_tc
.LO
[0]);
15813 if (!SIGN_EXT_P(env
->btarget
))
15814 cpu_fprintf(f
, "BROKEN: btarget=0x" TARGET_FMT_lx
"\n", env
->btarget
);
15816 for (i
= 0; i
< 32; i
++) {
15817 if (!SIGN_EXT_P(env
->active_tc
.gpr
[i
]))
15818 cpu_fprintf(f
, "BROKEN: %s=0x" TARGET_FMT_lx
"\n", regnames
[i
], env
->active_tc
.gpr
[i
]);
15821 if (!SIGN_EXT_P(env
->CP0_EPC
))
15822 cpu_fprintf(f
, "BROKEN: EPC=0x" TARGET_FMT_lx
"\n", env
->CP0_EPC
);
15823 if (!SIGN_EXT_P(env
->lladdr
))
15824 cpu_fprintf(f
, "BROKEN: LLAddr=0x" TARGET_FMT_lx
"\n", env
->lladdr
);
15828 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
15831 MIPSCPU
*cpu
= MIPS_CPU(cs
);
15832 CPUMIPSState
*env
= &cpu
->env
;
15835 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
15836 " LO=0x" TARGET_FMT_lx
" ds %04x "
15837 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
15838 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
15839 env
->hflags
, env
->btarget
, env
->bcond
);
15840 for (i
= 0; i
< 32; i
++) {
15842 cpu_fprintf(f
, "GPR%02d:", i
);
15843 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
15845 cpu_fprintf(f
, "\n");
15848 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
15849 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
15850 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx
"\n",
15851 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
15852 if (env
->hflags
& MIPS_HFLAG_FPU
)
15853 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
15854 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
15855 cpu_mips_check_sign_extensions(env
, f
, cpu_fprintf
, flags
);
15859 void mips_tcg_init(void)
15864 /* Initialize various static tables. */
15868 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
15869 TCGV_UNUSED(cpu_gpr
[0]);
15870 for (i
= 1; i
< 32; i
++)
15871 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
15872 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
15875 for (i
= 0; i
< 32; i
++) {
15876 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
]);
15877 fpu_f64
[i
] = tcg_global_mem_new_i64(TCG_AREG0
, off
, fregnames
[i
]);
15880 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
15881 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
15882 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
15883 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
15884 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
15886 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
15887 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
15889 cpu_ACX
[i
] = tcg_global_mem_new(TCG_AREG0
,
15890 offsetof(CPUMIPSState
, active_tc
.ACX
[i
]),
15893 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
15894 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
15896 bcond
= tcg_global_mem_new(TCG_AREG0
,
15897 offsetof(CPUMIPSState
, bcond
), "bcond");
15898 btarget
= tcg_global_mem_new(TCG_AREG0
,
15899 offsetof(CPUMIPSState
, btarget
), "btarget");
15900 hflags
= tcg_global_mem_new_i32(TCG_AREG0
,
15901 offsetof(CPUMIPSState
, hflags
), "hflags");
15903 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
15904 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
15906 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
15907 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
15913 #include "translate_init.c"
15915 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
15919 const mips_def_t
*def
;
15921 def
= cpu_mips_find_by_name(cpu_model
);
15924 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
15926 env
->cpu_model
= def
;
15928 #ifndef CONFIG_USER_ONLY
15929 mmu_init(env
, def
);
15931 fpu_init(env
, def
);
15932 mvp_init(env
, def
);
15934 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
15939 void cpu_state_reset(CPUMIPSState
*env
)
15941 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
15942 CPUState
*cs
= CPU(cpu
);
15944 /* Reset registers to their default values */
15945 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
15946 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
15947 #ifdef TARGET_WORDS_BIGENDIAN
15948 env
->CP0_Config0
|= (1 << CP0C0_BE
);
15950 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
15951 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
15952 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
15953 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
15954 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
15955 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
15956 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
15957 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
15958 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
15959 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
15960 << env
->cpu_model
->CP0_LLAddr_shift
;
15961 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
15962 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
15963 env
->CCRes
= env
->cpu_model
->CCRes
;
15964 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
15965 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
15966 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
15967 env
->current_tc
= 0;
15968 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
15969 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
15970 #if defined(TARGET_MIPS64)
15971 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
15972 env
->SEGMask
|= 3ULL << 62;
15975 env
->PABITS
= env
->cpu_model
->PABITS
;
15976 env
->PAMask
= (target_ulong
)((1ULL << env
->cpu_model
->PABITS
) - 1);
15977 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
15978 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
15979 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
15980 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
15981 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
15982 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
15983 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
15984 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
15985 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
15986 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
15987 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
15988 env
->insn_flags
= env
->cpu_model
->insn_flags
;
15990 #if defined(CONFIG_USER_ONLY)
15991 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
15992 # ifdef TARGET_MIPS64
15993 /* Enable 64-bit register mode. */
15994 env
->CP0_Status
|= (1 << CP0St_PX
);
15996 # ifdef TARGET_ABI_MIPSN64
15997 /* Enable 64-bit address mode. */
15998 env
->CP0_Status
|= (1 << CP0St_UX
);
16000 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
16001 hardware registers. */
16002 env
->CP0_HWREna
|= 0x0000000F;
16003 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16004 env
->CP0_Status
|= (1 << CP0St_CU1
);
16006 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
16007 env
->CP0_Status
|= (1 << CP0St_MX
);
16009 # if defined(TARGET_MIPS64)
16010 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
16011 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
16012 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
16013 env
->CP0_Status
|= (1 << CP0St_FR
);
16017 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
16018 /* If the exception was raised from a delay slot,
16019 come back to the jump. */
16020 env
->CP0_ErrorEPC
= env
->active_tc
.PC
- 4;
16022 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
16024 env
->active_tc
.PC
= (int32_t)0xBFC00000;
16025 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
16026 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
16027 env
->CP0_Wired
= 0;
16028 env
->CP0_EBase
= 0x80000000 | (cs
->cpu_index
& 0x3FF);
16029 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
16030 /* vectored interrupts not implemented, timer on int 7,
16031 no performance counters. */
16032 env
->CP0_IntCtl
= 0xe0000000;
16036 for (i
= 0; i
< 7; i
++) {
16037 env
->CP0_WatchLo
[i
] = 0;
16038 env
->CP0_WatchHi
[i
] = 0x80000000;
16040 env
->CP0_WatchLo
[7] = 0;
16041 env
->CP0_WatchHi
[7] = 0;
16043 /* Count register increments in debug mode, EJTAG version 1 */
16044 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
16046 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
16049 /* Only TC0 on VPE 0 starts as active. */
16050 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
16051 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
16052 env
->tcs
[i
].CP0_TCHalt
= 1;
16054 env
->active_tc
.CP0_TCHalt
= 1;
16057 if (cs
->cpu_index
== 0) {
16058 /* VPE0 starts up enabled. */
16059 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
16060 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
16062 /* TC0 starts up unhalted. */
16064 env
->active_tc
.CP0_TCHalt
= 0;
16065 env
->tcs
[0].CP0_TCHalt
= 0;
16066 /* With thread 0 active. */
16067 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
16068 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
16072 compute_hflags(env
);
16073 cs
->exception_index
= EXCP_NONE
;
16076 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
, int pc_pos
)
16078 env
->active_tc
.PC
= tcg_ctx
.gen_opc_pc
[pc_pos
];
16079 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
16080 env
->hflags
|= gen_opc_hflags
[pc_pos
];
16081 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
16082 case MIPS_HFLAG_BR
:
16084 case MIPS_HFLAG_BC
:
16085 case MIPS_HFLAG_BL
:
16087 env
->btarget
= gen_opc_btarget
[pc_pos
];