2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
467 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
468 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
472 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
475 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
476 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
477 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
478 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
479 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
482 /* MIPS DSP REGIMM opcodes */
484 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
485 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
488 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
491 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
492 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
493 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
494 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
497 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 /* MIPS DSP Arithmetic Sub-class */
500 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
501 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
502 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
503 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
504 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
505 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
506 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
507 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
508 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
509 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
510 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
517 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
518 /* MIPS DSP Multiply Sub-class insns */
519 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
522 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
523 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
527 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
528 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
532 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
533 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
534 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
535 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
536 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
537 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
538 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
539 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
543 /* MIPS DSP Multiply Sub-class insns */
544 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
550 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
552 /* MIPS DSP Arithmetic Sub-class */
553 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
554 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
555 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
556 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
557 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
558 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
559 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
560 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
561 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
566 /* DSP Bit/Manipulation Sub-class */
567 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
574 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
576 /* MIPS DSP Arithmetic Sub-class */
577 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
581 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
582 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
583 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
584 /* DSP Compare-Pick Sub-class */
585 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
602 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
604 /* MIPS DSP GPR-Based Shift Sub-class */
605 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
607 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
608 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
609 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
610 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
611 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
612 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
613 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
629 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
631 /* MIPS DSP Multiply Sub-class insns */
632 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
634 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
635 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
636 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
637 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
638 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
639 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
640 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
656 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
658 /* DSP Bit/Manipulation Sub-class */
659 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
662 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
664 /* MIPS DSP Append Sub-class */
665 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
666 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
667 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
670 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
673 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
674 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
675 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
676 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
677 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
678 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
679 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
680 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
681 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
685 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
686 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
687 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
688 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
689 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
692 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
694 /* MIPS DSP Arithmetic Sub-class */
695 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
699 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
700 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
701 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
702 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
703 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
712 /* DSP Bit/Manipulation Sub-class */
713 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
721 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
723 /* MIPS DSP Multiply Sub-class insns */
724 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
725 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
726 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
729 /* MIPS DSP Arithmetic Sub-class */
730 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
731 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
732 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
733 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
734 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
735 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
737 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
738 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
741 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
743 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
744 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
745 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
753 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
755 /* DSP Compare-Pick Sub-class */
756 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
764 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
775 /* MIPS DSP Arithmetic Sub-class */
776 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
786 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
788 /* DSP Append Sub-class */
789 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
790 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
791 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
792 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
795 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
797 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
798 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
799 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
800 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
801 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
802 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
803 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
804 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
805 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
806 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
821 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
823 /* DSP Bit/Manipulation Sub-class */
824 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
827 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
829 /* MIPS DSP Multiply Sub-class insns */
830 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
836 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
837 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
838 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
858 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
860 /* MIPS DSP GPR-Based Shift Sub-class */
861 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
867 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
868 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
869 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
889 /* Coprocessor 0 (rs field) */
890 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
893 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
894 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
895 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
896 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
897 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
898 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
899 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
900 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
901 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
902 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
903 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
904 OPC_C0
= (0x10 << 21) | OPC_CP0
,
905 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
906 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
907 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
908 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
909 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
910 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
911 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
912 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
913 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
914 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
915 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
916 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
917 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
918 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
919 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
923 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
926 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
927 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
928 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
929 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
930 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
931 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
932 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
933 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
936 /* Coprocessor 0 (with rs == C0) */
937 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
940 OPC_TLBR
= 0x01 | OPC_C0
,
941 OPC_TLBWI
= 0x02 | OPC_C0
,
942 OPC_TLBINV
= 0x03 | OPC_C0
,
943 OPC_TLBINVF
= 0x04 | OPC_C0
,
944 OPC_TLBWR
= 0x06 | OPC_C0
,
945 OPC_TLBP
= 0x08 | OPC_C0
,
946 OPC_RFE
= 0x10 | OPC_C0
,
947 OPC_ERET
= 0x18 | OPC_C0
,
948 OPC_DERET
= 0x1F | OPC_C0
,
949 OPC_WAIT
= 0x20 | OPC_C0
,
952 /* Coprocessor 1 (rs field) */
953 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
955 /* Values for the fmt field in FP instructions */
957 /* 0 - 15 are reserved */
958 FMT_S
= 16, /* single fp */
959 FMT_D
= 17, /* double fp */
960 FMT_E
= 18, /* extended fp */
961 FMT_Q
= 19, /* quad fp */
962 FMT_W
= 20, /* 32-bit fixed */
963 FMT_L
= 21, /* 64-bit fixed */
964 FMT_PS
= 22, /* paired single fp */
965 /* 23 - 31 are reserved */
969 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
970 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
971 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
972 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
973 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
974 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
975 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
976 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
977 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
978 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
979 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
980 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
981 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
982 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
983 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
984 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
985 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
986 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
987 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
988 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
989 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
990 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
991 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
992 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
993 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
994 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
995 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
996 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
997 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
998 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1001 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1002 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1005 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1006 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1007 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1008 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1012 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1013 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1017 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1018 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1021 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1024 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1025 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1026 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1027 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1028 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1029 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1030 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1031 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1032 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1033 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1034 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1037 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1040 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1041 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1042 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1043 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1044 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1045 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1046 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1047 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1050 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1051 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1052 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1053 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1054 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1055 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1056 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1059 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1060 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1061 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1062 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1063 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1064 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1065 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1068 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1069 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1070 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1071 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1072 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1073 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1074 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1077 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1078 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1079 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1080 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1081 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1083 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1084 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1085 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1086 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1087 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1088 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1090 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1091 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1092 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1093 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1094 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1095 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1097 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1098 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1099 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1100 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1101 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1102 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1104 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1105 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1106 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1107 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1108 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1109 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1111 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1112 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1113 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1114 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1115 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1116 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1118 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1119 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1120 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1121 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1122 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1123 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1125 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1126 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1127 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1128 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1129 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1130 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1134 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1137 OPC_LWXC1
= 0x00 | OPC_CP3
,
1138 OPC_LDXC1
= 0x01 | OPC_CP3
,
1139 OPC_LUXC1
= 0x05 | OPC_CP3
,
1140 OPC_SWXC1
= 0x08 | OPC_CP3
,
1141 OPC_SDXC1
= 0x09 | OPC_CP3
,
1142 OPC_SUXC1
= 0x0D | OPC_CP3
,
1143 OPC_PREFX
= 0x0F | OPC_CP3
,
1144 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1145 OPC_MADD_S
= 0x20 | OPC_CP3
,
1146 OPC_MADD_D
= 0x21 | OPC_CP3
,
1147 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1148 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1149 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1150 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1151 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1152 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1153 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1154 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1155 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1156 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1160 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1162 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1163 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1164 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1165 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1166 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1167 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1168 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1169 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1170 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1171 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1172 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1173 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1174 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1175 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1176 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1177 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1178 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1179 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1180 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1181 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1182 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1184 /* MI10 instruction */
1185 OPC_LD_B
= (0x20) | OPC_MSA
,
1186 OPC_LD_H
= (0x21) | OPC_MSA
,
1187 OPC_LD_W
= (0x22) | OPC_MSA
,
1188 OPC_LD_D
= (0x23) | OPC_MSA
,
1189 OPC_ST_B
= (0x24) | OPC_MSA
,
1190 OPC_ST_H
= (0x25) | OPC_MSA
,
1191 OPC_ST_W
= (0x26) | OPC_MSA
,
1192 OPC_ST_D
= (0x27) | OPC_MSA
,
1196 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1197 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1198 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1199 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1200 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1201 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1202 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1203 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1204 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1205 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1206 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1207 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1208 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1210 /* I8 instruction */
1211 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1212 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1213 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1214 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1215 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1216 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1217 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1218 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1219 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1220 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1222 /* VEC/2R/2RF instruction */
1223 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1224 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1225 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1226 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1227 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1228 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1229 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1231 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1232 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1234 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1235 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1236 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1237 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1238 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1240 /* 2RF instruction df(bit 16) = _w, _d */
1241 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1242 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1243 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1244 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1245 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1246 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1247 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1248 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1249 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1250 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1251 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1252 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1253 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1254 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1255 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1256 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1258 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1259 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1260 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1261 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1262 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1263 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1264 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1265 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1266 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1267 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1268 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1269 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1270 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1272 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1274 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1275 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1278 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1279 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1280 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1281 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1282 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1283 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1284 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1285 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1286 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1287 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1288 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1289 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1290 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1292 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1293 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1294 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1295 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1296 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1297 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1298 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1299 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1300 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1301 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1302 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1303 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1304 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1305 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1306 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1307 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1308 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1309 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1310 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1311 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1312 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1313 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1314 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1315 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1316 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1317 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1318 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1319 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1320 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1321 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1323 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1324 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1325 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1326 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1327 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1328 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1329 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1330 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1331 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1332 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1334 /* 3RF instruction _df(bit 21) = _w, _d */
1335 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1336 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1337 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1338 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1339 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1340 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1341 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1342 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1343 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1346 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1348 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1349 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1350 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1351 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1352 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1353 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1357 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1358 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1359 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1361 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1362 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1363 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1364 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1365 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1366 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1367 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1370 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1373 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1374 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1375 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1377 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1378 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1379 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1380 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1381 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1382 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1383 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1384 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1385 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1386 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1387 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1388 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1389 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1392 /* global register indices */
1393 static TCGv cpu_gpr
[32], cpu_PC
;
1394 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1395 static TCGv cpu_dspctrl
, btarget
, bcond
;
1396 static TCGv_i32 hflags
;
1397 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1398 static TCGv_i64 fpu_f64
[32];
1399 static TCGv_i64 msa_wr_d
[64];
1401 #include "exec/gen-icount.h"
1403 #define gen_helper_0e0i(name, arg) do { \
1404 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1405 gen_helper_##name(cpu_env, helper_tmp); \
1406 tcg_temp_free_i32(helper_tmp); \
1409 #define gen_helper_0e1i(name, arg1, arg2) do { \
1410 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1411 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1412 tcg_temp_free_i32(helper_tmp); \
1415 #define gen_helper_1e0i(name, ret, arg1) do { \
1416 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1417 gen_helper_##name(ret, cpu_env, helper_tmp); \
1418 tcg_temp_free_i32(helper_tmp); \
1421 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1422 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1423 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1424 tcg_temp_free_i32(helper_tmp); \
1427 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1428 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1429 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1430 tcg_temp_free_i32(helper_tmp); \
1433 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1434 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1435 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1436 tcg_temp_free_i32(helper_tmp); \
1439 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1440 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1441 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1442 tcg_temp_free_i32(helper_tmp); \
1445 typedef struct DisasContext
{
1446 DisasContextBase base
;
1447 target_ulong saved_pc
;
1448 target_ulong page_start
;
1451 int32_t CP0_Config1
;
1452 int32_t CP0_Config3
;
1453 int32_t CP0_Config5
;
1454 /* Routine used to access memory */
1456 TCGMemOp default_tcg_memop_mask
;
1457 uint32_t hflags
, saved_hflags
;
1458 target_ulong btarget
;
1469 int CP0_LLAddr_shift
;
1478 #define DISAS_STOP DISAS_TARGET_0
1479 #define DISAS_EXIT DISAS_TARGET_1
1481 static const char * const regnames
[] = {
1482 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1483 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1484 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1485 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1488 static const char * const regnames_HI
[] = {
1489 "HI0", "HI1", "HI2", "HI3",
1492 static const char * const regnames_LO
[] = {
1493 "LO0", "LO1", "LO2", "LO3",
1496 static const char * const fregnames
[] = {
1497 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1498 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1499 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1500 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1503 static const char * const msaregnames
[] = {
1504 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1505 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1506 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1507 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1508 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1509 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1510 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1511 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1512 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1513 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1514 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1515 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1516 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1517 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1518 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1519 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1522 #define LOG_DISAS(...) \
1524 if (MIPS_DEBUG_DISAS) { \
1525 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1529 #define MIPS_INVAL(op) \
1531 if (MIPS_DEBUG_DISAS) { \
1532 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1533 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1534 ctx->base.pc_next, ctx->opcode, op, \
1535 ctx->opcode >> 26, ctx->opcode & 0x3F, \
1536 ((ctx->opcode >> 16) & 0x1F)); \
1540 /* General purpose registers moves. */
1541 static inline void gen_load_gpr (TCGv t
, int reg
)
1544 tcg_gen_movi_tl(t
, 0);
1546 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1549 static inline void gen_store_gpr (TCGv t
, int reg
)
1552 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1555 /* Moves to/from shadow registers. */
1556 static inline void gen_load_srsgpr (int from
, int to
)
1558 TCGv t0
= tcg_temp_new();
1561 tcg_gen_movi_tl(t0
, 0);
1563 TCGv_i32 t2
= tcg_temp_new_i32();
1564 TCGv_ptr addr
= tcg_temp_new_ptr();
1566 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1567 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1568 tcg_gen_andi_i32(t2
, t2
, 0xf);
1569 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1570 tcg_gen_ext_i32_ptr(addr
, t2
);
1571 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1573 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1574 tcg_temp_free_ptr(addr
);
1575 tcg_temp_free_i32(t2
);
1577 gen_store_gpr(t0
, to
);
1581 static inline void gen_store_srsgpr (int from
, int to
)
1584 TCGv t0
= tcg_temp_new();
1585 TCGv_i32 t2
= tcg_temp_new_i32();
1586 TCGv_ptr addr
= tcg_temp_new_ptr();
1588 gen_load_gpr(t0
, from
);
1589 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1590 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1591 tcg_gen_andi_i32(t2
, t2
, 0xf);
1592 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1593 tcg_gen_ext_i32_ptr(addr
, t2
);
1594 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1596 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1597 tcg_temp_free_ptr(addr
);
1598 tcg_temp_free_i32(t2
);
1604 static inline void gen_save_pc(target_ulong pc
)
1606 tcg_gen_movi_tl(cpu_PC
, pc
);
1609 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1611 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1612 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
1613 gen_save_pc(ctx
->base
.pc_next
);
1614 ctx
->saved_pc
= ctx
->base
.pc_next
;
1616 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1617 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1618 ctx
->saved_hflags
= ctx
->hflags
;
1619 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1625 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1631 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1633 ctx
->saved_hflags
= ctx
->hflags
;
1634 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1640 ctx
->btarget
= env
->btarget
;
1645 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1647 TCGv_i32 texcp
= tcg_const_i32(excp
);
1648 TCGv_i32 terr
= tcg_const_i32(err
);
1649 save_cpu_state(ctx
, 1);
1650 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1651 tcg_temp_free_i32(terr
);
1652 tcg_temp_free_i32(texcp
);
1653 ctx
->base
.is_jmp
= DISAS_NORETURN
;
1656 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1658 gen_helper_0e0i(raise_exception
, excp
);
1661 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1663 generate_exception_err(ctx
, excp
, 0);
1666 /* Floating point register moves. */
1667 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1669 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1670 generate_exception(ctx
, EXCP_RI
);
1672 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1675 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1678 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1679 generate_exception(ctx
, EXCP_RI
);
1681 t64
= tcg_temp_new_i64();
1682 tcg_gen_extu_i32_i64(t64
, t
);
1683 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1684 tcg_temp_free_i64(t64
);
1687 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1689 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1690 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1692 gen_load_fpr32(ctx
, t
, reg
| 1);
1696 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1698 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1699 TCGv_i64 t64
= tcg_temp_new_i64();
1700 tcg_gen_extu_i32_i64(t64
, t
);
1701 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1702 tcg_temp_free_i64(t64
);
1704 gen_store_fpr32(ctx
, t
, reg
| 1);
1708 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1710 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1711 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1713 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1717 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1719 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1720 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1723 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1724 t0
= tcg_temp_new_i64();
1725 tcg_gen_shri_i64(t0
, t
, 32);
1726 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1727 tcg_temp_free_i64(t0
);
1731 static inline int get_fp_bit (int cc
)
1739 /* Addresses computation */
1740 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1742 tcg_gen_add_tl(ret
, arg0
, arg1
);
1744 #if defined(TARGET_MIPS64)
1745 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1746 tcg_gen_ext32s_i64(ret
, ret
);
1751 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
1754 tcg_gen_addi_tl(ret
, base
, ofs
);
1756 #if defined(TARGET_MIPS64)
1757 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1758 tcg_gen_ext32s_i64(ret
, ret
);
1763 /* Addresses computation (translation time) */
1764 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1767 target_long sum
= base
+ offset
;
1769 #if defined(TARGET_MIPS64)
1770 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1777 /* Sign-extract the low 32-bits to a target_long. */
1778 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1780 #if defined(TARGET_MIPS64)
1781 tcg_gen_ext32s_i64(ret
, arg
);
1783 tcg_gen_extrl_i64_i32(ret
, arg
);
1787 /* Sign-extract the high 32-bits to a target_long. */
1788 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1790 #if defined(TARGET_MIPS64)
1791 tcg_gen_sari_i64(ret
, arg
, 32);
1793 tcg_gen_extrh_i64_i32(ret
, arg
);
1797 static inline void check_cp0_enabled(DisasContext
*ctx
)
1799 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1800 generate_exception_err(ctx
, EXCP_CpU
, 0);
1803 static inline void check_cp1_enabled(DisasContext
*ctx
)
1805 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1806 generate_exception_err(ctx
, EXCP_CpU
, 1);
1809 /* Verify that the processor is running with COP1X instructions enabled.
1810 This is associated with the nabla symbol in the MIPS32 and MIPS64
1813 static inline void check_cop1x(DisasContext
*ctx
)
1815 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1816 generate_exception_end(ctx
, EXCP_RI
);
1819 /* Verify that the processor is running with 64-bit floating-point
1820 operations enabled. */
1822 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1824 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1825 generate_exception_end(ctx
, EXCP_RI
);
1829 * Verify if floating point register is valid; an operation is not defined
1830 * if bit 0 of any register specification is set and the FR bit in the
1831 * Status register equals zero, since the register numbers specify an
1832 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1833 * in the Status register equals one, both even and odd register numbers
1834 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1836 * Multiple 64 bit wide registers can be checked by calling
1837 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1839 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1841 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1842 generate_exception_end(ctx
, EXCP_RI
);
1845 /* Verify that the processor is running with DSP instructions enabled.
1846 This is enabled by CP0 Status register MX(24) bit.
1849 static inline void check_dsp(DisasContext
*ctx
)
1851 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1852 if (ctx
->insn_flags
& ASE_DSP
) {
1853 generate_exception_end(ctx
, EXCP_DSPDIS
);
1855 generate_exception_end(ctx
, EXCP_RI
);
1860 static inline void check_dspr2(DisasContext
*ctx
)
1862 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1863 if (ctx
->insn_flags
& ASE_DSP
) {
1864 generate_exception_end(ctx
, EXCP_DSPDIS
);
1866 generate_exception_end(ctx
, EXCP_RI
);
1871 /* This code generates a "reserved instruction" exception if the
1872 CPU does not support the instruction set corresponding to flags. */
1873 static inline void check_insn(DisasContext
*ctx
, int flags
)
1875 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1876 generate_exception_end(ctx
, EXCP_RI
);
1880 /* This code generates a "reserved instruction" exception if the
1881 CPU has corresponding flag set which indicates that the instruction
1882 has been removed. */
1883 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1885 if (unlikely(ctx
->insn_flags
& flags
)) {
1886 generate_exception_end(ctx
, EXCP_RI
);
1890 /* This code generates a "reserved instruction" exception if the
1891 CPU does not support 64-bit paired-single (PS) floating point data type */
1892 static inline void check_ps(DisasContext
*ctx
)
1894 if (unlikely(!ctx
->ps
)) {
1895 generate_exception(ctx
, EXCP_RI
);
1897 check_cp1_64bitmode(ctx
);
1900 #ifdef TARGET_MIPS64
1901 /* This code generates a "reserved instruction" exception if 64-bit
1902 instructions are not enabled. */
1903 static inline void check_mips_64(DisasContext
*ctx
)
1905 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1906 generate_exception_end(ctx
, EXCP_RI
);
1910 #ifndef CONFIG_USER_ONLY
1911 static inline void check_mvh(DisasContext
*ctx
)
1913 if (unlikely(!ctx
->mvh
)) {
1914 generate_exception(ctx
, EXCP_RI
);
1920 * This code generates a "reserved instruction" exception if the
1921 * Config5 XNP bit is set.
1923 static inline void check_xnp(DisasContext
*ctx
)
1925 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
1926 generate_exception_end(ctx
, EXCP_RI
);
1931 * This code generates a "reserved instruction" exception if the
1932 * Config3 MT bit is NOT set.
1934 static inline void check_mt(DisasContext
*ctx
)
1936 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
1937 generate_exception_end(ctx
, EXCP_RI
);
1941 #ifndef CONFIG_USER_ONLY
1943 * This code generates a "coprocessor unusable" exception if CP0 is not
1944 * available, and, if that is not the case, generates a "reserved instruction"
1945 * exception if the Config5 MT bit is NOT set. This is needed for availability
1946 * control of some of MT ASE instructions.
1948 static inline void check_cp0_mt(DisasContext
*ctx
)
1950 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
1951 generate_exception_err(ctx
, EXCP_CpU
, 0);
1953 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
1954 generate_exception_err(ctx
, EXCP_RI
, 0);
1961 * This code generates a "reserved instruction" exception if the
1962 * Config5 NMS bit is set.
1964 static inline void check_nms(DisasContext
*ctx
)
1966 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
1967 generate_exception_end(ctx
, EXCP_RI
);
1972 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1973 calling interface for 32 and 64-bit FPRs. No sense in changing
1974 all callers for gen_load_fpr32 when we need the CTX parameter for
1976 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1977 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1978 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1979 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1980 int ft, int fs, int cc) \
1982 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1983 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1992 check_cp1_registers(ctx, fs | ft); \
2000 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
2001 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
2003 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
2004 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
2005 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
2006 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
2007 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
2008 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
2009 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
2010 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
2011 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
2012 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
2013 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
2014 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
2015 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
2016 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
2017 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
2018 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
2021 tcg_temp_free_i##bits (fp0); \
2022 tcg_temp_free_i##bits (fp1); \
2025 FOP_CONDS(, 0, d
, FMT_D
, 64)
2026 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
2027 FOP_CONDS(, 0, s
, FMT_S
, 32)
2028 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
2029 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
2030 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
2033 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
2034 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
2035 int ft, int fs, int fd) \
2037 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
2038 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
2039 if (ifmt == FMT_D) { \
2040 check_cp1_registers(ctx, fs | ft | fd); \
2042 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
2043 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
2046 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
2049 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
2052 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
2055 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
2058 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
2061 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
2064 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
2067 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
2070 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
2073 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
2076 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
2079 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
2082 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
2085 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
2088 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
2091 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
2094 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2097 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2100 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2103 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2106 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2109 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2115 tcg_temp_free_i ## bits (fp0); \
2116 tcg_temp_free_i ## bits (fp1); \
2119 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2120 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2122 #undef gen_ldcmp_fpr32
2123 #undef gen_ldcmp_fpr64
2125 /* load/store instructions. */
2126 #ifdef CONFIG_USER_ONLY
2127 #define OP_LD_ATOMIC(insn,fname) \
2128 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2129 DisasContext *ctx) \
2131 TCGv t0 = tcg_temp_new(); \
2132 tcg_gen_mov_tl(t0, arg1); \
2133 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2134 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2135 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2136 tcg_temp_free(t0); \
2139 #define OP_LD_ATOMIC(insn,fname) \
2140 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2141 DisasContext *ctx) \
2143 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
2146 OP_LD_ATOMIC(ll
,ld32s
);
2147 #if defined(TARGET_MIPS64)
2148 OP_LD_ATOMIC(lld
,ld64
);
2152 #ifdef CONFIG_USER_ONLY
2153 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2154 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2155 DisasContext *ctx) \
2157 TCGv t0 = tcg_temp_new(); \
2158 TCGLabel *l1 = gen_new_label(); \
2159 TCGLabel *l2 = gen_new_label(); \
2161 tcg_gen_andi_tl(t0, arg2, almask); \
2162 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2163 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2164 generate_exception(ctx, EXCP_AdES); \
2165 gen_set_label(l1); \
2166 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2167 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2168 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2169 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2170 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2171 generate_exception_end(ctx, EXCP_SC); \
2172 gen_set_label(l2); \
2173 tcg_gen_movi_tl(t0, 0); \
2174 gen_store_gpr(t0, rt); \
2175 tcg_temp_free(t0); \
2178 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2179 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2180 DisasContext *ctx) \
2182 TCGv t0 = tcg_temp_new(); \
2183 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
2184 gen_store_gpr(t0, rt); \
2185 tcg_temp_free(t0); \
2188 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2189 #if defined(TARGET_MIPS64)
2190 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2194 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2195 int base
, int offset
)
2198 tcg_gen_movi_tl(addr
, offset
);
2199 } else if (offset
== 0) {
2200 gen_load_gpr(addr
, base
);
2202 tcg_gen_movi_tl(addr
, offset
);
2203 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2207 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2209 target_ulong pc
= ctx
->base
.pc_next
;
2211 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2212 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2217 pc
&= ~(target_ulong
)3;
2222 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2223 int rt
, int base
, int offset
)
2226 int mem_idx
= ctx
->mem_idx
;
2228 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2229 /* Loongson CPU uses a load to zero register for prefetch.
2230 We emulate it as a NOP. On other CPU we must perform the
2231 actual memory access. */
2235 t0
= tcg_temp_new();
2236 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2239 #if defined(TARGET_MIPS64)
2241 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
2242 ctx
->default_tcg_memop_mask
);
2243 gen_store_gpr(t0
, rt
);
2246 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
2247 ctx
->default_tcg_memop_mask
);
2248 gen_store_gpr(t0
, rt
);
2252 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
2253 gen_store_gpr(t0
, rt
);
2256 t1
= tcg_temp_new();
2257 /* Do a byte access to possibly trigger a page
2258 fault with the unaligned address. */
2259 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2260 tcg_gen_andi_tl(t1
, t0
, 7);
2261 #ifndef TARGET_WORDS_BIGENDIAN
2262 tcg_gen_xori_tl(t1
, t1
, 7);
2264 tcg_gen_shli_tl(t1
, t1
, 3);
2265 tcg_gen_andi_tl(t0
, t0
, ~7);
2266 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2267 tcg_gen_shl_tl(t0
, t0
, t1
);
2268 t2
= tcg_const_tl(-1);
2269 tcg_gen_shl_tl(t2
, t2
, t1
);
2270 gen_load_gpr(t1
, rt
);
2271 tcg_gen_andc_tl(t1
, t1
, t2
);
2273 tcg_gen_or_tl(t0
, t0
, t1
);
2275 gen_store_gpr(t0
, rt
);
2278 t1
= tcg_temp_new();
2279 /* Do a byte access to possibly trigger a page
2280 fault with the unaligned address. */
2281 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2282 tcg_gen_andi_tl(t1
, t0
, 7);
2283 #ifdef TARGET_WORDS_BIGENDIAN
2284 tcg_gen_xori_tl(t1
, t1
, 7);
2286 tcg_gen_shli_tl(t1
, t1
, 3);
2287 tcg_gen_andi_tl(t0
, t0
, ~7);
2288 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2289 tcg_gen_shr_tl(t0
, t0
, t1
);
2290 tcg_gen_xori_tl(t1
, t1
, 63);
2291 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2292 tcg_gen_shl_tl(t2
, t2
, t1
);
2293 gen_load_gpr(t1
, rt
);
2294 tcg_gen_and_tl(t1
, t1
, t2
);
2296 tcg_gen_or_tl(t0
, t0
, t1
);
2298 gen_store_gpr(t0
, rt
);
2301 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2302 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2304 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2305 gen_store_gpr(t0
, rt
);
2309 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2310 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2312 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
2313 gen_store_gpr(t0
, rt
);
2316 mem_idx
= MIPS_HFLAG_UM
;
2319 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
2320 ctx
->default_tcg_memop_mask
);
2321 gen_store_gpr(t0
, rt
);
2324 mem_idx
= MIPS_HFLAG_UM
;
2327 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
2328 ctx
->default_tcg_memop_mask
);
2329 gen_store_gpr(t0
, rt
);
2332 mem_idx
= MIPS_HFLAG_UM
;
2335 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
2336 ctx
->default_tcg_memop_mask
);
2337 gen_store_gpr(t0
, rt
);
2340 mem_idx
= MIPS_HFLAG_UM
;
2343 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
2344 gen_store_gpr(t0
, rt
);
2347 mem_idx
= MIPS_HFLAG_UM
;
2350 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
2351 gen_store_gpr(t0
, rt
);
2354 mem_idx
= MIPS_HFLAG_UM
;
2357 t1
= tcg_temp_new();
2358 /* Do a byte access to possibly trigger a page
2359 fault with the unaligned address. */
2360 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2361 tcg_gen_andi_tl(t1
, t0
, 3);
2362 #ifndef TARGET_WORDS_BIGENDIAN
2363 tcg_gen_xori_tl(t1
, t1
, 3);
2365 tcg_gen_shli_tl(t1
, t1
, 3);
2366 tcg_gen_andi_tl(t0
, t0
, ~3);
2367 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2368 tcg_gen_shl_tl(t0
, t0
, t1
);
2369 t2
= tcg_const_tl(-1);
2370 tcg_gen_shl_tl(t2
, t2
, t1
);
2371 gen_load_gpr(t1
, rt
);
2372 tcg_gen_andc_tl(t1
, t1
, t2
);
2374 tcg_gen_or_tl(t0
, t0
, t1
);
2376 tcg_gen_ext32s_tl(t0
, t0
);
2377 gen_store_gpr(t0
, rt
);
2380 mem_idx
= MIPS_HFLAG_UM
;
2383 t1
= tcg_temp_new();
2384 /* Do a byte access to possibly trigger a page
2385 fault with the unaligned address. */
2386 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2387 tcg_gen_andi_tl(t1
, t0
, 3);
2388 #ifdef TARGET_WORDS_BIGENDIAN
2389 tcg_gen_xori_tl(t1
, t1
, 3);
2391 tcg_gen_shli_tl(t1
, t1
, 3);
2392 tcg_gen_andi_tl(t0
, t0
, ~3);
2393 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2394 tcg_gen_shr_tl(t0
, t0
, t1
);
2395 tcg_gen_xori_tl(t1
, t1
, 31);
2396 t2
= tcg_const_tl(0xfffffffeull
);
2397 tcg_gen_shl_tl(t2
, t2
, t1
);
2398 gen_load_gpr(t1
, rt
);
2399 tcg_gen_and_tl(t1
, t1
, t2
);
2401 tcg_gen_or_tl(t0
, t0
, t1
);
2403 tcg_gen_ext32s_tl(t0
, t0
);
2404 gen_store_gpr(t0
, rt
);
2407 mem_idx
= MIPS_HFLAG_UM
;
2411 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
2412 gen_store_gpr(t0
, rt
);
2418 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
2419 uint32_t reg1
, uint32_t reg2
)
2421 TCGv taddr
= tcg_temp_new();
2422 TCGv_i64 tval
= tcg_temp_new_i64();
2423 TCGv tmp1
= tcg_temp_new();
2424 TCGv tmp2
= tcg_temp_new();
2426 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
2427 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
2428 #ifdef TARGET_WORDS_BIGENDIAN
2429 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
2431 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
2433 gen_store_gpr(tmp1
, reg1
);
2434 tcg_temp_free(tmp1
);
2435 gen_store_gpr(tmp2
, reg2
);
2436 tcg_temp_free(tmp2
);
2437 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
2438 tcg_temp_free_i64(tval
);
2439 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
2440 tcg_temp_free(taddr
);
2444 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2445 int base
, int offset
)
2447 TCGv t0
= tcg_temp_new();
2448 TCGv t1
= tcg_temp_new();
2449 int mem_idx
= ctx
->mem_idx
;
2451 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2452 gen_load_gpr(t1
, rt
);
2454 #if defined(TARGET_MIPS64)
2456 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
2457 ctx
->default_tcg_memop_mask
);
2460 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
2463 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
2467 mem_idx
= MIPS_HFLAG_UM
;
2470 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
2471 ctx
->default_tcg_memop_mask
);
2474 mem_idx
= MIPS_HFLAG_UM
;
2477 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
2478 ctx
->default_tcg_memop_mask
);
2481 mem_idx
= MIPS_HFLAG_UM
;
2484 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
2487 mem_idx
= MIPS_HFLAG_UM
;
2490 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
2493 mem_idx
= MIPS_HFLAG_UM
;
2496 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
2504 /* Store conditional */
2505 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2506 int base
, int16_t offset
)
2509 int mem_idx
= ctx
->mem_idx
;
2511 #ifdef CONFIG_USER_ONLY
2512 t0
= tcg_temp_local_new();
2513 t1
= tcg_temp_local_new();
2515 t0
= tcg_temp_new();
2516 t1
= tcg_temp_new();
2518 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2519 gen_load_gpr(t1
, rt
);
2521 #if defined(TARGET_MIPS64)
2524 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
2528 mem_idx
= MIPS_HFLAG_UM
;
2532 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
2539 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
2540 uint32_t reg1
, uint32_t reg2
)
2542 TCGv taddr
= tcg_temp_local_new();
2543 TCGv lladdr
= tcg_temp_local_new();
2544 TCGv_i64 tval
= tcg_temp_new_i64();
2545 TCGv_i64 llval
= tcg_temp_new_i64();
2546 TCGv_i64 val
= tcg_temp_new_i64();
2547 TCGv tmp1
= tcg_temp_new();
2548 TCGv tmp2
= tcg_temp_new();
2549 TCGLabel
*lab_fail
= gen_new_label();
2550 TCGLabel
*lab_done
= gen_new_label();
2552 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
2554 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
2555 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
2557 gen_load_gpr(tmp1
, reg1
);
2558 gen_load_gpr(tmp2
, reg2
);
2560 #ifdef TARGET_WORDS_BIGENDIAN
2561 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
2563 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
2566 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
2567 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
2568 ctx
->mem_idx
, MO_64
);
2570 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
2572 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
2574 gen_set_label(lab_fail
);
2577 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
2579 gen_set_label(lab_done
);
2580 tcg_gen_movi_tl(lladdr
, -1);
2581 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
2584 /* Load and store */
2585 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2588 /* Don't do NOP if destination is zero: we must perform the actual
2593 TCGv_i32 fp0
= tcg_temp_new_i32();
2594 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2595 ctx
->default_tcg_memop_mask
);
2596 gen_store_fpr32(ctx
, fp0
, ft
);
2597 tcg_temp_free_i32(fp0
);
2602 TCGv_i32 fp0
= tcg_temp_new_i32();
2603 gen_load_fpr32(ctx
, fp0
, ft
);
2604 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2605 ctx
->default_tcg_memop_mask
);
2606 tcg_temp_free_i32(fp0
);
2611 TCGv_i64 fp0
= tcg_temp_new_i64();
2612 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2613 ctx
->default_tcg_memop_mask
);
2614 gen_store_fpr64(ctx
, fp0
, ft
);
2615 tcg_temp_free_i64(fp0
);
2620 TCGv_i64 fp0
= tcg_temp_new_i64();
2621 gen_load_fpr64(ctx
, fp0
, ft
);
2622 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2623 ctx
->default_tcg_memop_mask
);
2624 tcg_temp_free_i64(fp0
);
2628 MIPS_INVAL("flt_ldst");
2629 generate_exception_end(ctx
, EXCP_RI
);
2634 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2635 int rs
, int16_t imm
)
2637 TCGv t0
= tcg_temp_new();
2639 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2640 check_cp1_enabled(ctx
);
2644 check_insn(ctx
, ISA_MIPS2
);
2647 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
2648 gen_flt_ldst(ctx
, op
, rt
, t0
);
2651 generate_exception_err(ctx
, EXCP_CpU
, 1);
2656 /* Arithmetic with immediate operand */
2657 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2658 int rt
, int rs
, int imm
)
2660 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2662 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2663 /* If no destination, treat it as a NOP.
2664 For addi, we must generate the overflow exception when needed. */
2670 TCGv t0
= tcg_temp_local_new();
2671 TCGv t1
= tcg_temp_new();
2672 TCGv t2
= tcg_temp_new();
2673 TCGLabel
*l1
= gen_new_label();
2675 gen_load_gpr(t1
, rs
);
2676 tcg_gen_addi_tl(t0
, t1
, uimm
);
2677 tcg_gen_ext32s_tl(t0
, t0
);
2679 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2680 tcg_gen_xori_tl(t2
, t0
, uimm
);
2681 tcg_gen_and_tl(t1
, t1
, t2
);
2683 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2685 /* operands of same sign, result different sign */
2686 generate_exception(ctx
, EXCP_OVERFLOW
);
2688 tcg_gen_ext32s_tl(t0
, t0
);
2689 gen_store_gpr(t0
, rt
);
2695 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2696 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2698 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2701 #if defined(TARGET_MIPS64)
2704 TCGv t0
= tcg_temp_local_new();
2705 TCGv t1
= tcg_temp_new();
2706 TCGv t2
= tcg_temp_new();
2707 TCGLabel
*l1
= gen_new_label();
2709 gen_load_gpr(t1
, rs
);
2710 tcg_gen_addi_tl(t0
, t1
, uimm
);
2712 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2713 tcg_gen_xori_tl(t2
, t0
, uimm
);
2714 tcg_gen_and_tl(t1
, t1
, t2
);
2716 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2718 /* operands of same sign, result different sign */
2719 generate_exception(ctx
, EXCP_OVERFLOW
);
2721 gen_store_gpr(t0
, rt
);
2727 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2729 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2736 /* Logic with immediate operand */
2737 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2738 int rt
, int rs
, int16_t imm
)
2743 /* If no destination, treat it as a NOP. */
2746 uimm
= (uint16_t)imm
;
2749 if (likely(rs
!= 0))
2750 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2752 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2756 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2758 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2761 if (likely(rs
!= 0))
2762 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2764 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2767 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2769 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2770 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2772 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2781 /* Set on less than with immediate operand */
2782 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2783 int rt
, int rs
, int16_t imm
)
2785 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2789 /* If no destination, treat it as a NOP. */
2792 t0
= tcg_temp_new();
2793 gen_load_gpr(t0
, rs
);
2796 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2799 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2805 /* Shifts with immediate operand */
2806 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2807 int rt
, int rs
, int16_t imm
)
2809 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2813 /* If no destination, treat it as a NOP. */
2817 t0
= tcg_temp_new();
2818 gen_load_gpr(t0
, rs
);
2821 tcg_gen_shli_tl(t0
, t0
, uimm
);
2822 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2825 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2829 tcg_gen_ext32u_tl(t0
, t0
);
2830 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2832 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2837 TCGv_i32 t1
= tcg_temp_new_i32();
2839 tcg_gen_trunc_tl_i32(t1
, t0
);
2840 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2841 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2842 tcg_temp_free_i32(t1
);
2844 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2847 #if defined(TARGET_MIPS64)
2849 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2852 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2855 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2859 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2861 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2865 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2868 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2871 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2874 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2882 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2883 int rd
, int rs
, int rt
)
2885 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2886 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2887 /* If no destination, treat it as a NOP.
2888 For add & sub, we must generate the overflow exception when needed. */
2895 TCGv t0
= tcg_temp_local_new();
2896 TCGv t1
= tcg_temp_new();
2897 TCGv t2
= tcg_temp_new();
2898 TCGLabel
*l1
= gen_new_label();
2900 gen_load_gpr(t1
, rs
);
2901 gen_load_gpr(t2
, rt
);
2902 tcg_gen_add_tl(t0
, t1
, t2
);
2903 tcg_gen_ext32s_tl(t0
, t0
);
2904 tcg_gen_xor_tl(t1
, t1
, t2
);
2905 tcg_gen_xor_tl(t2
, t0
, t2
);
2906 tcg_gen_andc_tl(t1
, t2
, t1
);
2908 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2910 /* operands of same sign, result different sign */
2911 generate_exception(ctx
, EXCP_OVERFLOW
);
2913 gen_store_gpr(t0
, rd
);
2918 if (rs
!= 0 && rt
!= 0) {
2919 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2920 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2921 } else if (rs
== 0 && rt
!= 0) {
2922 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2923 } else if (rs
!= 0 && rt
== 0) {
2924 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2926 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2931 TCGv t0
= tcg_temp_local_new();
2932 TCGv t1
= tcg_temp_new();
2933 TCGv t2
= tcg_temp_new();
2934 TCGLabel
*l1
= gen_new_label();
2936 gen_load_gpr(t1
, rs
);
2937 gen_load_gpr(t2
, rt
);
2938 tcg_gen_sub_tl(t0
, t1
, t2
);
2939 tcg_gen_ext32s_tl(t0
, t0
);
2940 tcg_gen_xor_tl(t2
, t1
, t2
);
2941 tcg_gen_xor_tl(t1
, t0
, t1
);
2942 tcg_gen_and_tl(t1
, t1
, t2
);
2944 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2946 /* operands of different sign, first operand and result different sign */
2947 generate_exception(ctx
, EXCP_OVERFLOW
);
2949 gen_store_gpr(t0
, rd
);
2954 if (rs
!= 0 && rt
!= 0) {
2955 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2956 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2957 } else if (rs
== 0 && rt
!= 0) {
2958 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2959 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2960 } else if (rs
!= 0 && rt
== 0) {
2961 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2963 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2966 #if defined(TARGET_MIPS64)
2969 TCGv t0
= tcg_temp_local_new();
2970 TCGv t1
= tcg_temp_new();
2971 TCGv t2
= tcg_temp_new();
2972 TCGLabel
*l1
= gen_new_label();
2974 gen_load_gpr(t1
, rs
);
2975 gen_load_gpr(t2
, rt
);
2976 tcg_gen_add_tl(t0
, t1
, t2
);
2977 tcg_gen_xor_tl(t1
, t1
, t2
);
2978 tcg_gen_xor_tl(t2
, t0
, t2
);
2979 tcg_gen_andc_tl(t1
, t2
, t1
);
2981 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2983 /* operands of same sign, result different sign */
2984 generate_exception(ctx
, EXCP_OVERFLOW
);
2986 gen_store_gpr(t0
, rd
);
2991 if (rs
!= 0 && rt
!= 0) {
2992 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2993 } else if (rs
== 0 && rt
!= 0) {
2994 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2995 } else if (rs
!= 0 && rt
== 0) {
2996 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2998 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3003 TCGv t0
= tcg_temp_local_new();
3004 TCGv t1
= tcg_temp_new();
3005 TCGv t2
= tcg_temp_new();
3006 TCGLabel
*l1
= gen_new_label();
3008 gen_load_gpr(t1
, rs
);
3009 gen_load_gpr(t2
, rt
);
3010 tcg_gen_sub_tl(t0
, t1
, t2
);
3011 tcg_gen_xor_tl(t2
, t1
, t2
);
3012 tcg_gen_xor_tl(t1
, t0
, t1
);
3013 tcg_gen_and_tl(t1
, t1
, t2
);
3015 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3017 /* operands of different sign, first operand and result different sign */
3018 generate_exception(ctx
, EXCP_OVERFLOW
);
3020 gen_store_gpr(t0
, rd
);
3025 if (rs
!= 0 && rt
!= 0) {
3026 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3027 } else if (rs
== 0 && rt
!= 0) {
3028 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3029 } else if (rs
!= 0 && rt
== 0) {
3030 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3032 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3037 if (likely(rs
!= 0 && rt
!= 0)) {
3038 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3039 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3041 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3047 /* Conditional move */
3048 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
3049 int rd
, int rs
, int rt
)
3054 /* If no destination, treat it as a NOP. */
3058 t0
= tcg_temp_new();
3059 gen_load_gpr(t0
, rt
);
3060 t1
= tcg_const_tl(0);
3061 t2
= tcg_temp_new();
3062 gen_load_gpr(t2
, rs
);
3065 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
3068 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
3071 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
3074 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
3083 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
3084 int rd
, int rs
, int rt
)
3087 /* If no destination, treat it as a NOP. */
3093 if (likely(rs
!= 0 && rt
!= 0)) {
3094 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3096 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3100 if (rs
!= 0 && rt
!= 0) {
3101 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3102 } else if (rs
== 0 && rt
!= 0) {
3103 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3104 } else if (rs
!= 0 && rt
== 0) {
3105 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3107 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
3111 if (likely(rs
!= 0 && rt
!= 0)) {
3112 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3113 } else if (rs
== 0 && rt
!= 0) {
3114 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3115 } else if (rs
!= 0 && rt
== 0) {
3116 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3118 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3122 if (likely(rs
!= 0 && rt
!= 0)) {
3123 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3124 } else if (rs
== 0 && rt
!= 0) {
3125 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3126 } else if (rs
!= 0 && rt
== 0) {
3127 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3129 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3135 /* Set on lower than */
3136 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
3137 int rd
, int rs
, int rt
)
3142 /* If no destination, treat it as a NOP. */
3146 t0
= tcg_temp_new();
3147 t1
= tcg_temp_new();
3148 gen_load_gpr(t0
, rs
);
3149 gen_load_gpr(t1
, rt
);
3152 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
3155 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
3163 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
3164 int rd
, int rs
, int rt
)
3169 /* If no destination, treat it as a NOP.
3170 For add & sub, we must generate the overflow exception when needed. */
3174 t0
= tcg_temp_new();
3175 t1
= tcg_temp_new();
3176 gen_load_gpr(t0
, rs
);
3177 gen_load_gpr(t1
, rt
);
3180 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3181 tcg_gen_shl_tl(t0
, t1
, t0
);
3182 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3185 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3186 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3189 tcg_gen_ext32u_tl(t1
, t1
);
3190 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3191 tcg_gen_shr_tl(t0
, t1
, t0
);
3192 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3196 TCGv_i32 t2
= tcg_temp_new_i32();
3197 TCGv_i32 t3
= tcg_temp_new_i32();
3199 tcg_gen_trunc_tl_i32(t2
, t0
);
3200 tcg_gen_trunc_tl_i32(t3
, t1
);
3201 tcg_gen_andi_i32(t2
, t2
, 0x1f);
3202 tcg_gen_rotr_i32(t2
, t3
, t2
);
3203 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3204 tcg_temp_free_i32(t2
);
3205 tcg_temp_free_i32(t3
);
3208 #if defined(TARGET_MIPS64)
3210 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3211 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
3214 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3215 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3218 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3219 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3222 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3223 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3231 /* Arithmetic on HI/LO registers */
3232 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3234 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3245 #if defined(TARGET_MIPS64)
3247 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3251 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3255 #if defined(TARGET_MIPS64)
3257 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3261 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3266 #if defined(TARGET_MIPS64)
3268 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3272 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3275 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3280 #if defined(TARGET_MIPS64)
3282 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3286 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3289 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3295 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3298 TCGv t0
= tcg_const_tl(addr
);
3299 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3300 gen_store_gpr(t0
, reg
);
3304 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3310 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3313 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3314 addr
= addr_add(ctx
, pc
, offset
);
3315 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3319 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3320 addr
= addr_add(ctx
, pc
, offset
);
3321 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3323 #if defined(TARGET_MIPS64)
3326 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3327 addr
= addr_add(ctx
, pc
, offset
);
3328 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3332 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3335 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3336 addr
= addr_add(ctx
, pc
, offset
);
3337 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3342 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3343 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3344 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3347 #if defined(TARGET_MIPS64)
3348 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3349 case R6_OPC_LDPC
+ (1 << 16):
3350 case R6_OPC_LDPC
+ (2 << 16):
3351 case R6_OPC_LDPC
+ (3 << 16):
3353 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3354 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3355 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3359 MIPS_INVAL("OPC_PCREL");
3360 generate_exception_end(ctx
, EXCP_RI
);
3367 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3376 t0
= tcg_temp_new();
3377 t1
= tcg_temp_new();
3379 gen_load_gpr(t0
, rs
);
3380 gen_load_gpr(t1
, rt
);
3385 TCGv t2
= tcg_temp_new();
3386 TCGv t3
= tcg_temp_new();
3387 tcg_gen_ext32s_tl(t0
, t0
);
3388 tcg_gen_ext32s_tl(t1
, t1
);
3389 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3390 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3391 tcg_gen_and_tl(t2
, t2
, t3
);
3392 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3393 tcg_gen_or_tl(t2
, t2
, t3
);
3394 tcg_gen_movi_tl(t3
, 0);
3395 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3396 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3397 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3404 TCGv t2
= tcg_temp_new();
3405 TCGv t3
= tcg_temp_new();
3406 tcg_gen_ext32s_tl(t0
, t0
);
3407 tcg_gen_ext32s_tl(t1
, t1
);
3408 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3409 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3410 tcg_gen_and_tl(t2
, t2
, t3
);
3411 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3412 tcg_gen_or_tl(t2
, t2
, t3
);
3413 tcg_gen_movi_tl(t3
, 0);
3414 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3415 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3416 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3423 TCGv t2
= tcg_const_tl(0);
3424 TCGv t3
= tcg_const_tl(1);
3425 tcg_gen_ext32u_tl(t0
, t0
);
3426 tcg_gen_ext32u_tl(t1
, t1
);
3427 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3428 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3429 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3436 TCGv t2
= tcg_const_tl(0);
3437 TCGv t3
= tcg_const_tl(1);
3438 tcg_gen_ext32u_tl(t0
, t0
);
3439 tcg_gen_ext32u_tl(t1
, t1
);
3440 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3441 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3442 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3449 TCGv_i32 t2
= tcg_temp_new_i32();
3450 TCGv_i32 t3
= tcg_temp_new_i32();
3451 tcg_gen_trunc_tl_i32(t2
, t0
);
3452 tcg_gen_trunc_tl_i32(t3
, t1
);
3453 tcg_gen_mul_i32(t2
, t2
, t3
);
3454 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3455 tcg_temp_free_i32(t2
);
3456 tcg_temp_free_i32(t3
);
3461 TCGv_i32 t2
= tcg_temp_new_i32();
3462 TCGv_i32 t3
= tcg_temp_new_i32();
3463 tcg_gen_trunc_tl_i32(t2
, t0
);
3464 tcg_gen_trunc_tl_i32(t3
, t1
);
3465 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3466 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3467 tcg_temp_free_i32(t2
);
3468 tcg_temp_free_i32(t3
);
3473 TCGv_i32 t2
= tcg_temp_new_i32();
3474 TCGv_i32 t3
= tcg_temp_new_i32();
3475 tcg_gen_trunc_tl_i32(t2
, t0
);
3476 tcg_gen_trunc_tl_i32(t3
, t1
);
3477 tcg_gen_mul_i32(t2
, t2
, t3
);
3478 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3479 tcg_temp_free_i32(t2
);
3480 tcg_temp_free_i32(t3
);
3485 TCGv_i32 t2
= tcg_temp_new_i32();
3486 TCGv_i32 t3
= tcg_temp_new_i32();
3487 tcg_gen_trunc_tl_i32(t2
, t0
);
3488 tcg_gen_trunc_tl_i32(t3
, t1
);
3489 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3490 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3491 tcg_temp_free_i32(t2
);
3492 tcg_temp_free_i32(t3
);
3495 #if defined(TARGET_MIPS64)
3498 TCGv t2
= tcg_temp_new();
3499 TCGv t3
= tcg_temp_new();
3500 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3501 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3502 tcg_gen_and_tl(t2
, t2
, t3
);
3503 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3504 tcg_gen_or_tl(t2
, t2
, t3
);
3505 tcg_gen_movi_tl(t3
, 0);
3506 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3507 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3514 TCGv t2
= tcg_temp_new();
3515 TCGv t3
= tcg_temp_new();
3516 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3517 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3518 tcg_gen_and_tl(t2
, t2
, t3
);
3519 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3520 tcg_gen_or_tl(t2
, t2
, t3
);
3521 tcg_gen_movi_tl(t3
, 0);
3522 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3523 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3530 TCGv t2
= tcg_const_tl(0);
3531 TCGv t3
= tcg_const_tl(1);
3532 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3533 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3540 TCGv t2
= tcg_const_tl(0);
3541 TCGv t3
= tcg_const_tl(1);
3542 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3543 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3549 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3553 TCGv t2
= tcg_temp_new();
3554 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3559 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3563 TCGv t2
= tcg_temp_new();
3564 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3570 MIPS_INVAL("r6 mul/div");
3571 generate_exception_end(ctx
, EXCP_RI
);
3579 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3580 int acc
, int rs
, int rt
)
3584 t0
= tcg_temp_new();
3585 t1
= tcg_temp_new();
3587 gen_load_gpr(t0
, rs
);
3588 gen_load_gpr(t1
, rt
);
3597 TCGv t2
= tcg_temp_new();
3598 TCGv t3
= tcg_temp_new();
3599 tcg_gen_ext32s_tl(t0
, t0
);
3600 tcg_gen_ext32s_tl(t1
, t1
);
3601 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3602 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3603 tcg_gen_and_tl(t2
, t2
, t3
);
3604 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3605 tcg_gen_or_tl(t2
, t2
, t3
);
3606 tcg_gen_movi_tl(t3
, 0);
3607 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3608 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3609 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3610 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3611 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3618 TCGv t2
= tcg_const_tl(0);
3619 TCGv t3
= tcg_const_tl(1);
3620 tcg_gen_ext32u_tl(t0
, t0
);
3621 tcg_gen_ext32u_tl(t1
, t1
);
3622 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3623 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3624 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3625 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3626 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3633 TCGv_i32 t2
= tcg_temp_new_i32();
3634 TCGv_i32 t3
= tcg_temp_new_i32();
3635 tcg_gen_trunc_tl_i32(t2
, t0
);
3636 tcg_gen_trunc_tl_i32(t3
, t1
);
3637 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3638 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3639 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3640 tcg_temp_free_i32(t2
);
3641 tcg_temp_free_i32(t3
);
3646 TCGv_i32 t2
= tcg_temp_new_i32();
3647 TCGv_i32 t3
= tcg_temp_new_i32();
3648 tcg_gen_trunc_tl_i32(t2
, t0
);
3649 tcg_gen_trunc_tl_i32(t3
, t1
);
3650 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3651 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3652 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3653 tcg_temp_free_i32(t2
);
3654 tcg_temp_free_i32(t3
);
3657 #if defined(TARGET_MIPS64)
3660 TCGv t2
= tcg_temp_new();
3661 TCGv t3
= tcg_temp_new();
3662 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3663 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3664 tcg_gen_and_tl(t2
, t2
, t3
);
3665 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3666 tcg_gen_or_tl(t2
, t2
, t3
);
3667 tcg_gen_movi_tl(t3
, 0);
3668 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3669 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3670 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3677 TCGv t2
= tcg_const_tl(0);
3678 TCGv t3
= tcg_const_tl(1);
3679 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3680 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3681 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3687 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3690 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3695 TCGv_i64 t2
= tcg_temp_new_i64();
3696 TCGv_i64 t3
= tcg_temp_new_i64();
3698 tcg_gen_ext_tl_i64(t2
, t0
);
3699 tcg_gen_ext_tl_i64(t3
, t1
);
3700 tcg_gen_mul_i64(t2
, t2
, t3
);
3701 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3702 tcg_gen_add_i64(t2
, t2
, t3
);
3703 tcg_temp_free_i64(t3
);
3704 gen_move_low32(cpu_LO
[acc
], t2
);
3705 gen_move_high32(cpu_HI
[acc
], t2
);
3706 tcg_temp_free_i64(t2
);
3711 TCGv_i64 t2
= tcg_temp_new_i64();
3712 TCGv_i64 t3
= tcg_temp_new_i64();
3714 tcg_gen_ext32u_tl(t0
, t0
);
3715 tcg_gen_ext32u_tl(t1
, t1
);
3716 tcg_gen_extu_tl_i64(t2
, t0
);
3717 tcg_gen_extu_tl_i64(t3
, t1
);
3718 tcg_gen_mul_i64(t2
, t2
, t3
);
3719 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3720 tcg_gen_add_i64(t2
, t2
, t3
);
3721 tcg_temp_free_i64(t3
);
3722 gen_move_low32(cpu_LO
[acc
], t2
);
3723 gen_move_high32(cpu_HI
[acc
], t2
);
3724 tcg_temp_free_i64(t2
);
3729 TCGv_i64 t2
= tcg_temp_new_i64();
3730 TCGv_i64 t3
= tcg_temp_new_i64();
3732 tcg_gen_ext_tl_i64(t2
, t0
);
3733 tcg_gen_ext_tl_i64(t3
, t1
);
3734 tcg_gen_mul_i64(t2
, t2
, t3
);
3735 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3736 tcg_gen_sub_i64(t2
, t3
, t2
);
3737 tcg_temp_free_i64(t3
);
3738 gen_move_low32(cpu_LO
[acc
], t2
);
3739 gen_move_high32(cpu_HI
[acc
], t2
);
3740 tcg_temp_free_i64(t2
);
3745 TCGv_i64 t2
= tcg_temp_new_i64();
3746 TCGv_i64 t3
= tcg_temp_new_i64();
3748 tcg_gen_ext32u_tl(t0
, t0
);
3749 tcg_gen_ext32u_tl(t1
, t1
);
3750 tcg_gen_extu_tl_i64(t2
, t0
);
3751 tcg_gen_extu_tl_i64(t3
, t1
);
3752 tcg_gen_mul_i64(t2
, t2
, t3
);
3753 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3754 tcg_gen_sub_i64(t2
, t3
, t2
);
3755 tcg_temp_free_i64(t3
);
3756 gen_move_low32(cpu_LO
[acc
], t2
);
3757 gen_move_high32(cpu_HI
[acc
], t2
);
3758 tcg_temp_free_i64(t2
);
3762 MIPS_INVAL("mul/div");
3763 generate_exception_end(ctx
, EXCP_RI
);
3771 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3772 int rd
, int rs
, int rt
)
3774 TCGv t0
= tcg_temp_new();
3775 TCGv t1
= tcg_temp_new();
3777 gen_load_gpr(t0
, rs
);
3778 gen_load_gpr(t1
, rt
);
3781 case OPC_VR54XX_MULS
:
3782 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3784 case OPC_VR54XX_MULSU
:
3785 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3787 case OPC_VR54XX_MACC
:
3788 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3790 case OPC_VR54XX_MACCU
:
3791 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3793 case OPC_VR54XX_MSAC
:
3794 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3796 case OPC_VR54XX_MSACU
:
3797 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3799 case OPC_VR54XX_MULHI
:
3800 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3802 case OPC_VR54XX_MULHIU
:
3803 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3805 case OPC_VR54XX_MULSHI
:
3806 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3808 case OPC_VR54XX_MULSHIU
:
3809 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3811 case OPC_VR54XX_MACCHI
:
3812 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3814 case OPC_VR54XX_MACCHIU
:
3815 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3817 case OPC_VR54XX_MSACHI
:
3818 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3820 case OPC_VR54XX_MSACHIU
:
3821 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3824 MIPS_INVAL("mul vr54xx");
3825 generate_exception_end(ctx
, EXCP_RI
);
3828 gen_store_gpr(t0
, rd
);
3835 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3845 gen_load_gpr(t0
, rs
);
3850 #if defined(TARGET_MIPS64)
3854 tcg_gen_not_tl(t0
, t0
);
3863 tcg_gen_ext32u_tl(t0
, t0
);
3864 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3865 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3867 #if defined(TARGET_MIPS64)
3872 tcg_gen_clzi_i64(t0
, t0
, 64);
3878 /* Godson integer instructions */
3879 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3880 int rd
, int rs
, int rt
)
3892 case OPC_MULTU_G_2E
:
3893 case OPC_MULTU_G_2F
:
3894 #if defined(TARGET_MIPS64)
3895 case OPC_DMULT_G_2E
:
3896 case OPC_DMULT_G_2F
:
3897 case OPC_DMULTU_G_2E
:
3898 case OPC_DMULTU_G_2F
:
3900 t0
= tcg_temp_new();
3901 t1
= tcg_temp_new();
3904 t0
= tcg_temp_local_new();
3905 t1
= tcg_temp_local_new();
3909 gen_load_gpr(t0
, rs
);
3910 gen_load_gpr(t1
, rt
);
3915 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3916 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3918 case OPC_MULTU_G_2E
:
3919 case OPC_MULTU_G_2F
:
3920 tcg_gen_ext32u_tl(t0
, t0
);
3921 tcg_gen_ext32u_tl(t1
, t1
);
3922 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3923 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3928 TCGLabel
*l1
= gen_new_label();
3929 TCGLabel
*l2
= gen_new_label();
3930 TCGLabel
*l3
= gen_new_label();
3931 tcg_gen_ext32s_tl(t0
, t0
);
3932 tcg_gen_ext32s_tl(t1
, t1
);
3933 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3934 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3937 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3938 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3939 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3942 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3943 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3950 TCGLabel
*l1
= gen_new_label();
3951 TCGLabel
*l2
= gen_new_label();
3952 tcg_gen_ext32u_tl(t0
, t0
);
3953 tcg_gen_ext32u_tl(t1
, t1
);
3954 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3955 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3958 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3959 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3966 TCGLabel
*l1
= gen_new_label();
3967 TCGLabel
*l2
= gen_new_label();
3968 TCGLabel
*l3
= gen_new_label();
3969 tcg_gen_ext32u_tl(t0
, t0
);
3970 tcg_gen_ext32u_tl(t1
, t1
);
3971 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3972 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3973 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3975 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3978 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3979 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3986 TCGLabel
*l1
= gen_new_label();
3987 TCGLabel
*l2
= gen_new_label();
3988 tcg_gen_ext32u_tl(t0
, t0
);
3989 tcg_gen_ext32u_tl(t1
, t1
);
3990 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3991 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3994 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3995 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3999 #if defined(TARGET_MIPS64)
4000 case OPC_DMULT_G_2E
:
4001 case OPC_DMULT_G_2F
:
4002 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
4004 case OPC_DMULTU_G_2E
:
4005 case OPC_DMULTU_G_2F
:
4006 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
4011 TCGLabel
*l1
= gen_new_label();
4012 TCGLabel
*l2
= gen_new_label();
4013 TCGLabel
*l3
= gen_new_label();
4014 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4015 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4018 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
4019 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
4020 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4023 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4027 case OPC_DDIVU_G_2E
:
4028 case OPC_DDIVU_G_2F
:
4030 TCGLabel
*l1
= gen_new_label();
4031 TCGLabel
*l2
= gen_new_label();
4032 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4033 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4036 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4043 TCGLabel
*l1
= gen_new_label();
4044 TCGLabel
*l2
= gen_new_label();
4045 TCGLabel
*l3
= gen_new_label();
4046 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
4047 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
4048 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
4050 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4053 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4057 case OPC_DMODU_G_2E
:
4058 case OPC_DMODU_G_2F
:
4060 TCGLabel
*l1
= gen_new_label();
4061 TCGLabel
*l2
= gen_new_label();
4062 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4063 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4066 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4077 /* Loongson multimedia instructions */
4078 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
4080 uint32_t opc
, shift_max
;
4083 opc
= MASK_LMI(ctx
->opcode
);
4089 t0
= tcg_temp_local_new_i64();
4090 t1
= tcg_temp_local_new_i64();
4093 t0
= tcg_temp_new_i64();
4094 t1
= tcg_temp_new_i64();
4098 check_cp1_enabled(ctx
);
4099 gen_load_fpr64(ctx
, t0
, rs
);
4100 gen_load_fpr64(ctx
, t1
, rt
);
4102 #define LMI_HELPER(UP, LO) \
4103 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
4104 #define LMI_HELPER_1(UP, LO) \
4105 case OPC_##UP: gen_helper_##LO(t0, t0); break
4106 #define LMI_DIRECT(UP, LO, OP) \
4107 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
4110 LMI_HELPER(PADDSH
, paddsh
);
4111 LMI_HELPER(PADDUSH
, paddush
);
4112 LMI_HELPER(PADDH
, paddh
);
4113 LMI_HELPER(PADDW
, paddw
);
4114 LMI_HELPER(PADDSB
, paddsb
);
4115 LMI_HELPER(PADDUSB
, paddusb
);
4116 LMI_HELPER(PADDB
, paddb
);
4118 LMI_HELPER(PSUBSH
, psubsh
);
4119 LMI_HELPER(PSUBUSH
, psubush
);
4120 LMI_HELPER(PSUBH
, psubh
);
4121 LMI_HELPER(PSUBW
, psubw
);
4122 LMI_HELPER(PSUBSB
, psubsb
);
4123 LMI_HELPER(PSUBUSB
, psubusb
);
4124 LMI_HELPER(PSUBB
, psubb
);
4126 LMI_HELPER(PSHUFH
, pshufh
);
4127 LMI_HELPER(PACKSSWH
, packsswh
);
4128 LMI_HELPER(PACKSSHB
, packsshb
);
4129 LMI_HELPER(PACKUSHB
, packushb
);
4131 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
4132 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
4133 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
4134 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
4135 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
4136 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
4138 LMI_HELPER(PAVGH
, pavgh
);
4139 LMI_HELPER(PAVGB
, pavgb
);
4140 LMI_HELPER(PMAXSH
, pmaxsh
);
4141 LMI_HELPER(PMINSH
, pminsh
);
4142 LMI_HELPER(PMAXUB
, pmaxub
);
4143 LMI_HELPER(PMINUB
, pminub
);
4145 LMI_HELPER(PCMPEQW
, pcmpeqw
);
4146 LMI_HELPER(PCMPGTW
, pcmpgtw
);
4147 LMI_HELPER(PCMPEQH
, pcmpeqh
);
4148 LMI_HELPER(PCMPGTH
, pcmpgth
);
4149 LMI_HELPER(PCMPEQB
, pcmpeqb
);
4150 LMI_HELPER(PCMPGTB
, pcmpgtb
);
4152 LMI_HELPER(PSLLW
, psllw
);
4153 LMI_HELPER(PSLLH
, psllh
);
4154 LMI_HELPER(PSRLW
, psrlw
);
4155 LMI_HELPER(PSRLH
, psrlh
);
4156 LMI_HELPER(PSRAW
, psraw
);
4157 LMI_HELPER(PSRAH
, psrah
);
4159 LMI_HELPER(PMULLH
, pmullh
);
4160 LMI_HELPER(PMULHH
, pmulhh
);
4161 LMI_HELPER(PMULHUH
, pmulhuh
);
4162 LMI_HELPER(PMADDHW
, pmaddhw
);
4164 LMI_HELPER(PASUBUB
, pasubub
);
4165 LMI_HELPER_1(BIADD
, biadd
);
4166 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
4168 LMI_DIRECT(PADDD
, paddd
, add
);
4169 LMI_DIRECT(PSUBD
, psubd
, sub
);
4170 LMI_DIRECT(XOR_CP2
, xor, xor);
4171 LMI_DIRECT(NOR_CP2
, nor
, nor
);
4172 LMI_DIRECT(AND_CP2
, and, and);
4173 LMI_DIRECT(OR_CP2
, or, or);
4176 tcg_gen_andc_i64(t0
, t1
, t0
);
4180 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
4183 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
4186 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
4189 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
4193 tcg_gen_andi_i64(t1
, t1
, 3);
4194 tcg_gen_shli_i64(t1
, t1
, 4);
4195 tcg_gen_shr_i64(t0
, t0
, t1
);
4196 tcg_gen_ext16u_i64(t0
, t0
);
4200 tcg_gen_add_i64(t0
, t0
, t1
);
4201 tcg_gen_ext32s_i64(t0
, t0
);
4204 tcg_gen_sub_i64(t0
, t0
, t1
);
4205 tcg_gen_ext32s_i64(t0
, t0
);
4227 /* Make sure shift count isn't TCG undefined behaviour. */
4228 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4233 tcg_gen_shl_i64(t0
, t0
, t1
);
4237 /* Since SRA is UndefinedResult without sign-extended inputs,
4238 we can treat SRA and DSRA the same. */
4239 tcg_gen_sar_i64(t0
, t0
, t1
);
4242 /* We want to shift in zeros for SRL; zero-extend first. */
4243 tcg_gen_ext32u_i64(t0
, t0
);
4246 tcg_gen_shr_i64(t0
, t0
, t1
);
4250 if (shift_max
== 32) {
4251 tcg_gen_ext32s_i64(t0
, t0
);
4254 /* Shifts larger than MAX produce zero. */
4255 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4256 tcg_gen_neg_i64(t1
, t1
);
4257 tcg_gen_and_i64(t0
, t0
, t1
);
4263 TCGv_i64 t2
= tcg_temp_new_i64();
4264 TCGLabel
*lab
= gen_new_label();
4266 tcg_gen_mov_i64(t2
, t0
);
4267 tcg_gen_add_i64(t0
, t1
, t2
);
4268 if (opc
== OPC_ADD_CP2
) {
4269 tcg_gen_ext32s_i64(t0
, t0
);
4271 tcg_gen_xor_i64(t1
, t1
, t2
);
4272 tcg_gen_xor_i64(t2
, t2
, t0
);
4273 tcg_gen_andc_i64(t1
, t2
, t1
);
4274 tcg_temp_free_i64(t2
);
4275 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4276 generate_exception(ctx
, EXCP_OVERFLOW
);
4284 TCGv_i64 t2
= tcg_temp_new_i64();
4285 TCGLabel
*lab
= gen_new_label();
4287 tcg_gen_mov_i64(t2
, t0
);
4288 tcg_gen_sub_i64(t0
, t1
, t2
);
4289 if (opc
== OPC_SUB_CP2
) {
4290 tcg_gen_ext32s_i64(t0
, t0
);
4292 tcg_gen_xor_i64(t1
, t1
, t2
);
4293 tcg_gen_xor_i64(t2
, t2
, t0
);
4294 tcg_gen_and_i64(t1
, t1
, t2
);
4295 tcg_temp_free_i64(t2
);
4296 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4297 generate_exception(ctx
, EXCP_OVERFLOW
);
4303 tcg_gen_ext32u_i64(t0
, t0
);
4304 tcg_gen_ext32u_i64(t1
, t1
);
4305 tcg_gen_mul_i64(t0
, t0
, t1
);
4314 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4315 FD field is the CC field? */
4317 MIPS_INVAL("loongson_cp2");
4318 generate_exception_end(ctx
, EXCP_RI
);
4325 gen_store_fpr64(ctx
, t0
, rd
);
4327 tcg_temp_free_i64(t0
);
4328 tcg_temp_free_i64(t1
);
4332 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4333 int rs
, int rt
, int16_t imm
)
4336 TCGv t0
= tcg_temp_new();
4337 TCGv t1
= tcg_temp_new();
4340 /* Load needed operands */
4348 /* Compare two registers */
4350 gen_load_gpr(t0
, rs
);
4351 gen_load_gpr(t1
, rt
);
4361 /* Compare register to immediate */
4362 if (rs
!= 0 || imm
!= 0) {
4363 gen_load_gpr(t0
, rs
);
4364 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4371 case OPC_TEQ
: /* rs == rs */
4372 case OPC_TEQI
: /* r0 == 0 */
4373 case OPC_TGE
: /* rs >= rs */
4374 case OPC_TGEI
: /* r0 >= 0 */
4375 case OPC_TGEU
: /* rs >= rs unsigned */
4376 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4378 generate_exception_end(ctx
, EXCP_TRAP
);
4380 case OPC_TLT
: /* rs < rs */
4381 case OPC_TLTI
: /* r0 < 0 */
4382 case OPC_TLTU
: /* rs < rs unsigned */
4383 case OPC_TLTIU
: /* r0 < 0 unsigned */
4384 case OPC_TNE
: /* rs != rs */
4385 case OPC_TNEI
: /* r0 != 0 */
4386 /* Never trap: treat as NOP. */
4390 TCGLabel
*l1
= gen_new_label();
4395 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4399 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4403 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4407 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4411 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4415 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4418 generate_exception(ctx
, EXCP_TRAP
);
4425 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4427 if (unlikely(ctx
->base
.singlestep_enabled
)) {
4431 #ifndef CONFIG_USER_ONLY
4432 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4438 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4440 if (use_goto_tb(ctx
, dest
)) {
4443 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
4446 if (ctx
->base
.singlestep_enabled
) {
4447 save_cpu_state(ctx
, 0);
4448 gen_helper_raise_exception_debug(cpu_env
);
4450 tcg_gen_lookup_and_goto_ptr();
4454 /* Branches (before delay slot) */
4455 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4457 int rs
, int rt
, int32_t offset
,
4460 target_ulong btgt
= -1;
4462 int bcond_compute
= 0;
4463 TCGv t0
= tcg_temp_new();
4464 TCGv t1
= tcg_temp_new();
4466 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4467 #ifdef MIPS_DEBUG_DISAS
4468 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4469 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
4471 generate_exception_end(ctx
, EXCP_RI
);
4475 /* Load needed operands */
4481 /* Compare two registers */
4483 gen_load_gpr(t0
, rs
);
4484 gen_load_gpr(t1
, rt
);
4487 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4501 /* Compare to zero */
4503 gen_load_gpr(t0
, rs
);
4506 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4509 #if defined(TARGET_MIPS64)
4511 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4513 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4516 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4521 /* Jump to immediate */
4522 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
4527 /* Jump to register */
4528 if (offset
!= 0 && offset
!= 16) {
4529 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4530 others are reserved. */
4531 MIPS_INVAL("jump hint");
4532 generate_exception_end(ctx
, EXCP_RI
);
4535 gen_load_gpr(btarget
, rs
);
4538 MIPS_INVAL("branch/jump");
4539 generate_exception_end(ctx
, EXCP_RI
);
4542 if (bcond_compute
== 0) {
4543 /* No condition to be computed */
4545 case OPC_BEQ
: /* rx == rx */
4546 case OPC_BEQL
: /* rx == rx likely */
4547 case OPC_BGEZ
: /* 0 >= 0 */
4548 case OPC_BGEZL
: /* 0 >= 0 likely */
4549 case OPC_BLEZ
: /* 0 <= 0 */
4550 case OPC_BLEZL
: /* 0 <= 0 likely */
4552 ctx
->hflags
|= MIPS_HFLAG_B
;
4554 case OPC_BGEZAL
: /* 0 >= 0 */
4555 case OPC_BGEZALL
: /* 0 >= 0 likely */
4556 /* Always take and link */
4558 ctx
->hflags
|= MIPS_HFLAG_B
;
4560 case OPC_BNE
: /* rx != rx */
4561 case OPC_BGTZ
: /* 0 > 0 */
4562 case OPC_BLTZ
: /* 0 < 0 */
4565 case OPC_BLTZAL
: /* 0 < 0 */
4566 /* Handle as an unconditional branch to get correct delay
4569 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
4570 ctx
->hflags
|= MIPS_HFLAG_B
;
4572 case OPC_BLTZALL
: /* 0 < 0 likely */
4573 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
4574 /* Skip the instruction in the delay slot */
4575 ctx
->base
.pc_next
+= 4;
4577 case OPC_BNEL
: /* rx != rx likely */
4578 case OPC_BGTZL
: /* 0 > 0 likely */
4579 case OPC_BLTZL
: /* 0 < 0 likely */
4580 /* Skip the instruction in the delay slot */
4581 ctx
->base
.pc_next
+= 4;
4584 ctx
->hflags
|= MIPS_HFLAG_B
;
4587 ctx
->hflags
|= MIPS_HFLAG_BX
;
4591 ctx
->hflags
|= MIPS_HFLAG_B
;
4594 ctx
->hflags
|= MIPS_HFLAG_BR
;
4598 ctx
->hflags
|= MIPS_HFLAG_BR
;
4601 MIPS_INVAL("branch/jump");
4602 generate_exception_end(ctx
, EXCP_RI
);
4608 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4611 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4614 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4617 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4620 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4623 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4626 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4630 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4634 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4637 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4640 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4643 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4646 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4649 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4652 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4654 #if defined(TARGET_MIPS64)
4656 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4660 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4663 ctx
->hflags
|= MIPS_HFLAG_BC
;
4666 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4669 ctx
->hflags
|= MIPS_HFLAG_BL
;
4672 MIPS_INVAL("conditional branch/jump");
4673 generate_exception_end(ctx
, EXCP_RI
);
4678 ctx
->btarget
= btgt
;
4680 switch (delayslot_size
) {
4682 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4685 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4690 int post_delay
= insn_bytes
+ delayslot_size
;
4691 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4693 tcg_gen_movi_tl(cpu_gpr
[blink
],
4694 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
4698 if (insn_bytes
== 2)
4699 ctx
->hflags
|= MIPS_HFLAG_B16
;
4705 /* nanoMIPS Branches */
4706 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
4708 int rs
, int rt
, int32_t offset
)
4710 target_ulong btgt
= -1;
4711 int bcond_compute
= 0;
4712 TCGv t0
= tcg_temp_new();
4713 TCGv t1
= tcg_temp_new();
4715 /* Load needed operands */
4719 /* Compare two registers */
4721 gen_load_gpr(t0
, rs
);
4722 gen_load_gpr(t1
, rt
);
4725 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4728 /* Compare to zero */
4730 gen_load_gpr(t0
, rs
);
4733 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4736 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4738 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4742 /* Jump to register */
4743 if (offset
!= 0 && offset
!= 16) {
4744 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4745 others are reserved. */
4746 MIPS_INVAL("jump hint");
4747 generate_exception_end(ctx
, EXCP_RI
);
4750 gen_load_gpr(btarget
, rs
);
4753 MIPS_INVAL("branch/jump");
4754 generate_exception_end(ctx
, EXCP_RI
);
4757 if (bcond_compute
== 0) {
4758 /* No condition to be computed */
4760 case OPC_BEQ
: /* rx == rx */
4762 ctx
->hflags
|= MIPS_HFLAG_B
;
4764 case OPC_BGEZAL
: /* 0 >= 0 */
4765 /* Always take and link */
4766 tcg_gen_movi_tl(cpu_gpr
[31],
4767 ctx
->base
.pc_next
+ insn_bytes
);
4768 ctx
->hflags
|= MIPS_HFLAG_B
;
4770 case OPC_BNE
: /* rx != rx */
4771 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
4772 /* Skip the instruction in the delay slot */
4773 ctx
->base
.pc_next
+= 4;
4776 ctx
->hflags
|= MIPS_HFLAG_BR
;
4780 tcg_gen_movi_tl(cpu_gpr
[rt
],
4781 ctx
->base
.pc_next
+ insn_bytes
);
4783 ctx
->hflags
|= MIPS_HFLAG_BR
;
4786 MIPS_INVAL("branch/jump");
4787 generate_exception_end(ctx
, EXCP_RI
);
4793 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4796 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4799 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4800 tcg_gen_movi_tl(cpu_gpr
[31],
4801 ctx
->base
.pc_next
+ insn_bytes
);
4804 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4806 ctx
->hflags
|= MIPS_HFLAG_BC
;
4809 MIPS_INVAL("conditional branch/jump");
4810 generate_exception_end(ctx
, EXCP_RI
);
4815 ctx
->btarget
= btgt
;
4818 if (insn_bytes
== 2) {
4819 ctx
->hflags
|= MIPS_HFLAG_B16
;
4826 /* special3 bitfield operations */
4827 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4828 int rs
, int lsb
, int msb
)
4830 TCGv t0
= tcg_temp_new();
4831 TCGv t1
= tcg_temp_new();
4833 gen_load_gpr(t1
, rs
);
4836 if (lsb
+ msb
> 31) {
4840 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4842 /* The two checks together imply that lsb == 0,
4843 so this is a simple sign-extension. */
4844 tcg_gen_ext32s_tl(t0
, t1
);
4847 #if defined(TARGET_MIPS64)
4856 if (lsb
+ msb
> 63) {
4859 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4866 gen_load_gpr(t0
, rt
);
4867 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4868 tcg_gen_ext32s_tl(t0
, t0
);
4870 #if defined(TARGET_MIPS64)
4881 gen_load_gpr(t0
, rt
);
4882 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4887 MIPS_INVAL("bitops");
4888 generate_exception_end(ctx
, EXCP_RI
);
4893 gen_store_gpr(t0
, rt
);
4898 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4903 /* If no destination, treat it as a NOP. */
4907 t0
= tcg_temp_new();
4908 gen_load_gpr(t0
, rt
);
4912 TCGv t1
= tcg_temp_new();
4913 TCGv t2
= tcg_const_tl(0x00FF00FF);
4915 tcg_gen_shri_tl(t1
, t0
, 8);
4916 tcg_gen_and_tl(t1
, t1
, t2
);
4917 tcg_gen_and_tl(t0
, t0
, t2
);
4918 tcg_gen_shli_tl(t0
, t0
, 8);
4919 tcg_gen_or_tl(t0
, t0
, t1
);
4922 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4926 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4929 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4931 #if defined(TARGET_MIPS64)
4934 TCGv t1
= tcg_temp_new();
4935 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
4937 tcg_gen_shri_tl(t1
, t0
, 8);
4938 tcg_gen_and_tl(t1
, t1
, t2
);
4939 tcg_gen_and_tl(t0
, t0
, t2
);
4940 tcg_gen_shli_tl(t0
, t0
, 8);
4941 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4948 TCGv t1
= tcg_temp_new();
4949 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
4951 tcg_gen_shri_tl(t1
, t0
, 16);
4952 tcg_gen_and_tl(t1
, t1
, t2
);
4953 tcg_gen_and_tl(t0
, t0
, t2
);
4954 tcg_gen_shli_tl(t0
, t0
, 16);
4955 tcg_gen_or_tl(t0
, t0
, t1
);
4956 tcg_gen_shri_tl(t1
, t0
, 32);
4957 tcg_gen_shli_tl(t0
, t0
, 32);
4958 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4965 MIPS_INVAL("bsfhl");
4966 generate_exception_end(ctx
, EXCP_RI
);
4973 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4982 t0
= tcg_temp_new();
4983 t1
= tcg_temp_new();
4984 gen_load_gpr(t0
, rs
);
4985 gen_load_gpr(t1
, rt
);
4986 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4987 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4988 if (opc
== OPC_LSA
) {
4989 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4998 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
5006 t0
= tcg_temp_new();
5007 if (bits
== 0 || bits
== wordsz
) {
5009 gen_load_gpr(t0
, rt
);
5011 gen_load_gpr(t0
, rs
);
5015 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
5017 #if defined(TARGET_MIPS64)
5019 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5024 TCGv t1
= tcg_temp_new();
5025 gen_load_gpr(t0
, rt
);
5026 gen_load_gpr(t1
, rs
);
5030 TCGv_i64 t2
= tcg_temp_new_i64();
5031 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
5032 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
5033 gen_move_low32(cpu_gpr
[rd
], t2
);
5034 tcg_temp_free_i64(t2
);
5037 #if defined(TARGET_MIPS64)
5039 tcg_gen_shli_tl(t0
, t0
, bits
);
5040 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
5041 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
5051 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
5054 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
5057 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
5060 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
5063 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
5070 t0
= tcg_temp_new();
5071 gen_load_gpr(t0
, rt
);
5074 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
5076 #if defined(TARGET_MIPS64)
5078 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
5085 #ifndef CONFIG_USER_ONLY
5086 /* CP0 (MMU and control) */
5087 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
5089 TCGv_i64 t0
= tcg_temp_new_i64();
5090 TCGv_i64 t1
= tcg_temp_new_i64();
5092 tcg_gen_ext_tl_i64(t0
, arg
);
5093 tcg_gen_ld_i64(t1
, cpu_env
, off
);
5094 #if defined(TARGET_MIPS64)
5095 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
5097 tcg_gen_concat32_i64(t1
, t1
, t0
);
5099 tcg_gen_st_i64(t1
, cpu_env
, off
);
5100 tcg_temp_free_i64(t1
);
5101 tcg_temp_free_i64(t0
);
5104 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
5106 TCGv_i64 t0
= tcg_temp_new_i64();
5107 TCGv_i64 t1
= tcg_temp_new_i64();
5109 tcg_gen_ext_tl_i64(t0
, arg
);
5110 tcg_gen_ld_i64(t1
, cpu_env
, off
);
5111 tcg_gen_concat32_i64(t1
, t1
, t0
);
5112 tcg_gen_st_i64(t1
, cpu_env
, off
);
5113 tcg_temp_free_i64(t1
);
5114 tcg_temp_free_i64(t0
);
5117 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
5119 TCGv_i64 t0
= tcg_temp_new_i64();
5121 tcg_gen_ld_i64(t0
, cpu_env
, off
);
5122 #if defined(TARGET_MIPS64)
5123 tcg_gen_shri_i64(t0
, t0
, 30);
5125 tcg_gen_shri_i64(t0
, t0
, 32);
5127 gen_move_low32(arg
, t0
);
5128 tcg_temp_free_i64(t0
);
5131 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
5133 TCGv_i64 t0
= tcg_temp_new_i64();
5135 tcg_gen_ld_i64(t0
, cpu_env
, off
);
5136 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
5137 gen_move_low32(arg
, t0
);
5138 tcg_temp_free_i64(t0
);
5141 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
5143 TCGv_i32 t0
= tcg_temp_new_i32();
5145 tcg_gen_ld_i32(t0
, cpu_env
, off
);
5146 tcg_gen_ext_i32_tl(arg
, t0
);
5147 tcg_temp_free_i32(t0
);
5150 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
5152 tcg_gen_ld_tl(arg
, cpu_env
, off
);
5153 tcg_gen_ext32s_tl(arg
, arg
);
5156 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
5158 TCGv_i32 t0
= tcg_temp_new_i32();
5160 tcg_gen_trunc_tl_i32(t0
, arg
);
5161 tcg_gen_st_i32(t0
, cpu_env
, off
);
5162 tcg_temp_free_i32(t0
);
5165 #define CP0_CHECK(c) \
5168 goto cp0_unimplemented; \
5172 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5174 const char *rn
= "invalid";
5180 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5181 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
5185 goto cp0_unimplemented
;
5191 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5192 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
5196 goto cp0_unimplemented
;
5202 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
5203 ctx
->CP0_LLAddr_shift
);
5207 CP0_CHECK(ctx
->mrp
);
5208 gen_helper_mfhc0_maar(arg
, cpu_env
);
5212 goto cp0_unimplemented
;
5221 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
5225 goto cp0_unimplemented
;
5229 goto cp0_unimplemented
;
5231 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
5235 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5236 tcg_gen_movi_tl(arg
, 0);
5239 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5241 const char *rn
= "invalid";
5242 uint64_t mask
= ctx
->PAMask
>> 36;
5248 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5249 tcg_gen_andi_tl(arg
, arg
, mask
);
5250 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
5254 goto cp0_unimplemented
;
5260 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5261 tcg_gen_andi_tl(arg
, arg
, mask
);
5262 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
5266 goto cp0_unimplemented
;
5272 /* LLAddr is read-only (the only exception is bit 0 if LLB is
5273 supported); the CP0_LLAddr_rw_bitmask does not seem to be
5274 relevant for modern MIPS cores supporting MTHC0, therefore
5275 treating MTHC0 to LLAddr as NOP. */
5279 CP0_CHECK(ctx
->mrp
);
5280 gen_helper_mthc0_maar(cpu_env
, arg
);
5284 goto cp0_unimplemented
;
5293 tcg_gen_andi_tl(arg
, arg
, mask
);
5294 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5298 goto cp0_unimplemented
;
5302 goto cp0_unimplemented
;
5304 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
5307 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5310 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
5312 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
5313 tcg_gen_movi_tl(arg
, 0);
5315 tcg_gen_movi_tl(arg
, ~0);
5319 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5321 const char *rn
= "invalid";
5324 check_insn(ctx
, ISA_MIPS32
);
5330 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
5334 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5335 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
5339 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5340 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
5344 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5345 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
5350 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
5354 goto cp0_unimplemented
;
5360 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5361 gen_helper_mfc0_random(arg
, cpu_env
);
5365 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5366 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5370 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5371 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5375 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5376 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5380 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5381 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5385 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5386 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5390 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5391 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5392 rn
= "VPEScheFBack";
5395 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5396 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5400 goto cp0_unimplemented
;
5407 TCGv_i64 tmp
= tcg_temp_new_i64();
5408 tcg_gen_ld_i64(tmp
, cpu_env
,
5409 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5410 #if defined(TARGET_MIPS64)
5412 /* Move RI/XI fields to bits 31:30 */
5413 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5414 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5417 gen_move_low32(arg
, tmp
);
5418 tcg_temp_free_i64(tmp
);
5423 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5424 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5428 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5429 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5433 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5434 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5438 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5439 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5443 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5444 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5448 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5449 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5453 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5454 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5458 goto cp0_unimplemented
;
5465 TCGv_i64 tmp
= tcg_temp_new_i64();
5466 tcg_gen_ld_i64(tmp
, cpu_env
,
5467 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5468 #if defined(TARGET_MIPS64)
5470 /* Move RI/XI fields to bits 31:30 */
5471 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5472 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5475 gen_move_low32(arg
, tmp
);
5476 tcg_temp_free_i64(tmp
);
5482 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5483 rn
= "GlobalNumber";
5486 goto cp0_unimplemented
;
5492 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5493 tcg_gen_ext32s_tl(arg
, arg
);
5497 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5498 rn
= "ContextConfig";
5499 goto cp0_unimplemented
;
5501 CP0_CHECK(ctx
->ulri
);
5502 tcg_gen_ld_tl(arg
, cpu_env
,
5503 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5504 tcg_gen_ext32s_tl(arg
, arg
);
5508 goto cp0_unimplemented
;
5514 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5518 check_insn(ctx
, ISA_MIPS32R2
);
5519 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5524 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
5525 tcg_gen_ext32s_tl(arg
, arg
);
5530 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
5531 tcg_gen_ext32s_tl(arg
, arg
);
5536 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
5537 tcg_gen_ext32s_tl(arg
, arg
);
5541 goto cp0_unimplemented
;
5547 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5551 check_insn(ctx
, ISA_MIPS32R2
);
5552 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5556 check_insn(ctx
, ISA_MIPS32R2
);
5557 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5561 check_insn(ctx
, ISA_MIPS32R2
);
5562 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5566 check_insn(ctx
, ISA_MIPS32R2
);
5567 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5571 check_insn(ctx
, ISA_MIPS32R2
);
5572 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5576 goto cp0_unimplemented
;
5582 check_insn(ctx
, ISA_MIPS32R2
);
5583 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5587 goto cp0_unimplemented
;
5593 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5594 tcg_gen_ext32s_tl(arg
, arg
);
5599 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5604 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5609 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
5610 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
5614 goto cp0_unimplemented
;
5620 /* Mark as an IO operation because we read the time. */
5621 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5624 gen_helper_mfc0_count(arg
, cpu_env
);
5625 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5628 /* Break the TB to be able to take timer interrupts immediately
5629 after reading count. DISAS_STOP isn't sufficient, we need to
5630 ensure we break completely out of translated code. */
5631 gen_save_pc(ctx
->base
.pc_next
+ 4);
5632 ctx
->base
.is_jmp
= DISAS_EXIT
;
5635 /* 6,7 are implementation dependent */
5637 goto cp0_unimplemented
;
5643 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5644 tcg_gen_ext32s_tl(arg
, arg
);
5648 goto cp0_unimplemented
;
5654 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5657 /* 6,7 are implementation dependent */
5659 goto cp0_unimplemented
;
5665 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5669 check_insn(ctx
, ISA_MIPS32R2
);
5670 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5674 check_insn(ctx
, ISA_MIPS32R2
);
5675 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5679 check_insn(ctx
, ISA_MIPS32R2
);
5680 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5684 goto cp0_unimplemented
;
5690 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5694 goto cp0_unimplemented
;
5700 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5701 tcg_gen_ext32s_tl(arg
, arg
);
5705 goto cp0_unimplemented
;
5711 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5715 check_insn(ctx
, ISA_MIPS32R2
);
5716 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
5717 tcg_gen_ext32s_tl(arg
, arg
);
5721 check_insn(ctx
, ISA_MIPS32R2
);
5722 CP0_CHECK(ctx
->cmgcr
);
5723 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5724 tcg_gen_ext32s_tl(arg
, arg
);
5728 goto cp0_unimplemented
;
5734 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5738 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5742 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5746 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5750 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5754 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5757 /* 6,7 are implementation dependent */
5759 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5763 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5767 goto cp0_unimplemented
;
5773 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5777 CP0_CHECK(ctx
->mrp
);
5778 gen_helper_mfc0_maar(arg
, cpu_env
);
5782 CP0_CHECK(ctx
->mrp
);
5783 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5787 goto cp0_unimplemented
;
5800 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
5801 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5805 goto cp0_unimplemented
;
5818 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
5819 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5823 goto cp0_unimplemented
;
5829 #if defined(TARGET_MIPS64)
5830 check_insn(ctx
, ISA_MIPS3
);
5831 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5832 tcg_gen_ext32s_tl(arg
, arg
);
5837 goto cp0_unimplemented
;
5841 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5842 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5845 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5849 goto cp0_unimplemented
;
5853 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5854 rn
= "'Diagnostic"; /* implementation dependent */
5859 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5863 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5864 rn
= "TraceControl";
5865 goto cp0_unimplemented
;
5867 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5868 rn
= "TraceControl2";
5869 goto cp0_unimplemented
;
5871 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5872 rn
= "UserTraceData";
5873 goto cp0_unimplemented
;
5875 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5877 goto cp0_unimplemented
;
5879 goto cp0_unimplemented
;
5886 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5887 tcg_gen_ext32s_tl(arg
, arg
);
5891 goto cp0_unimplemented
;
5897 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5898 rn
= "Performance0";
5901 // gen_helper_mfc0_performance1(arg);
5902 rn
= "Performance1";
5903 goto cp0_unimplemented
;
5905 // gen_helper_mfc0_performance2(arg);
5906 rn
= "Performance2";
5907 goto cp0_unimplemented
;
5909 // gen_helper_mfc0_performance3(arg);
5910 rn
= "Performance3";
5911 goto cp0_unimplemented
;
5913 // gen_helper_mfc0_performance4(arg);
5914 rn
= "Performance4";
5915 goto cp0_unimplemented
;
5917 // gen_helper_mfc0_performance5(arg);
5918 rn
= "Performance5";
5919 goto cp0_unimplemented
;
5921 // gen_helper_mfc0_performance6(arg);
5922 rn
= "Performance6";
5923 goto cp0_unimplemented
;
5925 // gen_helper_mfc0_performance7(arg);
5926 rn
= "Performance7";
5927 goto cp0_unimplemented
;
5929 goto cp0_unimplemented
;
5935 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5939 goto cp0_unimplemented
;
5948 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5952 goto cp0_unimplemented
;
5962 TCGv_i64 tmp
= tcg_temp_new_i64();
5963 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5964 gen_move_low32(arg
, tmp
);
5965 tcg_temp_free_i64(tmp
);
5973 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5977 goto cp0_unimplemented
;
5986 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5993 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5997 goto cp0_unimplemented
;
6003 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6004 tcg_gen_ext32s_tl(arg
, arg
);
6008 goto cp0_unimplemented
;
6015 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6024 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6025 tcg_gen_ld_tl(arg
, cpu_env
,
6026 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6027 tcg_gen_ext32s_tl(arg
, arg
);
6031 goto cp0_unimplemented
;
6035 goto cp0_unimplemented
;
6037 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
6041 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6042 gen_mfc0_unimplemented(ctx
, arg
);
6045 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6047 const char *rn
= "invalid";
6050 check_insn(ctx
, ISA_MIPS32
);
6052 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6060 gen_helper_mtc0_index(cpu_env
, arg
);
6064 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6065 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6069 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6074 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6084 goto cp0_unimplemented
;
6094 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6095 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6099 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6100 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6104 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6105 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6109 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6110 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6114 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6115 tcg_gen_st_tl(arg
, cpu_env
,
6116 offsetof(CPUMIPSState
, CP0_VPESchedule
));
6120 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6121 tcg_gen_st_tl(arg
, cpu_env
,
6122 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6123 rn
= "VPEScheFBack";
6126 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6127 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6131 goto cp0_unimplemented
;
6137 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
6141 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6142 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6146 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6147 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6151 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6152 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6156 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6157 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6161 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6162 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6166 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6167 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6171 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6172 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6176 goto cp0_unimplemented
;
6182 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
6188 rn
= "GlobalNumber";
6191 goto cp0_unimplemented
;
6197 gen_helper_mtc0_context(cpu_env
, arg
);
6201 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
6202 rn
= "ContextConfig";
6203 goto cp0_unimplemented
;
6205 CP0_CHECK(ctx
->ulri
);
6206 tcg_gen_st_tl(arg
, cpu_env
,
6207 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6211 goto cp0_unimplemented
;
6217 gen_helper_mtc0_pagemask(cpu_env
, arg
);
6221 check_insn(ctx
, ISA_MIPS32R2
);
6222 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
6224 ctx
->base
.is_jmp
= DISAS_STOP
;
6228 gen_helper_mtc0_segctl0(cpu_env
, arg
);
6233 gen_helper_mtc0_segctl1(cpu_env
, arg
);
6238 gen_helper_mtc0_segctl2(cpu_env
, arg
);
6242 goto cp0_unimplemented
;
6248 gen_helper_mtc0_wired(cpu_env
, arg
);
6252 check_insn(ctx
, ISA_MIPS32R2
);
6253 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
6257 check_insn(ctx
, ISA_MIPS32R2
);
6258 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
6262 check_insn(ctx
, ISA_MIPS32R2
);
6263 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
6267 check_insn(ctx
, ISA_MIPS32R2
);
6268 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
6272 check_insn(ctx
, ISA_MIPS32R2
);
6273 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
6277 goto cp0_unimplemented
;
6283 check_insn(ctx
, ISA_MIPS32R2
);
6284 gen_helper_mtc0_hwrena(cpu_env
, arg
);
6285 ctx
->base
.is_jmp
= DISAS_STOP
;
6289 goto cp0_unimplemented
;
6311 goto cp0_unimplemented
;
6317 gen_helper_mtc0_count(cpu_env
, arg
);
6320 /* 6,7 are implementation dependent */
6322 goto cp0_unimplemented
;
6328 gen_helper_mtc0_entryhi(cpu_env
, arg
);
6332 goto cp0_unimplemented
;
6338 gen_helper_mtc0_compare(cpu_env
, arg
);
6341 /* 6,7 are implementation dependent */
6343 goto cp0_unimplemented
;
6349 save_cpu_state(ctx
, 1);
6350 gen_helper_mtc0_status(cpu_env
, arg
);
6351 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6352 gen_save_pc(ctx
->base
.pc_next
+ 4);
6353 ctx
->base
.is_jmp
= DISAS_EXIT
;
6357 check_insn(ctx
, ISA_MIPS32R2
);
6358 gen_helper_mtc0_intctl(cpu_env
, arg
);
6359 /* Stop translation as we may have switched the execution mode */
6360 ctx
->base
.is_jmp
= DISAS_STOP
;
6364 check_insn(ctx
, ISA_MIPS32R2
);
6365 gen_helper_mtc0_srsctl(cpu_env
, arg
);
6366 /* Stop translation as we may have switched the execution mode */
6367 ctx
->base
.is_jmp
= DISAS_STOP
;
6371 check_insn(ctx
, ISA_MIPS32R2
);
6372 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6373 /* Stop translation as we may have switched the execution mode */
6374 ctx
->base
.is_jmp
= DISAS_STOP
;
6378 goto cp0_unimplemented
;
6384 save_cpu_state(ctx
, 1);
6385 gen_helper_mtc0_cause(cpu_env
, arg
);
6386 /* Stop translation as we may have triggered an interrupt.
6387 * DISAS_STOP isn't sufficient, we need to ensure we break out of
6388 * translated code to check for pending interrupts. */
6389 gen_save_pc(ctx
->base
.pc_next
+ 4);
6390 ctx
->base
.is_jmp
= DISAS_EXIT
;
6394 goto cp0_unimplemented
;
6400 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6404 goto cp0_unimplemented
;
6414 check_insn(ctx
, ISA_MIPS32R2
);
6415 gen_helper_mtc0_ebase(cpu_env
, arg
);
6419 goto cp0_unimplemented
;
6425 gen_helper_mtc0_config0(cpu_env
, arg
);
6427 /* Stop translation as we may have switched the execution mode */
6428 ctx
->base
.is_jmp
= DISAS_STOP
;
6431 /* ignored, read only */
6435 gen_helper_mtc0_config2(cpu_env
, arg
);
6437 /* Stop translation as we may have switched the execution mode */
6438 ctx
->base
.is_jmp
= DISAS_STOP
;
6441 gen_helper_mtc0_config3(cpu_env
, arg
);
6443 /* Stop translation as we may have switched the execution mode */
6444 ctx
->base
.is_jmp
= DISAS_STOP
;
6447 gen_helper_mtc0_config4(cpu_env
, arg
);
6449 ctx
->base
.is_jmp
= DISAS_STOP
;
6452 gen_helper_mtc0_config5(cpu_env
, arg
);
6454 /* Stop translation as we may have switched the execution mode */
6455 ctx
->base
.is_jmp
= DISAS_STOP
;
6457 /* 6,7 are implementation dependent */
6467 rn
= "Invalid config selector";
6468 goto cp0_unimplemented
;
6474 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6478 CP0_CHECK(ctx
->mrp
);
6479 gen_helper_mtc0_maar(cpu_env
, arg
);
6483 CP0_CHECK(ctx
->mrp
);
6484 gen_helper_mtc0_maari(cpu_env
, arg
);
6488 goto cp0_unimplemented
;
6501 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6502 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6506 goto cp0_unimplemented
;
6519 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6520 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6524 goto cp0_unimplemented
;
6530 #if defined(TARGET_MIPS64)
6531 check_insn(ctx
, ISA_MIPS3
);
6532 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6537 goto cp0_unimplemented
;
6541 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6542 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6545 gen_helper_mtc0_framemask(cpu_env
, arg
);
6549 goto cp0_unimplemented
;
6554 rn
= "Diagnostic"; /* implementation dependent */
6559 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6560 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6561 gen_save_pc(ctx
->base
.pc_next
+ 4);
6562 ctx
->base
.is_jmp
= DISAS_EXIT
;
6566 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6567 rn
= "TraceControl";
6568 /* Stop translation as we may have switched the execution mode */
6569 ctx
->base
.is_jmp
= DISAS_STOP
;
6570 goto cp0_unimplemented
;
6572 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6573 rn
= "TraceControl2";
6574 /* Stop translation as we may have switched the execution mode */
6575 ctx
->base
.is_jmp
= DISAS_STOP
;
6576 goto cp0_unimplemented
;
6578 /* Stop translation as we may have switched the execution mode */
6579 ctx
->base
.is_jmp
= DISAS_STOP
;
6580 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6581 rn
= "UserTraceData";
6582 /* Stop translation as we may have switched the execution mode */
6583 ctx
->base
.is_jmp
= DISAS_STOP
;
6584 goto cp0_unimplemented
;
6586 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6587 /* Stop translation as we may have switched the execution mode */
6588 ctx
->base
.is_jmp
= DISAS_STOP
;
6590 goto cp0_unimplemented
;
6592 goto cp0_unimplemented
;
6599 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6603 goto cp0_unimplemented
;
6609 gen_helper_mtc0_performance0(cpu_env
, arg
);
6610 rn
= "Performance0";
6613 // gen_helper_mtc0_performance1(arg);
6614 rn
= "Performance1";
6615 goto cp0_unimplemented
;
6617 // gen_helper_mtc0_performance2(arg);
6618 rn
= "Performance2";
6619 goto cp0_unimplemented
;
6621 // gen_helper_mtc0_performance3(arg);
6622 rn
= "Performance3";
6623 goto cp0_unimplemented
;
6625 // gen_helper_mtc0_performance4(arg);
6626 rn
= "Performance4";
6627 goto cp0_unimplemented
;
6629 // gen_helper_mtc0_performance5(arg);
6630 rn
= "Performance5";
6631 goto cp0_unimplemented
;
6633 // gen_helper_mtc0_performance6(arg);
6634 rn
= "Performance6";
6635 goto cp0_unimplemented
;
6637 // gen_helper_mtc0_performance7(arg);
6638 rn
= "Performance7";
6639 goto cp0_unimplemented
;
6641 goto cp0_unimplemented
;
6647 gen_helper_mtc0_errctl(cpu_env
, arg
);
6648 ctx
->base
.is_jmp
= DISAS_STOP
;
6652 goto cp0_unimplemented
;
6665 goto cp0_unimplemented
;
6674 gen_helper_mtc0_taglo(cpu_env
, arg
);
6681 gen_helper_mtc0_datalo(cpu_env
, arg
);
6685 goto cp0_unimplemented
;
6694 gen_helper_mtc0_taghi(cpu_env
, arg
);
6701 gen_helper_mtc0_datahi(cpu_env
, arg
);
6706 goto cp0_unimplemented
;
6712 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6716 goto cp0_unimplemented
;
6723 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6732 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6733 tcg_gen_st_tl(arg
, cpu_env
,
6734 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6738 goto cp0_unimplemented
;
6742 goto cp0_unimplemented
;
6744 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
6746 /* For simplicity assume that all writes can cause interrupts. */
6747 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6749 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
6750 * translated code to check for pending interrupts. */
6751 gen_save_pc(ctx
->base
.pc_next
+ 4);
6752 ctx
->base
.is_jmp
= DISAS_EXIT
;
6757 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6760 #if defined(TARGET_MIPS64)
6761 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6763 const char *rn
= "invalid";
6766 check_insn(ctx
, ISA_MIPS64
);
6772 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6776 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6777 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6781 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6782 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6786 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6787 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6792 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6796 goto cp0_unimplemented
;
6802 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6803 gen_helper_mfc0_random(arg
, cpu_env
);
6807 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6808 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6812 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6813 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6817 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6818 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6822 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6823 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6827 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6828 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6832 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6833 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6834 rn
= "VPEScheFBack";
6837 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6838 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6842 goto cp0_unimplemented
;
6848 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6852 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6853 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6857 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6858 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6862 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6863 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6867 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6868 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6872 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6873 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6877 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6878 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6882 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6883 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6887 goto cp0_unimplemented
;
6893 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6898 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6899 rn
= "GlobalNumber";
6902 goto cp0_unimplemented
;
6908 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6912 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6913 rn
= "ContextConfig";
6914 goto cp0_unimplemented
;
6916 CP0_CHECK(ctx
->ulri
);
6917 tcg_gen_ld_tl(arg
, cpu_env
,
6918 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6922 goto cp0_unimplemented
;
6928 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6932 check_insn(ctx
, ISA_MIPS32R2
);
6933 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6938 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6943 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6948 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6952 goto cp0_unimplemented
;
6958 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6962 check_insn(ctx
, ISA_MIPS32R2
);
6963 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6967 check_insn(ctx
, ISA_MIPS32R2
);
6968 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6972 check_insn(ctx
, ISA_MIPS32R2
);
6973 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6977 check_insn(ctx
, ISA_MIPS32R2
);
6978 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6982 check_insn(ctx
, ISA_MIPS32R2
);
6983 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6987 goto cp0_unimplemented
;
6993 check_insn(ctx
, ISA_MIPS32R2
);
6994 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6998 goto cp0_unimplemented
;
7004 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7009 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7014 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7019 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7020 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7024 goto cp0_unimplemented
;
7030 /* Mark as an IO operation because we read the time. */
7031 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7034 gen_helper_mfc0_count(arg
, cpu_env
);
7035 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7038 /* Break the TB to be able to take timer interrupts immediately
7039 after reading count. DISAS_STOP isn't sufficient, we need to
7040 ensure we break completely out of translated code. */
7041 gen_save_pc(ctx
->base
.pc_next
+ 4);
7042 ctx
->base
.is_jmp
= DISAS_EXIT
;
7045 /* 6,7 are implementation dependent */
7047 goto cp0_unimplemented
;
7053 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7057 goto cp0_unimplemented
;
7063 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7066 /* 6,7 are implementation dependent */
7068 goto cp0_unimplemented
;
7074 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7078 check_insn(ctx
, ISA_MIPS32R2
);
7079 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7083 check_insn(ctx
, ISA_MIPS32R2
);
7084 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7088 check_insn(ctx
, ISA_MIPS32R2
);
7089 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7093 goto cp0_unimplemented
;
7099 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7103 goto cp0_unimplemented
;
7109 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7113 goto cp0_unimplemented
;
7119 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7123 check_insn(ctx
, ISA_MIPS32R2
);
7124 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7128 check_insn(ctx
, ISA_MIPS32R2
);
7129 CP0_CHECK(ctx
->cmgcr
);
7130 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7134 goto cp0_unimplemented
;
7140 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7144 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7148 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7152 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7156 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7160 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7163 /* 6,7 are implementation dependent */
7165 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7169 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7173 goto cp0_unimplemented
;
7179 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
7183 CP0_CHECK(ctx
->mrp
);
7184 gen_helper_dmfc0_maar(arg
, cpu_env
);
7188 CP0_CHECK(ctx
->mrp
);
7189 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7193 goto cp0_unimplemented
;
7206 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7207 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
7211 goto cp0_unimplemented
;
7224 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7225 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
7229 goto cp0_unimplemented
;
7235 check_insn(ctx
, ISA_MIPS3
);
7236 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7240 goto cp0_unimplemented
;
7244 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7245 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7248 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7252 goto cp0_unimplemented
;
7256 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7257 rn
= "'Diagnostic"; /* implementation dependent */
7262 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7266 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
7267 rn
= "TraceControl";
7268 goto cp0_unimplemented
;
7270 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
7271 rn
= "TraceControl2";
7272 goto cp0_unimplemented
;
7274 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
7275 rn
= "UserTraceData";
7276 goto cp0_unimplemented
;
7278 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
7280 goto cp0_unimplemented
;
7282 goto cp0_unimplemented
;
7289 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7293 goto cp0_unimplemented
;
7299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7300 rn
= "Performance0";
7303 // gen_helper_dmfc0_performance1(arg);
7304 rn
= "Performance1";
7305 goto cp0_unimplemented
;
7307 // gen_helper_dmfc0_performance2(arg);
7308 rn
= "Performance2";
7309 goto cp0_unimplemented
;
7311 // gen_helper_dmfc0_performance3(arg);
7312 rn
= "Performance3";
7313 goto cp0_unimplemented
;
7315 // gen_helper_dmfc0_performance4(arg);
7316 rn
= "Performance4";
7317 goto cp0_unimplemented
;
7319 // gen_helper_dmfc0_performance5(arg);
7320 rn
= "Performance5";
7321 goto cp0_unimplemented
;
7323 // gen_helper_dmfc0_performance6(arg);
7324 rn
= "Performance6";
7325 goto cp0_unimplemented
;
7327 // gen_helper_dmfc0_performance7(arg);
7328 rn
= "Performance7";
7329 goto cp0_unimplemented
;
7331 goto cp0_unimplemented
;
7337 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7341 goto cp0_unimplemented
;
7351 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7355 goto cp0_unimplemented
;
7364 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
7371 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7375 goto cp0_unimplemented
;
7384 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7391 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7395 goto cp0_unimplemented
;
7401 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7405 goto cp0_unimplemented
;
7412 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7421 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7422 tcg_gen_ld_tl(arg
, cpu_env
,
7423 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7427 goto cp0_unimplemented
;
7431 goto cp0_unimplemented
;
7433 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
7437 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7438 gen_mfc0_unimplemented(ctx
, arg
);
7441 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7443 const char *rn
= "invalid";
7446 check_insn(ctx
, ISA_MIPS64
);
7448 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7456 gen_helper_mtc0_index(cpu_env
, arg
);
7460 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7461 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7465 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7470 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7480 goto cp0_unimplemented
;
7490 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7491 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7495 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7496 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7500 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7501 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7505 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7506 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7510 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7511 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7515 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7516 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7517 rn
= "VPEScheFBack";
7520 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7521 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7525 goto cp0_unimplemented
;
7531 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7535 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7536 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7540 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7541 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7545 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7546 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7550 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7551 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7555 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7556 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7560 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7561 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7565 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7566 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7570 goto cp0_unimplemented
;
7576 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7582 rn
= "GlobalNumber";
7585 goto cp0_unimplemented
;
7591 gen_helper_mtc0_context(cpu_env
, arg
);
7595 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7596 rn
= "ContextConfig";
7597 goto cp0_unimplemented
;
7599 CP0_CHECK(ctx
->ulri
);
7600 tcg_gen_st_tl(arg
, cpu_env
,
7601 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7605 goto cp0_unimplemented
;
7611 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7615 check_insn(ctx
, ISA_MIPS32R2
);
7616 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7621 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7626 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7631 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7635 goto cp0_unimplemented
;
7641 gen_helper_mtc0_wired(cpu_env
, arg
);
7645 check_insn(ctx
, ISA_MIPS32R2
);
7646 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7650 check_insn(ctx
, ISA_MIPS32R2
);
7651 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7655 check_insn(ctx
, ISA_MIPS32R2
);
7656 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7660 check_insn(ctx
, ISA_MIPS32R2
);
7661 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7665 check_insn(ctx
, ISA_MIPS32R2
);
7666 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7670 goto cp0_unimplemented
;
7676 check_insn(ctx
, ISA_MIPS32R2
);
7677 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7678 ctx
->base
.is_jmp
= DISAS_STOP
;
7682 goto cp0_unimplemented
;
7704 goto cp0_unimplemented
;
7710 gen_helper_mtc0_count(cpu_env
, arg
);
7713 /* 6,7 are implementation dependent */
7715 goto cp0_unimplemented
;
7717 /* Stop translation as we may have switched the execution mode */
7718 ctx
->base
.is_jmp
= DISAS_STOP
;
7723 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7727 goto cp0_unimplemented
;
7733 gen_helper_mtc0_compare(cpu_env
, arg
);
7736 /* 6,7 are implementation dependent */
7738 goto cp0_unimplemented
;
7740 /* Stop translation as we may have switched the execution mode */
7741 ctx
->base
.is_jmp
= DISAS_STOP
;
7746 save_cpu_state(ctx
, 1);
7747 gen_helper_mtc0_status(cpu_env
, arg
);
7748 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7749 gen_save_pc(ctx
->base
.pc_next
+ 4);
7750 ctx
->base
.is_jmp
= DISAS_EXIT
;
7754 check_insn(ctx
, ISA_MIPS32R2
);
7755 gen_helper_mtc0_intctl(cpu_env
, arg
);
7756 /* Stop translation as we may have switched the execution mode */
7757 ctx
->base
.is_jmp
= DISAS_STOP
;
7761 check_insn(ctx
, ISA_MIPS32R2
);
7762 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7763 /* Stop translation as we may have switched the execution mode */
7764 ctx
->base
.is_jmp
= DISAS_STOP
;
7768 check_insn(ctx
, ISA_MIPS32R2
);
7769 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7770 /* Stop translation as we may have switched the execution mode */
7771 ctx
->base
.is_jmp
= DISAS_STOP
;
7775 goto cp0_unimplemented
;
7781 save_cpu_state(ctx
, 1);
7782 gen_helper_mtc0_cause(cpu_env
, arg
);
7783 /* Stop translation as we may have triggered an interrupt.
7784 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7785 * translated code to check for pending interrupts. */
7786 gen_save_pc(ctx
->base
.pc_next
+ 4);
7787 ctx
->base
.is_jmp
= DISAS_EXIT
;
7791 goto cp0_unimplemented
;
7797 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7801 goto cp0_unimplemented
;
7811 check_insn(ctx
, ISA_MIPS32R2
);
7812 gen_helper_mtc0_ebase(cpu_env
, arg
);
7816 goto cp0_unimplemented
;
7822 gen_helper_mtc0_config0(cpu_env
, arg
);
7824 /* Stop translation as we may have switched the execution mode */
7825 ctx
->base
.is_jmp
= DISAS_STOP
;
7828 /* ignored, read only */
7832 gen_helper_mtc0_config2(cpu_env
, arg
);
7834 /* Stop translation as we may have switched the execution mode */
7835 ctx
->base
.is_jmp
= DISAS_STOP
;
7838 gen_helper_mtc0_config3(cpu_env
, arg
);
7840 /* Stop translation as we may have switched the execution mode */
7841 ctx
->base
.is_jmp
= DISAS_STOP
;
7844 /* currently ignored */
7848 gen_helper_mtc0_config5(cpu_env
, arg
);
7850 /* Stop translation as we may have switched the execution mode */
7851 ctx
->base
.is_jmp
= DISAS_STOP
;
7853 /* 6,7 are implementation dependent */
7855 rn
= "Invalid config selector";
7856 goto cp0_unimplemented
;
7862 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7866 CP0_CHECK(ctx
->mrp
);
7867 gen_helper_mtc0_maar(cpu_env
, arg
);
7871 CP0_CHECK(ctx
->mrp
);
7872 gen_helper_mtc0_maari(cpu_env
, arg
);
7876 goto cp0_unimplemented
;
7889 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7890 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7894 goto cp0_unimplemented
;
7907 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7908 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7912 goto cp0_unimplemented
;
7918 check_insn(ctx
, ISA_MIPS3
);
7919 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7923 goto cp0_unimplemented
;
7927 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7928 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7931 gen_helper_mtc0_framemask(cpu_env
, arg
);
7935 goto cp0_unimplemented
;
7940 rn
= "Diagnostic"; /* implementation dependent */
7945 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7946 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7947 gen_save_pc(ctx
->base
.pc_next
+ 4);
7948 ctx
->base
.is_jmp
= DISAS_EXIT
;
7952 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7953 /* Stop translation as we may have switched the execution mode */
7954 ctx
->base
.is_jmp
= DISAS_STOP
;
7955 rn
= "TraceControl";
7956 goto cp0_unimplemented
;
7958 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7959 /* Stop translation as we may have switched the execution mode */
7960 ctx
->base
.is_jmp
= DISAS_STOP
;
7961 rn
= "TraceControl2";
7962 goto cp0_unimplemented
;
7964 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7965 /* Stop translation as we may have switched the execution mode */
7966 ctx
->base
.is_jmp
= DISAS_STOP
;
7967 rn
= "UserTraceData";
7968 goto cp0_unimplemented
;
7970 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7971 /* Stop translation as we may have switched the execution mode */
7972 ctx
->base
.is_jmp
= DISAS_STOP
;
7974 goto cp0_unimplemented
;
7976 goto cp0_unimplemented
;
7983 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7987 goto cp0_unimplemented
;
7993 gen_helper_mtc0_performance0(cpu_env
, arg
);
7994 rn
= "Performance0";
7997 // gen_helper_mtc0_performance1(cpu_env, arg);
7998 rn
= "Performance1";
7999 goto cp0_unimplemented
;
8001 // gen_helper_mtc0_performance2(cpu_env, arg);
8002 rn
= "Performance2";
8003 goto cp0_unimplemented
;
8005 // gen_helper_mtc0_performance3(cpu_env, arg);
8006 rn
= "Performance3";
8007 goto cp0_unimplemented
;
8009 // gen_helper_mtc0_performance4(cpu_env, arg);
8010 rn
= "Performance4";
8011 goto cp0_unimplemented
;
8013 // gen_helper_mtc0_performance5(cpu_env, arg);
8014 rn
= "Performance5";
8015 goto cp0_unimplemented
;
8017 // gen_helper_mtc0_performance6(cpu_env, arg);
8018 rn
= "Performance6";
8019 goto cp0_unimplemented
;
8021 // gen_helper_mtc0_performance7(cpu_env, arg);
8022 rn
= "Performance7";
8023 goto cp0_unimplemented
;
8025 goto cp0_unimplemented
;
8031 gen_helper_mtc0_errctl(cpu_env
, arg
);
8032 ctx
->base
.is_jmp
= DISAS_STOP
;
8036 goto cp0_unimplemented
;
8049 goto cp0_unimplemented
;
8058 gen_helper_mtc0_taglo(cpu_env
, arg
);
8065 gen_helper_mtc0_datalo(cpu_env
, arg
);
8069 goto cp0_unimplemented
;
8078 gen_helper_mtc0_taghi(cpu_env
, arg
);
8085 gen_helper_mtc0_datahi(cpu_env
, arg
);
8090 goto cp0_unimplemented
;
8096 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8100 goto cp0_unimplemented
;
8107 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8116 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8117 tcg_gen_st_tl(arg
, cpu_env
,
8118 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8122 goto cp0_unimplemented
;
8126 goto cp0_unimplemented
;
8128 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
8130 /* For simplicity assume that all writes can cause interrupts. */
8131 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8133 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
8134 * translated code to check for pending interrupts. */
8135 gen_save_pc(ctx
->base
.pc_next
+ 4);
8136 ctx
->base
.is_jmp
= DISAS_EXIT
;
8141 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8143 #endif /* TARGET_MIPS64 */
8145 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
8146 int u
, int sel
, int h
)
8148 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
8149 TCGv t0
= tcg_temp_local_new();
8151 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
8152 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
8153 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
8154 tcg_gen_movi_tl(t0
, -1);
8155 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
8156 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
8157 tcg_gen_movi_tl(t0
, -1);
8163 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
8166 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
8176 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
8179 gen_helper_mftc0_tcbind(t0
, cpu_env
);
8182 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
8185 gen_helper_mftc0_tchalt(t0
, cpu_env
);
8188 gen_helper_mftc0_tccontext(t0
, cpu_env
);
8191 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
8194 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
8197 gen_mfc0(ctx
, t0
, rt
, sel
);
8204 gen_helper_mftc0_entryhi(t0
, cpu_env
);
8207 gen_mfc0(ctx
, t0
, rt
, sel
);
8213 gen_helper_mftc0_status(t0
, cpu_env
);
8216 gen_mfc0(ctx
, t0
, rt
, sel
);
8222 gen_helper_mftc0_cause(t0
, cpu_env
);
8232 gen_helper_mftc0_epc(t0
, cpu_env
);
8242 gen_helper_mftc0_ebase(t0
, cpu_env
);
8259 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
8269 gen_helper_mftc0_debug(t0
, cpu_env
);
8272 gen_mfc0(ctx
, t0
, rt
, sel
);
8277 gen_mfc0(ctx
, t0
, rt
, sel
);
8279 } else switch (sel
) {
8280 /* GPR registers. */
8282 gen_helper_1e0i(mftgpr
, t0
, rt
);
8284 /* Auxiliary CPU registers */
8288 gen_helper_1e0i(mftlo
, t0
, 0);
8291 gen_helper_1e0i(mfthi
, t0
, 0);
8294 gen_helper_1e0i(mftacx
, t0
, 0);
8297 gen_helper_1e0i(mftlo
, t0
, 1);
8300 gen_helper_1e0i(mfthi
, t0
, 1);
8303 gen_helper_1e0i(mftacx
, t0
, 1);
8306 gen_helper_1e0i(mftlo
, t0
, 2);
8309 gen_helper_1e0i(mfthi
, t0
, 2);
8312 gen_helper_1e0i(mftacx
, t0
, 2);
8315 gen_helper_1e0i(mftlo
, t0
, 3);
8318 gen_helper_1e0i(mfthi
, t0
, 3);
8321 gen_helper_1e0i(mftacx
, t0
, 3);
8324 gen_helper_mftdsp(t0
, cpu_env
);
8330 /* Floating point (COP1). */
8332 /* XXX: For now we support only a single FPU context. */
8334 TCGv_i32 fp0
= tcg_temp_new_i32();
8336 gen_load_fpr32(ctx
, fp0
, rt
);
8337 tcg_gen_ext_i32_tl(t0
, fp0
);
8338 tcg_temp_free_i32(fp0
);
8340 TCGv_i32 fp0
= tcg_temp_new_i32();
8342 gen_load_fpr32h(ctx
, fp0
, rt
);
8343 tcg_gen_ext_i32_tl(t0
, fp0
);
8344 tcg_temp_free_i32(fp0
);
8348 /* XXX: For now we support only a single FPU context. */
8349 gen_helper_1e0i(cfc1
, t0
, rt
);
8351 /* COP2: Not implemented. */
8358 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
8359 gen_store_gpr(t0
, rd
);
8365 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
8366 generate_exception_end(ctx
, EXCP_RI
);
8369 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
8370 int u
, int sel
, int h
)
8372 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
8373 TCGv t0
= tcg_temp_local_new();
8375 gen_load_gpr(t0
, rt
);
8376 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
8377 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
8378 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
8380 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
8381 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
8388 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
8391 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
8401 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
8404 gen_helper_mttc0_tcbind(cpu_env
, t0
);
8407 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
8410 gen_helper_mttc0_tchalt(cpu_env
, t0
);
8413 gen_helper_mttc0_tccontext(cpu_env
, t0
);
8416 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
8419 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
8422 gen_mtc0(ctx
, t0
, rd
, sel
);
8429 gen_helper_mttc0_entryhi(cpu_env
, t0
);
8432 gen_mtc0(ctx
, t0
, rd
, sel
);
8438 gen_helper_mttc0_status(cpu_env
, t0
);
8441 gen_mtc0(ctx
, t0
, rd
, sel
);
8447 gen_helper_mttc0_cause(cpu_env
, t0
);
8457 gen_helper_mttc0_ebase(cpu_env
, t0
);
8467 gen_helper_mttc0_debug(cpu_env
, t0
);
8470 gen_mtc0(ctx
, t0
, rd
, sel
);
8475 gen_mtc0(ctx
, t0
, rd
, sel
);
8477 } else switch (sel
) {
8478 /* GPR registers. */
8480 gen_helper_0e1i(mttgpr
, t0
, rd
);
8482 /* Auxiliary CPU registers */
8486 gen_helper_0e1i(mttlo
, t0
, 0);
8489 gen_helper_0e1i(mtthi
, t0
, 0);
8492 gen_helper_0e1i(mttacx
, t0
, 0);
8495 gen_helper_0e1i(mttlo
, t0
, 1);
8498 gen_helper_0e1i(mtthi
, t0
, 1);
8501 gen_helper_0e1i(mttacx
, t0
, 1);
8504 gen_helper_0e1i(mttlo
, t0
, 2);
8507 gen_helper_0e1i(mtthi
, t0
, 2);
8510 gen_helper_0e1i(mttacx
, t0
, 2);
8513 gen_helper_0e1i(mttlo
, t0
, 3);
8516 gen_helper_0e1i(mtthi
, t0
, 3);
8519 gen_helper_0e1i(mttacx
, t0
, 3);
8522 gen_helper_mttdsp(cpu_env
, t0
);
8528 /* Floating point (COP1). */
8530 /* XXX: For now we support only a single FPU context. */
8532 TCGv_i32 fp0
= tcg_temp_new_i32();
8534 tcg_gen_trunc_tl_i32(fp0
, t0
);
8535 gen_store_fpr32(ctx
, fp0
, rd
);
8536 tcg_temp_free_i32(fp0
);
8538 TCGv_i32 fp0
= tcg_temp_new_i32();
8540 tcg_gen_trunc_tl_i32(fp0
, t0
);
8541 gen_store_fpr32h(ctx
, fp0
, rd
);
8542 tcg_temp_free_i32(fp0
);
8546 /* XXX: For now we support only a single FPU context. */
8548 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
8550 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8551 tcg_temp_free_i32(fs_tmp
);
8553 /* Stop translation as we may have changed hflags */
8554 ctx
->base
.is_jmp
= DISAS_STOP
;
8556 /* COP2: Not implemented. */
8563 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
8569 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8570 generate_exception_end(ctx
, EXCP_RI
);
8573 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8575 const char *opn
= "ldst";
8577 check_cp0_enabled(ctx
);
8584 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8589 TCGv t0
= tcg_temp_new();
8591 gen_load_gpr(t0
, rt
);
8592 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8597 #if defined(TARGET_MIPS64)
8599 check_insn(ctx
, ISA_MIPS3
);
8604 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8608 check_insn(ctx
, ISA_MIPS3
);
8610 TCGv t0
= tcg_temp_new();
8612 gen_load_gpr(t0
, rt
);
8613 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8625 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8631 TCGv t0
= tcg_temp_new();
8632 gen_load_gpr(t0
, rt
);
8633 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8639 check_cp0_enabled(ctx
);
8644 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8645 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8649 check_cp0_enabled(ctx
);
8650 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8651 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8656 if (!env
->tlb
->helper_tlbwi
)
8658 gen_helper_tlbwi(cpu_env
);
8663 if (!env
->tlb
->helper_tlbinv
) {
8666 gen_helper_tlbinv(cpu_env
);
8667 } /* treat as nop if TLBINV not supported */
8672 if (!env
->tlb
->helper_tlbinvf
) {
8675 gen_helper_tlbinvf(cpu_env
);
8676 } /* treat as nop if TLBINV not supported */
8680 if (!env
->tlb
->helper_tlbwr
)
8682 gen_helper_tlbwr(cpu_env
);
8686 if (!env
->tlb
->helper_tlbp
)
8688 gen_helper_tlbp(cpu_env
);
8692 if (!env
->tlb
->helper_tlbr
)
8694 gen_helper_tlbr(cpu_env
);
8696 case OPC_ERET
: /* OPC_ERETNC */
8697 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8698 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8701 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8702 if (ctx
->opcode
& (1 << bit_shift
)) {
8705 check_insn(ctx
, ISA_MIPS32R5
);
8706 gen_helper_eretnc(cpu_env
);
8710 check_insn(ctx
, ISA_MIPS2
);
8711 gen_helper_eret(cpu_env
);
8713 ctx
->base
.is_jmp
= DISAS_EXIT
;
8718 check_insn(ctx
, ISA_MIPS32
);
8719 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8720 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8723 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8725 generate_exception_end(ctx
, EXCP_RI
);
8727 gen_helper_deret(cpu_env
);
8728 ctx
->base
.is_jmp
= DISAS_EXIT
;
8733 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8734 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8735 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8738 /* If we get an exception, we want to restart at next instruction */
8739 ctx
->base
.pc_next
+= 4;
8740 save_cpu_state(ctx
, 1);
8741 ctx
->base
.pc_next
-= 4;
8742 gen_helper_wait(cpu_env
);
8743 ctx
->base
.is_jmp
= DISAS_NORETURN
;
8748 generate_exception_end(ctx
, EXCP_RI
);
8751 (void)opn
; /* avoid a compiler warning */
8753 #endif /* !CONFIG_USER_ONLY */
8755 /* CP1 Branches (before delay slot) */
8756 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8757 int32_t cc
, int32_t offset
)
8759 target_ulong btarget
;
8760 TCGv_i32 t0
= tcg_temp_new_i32();
8762 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8763 generate_exception_end(ctx
, EXCP_RI
);
8768 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8770 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
8774 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8775 tcg_gen_not_i32(t0
, t0
);
8776 tcg_gen_andi_i32(t0
, t0
, 1);
8777 tcg_gen_extu_i32_tl(bcond
, t0
);
8780 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8781 tcg_gen_not_i32(t0
, t0
);
8782 tcg_gen_andi_i32(t0
, t0
, 1);
8783 tcg_gen_extu_i32_tl(bcond
, t0
);
8786 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8787 tcg_gen_andi_i32(t0
, t0
, 1);
8788 tcg_gen_extu_i32_tl(bcond
, t0
);
8791 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8792 tcg_gen_andi_i32(t0
, t0
, 1);
8793 tcg_gen_extu_i32_tl(bcond
, t0
);
8795 ctx
->hflags
|= MIPS_HFLAG_BL
;
8799 TCGv_i32 t1
= tcg_temp_new_i32();
8800 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8801 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8802 tcg_gen_nand_i32(t0
, t0
, t1
);
8803 tcg_temp_free_i32(t1
);
8804 tcg_gen_andi_i32(t0
, t0
, 1);
8805 tcg_gen_extu_i32_tl(bcond
, t0
);
8810 TCGv_i32 t1
= tcg_temp_new_i32();
8811 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8812 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8813 tcg_gen_or_i32(t0
, t0
, t1
);
8814 tcg_temp_free_i32(t1
);
8815 tcg_gen_andi_i32(t0
, t0
, 1);
8816 tcg_gen_extu_i32_tl(bcond
, t0
);
8821 TCGv_i32 t1
= tcg_temp_new_i32();
8822 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8823 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8824 tcg_gen_and_i32(t0
, t0
, t1
);
8825 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8826 tcg_gen_and_i32(t0
, t0
, t1
);
8827 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8828 tcg_gen_nand_i32(t0
, t0
, t1
);
8829 tcg_temp_free_i32(t1
);
8830 tcg_gen_andi_i32(t0
, t0
, 1);
8831 tcg_gen_extu_i32_tl(bcond
, t0
);
8836 TCGv_i32 t1
= tcg_temp_new_i32();
8837 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8838 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8839 tcg_gen_or_i32(t0
, t0
, t1
);
8840 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8841 tcg_gen_or_i32(t0
, t0
, t1
);
8842 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8843 tcg_gen_or_i32(t0
, t0
, t1
);
8844 tcg_temp_free_i32(t1
);
8845 tcg_gen_andi_i32(t0
, t0
, 1);
8846 tcg_gen_extu_i32_tl(bcond
, t0
);
8849 ctx
->hflags
|= MIPS_HFLAG_BC
;
8852 MIPS_INVAL("cp1 cond branch");
8853 generate_exception_end(ctx
, EXCP_RI
);
8856 ctx
->btarget
= btarget
;
8857 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8859 tcg_temp_free_i32(t0
);
8862 /* R6 CP1 Branches */
8863 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8864 int32_t ft
, int32_t offset
,
8867 target_ulong btarget
;
8868 TCGv_i64 t0
= tcg_temp_new_i64();
8870 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8871 #ifdef MIPS_DEBUG_DISAS
8872 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8873 "\n", ctx
->base
.pc_next
);
8875 generate_exception_end(ctx
, EXCP_RI
);
8879 gen_load_fpr64(ctx
, t0
, ft
);
8880 tcg_gen_andi_i64(t0
, t0
, 1);
8882 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
8886 tcg_gen_xori_i64(t0
, t0
, 1);
8887 ctx
->hflags
|= MIPS_HFLAG_BC
;
8890 /* t0 already set */
8891 ctx
->hflags
|= MIPS_HFLAG_BC
;
8894 MIPS_INVAL("cp1 cond branch");
8895 generate_exception_end(ctx
, EXCP_RI
);
8899 tcg_gen_trunc_i64_tl(bcond
, t0
);
8901 ctx
->btarget
= btarget
;
8903 switch (delayslot_size
) {
8905 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8908 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8913 tcg_temp_free_i64(t0
);
8916 /* Coprocessor 1 (FPU) */
8918 #define FOP(func, fmt) (((fmt) << 21) | (func))
8921 OPC_ADD_S
= FOP(0, FMT_S
),
8922 OPC_SUB_S
= FOP(1, FMT_S
),
8923 OPC_MUL_S
= FOP(2, FMT_S
),
8924 OPC_DIV_S
= FOP(3, FMT_S
),
8925 OPC_SQRT_S
= FOP(4, FMT_S
),
8926 OPC_ABS_S
= FOP(5, FMT_S
),
8927 OPC_MOV_S
= FOP(6, FMT_S
),
8928 OPC_NEG_S
= FOP(7, FMT_S
),
8929 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8930 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8931 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8932 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8933 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8934 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8935 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8936 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8937 OPC_SEL_S
= FOP(16, FMT_S
),
8938 OPC_MOVCF_S
= FOP(17, FMT_S
),
8939 OPC_MOVZ_S
= FOP(18, FMT_S
),
8940 OPC_MOVN_S
= FOP(19, FMT_S
),
8941 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8942 OPC_RECIP_S
= FOP(21, FMT_S
),
8943 OPC_RSQRT_S
= FOP(22, FMT_S
),
8944 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8945 OPC_MADDF_S
= FOP(24, FMT_S
),
8946 OPC_MSUBF_S
= FOP(25, FMT_S
),
8947 OPC_RINT_S
= FOP(26, FMT_S
),
8948 OPC_CLASS_S
= FOP(27, FMT_S
),
8949 OPC_MIN_S
= FOP(28, FMT_S
),
8950 OPC_RECIP2_S
= FOP(28, FMT_S
),
8951 OPC_MINA_S
= FOP(29, FMT_S
),
8952 OPC_RECIP1_S
= FOP(29, FMT_S
),
8953 OPC_MAX_S
= FOP(30, FMT_S
),
8954 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8955 OPC_MAXA_S
= FOP(31, FMT_S
),
8956 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8957 OPC_CVT_D_S
= FOP(33, FMT_S
),
8958 OPC_CVT_W_S
= FOP(36, FMT_S
),
8959 OPC_CVT_L_S
= FOP(37, FMT_S
),
8960 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8961 OPC_CMP_F_S
= FOP (48, FMT_S
),
8962 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8963 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8964 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8965 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8966 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8967 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8968 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8969 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8970 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8971 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8972 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8973 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8974 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8975 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8976 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8978 OPC_ADD_D
= FOP(0, FMT_D
),
8979 OPC_SUB_D
= FOP(1, FMT_D
),
8980 OPC_MUL_D
= FOP(2, FMT_D
),
8981 OPC_DIV_D
= FOP(3, FMT_D
),
8982 OPC_SQRT_D
= FOP(4, FMT_D
),
8983 OPC_ABS_D
= FOP(5, FMT_D
),
8984 OPC_MOV_D
= FOP(6, FMT_D
),
8985 OPC_NEG_D
= FOP(7, FMT_D
),
8986 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8987 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8988 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8989 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8990 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8991 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8992 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8993 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8994 OPC_SEL_D
= FOP(16, FMT_D
),
8995 OPC_MOVCF_D
= FOP(17, FMT_D
),
8996 OPC_MOVZ_D
= FOP(18, FMT_D
),
8997 OPC_MOVN_D
= FOP(19, FMT_D
),
8998 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8999 OPC_RECIP_D
= FOP(21, FMT_D
),
9000 OPC_RSQRT_D
= FOP(22, FMT_D
),
9001 OPC_SELNEZ_D
= FOP(23, FMT_D
),
9002 OPC_MADDF_D
= FOP(24, FMT_D
),
9003 OPC_MSUBF_D
= FOP(25, FMT_D
),
9004 OPC_RINT_D
= FOP(26, FMT_D
),
9005 OPC_CLASS_D
= FOP(27, FMT_D
),
9006 OPC_MIN_D
= FOP(28, FMT_D
),
9007 OPC_RECIP2_D
= FOP(28, FMT_D
),
9008 OPC_MINA_D
= FOP(29, FMT_D
),
9009 OPC_RECIP1_D
= FOP(29, FMT_D
),
9010 OPC_MAX_D
= FOP(30, FMT_D
),
9011 OPC_RSQRT1_D
= FOP(30, FMT_D
),
9012 OPC_MAXA_D
= FOP(31, FMT_D
),
9013 OPC_RSQRT2_D
= FOP(31, FMT_D
),
9014 OPC_CVT_S_D
= FOP(32, FMT_D
),
9015 OPC_CVT_W_D
= FOP(36, FMT_D
),
9016 OPC_CVT_L_D
= FOP(37, FMT_D
),
9017 OPC_CMP_F_D
= FOP (48, FMT_D
),
9018 OPC_CMP_UN_D
= FOP (49, FMT_D
),
9019 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
9020 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
9021 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
9022 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
9023 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
9024 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
9025 OPC_CMP_SF_D
= FOP (56, FMT_D
),
9026 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
9027 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
9028 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
9029 OPC_CMP_LT_D
= FOP (60, FMT_D
),
9030 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
9031 OPC_CMP_LE_D
= FOP (62, FMT_D
),
9032 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
9034 OPC_CVT_S_W
= FOP(32, FMT_W
),
9035 OPC_CVT_D_W
= FOP(33, FMT_W
),
9036 OPC_CVT_S_L
= FOP(32, FMT_L
),
9037 OPC_CVT_D_L
= FOP(33, FMT_L
),
9038 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
9040 OPC_ADD_PS
= FOP(0, FMT_PS
),
9041 OPC_SUB_PS
= FOP(1, FMT_PS
),
9042 OPC_MUL_PS
= FOP(2, FMT_PS
),
9043 OPC_DIV_PS
= FOP(3, FMT_PS
),
9044 OPC_ABS_PS
= FOP(5, FMT_PS
),
9045 OPC_MOV_PS
= FOP(6, FMT_PS
),
9046 OPC_NEG_PS
= FOP(7, FMT_PS
),
9047 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
9048 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
9049 OPC_MOVN_PS
= FOP(19, FMT_PS
),
9050 OPC_ADDR_PS
= FOP(24, FMT_PS
),
9051 OPC_MULR_PS
= FOP(26, FMT_PS
),
9052 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
9053 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
9054 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
9055 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
9057 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
9058 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
9059 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
9060 OPC_PLL_PS
= FOP(44, FMT_PS
),
9061 OPC_PLU_PS
= FOP(45, FMT_PS
),
9062 OPC_PUL_PS
= FOP(46, FMT_PS
),
9063 OPC_PUU_PS
= FOP(47, FMT_PS
),
9064 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
9065 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
9066 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
9067 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
9068 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
9069 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
9070 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
9071 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
9072 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
9073 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
9074 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
9075 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
9076 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
9077 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
9078 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
9079 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
9083 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
9084 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
9085 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
9086 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
9087 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
9088 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
9089 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
9090 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
9091 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
9092 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
9093 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
9094 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
9095 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
9096 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
9097 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
9098 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
9099 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
9100 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
9101 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
9102 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
9103 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
9104 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
9106 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
9107 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
9108 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
9109 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
9110 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
9111 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
9112 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
9113 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
9114 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
9115 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
9116 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
9117 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
9118 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
9119 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
9120 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
9121 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
9122 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
9123 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
9124 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
9125 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
9126 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
9127 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
9129 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
9131 TCGv t0
= tcg_temp_new();
9136 TCGv_i32 fp0
= tcg_temp_new_i32();
9138 gen_load_fpr32(ctx
, fp0
, fs
);
9139 tcg_gen_ext_i32_tl(t0
, fp0
);
9140 tcg_temp_free_i32(fp0
);
9142 gen_store_gpr(t0
, rt
);
9145 gen_load_gpr(t0
, rt
);
9147 TCGv_i32 fp0
= tcg_temp_new_i32();
9149 tcg_gen_trunc_tl_i32(fp0
, t0
);
9150 gen_store_fpr32(ctx
, fp0
, fs
);
9151 tcg_temp_free_i32(fp0
);
9155 gen_helper_1e0i(cfc1
, t0
, fs
);
9156 gen_store_gpr(t0
, rt
);
9159 gen_load_gpr(t0
, rt
);
9160 save_cpu_state(ctx
, 0);
9162 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
9164 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
9165 tcg_temp_free_i32(fs_tmp
);
9167 /* Stop translation as we may have changed hflags */
9168 ctx
->base
.is_jmp
= DISAS_STOP
;
9170 #if defined(TARGET_MIPS64)
9172 gen_load_fpr64(ctx
, t0
, fs
);
9173 gen_store_gpr(t0
, rt
);
9176 gen_load_gpr(t0
, rt
);
9177 gen_store_fpr64(ctx
, t0
, fs
);
9182 TCGv_i32 fp0
= tcg_temp_new_i32();
9184 gen_load_fpr32h(ctx
, fp0
, fs
);
9185 tcg_gen_ext_i32_tl(t0
, fp0
);
9186 tcg_temp_free_i32(fp0
);
9188 gen_store_gpr(t0
, rt
);
9191 gen_load_gpr(t0
, rt
);
9193 TCGv_i32 fp0
= tcg_temp_new_i32();
9195 tcg_gen_trunc_tl_i32(fp0
, t0
);
9196 gen_store_fpr32h(ctx
, fp0
, fs
);
9197 tcg_temp_free_i32(fp0
);
9201 MIPS_INVAL("cp1 move");
9202 generate_exception_end(ctx
, EXCP_RI
);
9210 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
9226 l1
= gen_new_label();
9227 t0
= tcg_temp_new_i32();
9228 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9229 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9230 tcg_temp_free_i32(t0
);
9232 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
9234 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
9239 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
9243 TCGv_i32 t0
= tcg_temp_new_i32();
9244 TCGLabel
*l1
= gen_new_label();
9251 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9252 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9253 gen_load_fpr32(ctx
, t0
, fs
);
9254 gen_store_fpr32(ctx
, t0
, fd
);
9256 tcg_temp_free_i32(t0
);
9259 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
9262 TCGv_i32 t0
= tcg_temp_new_i32();
9264 TCGLabel
*l1
= gen_new_label();
9271 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9272 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9273 tcg_temp_free_i32(t0
);
9274 fp0
= tcg_temp_new_i64();
9275 gen_load_fpr64(ctx
, fp0
, fs
);
9276 gen_store_fpr64(ctx
, fp0
, fd
);
9277 tcg_temp_free_i64(fp0
);
9281 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
9285 TCGv_i32 t0
= tcg_temp_new_i32();
9286 TCGLabel
*l1
= gen_new_label();
9287 TCGLabel
*l2
= gen_new_label();
9294 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9295 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9296 gen_load_fpr32(ctx
, t0
, fs
);
9297 gen_store_fpr32(ctx
, t0
, fd
);
9300 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
9301 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
9302 gen_load_fpr32h(ctx
, t0
, fs
);
9303 gen_store_fpr32h(ctx
, t0
, fd
);
9304 tcg_temp_free_i32(t0
);
9308 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
9311 TCGv_i32 t1
= tcg_const_i32(0);
9312 TCGv_i32 fp0
= tcg_temp_new_i32();
9313 TCGv_i32 fp1
= tcg_temp_new_i32();
9314 TCGv_i32 fp2
= tcg_temp_new_i32();
9315 gen_load_fpr32(ctx
, fp0
, fd
);
9316 gen_load_fpr32(ctx
, fp1
, ft
);
9317 gen_load_fpr32(ctx
, fp2
, fs
);
9321 tcg_gen_andi_i32(fp0
, fp0
, 1);
9322 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
9325 tcg_gen_andi_i32(fp1
, fp1
, 1);
9326 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
9329 tcg_gen_andi_i32(fp1
, fp1
, 1);
9330 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
9333 MIPS_INVAL("gen_sel_s");
9334 generate_exception_end(ctx
, EXCP_RI
);
9338 gen_store_fpr32(ctx
, fp0
, fd
);
9339 tcg_temp_free_i32(fp2
);
9340 tcg_temp_free_i32(fp1
);
9341 tcg_temp_free_i32(fp0
);
9342 tcg_temp_free_i32(t1
);
9345 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
9348 TCGv_i64 t1
= tcg_const_i64(0);
9349 TCGv_i64 fp0
= tcg_temp_new_i64();
9350 TCGv_i64 fp1
= tcg_temp_new_i64();
9351 TCGv_i64 fp2
= tcg_temp_new_i64();
9352 gen_load_fpr64(ctx
, fp0
, fd
);
9353 gen_load_fpr64(ctx
, fp1
, ft
);
9354 gen_load_fpr64(ctx
, fp2
, fs
);
9358 tcg_gen_andi_i64(fp0
, fp0
, 1);
9359 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
9362 tcg_gen_andi_i64(fp1
, fp1
, 1);
9363 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
9366 tcg_gen_andi_i64(fp1
, fp1
, 1);
9367 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
9370 MIPS_INVAL("gen_sel_d");
9371 generate_exception_end(ctx
, EXCP_RI
);
9375 gen_store_fpr64(ctx
, fp0
, fd
);
9376 tcg_temp_free_i64(fp2
);
9377 tcg_temp_free_i64(fp1
);
9378 tcg_temp_free_i64(fp0
);
9379 tcg_temp_free_i64(t1
);
9382 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
9383 int ft
, int fs
, int fd
, int cc
)
9385 uint32_t func
= ctx
->opcode
& 0x3f;
9389 TCGv_i32 fp0
= tcg_temp_new_i32();
9390 TCGv_i32 fp1
= tcg_temp_new_i32();
9392 gen_load_fpr32(ctx
, fp0
, fs
);
9393 gen_load_fpr32(ctx
, fp1
, ft
);
9394 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
9395 tcg_temp_free_i32(fp1
);
9396 gen_store_fpr32(ctx
, fp0
, fd
);
9397 tcg_temp_free_i32(fp0
);
9402 TCGv_i32 fp0
= tcg_temp_new_i32();
9403 TCGv_i32 fp1
= tcg_temp_new_i32();
9405 gen_load_fpr32(ctx
, fp0
, fs
);
9406 gen_load_fpr32(ctx
, fp1
, ft
);
9407 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
9408 tcg_temp_free_i32(fp1
);
9409 gen_store_fpr32(ctx
, fp0
, fd
);
9410 tcg_temp_free_i32(fp0
);
9415 TCGv_i32 fp0
= tcg_temp_new_i32();
9416 TCGv_i32 fp1
= tcg_temp_new_i32();
9418 gen_load_fpr32(ctx
, fp0
, fs
);
9419 gen_load_fpr32(ctx
, fp1
, ft
);
9420 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
9421 tcg_temp_free_i32(fp1
);
9422 gen_store_fpr32(ctx
, fp0
, fd
);
9423 tcg_temp_free_i32(fp0
);
9428 TCGv_i32 fp0
= tcg_temp_new_i32();
9429 TCGv_i32 fp1
= tcg_temp_new_i32();
9431 gen_load_fpr32(ctx
, fp0
, fs
);
9432 gen_load_fpr32(ctx
, fp1
, ft
);
9433 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
9434 tcg_temp_free_i32(fp1
);
9435 gen_store_fpr32(ctx
, fp0
, fd
);
9436 tcg_temp_free_i32(fp0
);
9441 TCGv_i32 fp0
= tcg_temp_new_i32();
9443 gen_load_fpr32(ctx
, fp0
, fs
);
9444 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
9445 gen_store_fpr32(ctx
, fp0
, fd
);
9446 tcg_temp_free_i32(fp0
);
9451 TCGv_i32 fp0
= tcg_temp_new_i32();
9453 gen_load_fpr32(ctx
, fp0
, fs
);
9455 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
9457 gen_helper_float_abs_s(fp0
, fp0
);
9459 gen_store_fpr32(ctx
, fp0
, fd
);
9460 tcg_temp_free_i32(fp0
);
9465 TCGv_i32 fp0
= tcg_temp_new_i32();
9467 gen_load_fpr32(ctx
, fp0
, fs
);
9468 gen_store_fpr32(ctx
, fp0
, fd
);
9469 tcg_temp_free_i32(fp0
);
9474 TCGv_i32 fp0
= tcg_temp_new_i32();
9476 gen_load_fpr32(ctx
, fp0
, fs
);
9478 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
9480 gen_helper_float_chs_s(fp0
, fp0
);
9482 gen_store_fpr32(ctx
, fp0
, fd
);
9483 tcg_temp_free_i32(fp0
);
9487 check_cp1_64bitmode(ctx
);
9489 TCGv_i32 fp32
= tcg_temp_new_i32();
9490 TCGv_i64 fp64
= tcg_temp_new_i64();
9492 gen_load_fpr32(ctx
, fp32
, fs
);
9494 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
9496 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
9498 tcg_temp_free_i32(fp32
);
9499 gen_store_fpr64(ctx
, fp64
, fd
);
9500 tcg_temp_free_i64(fp64
);
9504 check_cp1_64bitmode(ctx
);
9506 TCGv_i32 fp32
= tcg_temp_new_i32();
9507 TCGv_i64 fp64
= tcg_temp_new_i64();
9509 gen_load_fpr32(ctx
, fp32
, fs
);
9511 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
9513 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
9515 tcg_temp_free_i32(fp32
);
9516 gen_store_fpr64(ctx
, fp64
, fd
);
9517 tcg_temp_free_i64(fp64
);
9521 check_cp1_64bitmode(ctx
);
9523 TCGv_i32 fp32
= tcg_temp_new_i32();
9524 TCGv_i64 fp64
= tcg_temp_new_i64();
9526 gen_load_fpr32(ctx
, fp32
, fs
);
9528 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
9530 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
9532 tcg_temp_free_i32(fp32
);
9533 gen_store_fpr64(ctx
, fp64
, fd
);
9534 tcg_temp_free_i64(fp64
);
9538 check_cp1_64bitmode(ctx
);
9540 TCGv_i32 fp32
= tcg_temp_new_i32();
9541 TCGv_i64 fp64
= tcg_temp_new_i64();
9543 gen_load_fpr32(ctx
, fp32
, fs
);
9545 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
9547 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
9549 tcg_temp_free_i32(fp32
);
9550 gen_store_fpr64(ctx
, fp64
, fd
);
9551 tcg_temp_free_i64(fp64
);
9556 TCGv_i32 fp0
= tcg_temp_new_i32();
9558 gen_load_fpr32(ctx
, fp0
, fs
);
9560 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9562 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9564 gen_store_fpr32(ctx
, fp0
, fd
);
9565 tcg_temp_free_i32(fp0
);
9570 TCGv_i32 fp0
= tcg_temp_new_i32();
9572 gen_load_fpr32(ctx
, fp0
, fs
);
9574 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9576 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9578 gen_store_fpr32(ctx
, fp0
, fd
);
9579 tcg_temp_free_i32(fp0
);
9584 TCGv_i32 fp0
= tcg_temp_new_i32();
9586 gen_load_fpr32(ctx
, fp0
, fs
);
9588 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9590 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9592 gen_store_fpr32(ctx
, fp0
, fd
);
9593 tcg_temp_free_i32(fp0
);
9598 TCGv_i32 fp0
= tcg_temp_new_i32();
9600 gen_load_fpr32(ctx
, fp0
, fs
);
9602 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9604 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9606 gen_store_fpr32(ctx
, fp0
, fd
);
9607 tcg_temp_free_i32(fp0
);
9611 check_insn(ctx
, ISA_MIPS32R6
);
9612 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9615 check_insn(ctx
, ISA_MIPS32R6
);
9616 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9619 check_insn(ctx
, ISA_MIPS32R6
);
9620 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9623 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9624 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9627 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9629 TCGLabel
*l1
= gen_new_label();
9633 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9635 fp0
= tcg_temp_new_i32();
9636 gen_load_fpr32(ctx
, fp0
, fs
);
9637 gen_store_fpr32(ctx
, fp0
, fd
);
9638 tcg_temp_free_i32(fp0
);
9643 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9645 TCGLabel
*l1
= gen_new_label();
9649 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9650 fp0
= tcg_temp_new_i32();
9651 gen_load_fpr32(ctx
, fp0
, fs
);
9652 gen_store_fpr32(ctx
, fp0
, fd
);
9653 tcg_temp_free_i32(fp0
);
9660 TCGv_i32 fp0
= tcg_temp_new_i32();
9662 gen_load_fpr32(ctx
, fp0
, fs
);
9663 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9664 gen_store_fpr32(ctx
, fp0
, fd
);
9665 tcg_temp_free_i32(fp0
);
9670 TCGv_i32 fp0
= tcg_temp_new_i32();
9672 gen_load_fpr32(ctx
, fp0
, fs
);
9673 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9674 gen_store_fpr32(ctx
, fp0
, fd
);
9675 tcg_temp_free_i32(fp0
);
9679 check_insn(ctx
, ISA_MIPS32R6
);
9681 TCGv_i32 fp0
= tcg_temp_new_i32();
9682 TCGv_i32 fp1
= tcg_temp_new_i32();
9683 TCGv_i32 fp2
= tcg_temp_new_i32();
9684 gen_load_fpr32(ctx
, fp0
, fs
);
9685 gen_load_fpr32(ctx
, fp1
, ft
);
9686 gen_load_fpr32(ctx
, fp2
, fd
);
9687 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9688 gen_store_fpr32(ctx
, fp2
, fd
);
9689 tcg_temp_free_i32(fp2
);
9690 tcg_temp_free_i32(fp1
);
9691 tcg_temp_free_i32(fp0
);
9695 check_insn(ctx
, ISA_MIPS32R6
);
9697 TCGv_i32 fp0
= tcg_temp_new_i32();
9698 TCGv_i32 fp1
= tcg_temp_new_i32();
9699 TCGv_i32 fp2
= tcg_temp_new_i32();
9700 gen_load_fpr32(ctx
, fp0
, fs
);
9701 gen_load_fpr32(ctx
, fp1
, ft
);
9702 gen_load_fpr32(ctx
, fp2
, fd
);
9703 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9704 gen_store_fpr32(ctx
, fp2
, fd
);
9705 tcg_temp_free_i32(fp2
);
9706 tcg_temp_free_i32(fp1
);
9707 tcg_temp_free_i32(fp0
);
9711 check_insn(ctx
, ISA_MIPS32R6
);
9713 TCGv_i32 fp0
= tcg_temp_new_i32();
9714 gen_load_fpr32(ctx
, fp0
, fs
);
9715 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9716 gen_store_fpr32(ctx
, fp0
, fd
);
9717 tcg_temp_free_i32(fp0
);
9721 check_insn(ctx
, ISA_MIPS32R6
);
9723 TCGv_i32 fp0
= tcg_temp_new_i32();
9724 gen_load_fpr32(ctx
, fp0
, fs
);
9725 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9726 gen_store_fpr32(ctx
, fp0
, fd
);
9727 tcg_temp_free_i32(fp0
);
9730 case OPC_MIN_S
: /* OPC_RECIP2_S */
9731 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9733 TCGv_i32 fp0
= tcg_temp_new_i32();
9734 TCGv_i32 fp1
= tcg_temp_new_i32();
9735 TCGv_i32 fp2
= tcg_temp_new_i32();
9736 gen_load_fpr32(ctx
, fp0
, fs
);
9737 gen_load_fpr32(ctx
, fp1
, ft
);
9738 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9739 gen_store_fpr32(ctx
, fp2
, fd
);
9740 tcg_temp_free_i32(fp2
);
9741 tcg_temp_free_i32(fp1
);
9742 tcg_temp_free_i32(fp0
);
9745 check_cp1_64bitmode(ctx
);
9747 TCGv_i32 fp0
= tcg_temp_new_i32();
9748 TCGv_i32 fp1
= tcg_temp_new_i32();
9750 gen_load_fpr32(ctx
, fp0
, fs
);
9751 gen_load_fpr32(ctx
, fp1
, ft
);
9752 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9753 tcg_temp_free_i32(fp1
);
9754 gen_store_fpr32(ctx
, fp0
, fd
);
9755 tcg_temp_free_i32(fp0
);
9759 case OPC_MINA_S
: /* OPC_RECIP1_S */
9760 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9762 TCGv_i32 fp0
= tcg_temp_new_i32();
9763 TCGv_i32 fp1
= tcg_temp_new_i32();
9764 TCGv_i32 fp2
= tcg_temp_new_i32();
9765 gen_load_fpr32(ctx
, fp0
, fs
);
9766 gen_load_fpr32(ctx
, fp1
, ft
);
9767 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9768 gen_store_fpr32(ctx
, fp2
, fd
);
9769 tcg_temp_free_i32(fp2
);
9770 tcg_temp_free_i32(fp1
);
9771 tcg_temp_free_i32(fp0
);
9774 check_cp1_64bitmode(ctx
);
9776 TCGv_i32 fp0
= tcg_temp_new_i32();
9778 gen_load_fpr32(ctx
, fp0
, fs
);
9779 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9780 gen_store_fpr32(ctx
, fp0
, fd
);
9781 tcg_temp_free_i32(fp0
);
9785 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9786 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9788 TCGv_i32 fp0
= tcg_temp_new_i32();
9789 TCGv_i32 fp1
= tcg_temp_new_i32();
9790 gen_load_fpr32(ctx
, fp0
, fs
);
9791 gen_load_fpr32(ctx
, fp1
, ft
);
9792 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9793 gen_store_fpr32(ctx
, fp1
, fd
);
9794 tcg_temp_free_i32(fp1
);
9795 tcg_temp_free_i32(fp0
);
9798 check_cp1_64bitmode(ctx
);
9800 TCGv_i32 fp0
= tcg_temp_new_i32();
9802 gen_load_fpr32(ctx
, fp0
, fs
);
9803 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9804 gen_store_fpr32(ctx
, fp0
, fd
);
9805 tcg_temp_free_i32(fp0
);
9809 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9810 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9812 TCGv_i32 fp0
= tcg_temp_new_i32();
9813 TCGv_i32 fp1
= tcg_temp_new_i32();
9814 gen_load_fpr32(ctx
, fp0
, fs
);
9815 gen_load_fpr32(ctx
, fp1
, ft
);
9816 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9817 gen_store_fpr32(ctx
, fp1
, fd
);
9818 tcg_temp_free_i32(fp1
);
9819 tcg_temp_free_i32(fp0
);
9822 check_cp1_64bitmode(ctx
);
9824 TCGv_i32 fp0
= tcg_temp_new_i32();
9825 TCGv_i32 fp1
= tcg_temp_new_i32();
9827 gen_load_fpr32(ctx
, fp0
, fs
);
9828 gen_load_fpr32(ctx
, fp1
, ft
);
9829 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9830 tcg_temp_free_i32(fp1
);
9831 gen_store_fpr32(ctx
, fp0
, fd
);
9832 tcg_temp_free_i32(fp0
);
9837 check_cp1_registers(ctx
, fd
);
9839 TCGv_i32 fp32
= tcg_temp_new_i32();
9840 TCGv_i64 fp64
= tcg_temp_new_i64();
9842 gen_load_fpr32(ctx
, fp32
, fs
);
9843 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9844 tcg_temp_free_i32(fp32
);
9845 gen_store_fpr64(ctx
, fp64
, fd
);
9846 tcg_temp_free_i64(fp64
);
9851 TCGv_i32 fp0
= tcg_temp_new_i32();
9853 gen_load_fpr32(ctx
, fp0
, fs
);
9855 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9857 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9859 gen_store_fpr32(ctx
, fp0
, fd
);
9860 tcg_temp_free_i32(fp0
);
9864 check_cp1_64bitmode(ctx
);
9866 TCGv_i32 fp32
= tcg_temp_new_i32();
9867 TCGv_i64 fp64
= tcg_temp_new_i64();
9869 gen_load_fpr32(ctx
, fp32
, fs
);
9871 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9873 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9875 tcg_temp_free_i32(fp32
);
9876 gen_store_fpr64(ctx
, fp64
, fd
);
9877 tcg_temp_free_i64(fp64
);
9883 TCGv_i64 fp64
= tcg_temp_new_i64();
9884 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9885 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9887 gen_load_fpr32(ctx
, fp32_0
, fs
);
9888 gen_load_fpr32(ctx
, fp32_1
, ft
);
9889 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9890 tcg_temp_free_i32(fp32_1
);
9891 tcg_temp_free_i32(fp32_0
);
9892 gen_store_fpr64(ctx
, fp64
, fd
);
9893 tcg_temp_free_i64(fp64
);
9905 case OPC_CMP_NGLE_S
:
9912 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9913 if (ctx
->opcode
& (1 << 6)) {
9914 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9916 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9920 check_cp1_registers(ctx
, fs
| ft
| fd
);
9922 TCGv_i64 fp0
= tcg_temp_new_i64();
9923 TCGv_i64 fp1
= tcg_temp_new_i64();
9925 gen_load_fpr64(ctx
, fp0
, fs
);
9926 gen_load_fpr64(ctx
, fp1
, ft
);
9927 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9928 tcg_temp_free_i64(fp1
);
9929 gen_store_fpr64(ctx
, fp0
, fd
);
9930 tcg_temp_free_i64(fp0
);
9934 check_cp1_registers(ctx
, fs
| ft
| fd
);
9936 TCGv_i64 fp0
= tcg_temp_new_i64();
9937 TCGv_i64 fp1
= tcg_temp_new_i64();
9939 gen_load_fpr64(ctx
, fp0
, fs
);
9940 gen_load_fpr64(ctx
, fp1
, ft
);
9941 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9942 tcg_temp_free_i64(fp1
);
9943 gen_store_fpr64(ctx
, fp0
, fd
);
9944 tcg_temp_free_i64(fp0
);
9948 check_cp1_registers(ctx
, fs
| ft
| fd
);
9950 TCGv_i64 fp0
= tcg_temp_new_i64();
9951 TCGv_i64 fp1
= tcg_temp_new_i64();
9953 gen_load_fpr64(ctx
, fp0
, fs
);
9954 gen_load_fpr64(ctx
, fp1
, ft
);
9955 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9956 tcg_temp_free_i64(fp1
);
9957 gen_store_fpr64(ctx
, fp0
, fd
);
9958 tcg_temp_free_i64(fp0
);
9962 check_cp1_registers(ctx
, fs
| ft
| fd
);
9964 TCGv_i64 fp0
= tcg_temp_new_i64();
9965 TCGv_i64 fp1
= tcg_temp_new_i64();
9967 gen_load_fpr64(ctx
, fp0
, fs
);
9968 gen_load_fpr64(ctx
, fp1
, ft
);
9969 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9970 tcg_temp_free_i64(fp1
);
9971 gen_store_fpr64(ctx
, fp0
, fd
);
9972 tcg_temp_free_i64(fp0
);
9976 check_cp1_registers(ctx
, fs
| fd
);
9978 TCGv_i64 fp0
= tcg_temp_new_i64();
9980 gen_load_fpr64(ctx
, fp0
, fs
);
9981 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9982 gen_store_fpr64(ctx
, fp0
, fd
);
9983 tcg_temp_free_i64(fp0
);
9987 check_cp1_registers(ctx
, fs
| fd
);
9989 TCGv_i64 fp0
= tcg_temp_new_i64();
9991 gen_load_fpr64(ctx
, fp0
, fs
);
9993 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9995 gen_helper_float_abs_d(fp0
, fp0
);
9997 gen_store_fpr64(ctx
, fp0
, fd
);
9998 tcg_temp_free_i64(fp0
);
10002 check_cp1_registers(ctx
, fs
| fd
);
10004 TCGv_i64 fp0
= tcg_temp_new_i64();
10006 gen_load_fpr64(ctx
, fp0
, fs
);
10007 gen_store_fpr64(ctx
, fp0
, fd
);
10008 tcg_temp_free_i64(fp0
);
10012 check_cp1_registers(ctx
, fs
| fd
);
10014 TCGv_i64 fp0
= tcg_temp_new_i64();
10016 gen_load_fpr64(ctx
, fp0
, fs
);
10017 if (ctx
->abs2008
) {
10018 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
10020 gen_helper_float_chs_d(fp0
, fp0
);
10022 gen_store_fpr64(ctx
, fp0
, fd
);
10023 tcg_temp_free_i64(fp0
);
10026 case OPC_ROUND_L_D
:
10027 check_cp1_64bitmode(ctx
);
10029 TCGv_i64 fp0
= tcg_temp_new_i64();
10031 gen_load_fpr64(ctx
, fp0
, fs
);
10032 if (ctx
->nan2008
) {
10033 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
10035 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
10037 gen_store_fpr64(ctx
, fp0
, fd
);
10038 tcg_temp_free_i64(fp0
);
10041 case OPC_TRUNC_L_D
:
10042 check_cp1_64bitmode(ctx
);
10044 TCGv_i64 fp0
= tcg_temp_new_i64();
10046 gen_load_fpr64(ctx
, fp0
, fs
);
10047 if (ctx
->nan2008
) {
10048 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
10050 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
10052 gen_store_fpr64(ctx
, fp0
, fd
);
10053 tcg_temp_free_i64(fp0
);
10057 check_cp1_64bitmode(ctx
);
10059 TCGv_i64 fp0
= tcg_temp_new_i64();
10061 gen_load_fpr64(ctx
, fp0
, fs
);
10062 if (ctx
->nan2008
) {
10063 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
10065 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
10067 gen_store_fpr64(ctx
, fp0
, fd
);
10068 tcg_temp_free_i64(fp0
);
10071 case OPC_FLOOR_L_D
:
10072 check_cp1_64bitmode(ctx
);
10074 TCGv_i64 fp0
= tcg_temp_new_i64();
10076 gen_load_fpr64(ctx
, fp0
, fs
);
10077 if (ctx
->nan2008
) {
10078 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
10080 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
10082 gen_store_fpr64(ctx
, fp0
, fd
);
10083 tcg_temp_free_i64(fp0
);
10086 case OPC_ROUND_W_D
:
10087 check_cp1_registers(ctx
, fs
);
10089 TCGv_i32 fp32
= tcg_temp_new_i32();
10090 TCGv_i64 fp64
= tcg_temp_new_i64();
10092 gen_load_fpr64(ctx
, fp64
, fs
);
10093 if (ctx
->nan2008
) {
10094 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
10096 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
10098 tcg_temp_free_i64(fp64
);
10099 gen_store_fpr32(ctx
, fp32
, fd
);
10100 tcg_temp_free_i32(fp32
);
10103 case OPC_TRUNC_W_D
:
10104 check_cp1_registers(ctx
, fs
);
10106 TCGv_i32 fp32
= tcg_temp_new_i32();
10107 TCGv_i64 fp64
= tcg_temp_new_i64();
10109 gen_load_fpr64(ctx
, fp64
, fs
);
10110 if (ctx
->nan2008
) {
10111 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
10113 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
10115 tcg_temp_free_i64(fp64
);
10116 gen_store_fpr32(ctx
, fp32
, fd
);
10117 tcg_temp_free_i32(fp32
);
10121 check_cp1_registers(ctx
, fs
);
10123 TCGv_i32 fp32
= tcg_temp_new_i32();
10124 TCGv_i64 fp64
= tcg_temp_new_i64();
10126 gen_load_fpr64(ctx
, fp64
, fs
);
10127 if (ctx
->nan2008
) {
10128 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
10130 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
10132 tcg_temp_free_i64(fp64
);
10133 gen_store_fpr32(ctx
, fp32
, fd
);
10134 tcg_temp_free_i32(fp32
);
10137 case OPC_FLOOR_W_D
:
10138 check_cp1_registers(ctx
, fs
);
10140 TCGv_i32 fp32
= tcg_temp_new_i32();
10141 TCGv_i64 fp64
= tcg_temp_new_i64();
10143 gen_load_fpr64(ctx
, fp64
, fs
);
10144 if (ctx
->nan2008
) {
10145 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
10147 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
10149 tcg_temp_free_i64(fp64
);
10150 gen_store_fpr32(ctx
, fp32
, fd
);
10151 tcg_temp_free_i32(fp32
);
10155 check_insn(ctx
, ISA_MIPS32R6
);
10156 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10159 check_insn(ctx
, ISA_MIPS32R6
);
10160 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10163 check_insn(ctx
, ISA_MIPS32R6
);
10164 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10167 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10168 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10171 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10173 TCGLabel
*l1
= gen_new_label();
10177 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10179 fp0
= tcg_temp_new_i64();
10180 gen_load_fpr64(ctx
, fp0
, fs
);
10181 gen_store_fpr64(ctx
, fp0
, fd
);
10182 tcg_temp_free_i64(fp0
);
10187 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10189 TCGLabel
*l1
= gen_new_label();
10193 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10194 fp0
= tcg_temp_new_i64();
10195 gen_load_fpr64(ctx
, fp0
, fs
);
10196 gen_store_fpr64(ctx
, fp0
, fd
);
10197 tcg_temp_free_i64(fp0
);
10203 check_cp1_registers(ctx
, fs
| fd
);
10205 TCGv_i64 fp0
= tcg_temp_new_i64();
10207 gen_load_fpr64(ctx
, fp0
, fs
);
10208 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
10209 gen_store_fpr64(ctx
, fp0
, fd
);
10210 tcg_temp_free_i64(fp0
);
10214 check_cp1_registers(ctx
, fs
| fd
);
10216 TCGv_i64 fp0
= tcg_temp_new_i64();
10218 gen_load_fpr64(ctx
, fp0
, fs
);
10219 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
10220 gen_store_fpr64(ctx
, fp0
, fd
);
10221 tcg_temp_free_i64(fp0
);
10225 check_insn(ctx
, ISA_MIPS32R6
);
10227 TCGv_i64 fp0
= tcg_temp_new_i64();
10228 TCGv_i64 fp1
= tcg_temp_new_i64();
10229 TCGv_i64 fp2
= tcg_temp_new_i64();
10230 gen_load_fpr64(ctx
, fp0
, fs
);
10231 gen_load_fpr64(ctx
, fp1
, ft
);
10232 gen_load_fpr64(ctx
, fp2
, fd
);
10233 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10234 gen_store_fpr64(ctx
, fp2
, fd
);
10235 tcg_temp_free_i64(fp2
);
10236 tcg_temp_free_i64(fp1
);
10237 tcg_temp_free_i64(fp0
);
10241 check_insn(ctx
, ISA_MIPS32R6
);
10243 TCGv_i64 fp0
= tcg_temp_new_i64();
10244 TCGv_i64 fp1
= tcg_temp_new_i64();
10245 TCGv_i64 fp2
= tcg_temp_new_i64();
10246 gen_load_fpr64(ctx
, fp0
, fs
);
10247 gen_load_fpr64(ctx
, fp1
, ft
);
10248 gen_load_fpr64(ctx
, fp2
, fd
);
10249 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10250 gen_store_fpr64(ctx
, fp2
, fd
);
10251 tcg_temp_free_i64(fp2
);
10252 tcg_temp_free_i64(fp1
);
10253 tcg_temp_free_i64(fp0
);
10257 check_insn(ctx
, ISA_MIPS32R6
);
10259 TCGv_i64 fp0
= tcg_temp_new_i64();
10260 gen_load_fpr64(ctx
, fp0
, fs
);
10261 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
10262 gen_store_fpr64(ctx
, fp0
, fd
);
10263 tcg_temp_free_i64(fp0
);
10267 check_insn(ctx
, ISA_MIPS32R6
);
10269 TCGv_i64 fp0
= tcg_temp_new_i64();
10270 gen_load_fpr64(ctx
, fp0
, fs
);
10271 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
10272 gen_store_fpr64(ctx
, fp0
, fd
);
10273 tcg_temp_free_i64(fp0
);
10276 case OPC_MIN_D
: /* OPC_RECIP2_D */
10277 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10279 TCGv_i64 fp0
= tcg_temp_new_i64();
10280 TCGv_i64 fp1
= tcg_temp_new_i64();
10281 gen_load_fpr64(ctx
, fp0
, fs
);
10282 gen_load_fpr64(ctx
, fp1
, ft
);
10283 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
10284 gen_store_fpr64(ctx
, fp1
, fd
);
10285 tcg_temp_free_i64(fp1
);
10286 tcg_temp_free_i64(fp0
);
10289 check_cp1_64bitmode(ctx
);
10291 TCGv_i64 fp0
= tcg_temp_new_i64();
10292 TCGv_i64 fp1
= tcg_temp_new_i64();
10294 gen_load_fpr64(ctx
, fp0
, fs
);
10295 gen_load_fpr64(ctx
, fp1
, ft
);
10296 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
10297 tcg_temp_free_i64(fp1
);
10298 gen_store_fpr64(ctx
, fp0
, fd
);
10299 tcg_temp_free_i64(fp0
);
10303 case OPC_MINA_D
: /* OPC_RECIP1_D */
10304 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10306 TCGv_i64 fp0
= tcg_temp_new_i64();
10307 TCGv_i64 fp1
= tcg_temp_new_i64();
10308 gen_load_fpr64(ctx
, fp0
, fs
);
10309 gen_load_fpr64(ctx
, fp1
, ft
);
10310 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
10311 gen_store_fpr64(ctx
, fp1
, fd
);
10312 tcg_temp_free_i64(fp1
);
10313 tcg_temp_free_i64(fp0
);
10316 check_cp1_64bitmode(ctx
);
10318 TCGv_i64 fp0
= tcg_temp_new_i64();
10320 gen_load_fpr64(ctx
, fp0
, fs
);
10321 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
10322 gen_store_fpr64(ctx
, fp0
, fd
);
10323 tcg_temp_free_i64(fp0
);
10327 case OPC_MAX_D
: /* OPC_RSQRT1_D */
10328 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10330 TCGv_i64 fp0
= tcg_temp_new_i64();
10331 TCGv_i64 fp1
= tcg_temp_new_i64();
10332 gen_load_fpr64(ctx
, fp0
, fs
);
10333 gen_load_fpr64(ctx
, fp1
, ft
);
10334 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
10335 gen_store_fpr64(ctx
, fp1
, fd
);
10336 tcg_temp_free_i64(fp1
);
10337 tcg_temp_free_i64(fp0
);
10340 check_cp1_64bitmode(ctx
);
10342 TCGv_i64 fp0
= tcg_temp_new_i64();
10344 gen_load_fpr64(ctx
, fp0
, fs
);
10345 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
10346 gen_store_fpr64(ctx
, fp0
, fd
);
10347 tcg_temp_free_i64(fp0
);
10351 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
10352 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10354 TCGv_i64 fp0
= tcg_temp_new_i64();
10355 TCGv_i64 fp1
= tcg_temp_new_i64();
10356 gen_load_fpr64(ctx
, fp0
, fs
);
10357 gen_load_fpr64(ctx
, fp1
, ft
);
10358 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
10359 gen_store_fpr64(ctx
, fp1
, fd
);
10360 tcg_temp_free_i64(fp1
);
10361 tcg_temp_free_i64(fp0
);
10364 check_cp1_64bitmode(ctx
);
10366 TCGv_i64 fp0
= tcg_temp_new_i64();
10367 TCGv_i64 fp1
= tcg_temp_new_i64();
10369 gen_load_fpr64(ctx
, fp0
, fs
);
10370 gen_load_fpr64(ctx
, fp1
, ft
);
10371 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
10372 tcg_temp_free_i64(fp1
);
10373 gen_store_fpr64(ctx
, fp0
, fd
);
10374 tcg_temp_free_i64(fp0
);
10381 case OPC_CMP_UEQ_D
:
10382 case OPC_CMP_OLT_D
:
10383 case OPC_CMP_ULT_D
:
10384 case OPC_CMP_OLE_D
:
10385 case OPC_CMP_ULE_D
:
10387 case OPC_CMP_NGLE_D
:
10388 case OPC_CMP_SEQ_D
:
10389 case OPC_CMP_NGL_D
:
10391 case OPC_CMP_NGE_D
:
10393 case OPC_CMP_NGT_D
:
10394 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10395 if (ctx
->opcode
& (1 << 6)) {
10396 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
10398 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
10402 check_cp1_registers(ctx
, fs
);
10404 TCGv_i32 fp32
= tcg_temp_new_i32();
10405 TCGv_i64 fp64
= tcg_temp_new_i64();
10407 gen_load_fpr64(ctx
, fp64
, fs
);
10408 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
10409 tcg_temp_free_i64(fp64
);
10410 gen_store_fpr32(ctx
, fp32
, fd
);
10411 tcg_temp_free_i32(fp32
);
10415 check_cp1_registers(ctx
, fs
);
10417 TCGv_i32 fp32
= tcg_temp_new_i32();
10418 TCGv_i64 fp64
= tcg_temp_new_i64();
10420 gen_load_fpr64(ctx
, fp64
, fs
);
10421 if (ctx
->nan2008
) {
10422 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
10424 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
10426 tcg_temp_free_i64(fp64
);
10427 gen_store_fpr32(ctx
, fp32
, fd
);
10428 tcg_temp_free_i32(fp32
);
10432 check_cp1_64bitmode(ctx
);
10434 TCGv_i64 fp0
= tcg_temp_new_i64();
10436 gen_load_fpr64(ctx
, fp0
, fs
);
10437 if (ctx
->nan2008
) {
10438 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
10440 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
10442 gen_store_fpr64(ctx
, fp0
, fd
);
10443 tcg_temp_free_i64(fp0
);
10448 TCGv_i32 fp0
= tcg_temp_new_i32();
10450 gen_load_fpr32(ctx
, fp0
, fs
);
10451 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
10452 gen_store_fpr32(ctx
, fp0
, fd
);
10453 tcg_temp_free_i32(fp0
);
10457 check_cp1_registers(ctx
, fd
);
10459 TCGv_i32 fp32
= tcg_temp_new_i32();
10460 TCGv_i64 fp64
= tcg_temp_new_i64();
10462 gen_load_fpr32(ctx
, fp32
, fs
);
10463 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
10464 tcg_temp_free_i32(fp32
);
10465 gen_store_fpr64(ctx
, fp64
, fd
);
10466 tcg_temp_free_i64(fp64
);
10470 check_cp1_64bitmode(ctx
);
10472 TCGv_i32 fp32
= tcg_temp_new_i32();
10473 TCGv_i64 fp64
= tcg_temp_new_i64();
10475 gen_load_fpr64(ctx
, fp64
, fs
);
10476 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
10477 tcg_temp_free_i64(fp64
);
10478 gen_store_fpr32(ctx
, fp32
, fd
);
10479 tcg_temp_free_i32(fp32
);
10483 check_cp1_64bitmode(ctx
);
10485 TCGv_i64 fp0
= tcg_temp_new_i64();
10487 gen_load_fpr64(ctx
, fp0
, fs
);
10488 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
10489 gen_store_fpr64(ctx
, fp0
, fd
);
10490 tcg_temp_free_i64(fp0
);
10493 case OPC_CVT_PS_PW
:
10496 TCGv_i64 fp0
= tcg_temp_new_i64();
10498 gen_load_fpr64(ctx
, fp0
, fs
);
10499 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
10500 gen_store_fpr64(ctx
, fp0
, fd
);
10501 tcg_temp_free_i64(fp0
);
10507 TCGv_i64 fp0
= tcg_temp_new_i64();
10508 TCGv_i64 fp1
= tcg_temp_new_i64();
10510 gen_load_fpr64(ctx
, fp0
, fs
);
10511 gen_load_fpr64(ctx
, fp1
, ft
);
10512 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
10513 tcg_temp_free_i64(fp1
);
10514 gen_store_fpr64(ctx
, fp0
, fd
);
10515 tcg_temp_free_i64(fp0
);
10521 TCGv_i64 fp0
= tcg_temp_new_i64();
10522 TCGv_i64 fp1
= tcg_temp_new_i64();
10524 gen_load_fpr64(ctx
, fp0
, fs
);
10525 gen_load_fpr64(ctx
, fp1
, ft
);
10526 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
10527 tcg_temp_free_i64(fp1
);
10528 gen_store_fpr64(ctx
, fp0
, fd
);
10529 tcg_temp_free_i64(fp0
);
10535 TCGv_i64 fp0
= tcg_temp_new_i64();
10536 TCGv_i64 fp1
= tcg_temp_new_i64();
10538 gen_load_fpr64(ctx
, fp0
, fs
);
10539 gen_load_fpr64(ctx
, fp1
, ft
);
10540 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
10541 tcg_temp_free_i64(fp1
);
10542 gen_store_fpr64(ctx
, fp0
, fd
);
10543 tcg_temp_free_i64(fp0
);
10549 TCGv_i64 fp0
= tcg_temp_new_i64();
10551 gen_load_fpr64(ctx
, fp0
, fs
);
10552 gen_helper_float_abs_ps(fp0
, fp0
);
10553 gen_store_fpr64(ctx
, fp0
, fd
);
10554 tcg_temp_free_i64(fp0
);
10560 TCGv_i64 fp0
= tcg_temp_new_i64();
10562 gen_load_fpr64(ctx
, fp0
, fs
);
10563 gen_store_fpr64(ctx
, fp0
, fd
);
10564 tcg_temp_free_i64(fp0
);
10570 TCGv_i64 fp0
= tcg_temp_new_i64();
10572 gen_load_fpr64(ctx
, fp0
, fs
);
10573 gen_helper_float_chs_ps(fp0
, fp0
);
10574 gen_store_fpr64(ctx
, fp0
, fd
);
10575 tcg_temp_free_i64(fp0
);
10580 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10585 TCGLabel
*l1
= gen_new_label();
10589 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10590 fp0
= tcg_temp_new_i64();
10591 gen_load_fpr64(ctx
, fp0
, fs
);
10592 gen_store_fpr64(ctx
, fp0
, fd
);
10593 tcg_temp_free_i64(fp0
);
10600 TCGLabel
*l1
= gen_new_label();
10604 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10605 fp0
= tcg_temp_new_i64();
10606 gen_load_fpr64(ctx
, fp0
, fs
);
10607 gen_store_fpr64(ctx
, fp0
, fd
);
10608 tcg_temp_free_i64(fp0
);
10616 TCGv_i64 fp0
= tcg_temp_new_i64();
10617 TCGv_i64 fp1
= tcg_temp_new_i64();
10619 gen_load_fpr64(ctx
, fp0
, ft
);
10620 gen_load_fpr64(ctx
, fp1
, fs
);
10621 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10622 tcg_temp_free_i64(fp1
);
10623 gen_store_fpr64(ctx
, fp0
, fd
);
10624 tcg_temp_free_i64(fp0
);
10630 TCGv_i64 fp0
= tcg_temp_new_i64();
10631 TCGv_i64 fp1
= tcg_temp_new_i64();
10633 gen_load_fpr64(ctx
, fp0
, ft
);
10634 gen_load_fpr64(ctx
, fp1
, fs
);
10635 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10636 tcg_temp_free_i64(fp1
);
10637 gen_store_fpr64(ctx
, fp0
, fd
);
10638 tcg_temp_free_i64(fp0
);
10641 case OPC_RECIP2_PS
:
10644 TCGv_i64 fp0
= tcg_temp_new_i64();
10645 TCGv_i64 fp1
= tcg_temp_new_i64();
10647 gen_load_fpr64(ctx
, fp0
, fs
);
10648 gen_load_fpr64(ctx
, fp1
, ft
);
10649 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10650 tcg_temp_free_i64(fp1
);
10651 gen_store_fpr64(ctx
, fp0
, fd
);
10652 tcg_temp_free_i64(fp0
);
10655 case OPC_RECIP1_PS
:
10658 TCGv_i64 fp0
= tcg_temp_new_i64();
10660 gen_load_fpr64(ctx
, fp0
, fs
);
10661 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10662 gen_store_fpr64(ctx
, fp0
, fd
);
10663 tcg_temp_free_i64(fp0
);
10666 case OPC_RSQRT1_PS
:
10669 TCGv_i64 fp0
= tcg_temp_new_i64();
10671 gen_load_fpr64(ctx
, fp0
, fs
);
10672 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10673 gen_store_fpr64(ctx
, fp0
, fd
);
10674 tcg_temp_free_i64(fp0
);
10677 case OPC_RSQRT2_PS
:
10680 TCGv_i64 fp0
= tcg_temp_new_i64();
10681 TCGv_i64 fp1
= tcg_temp_new_i64();
10683 gen_load_fpr64(ctx
, fp0
, fs
);
10684 gen_load_fpr64(ctx
, fp1
, ft
);
10685 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10686 tcg_temp_free_i64(fp1
);
10687 gen_store_fpr64(ctx
, fp0
, fd
);
10688 tcg_temp_free_i64(fp0
);
10692 check_cp1_64bitmode(ctx
);
10694 TCGv_i32 fp0
= tcg_temp_new_i32();
10696 gen_load_fpr32h(ctx
, fp0
, fs
);
10697 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10698 gen_store_fpr32(ctx
, fp0
, fd
);
10699 tcg_temp_free_i32(fp0
);
10702 case OPC_CVT_PW_PS
:
10705 TCGv_i64 fp0
= tcg_temp_new_i64();
10707 gen_load_fpr64(ctx
, fp0
, fs
);
10708 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10709 gen_store_fpr64(ctx
, fp0
, fd
);
10710 tcg_temp_free_i64(fp0
);
10714 check_cp1_64bitmode(ctx
);
10716 TCGv_i32 fp0
= tcg_temp_new_i32();
10718 gen_load_fpr32(ctx
, fp0
, fs
);
10719 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10720 gen_store_fpr32(ctx
, fp0
, fd
);
10721 tcg_temp_free_i32(fp0
);
10727 TCGv_i32 fp0
= tcg_temp_new_i32();
10728 TCGv_i32 fp1
= tcg_temp_new_i32();
10730 gen_load_fpr32(ctx
, fp0
, fs
);
10731 gen_load_fpr32(ctx
, fp1
, ft
);
10732 gen_store_fpr32h(ctx
, fp0
, fd
);
10733 gen_store_fpr32(ctx
, fp1
, fd
);
10734 tcg_temp_free_i32(fp0
);
10735 tcg_temp_free_i32(fp1
);
10741 TCGv_i32 fp0
= tcg_temp_new_i32();
10742 TCGv_i32 fp1
= tcg_temp_new_i32();
10744 gen_load_fpr32(ctx
, fp0
, fs
);
10745 gen_load_fpr32h(ctx
, fp1
, ft
);
10746 gen_store_fpr32(ctx
, fp1
, fd
);
10747 gen_store_fpr32h(ctx
, fp0
, fd
);
10748 tcg_temp_free_i32(fp0
);
10749 tcg_temp_free_i32(fp1
);
10755 TCGv_i32 fp0
= tcg_temp_new_i32();
10756 TCGv_i32 fp1
= tcg_temp_new_i32();
10758 gen_load_fpr32h(ctx
, fp0
, fs
);
10759 gen_load_fpr32(ctx
, fp1
, ft
);
10760 gen_store_fpr32(ctx
, fp1
, fd
);
10761 gen_store_fpr32h(ctx
, fp0
, fd
);
10762 tcg_temp_free_i32(fp0
);
10763 tcg_temp_free_i32(fp1
);
10769 TCGv_i32 fp0
= tcg_temp_new_i32();
10770 TCGv_i32 fp1
= tcg_temp_new_i32();
10772 gen_load_fpr32h(ctx
, fp0
, fs
);
10773 gen_load_fpr32h(ctx
, fp1
, ft
);
10774 gen_store_fpr32(ctx
, fp1
, fd
);
10775 gen_store_fpr32h(ctx
, fp0
, fd
);
10776 tcg_temp_free_i32(fp0
);
10777 tcg_temp_free_i32(fp1
);
10781 case OPC_CMP_UN_PS
:
10782 case OPC_CMP_EQ_PS
:
10783 case OPC_CMP_UEQ_PS
:
10784 case OPC_CMP_OLT_PS
:
10785 case OPC_CMP_ULT_PS
:
10786 case OPC_CMP_OLE_PS
:
10787 case OPC_CMP_ULE_PS
:
10788 case OPC_CMP_SF_PS
:
10789 case OPC_CMP_NGLE_PS
:
10790 case OPC_CMP_SEQ_PS
:
10791 case OPC_CMP_NGL_PS
:
10792 case OPC_CMP_LT_PS
:
10793 case OPC_CMP_NGE_PS
:
10794 case OPC_CMP_LE_PS
:
10795 case OPC_CMP_NGT_PS
:
10796 if (ctx
->opcode
& (1 << 6)) {
10797 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10799 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10803 MIPS_INVAL("farith");
10804 generate_exception_end(ctx
, EXCP_RI
);
10809 /* Coprocessor 3 (FPU) */
10810 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10811 int fd
, int fs
, int base
, int index
)
10813 TCGv t0
= tcg_temp_new();
10816 gen_load_gpr(t0
, index
);
10817 } else if (index
== 0) {
10818 gen_load_gpr(t0
, base
);
10820 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10822 /* Don't do NOP if destination is zero: we must perform the actual
10828 TCGv_i32 fp0
= tcg_temp_new_i32();
10830 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10831 tcg_gen_trunc_tl_i32(fp0
, t0
);
10832 gen_store_fpr32(ctx
, fp0
, fd
);
10833 tcg_temp_free_i32(fp0
);
10838 check_cp1_registers(ctx
, fd
);
10840 TCGv_i64 fp0
= tcg_temp_new_i64();
10841 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10842 gen_store_fpr64(ctx
, fp0
, fd
);
10843 tcg_temp_free_i64(fp0
);
10847 check_cp1_64bitmode(ctx
);
10848 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10850 TCGv_i64 fp0
= tcg_temp_new_i64();
10852 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10853 gen_store_fpr64(ctx
, fp0
, fd
);
10854 tcg_temp_free_i64(fp0
);
10860 TCGv_i32 fp0
= tcg_temp_new_i32();
10861 gen_load_fpr32(ctx
, fp0
, fs
);
10862 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10863 tcg_temp_free_i32(fp0
);
10868 check_cp1_registers(ctx
, fs
);
10870 TCGv_i64 fp0
= tcg_temp_new_i64();
10871 gen_load_fpr64(ctx
, fp0
, fs
);
10872 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10873 tcg_temp_free_i64(fp0
);
10877 check_cp1_64bitmode(ctx
);
10878 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10880 TCGv_i64 fp0
= tcg_temp_new_i64();
10881 gen_load_fpr64(ctx
, fp0
, fs
);
10882 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10883 tcg_temp_free_i64(fp0
);
10890 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10891 int fd
, int fr
, int fs
, int ft
)
10897 TCGv t0
= tcg_temp_local_new();
10898 TCGv_i32 fp
= tcg_temp_new_i32();
10899 TCGv_i32 fph
= tcg_temp_new_i32();
10900 TCGLabel
*l1
= gen_new_label();
10901 TCGLabel
*l2
= gen_new_label();
10903 gen_load_gpr(t0
, fr
);
10904 tcg_gen_andi_tl(t0
, t0
, 0x7);
10906 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10907 gen_load_fpr32(ctx
, fp
, fs
);
10908 gen_load_fpr32h(ctx
, fph
, fs
);
10909 gen_store_fpr32(ctx
, fp
, fd
);
10910 gen_store_fpr32h(ctx
, fph
, fd
);
10913 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10915 #ifdef TARGET_WORDS_BIGENDIAN
10916 gen_load_fpr32(ctx
, fp
, fs
);
10917 gen_load_fpr32h(ctx
, fph
, ft
);
10918 gen_store_fpr32h(ctx
, fp
, fd
);
10919 gen_store_fpr32(ctx
, fph
, fd
);
10921 gen_load_fpr32h(ctx
, fph
, fs
);
10922 gen_load_fpr32(ctx
, fp
, ft
);
10923 gen_store_fpr32(ctx
, fph
, fd
);
10924 gen_store_fpr32h(ctx
, fp
, fd
);
10927 tcg_temp_free_i32(fp
);
10928 tcg_temp_free_i32(fph
);
10934 TCGv_i32 fp0
= tcg_temp_new_i32();
10935 TCGv_i32 fp1
= tcg_temp_new_i32();
10936 TCGv_i32 fp2
= tcg_temp_new_i32();
10938 gen_load_fpr32(ctx
, fp0
, fs
);
10939 gen_load_fpr32(ctx
, fp1
, ft
);
10940 gen_load_fpr32(ctx
, fp2
, fr
);
10941 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10942 tcg_temp_free_i32(fp0
);
10943 tcg_temp_free_i32(fp1
);
10944 gen_store_fpr32(ctx
, fp2
, fd
);
10945 tcg_temp_free_i32(fp2
);
10950 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10952 TCGv_i64 fp0
= tcg_temp_new_i64();
10953 TCGv_i64 fp1
= tcg_temp_new_i64();
10954 TCGv_i64 fp2
= tcg_temp_new_i64();
10956 gen_load_fpr64(ctx
, fp0
, fs
);
10957 gen_load_fpr64(ctx
, fp1
, ft
);
10958 gen_load_fpr64(ctx
, fp2
, fr
);
10959 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10960 tcg_temp_free_i64(fp0
);
10961 tcg_temp_free_i64(fp1
);
10962 gen_store_fpr64(ctx
, fp2
, fd
);
10963 tcg_temp_free_i64(fp2
);
10969 TCGv_i64 fp0
= tcg_temp_new_i64();
10970 TCGv_i64 fp1
= tcg_temp_new_i64();
10971 TCGv_i64 fp2
= tcg_temp_new_i64();
10973 gen_load_fpr64(ctx
, fp0
, fs
);
10974 gen_load_fpr64(ctx
, fp1
, ft
);
10975 gen_load_fpr64(ctx
, fp2
, fr
);
10976 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10977 tcg_temp_free_i64(fp0
);
10978 tcg_temp_free_i64(fp1
);
10979 gen_store_fpr64(ctx
, fp2
, fd
);
10980 tcg_temp_free_i64(fp2
);
10986 TCGv_i32 fp0
= tcg_temp_new_i32();
10987 TCGv_i32 fp1
= tcg_temp_new_i32();
10988 TCGv_i32 fp2
= tcg_temp_new_i32();
10990 gen_load_fpr32(ctx
, fp0
, fs
);
10991 gen_load_fpr32(ctx
, fp1
, ft
);
10992 gen_load_fpr32(ctx
, fp2
, fr
);
10993 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10994 tcg_temp_free_i32(fp0
);
10995 tcg_temp_free_i32(fp1
);
10996 gen_store_fpr32(ctx
, fp2
, fd
);
10997 tcg_temp_free_i32(fp2
);
11002 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11004 TCGv_i64 fp0
= tcg_temp_new_i64();
11005 TCGv_i64 fp1
= tcg_temp_new_i64();
11006 TCGv_i64 fp2
= tcg_temp_new_i64();
11008 gen_load_fpr64(ctx
, fp0
, fs
);
11009 gen_load_fpr64(ctx
, fp1
, ft
);
11010 gen_load_fpr64(ctx
, fp2
, fr
);
11011 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11012 tcg_temp_free_i64(fp0
);
11013 tcg_temp_free_i64(fp1
);
11014 gen_store_fpr64(ctx
, fp2
, fd
);
11015 tcg_temp_free_i64(fp2
);
11021 TCGv_i64 fp0
= tcg_temp_new_i64();
11022 TCGv_i64 fp1
= tcg_temp_new_i64();
11023 TCGv_i64 fp2
= tcg_temp_new_i64();
11025 gen_load_fpr64(ctx
, fp0
, fs
);
11026 gen_load_fpr64(ctx
, fp1
, ft
);
11027 gen_load_fpr64(ctx
, fp2
, fr
);
11028 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11029 tcg_temp_free_i64(fp0
);
11030 tcg_temp_free_i64(fp1
);
11031 gen_store_fpr64(ctx
, fp2
, fd
);
11032 tcg_temp_free_i64(fp2
);
11038 TCGv_i32 fp0
= tcg_temp_new_i32();
11039 TCGv_i32 fp1
= tcg_temp_new_i32();
11040 TCGv_i32 fp2
= tcg_temp_new_i32();
11042 gen_load_fpr32(ctx
, fp0
, fs
);
11043 gen_load_fpr32(ctx
, fp1
, ft
);
11044 gen_load_fpr32(ctx
, fp2
, fr
);
11045 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11046 tcg_temp_free_i32(fp0
);
11047 tcg_temp_free_i32(fp1
);
11048 gen_store_fpr32(ctx
, fp2
, fd
);
11049 tcg_temp_free_i32(fp2
);
11054 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11056 TCGv_i64 fp0
= tcg_temp_new_i64();
11057 TCGv_i64 fp1
= tcg_temp_new_i64();
11058 TCGv_i64 fp2
= tcg_temp_new_i64();
11060 gen_load_fpr64(ctx
, fp0
, fs
);
11061 gen_load_fpr64(ctx
, fp1
, ft
);
11062 gen_load_fpr64(ctx
, fp2
, fr
);
11063 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11064 tcg_temp_free_i64(fp0
);
11065 tcg_temp_free_i64(fp1
);
11066 gen_store_fpr64(ctx
, fp2
, fd
);
11067 tcg_temp_free_i64(fp2
);
11073 TCGv_i64 fp0
= tcg_temp_new_i64();
11074 TCGv_i64 fp1
= tcg_temp_new_i64();
11075 TCGv_i64 fp2
= tcg_temp_new_i64();
11077 gen_load_fpr64(ctx
, fp0
, fs
);
11078 gen_load_fpr64(ctx
, fp1
, ft
);
11079 gen_load_fpr64(ctx
, fp2
, fr
);
11080 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11081 tcg_temp_free_i64(fp0
);
11082 tcg_temp_free_i64(fp1
);
11083 gen_store_fpr64(ctx
, fp2
, fd
);
11084 tcg_temp_free_i64(fp2
);
11090 TCGv_i32 fp0
= tcg_temp_new_i32();
11091 TCGv_i32 fp1
= tcg_temp_new_i32();
11092 TCGv_i32 fp2
= tcg_temp_new_i32();
11094 gen_load_fpr32(ctx
, fp0
, fs
);
11095 gen_load_fpr32(ctx
, fp1
, ft
);
11096 gen_load_fpr32(ctx
, fp2
, fr
);
11097 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11098 tcg_temp_free_i32(fp0
);
11099 tcg_temp_free_i32(fp1
);
11100 gen_store_fpr32(ctx
, fp2
, fd
);
11101 tcg_temp_free_i32(fp2
);
11106 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11108 TCGv_i64 fp0
= tcg_temp_new_i64();
11109 TCGv_i64 fp1
= tcg_temp_new_i64();
11110 TCGv_i64 fp2
= tcg_temp_new_i64();
11112 gen_load_fpr64(ctx
, fp0
, fs
);
11113 gen_load_fpr64(ctx
, fp1
, ft
);
11114 gen_load_fpr64(ctx
, fp2
, fr
);
11115 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11116 tcg_temp_free_i64(fp0
);
11117 tcg_temp_free_i64(fp1
);
11118 gen_store_fpr64(ctx
, fp2
, fd
);
11119 tcg_temp_free_i64(fp2
);
11125 TCGv_i64 fp0
= tcg_temp_new_i64();
11126 TCGv_i64 fp1
= tcg_temp_new_i64();
11127 TCGv_i64 fp2
= tcg_temp_new_i64();
11129 gen_load_fpr64(ctx
, fp0
, fs
);
11130 gen_load_fpr64(ctx
, fp1
, ft
);
11131 gen_load_fpr64(ctx
, fp2
, fr
);
11132 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11133 tcg_temp_free_i64(fp0
);
11134 tcg_temp_free_i64(fp1
);
11135 gen_store_fpr64(ctx
, fp2
, fd
);
11136 tcg_temp_free_i64(fp2
);
11140 MIPS_INVAL("flt3_arith");
11141 generate_exception_end(ctx
, EXCP_RI
);
11146 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
11150 #if !defined(CONFIG_USER_ONLY)
11151 /* The Linux kernel will emulate rdhwr if it's not supported natively.
11152 Therefore only check the ISA in system mode. */
11153 check_insn(ctx
, ISA_MIPS32R2
);
11155 t0
= tcg_temp_new();
11159 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
11160 gen_store_gpr(t0
, rt
);
11163 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
11164 gen_store_gpr(t0
, rt
);
11167 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
11170 gen_helper_rdhwr_cc(t0
, cpu_env
);
11171 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
11174 gen_store_gpr(t0
, rt
);
11175 /* Break the TB to be able to take timer interrupts immediately
11176 after reading count. DISAS_STOP isn't sufficient, we need to ensure
11177 we break completely out of translated code. */
11178 gen_save_pc(ctx
->base
.pc_next
+ 4);
11179 ctx
->base
.is_jmp
= DISAS_EXIT
;
11182 gen_helper_rdhwr_ccres(t0
, cpu_env
);
11183 gen_store_gpr(t0
, rt
);
11186 check_insn(ctx
, ISA_MIPS32R6
);
11188 /* Performance counter registers are not implemented other than
11189 * control register 0.
11191 generate_exception(ctx
, EXCP_RI
);
11193 gen_helper_rdhwr_performance(t0
, cpu_env
);
11194 gen_store_gpr(t0
, rt
);
11197 check_insn(ctx
, ISA_MIPS32R6
);
11198 gen_helper_rdhwr_xnp(t0
, cpu_env
);
11199 gen_store_gpr(t0
, rt
);
11202 #if defined(CONFIG_USER_ONLY)
11203 tcg_gen_ld_tl(t0
, cpu_env
,
11204 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
11205 gen_store_gpr(t0
, rt
);
11208 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
11209 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
11210 tcg_gen_ld_tl(t0
, cpu_env
,
11211 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
11212 gen_store_gpr(t0
, rt
);
11214 generate_exception_end(ctx
, EXCP_RI
);
11218 default: /* Invalid */
11219 MIPS_INVAL("rdhwr");
11220 generate_exception_end(ctx
, EXCP_RI
);
11226 static inline void clear_branch_hflags(DisasContext
*ctx
)
11228 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
11229 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
11230 save_cpu_state(ctx
, 0);
11232 /* it is not safe to save ctx->hflags as hflags may be changed
11233 in execution time by the instruction in delay / forbidden slot. */
11234 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
11238 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
11240 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11241 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
11242 /* Branches completion */
11243 clear_branch_hflags(ctx
);
11244 ctx
->base
.is_jmp
= DISAS_NORETURN
;
11245 /* FIXME: Need to clear can_do_io. */
11246 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
11247 case MIPS_HFLAG_FBNSLOT
:
11248 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
11251 /* unconditional branch */
11252 if (proc_hflags
& MIPS_HFLAG_BX
) {
11253 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
11255 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11257 case MIPS_HFLAG_BL
:
11258 /* blikely taken case */
11259 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11261 case MIPS_HFLAG_BC
:
11262 /* Conditional branch */
11264 TCGLabel
*l1
= gen_new_label();
11266 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
11267 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
11269 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11272 case MIPS_HFLAG_BR
:
11273 /* unconditional branch to register */
11274 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
11275 TCGv t0
= tcg_temp_new();
11276 TCGv_i32 t1
= tcg_temp_new_i32();
11278 tcg_gen_andi_tl(t0
, btarget
, 0x1);
11279 tcg_gen_trunc_tl_i32(t1
, t0
);
11281 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
11282 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
11283 tcg_gen_or_i32(hflags
, hflags
, t1
);
11284 tcg_temp_free_i32(t1
);
11286 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
11288 tcg_gen_mov_tl(cpu_PC
, btarget
);
11290 if (ctx
->base
.singlestep_enabled
) {
11291 save_cpu_state(ctx
, 0);
11292 gen_helper_raise_exception_debug(cpu_env
);
11294 tcg_gen_lookup_and_goto_ptr();
11297 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
11303 /* Compact Branches */
11304 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
11305 int rs
, int rt
, int32_t offset
)
11307 int bcond_compute
= 0;
11308 TCGv t0
= tcg_temp_new();
11309 TCGv t1
= tcg_temp_new();
11310 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
11312 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11313 #ifdef MIPS_DEBUG_DISAS
11314 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
11315 "\n", ctx
->base
.pc_next
);
11317 generate_exception_end(ctx
, EXCP_RI
);
11321 /* Load needed operands and calculate btarget */
11323 /* compact branch */
11324 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11325 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11326 gen_load_gpr(t0
, rs
);
11327 gen_load_gpr(t1
, rt
);
11329 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11330 if (rs
<= rt
&& rs
== 0) {
11331 /* OPC_BEQZALC, OPC_BNEZALC */
11332 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11335 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11336 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11337 gen_load_gpr(t0
, rs
);
11338 gen_load_gpr(t1
, rt
);
11340 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11342 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11343 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11344 if (rs
== 0 || rs
== rt
) {
11345 /* OPC_BLEZALC, OPC_BGEZALC */
11346 /* OPC_BGTZALC, OPC_BLTZALC */
11347 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11349 gen_load_gpr(t0
, rs
);
11350 gen_load_gpr(t1
, rt
);
11352 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11356 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11361 /* OPC_BEQZC, OPC_BNEZC */
11362 gen_load_gpr(t0
, rs
);
11364 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11366 /* OPC_JIC, OPC_JIALC */
11367 TCGv tbase
= tcg_temp_new();
11368 TCGv toffset
= tcg_temp_new();
11370 gen_load_gpr(tbase
, rt
);
11371 tcg_gen_movi_tl(toffset
, offset
);
11372 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
11373 tcg_temp_free(tbase
);
11374 tcg_temp_free(toffset
);
11378 MIPS_INVAL("Compact branch/jump");
11379 generate_exception_end(ctx
, EXCP_RI
);
11383 if (bcond_compute
== 0) {
11384 /* Uncoditional compact branch */
11387 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11390 ctx
->hflags
|= MIPS_HFLAG_BR
;
11393 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11396 ctx
->hflags
|= MIPS_HFLAG_B
;
11399 MIPS_INVAL("Compact branch/jump");
11400 generate_exception_end(ctx
, EXCP_RI
);
11404 /* Generating branch here as compact branches don't have delay slot */
11405 gen_branch(ctx
, 4);
11407 /* Conditional compact branch */
11408 TCGLabel
*fs
= gen_new_label();
11409 save_cpu_state(ctx
, 0);
11412 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11413 if (rs
== 0 && rt
!= 0) {
11415 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11416 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11418 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11421 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
11424 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11425 if (rs
== 0 && rt
!= 0) {
11427 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11428 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11430 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11433 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
11436 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11437 if (rs
== 0 && rt
!= 0) {
11439 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11440 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11442 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11445 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
11448 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11449 if (rs
== 0 && rt
!= 0) {
11451 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11452 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11454 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11457 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
11460 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11461 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11463 /* OPC_BOVC, OPC_BNVC */
11464 TCGv t2
= tcg_temp_new();
11465 TCGv t3
= tcg_temp_new();
11466 TCGv t4
= tcg_temp_new();
11467 TCGv input_overflow
= tcg_temp_new();
11469 gen_load_gpr(t0
, rs
);
11470 gen_load_gpr(t1
, rt
);
11471 tcg_gen_ext32s_tl(t2
, t0
);
11472 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
11473 tcg_gen_ext32s_tl(t3
, t1
);
11474 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
11475 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
11477 tcg_gen_add_tl(t4
, t2
, t3
);
11478 tcg_gen_ext32s_tl(t4
, t4
);
11479 tcg_gen_xor_tl(t2
, t2
, t3
);
11480 tcg_gen_xor_tl(t3
, t4
, t3
);
11481 tcg_gen_andc_tl(t2
, t3
, t2
);
11482 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
11483 tcg_gen_or_tl(t4
, t4
, input_overflow
);
11484 if (opc
== OPC_BOVC
) {
11486 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
11489 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
11491 tcg_temp_free(input_overflow
);
11495 } else if (rs
< rt
&& rs
== 0) {
11496 /* OPC_BEQZALC, OPC_BNEZALC */
11497 if (opc
== OPC_BEQZALC
) {
11499 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
11502 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
11505 /* OPC_BEQC, OPC_BNEC */
11506 if (opc
== OPC_BEQC
) {
11508 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
11511 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
11516 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
11519 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
11522 MIPS_INVAL("Compact conditional branch/jump");
11523 generate_exception_end(ctx
, EXCP_RI
);
11527 /* Generating branch here as compact branches don't have delay slot */
11528 gen_goto_tb(ctx
, 1, ctx
->btarget
);
11531 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
11539 /* ISA extensions (ASEs) */
11540 /* MIPS16 extension to MIPS32 */
11542 /* MIPS16 major opcodes */
11544 M16_OPC_ADDIUSP
= 0x00,
11545 M16_OPC_ADDIUPC
= 0x01,
11547 M16_OPC_JAL
= 0x03,
11548 M16_OPC_BEQZ
= 0x04,
11549 M16_OPC_BNEQZ
= 0x05,
11550 M16_OPC_SHIFT
= 0x06,
11552 M16_OPC_RRIA
= 0x08,
11553 M16_OPC_ADDIU8
= 0x09,
11554 M16_OPC_SLTI
= 0x0a,
11555 M16_OPC_SLTIU
= 0x0b,
11558 M16_OPC_CMPI
= 0x0e,
11562 M16_OPC_LWSP
= 0x12,
11564 M16_OPC_LBU
= 0x14,
11565 M16_OPC_LHU
= 0x15,
11566 M16_OPC_LWPC
= 0x16,
11567 M16_OPC_LWU
= 0x17,
11570 M16_OPC_SWSP
= 0x1a,
11572 M16_OPC_RRR
= 0x1c,
11574 M16_OPC_EXTEND
= 0x1e,
11578 /* I8 funct field */
11597 /* RR funct field */
11631 /* I64 funct field */
11639 I64_DADDIUPC
= 0x6,
11643 /* RR ry field for CNVT */
11645 RR_RY_CNVT_ZEB
= 0x0,
11646 RR_RY_CNVT_ZEH
= 0x1,
11647 RR_RY_CNVT_ZEW
= 0x2,
11648 RR_RY_CNVT_SEB
= 0x4,
11649 RR_RY_CNVT_SEH
= 0x5,
11650 RR_RY_CNVT_SEW
= 0x6,
11653 static int xlat (int r
)
11655 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11660 static void gen_mips16_save (DisasContext
*ctx
,
11661 int xsregs
, int aregs
,
11662 int do_ra
, int do_s0
, int do_s1
,
11665 TCGv t0
= tcg_temp_new();
11666 TCGv t1
= tcg_temp_new();
11667 TCGv t2
= tcg_temp_new();
11697 generate_exception_end(ctx
, EXCP_RI
);
11703 gen_base_offset_addr(ctx
, t0
, 29, 12);
11704 gen_load_gpr(t1
, 7);
11705 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11708 gen_base_offset_addr(ctx
, t0
, 29, 8);
11709 gen_load_gpr(t1
, 6);
11710 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11713 gen_base_offset_addr(ctx
, t0
, 29, 4);
11714 gen_load_gpr(t1
, 5);
11715 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11718 gen_base_offset_addr(ctx
, t0
, 29, 0);
11719 gen_load_gpr(t1
, 4);
11720 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11723 gen_load_gpr(t0
, 29);
11725 #define DECR_AND_STORE(reg) do { \
11726 tcg_gen_movi_tl(t2, -4); \
11727 gen_op_addr_add(ctx, t0, t0, t2); \
11728 gen_load_gpr(t1, reg); \
11729 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11733 DECR_AND_STORE(31);
11738 DECR_AND_STORE(30);
11741 DECR_AND_STORE(23);
11744 DECR_AND_STORE(22);
11747 DECR_AND_STORE(21);
11750 DECR_AND_STORE(20);
11753 DECR_AND_STORE(19);
11756 DECR_AND_STORE(18);
11760 DECR_AND_STORE(17);
11763 DECR_AND_STORE(16);
11793 generate_exception_end(ctx
, EXCP_RI
);
11809 #undef DECR_AND_STORE
11811 tcg_gen_movi_tl(t2
, -framesize
);
11812 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11818 static void gen_mips16_restore (DisasContext
*ctx
,
11819 int xsregs
, int aregs
,
11820 int do_ra
, int do_s0
, int do_s1
,
11824 TCGv t0
= tcg_temp_new();
11825 TCGv t1
= tcg_temp_new();
11826 TCGv t2
= tcg_temp_new();
11828 tcg_gen_movi_tl(t2
, framesize
);
11829 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11831 #define DECR_AND_LOAD(reg) do { \
11832 tcg_gen_movi_tl(t2, -4); \
11833 gen_op_addr_add(ctx, t0, t0, t2); \
11834 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11835 gen_store_gpr(t1, reg); \
11899 generate_exception_end(ctx
, EXCP_RI
);
11915 #undef DECR_AND_LOAD
11917 tcg_gen_movi_tl(t2
, framesize
);
11918 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11924 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11925 int is_64_bit
, int extended
)
11929 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11930 generate_exception_end(ctx
, EXCP_RI
);
11934 t0
= tcg_temp_new();
11936 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11937 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11939 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11945 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11948 TCGv_i32 t0
= tcg_const_i32(op
);
11949 TCGv t1
= tcg_temp_new();
11950 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11951 gen_helper_cache(cpu_env
, t1
, t0
);
11954 #if defined(TARGET_MIPS64)
11955 static void decode_i64_mips16 (DisasContext
*ctx
,
11956 int ry
, int funct
, int16_t offset
,
11961 check_insn(ctx
, ISA_MIPS3
);
11962 check_mips_64(ctx
);
11963 offset
= extended
? offset
: offset
<< 3;
11964 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11967 check_insn(ctx
, ISA_MIPS3
);
11968 check_mips_64(ctx
);
11969 offset
= extended
? offset
: offset
<< 3;
11970 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11973 check_insn(ctx
, ISA_MIPS3
);
11974 check_mips_64(ctx
);
11975 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11976 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11979 check_insn(ctx
, ISA_MIPS3
);
11980 check_mips_64(ctx
);
11981 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11982 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11985 check_insn(ctx
, ISA_MIPS3
);
11986 check_mips_64(ctx
);
11987 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11988 generate_exception_end(ctx
, EXCP_RI
);
11990 offset
= extended
? offset
: offset
<< 3;
11991 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11995 check_insn(ctx
, ISA_MIPS3
);
11996 check_mips_64(ctx
);
11997 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11998 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
12001 check_insn(ctx
, ISA_MIPS3
);
12002 check_mips_64(ctx
);
12003 offset
= extended
? offset
: offset
<< 2;
12004 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
12007 check_insn(ctx
, ISA_MIPS3
);
12008 check_mips_64(ctx
);
12009 offset
= extended
? offset
: offset
<< 2;
12010 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
12016 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
12018 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
12019 int op
, rx
, ry
, funct
, sa
;
12020 int16_t imm
, offset
;
12022 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
12023 op
= (ctx
->opcode
>> 11) & 0x1f;
12024 sa
= (ctx
->opcode
>> 22) & 0x1f;
12025 funct
= (ctx
->opcode
>> 8) & 0x7;
12026 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
12027 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
12028 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
12029 | ((ctx
->opcode
>> 21) & 0x3f) << 5
12030 | (ctx
->opcode
& 0x1f));
12032 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
12035 case M16_OPC_ADDIUSP
:
12036 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
12038 case M16_OPC_ADDIUPC
:
12039 gen_addiupc(ctx
, rx
, imm
, 0, 1);
12042 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
12043 /* No delay slot, so just process as a normal instruction */
12046 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
12047 /* No delay slot, so just process as a normal instruction */
12049 case M16_OPC_BNEQZ
:
12050 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
12051 /* No delay slot, so just process as a normal instruction */
12053 case M16_OPC_SHIFT
:
12054 switch (ctx
->opcode
& 0x3) {
12056 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
12059 #if defined(TARGET_MIPS64)
12060 check_mips_64(ctx
);
12061 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
12063 generate_exception_end(ctx
, EXCP_RI
);
12067 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
12070 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
12074 #if defined(TARGET_MIPS64)
12076 check_insn(ctx
, ISA_MIPS3
);
12077 check_mips_64(ctx
);
12078 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
12082 imm
= ctx
->opcode
& 0xf;
12083 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
12084 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
12085 imm
= (int16_t) (imm
<< 1) >> 1;
12086 if ((ctx
->opcode
>> 4) & 0x1) {
12087 #if defined(TARGET_MIPS64)
12088 check_mips_64(ctx
);
12089 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
12091 generate_exception_end(ctx
, EXCP_RI
);
12094 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
12097 case M16_OPC_ADDIU8
:
12098 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
12101 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
12103 case M16_OPC_SLTIU
:
12104 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
12109 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
12112 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
12115 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
12118 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
12121 check_insn(ctx
, ISA_MIPS32
);
12123 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
12124 int aregs
= (ctx
->opcode
>> 16) & 0xf;
12125 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
12126 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
12127 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
12128 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
12129 | (ctx
->opcode
& 0xf)) << 3;
12131 if (ctx
->opcode
& (1 << 7)) {
12132 gen_mips16_save(ctx
, xsregs
, aregs
,
12133 do_ra
, do_s0
, do_s1
,
12136 gen_mips16_restore(ctx
, xsregs
, aregs
,
12137 do_ra
, do_s0
, do_s1
,
12143 generate_exception_end(ctx
, EXCP_RI
);
12148 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
12151 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
12153 #if defined(TARGET_MIPS64)
12155 check_insn(ctx
, ISA_MIPS3
);
12156 check_mips_64(ctx
);
12157 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
12161 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
12164 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
12167 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
12170 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
12173 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
12176 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
12179 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
12181 #if defined(TARGET_MIPS64)
12183 check_insn(ctx
, ISA_MIPS3
);
12184 check_mips_64(ctx
);
12185 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
12189 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
12192 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
12195 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
12198 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
12200 #if defined(TARGET_MIPS64)
12202 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
12206 generate_exception_end(ctx
, EXCP_RI
);
12213 static inline bool is_uhi(int sdbbp_code
)
12215 #ifdef CONFIG_USER_ONLY
12218 return semihosting_enabled() && sdbbp_code
== 1;
12222 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
12226 int op
, cnvt_op
, op1
, offset
;
12230 op
= (ctx
->opcode
>> 11) & 0x1f;
12231 sa
= (ctx
->opcode
>> 2) & 0x7;
12232 sa
= sa
== 0 ? 8 : sa
;
12233 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
12234 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
12235 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
12236 op1
= offset
= ctx
->opcode
& 0x1f;
12241 case M16_OPC_ADDIUSP
:
12243 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
12245 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
12248 case M16_OPC_ADDIUPC
:
12249 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
12252 offset
= (ctx
->opcode
& 0x7ff) << 1;
12253 offset
= (int16_t)(offset
<< 4) >> 4;
12254 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
12255 /* No delay slot, so just process as a normal instruction */
12258 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
12259 offset
= (((ctx
->opcode
& 0x1f) << 21)
12260 | ((ctx
->opcode
>> 5) & 0x1f) << 16
12262 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
12263 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
12267 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
12268 ((int8_t)ctx
->opcode
) << 1, 0);
12269 /* No delay slot, so just process as a normal instruction */
12271 case M16_OPC_BNEQZ
:
12272 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
12273 ((int8_t)ctx
->opcode
) << 1, 0);
12274 /* No delay slot, so just process as a normal instruction */
12276 case M16_OPC_SHIFT
:
12277 switch (ctx
->opcode
& 0x3) {
12279 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
12282 #if defined(TARGET_MIPS64)
12283 check_insn(ctx
, ISA_MIPS3
);
12284 check_mips_64(ctx
);
12285 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
12287 generate_exception_end(ctx
, EXCP_RI
);
12291 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
12294 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
12298 #if defined(TARGET_MIPS64)
12300 check_insn(ctx
, ISA_MIPS3
);
12301 check_mips_64(ctx
);
12302 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
12307 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
12309 if ((ctx
->opcode
>> 4) & 1) {
12310 #if defined(TARGET_MIPS64)
12311 check_insn(ctx
, ISA_MIPS3
);
12312 check_mips_64(ctx
);
12313 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
12315 generate_exception_end(ctx
, EXCP_RI
);
12318 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
12322 case M16_OPC_ADDIU8
:
12324 int16_t imm
= (int8_t) ctx
->opcode
;
12326 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
12331 int16_t imm
= (uint8_t) ctx
->opcode
;
12332 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
12335 case M16_OPC_SLTIU
:
12337 int16_t imm
= (uint8_t) ctx
->opcode
;
12338 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
12345 funct
= (ctx
->opcode
>> 8) & 0x7;
12348 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
12349 ((int8_t)ctx
->opcode
) << 1, 0);
12352 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
12353 ((int8_t)ctx
->opcode
) << 1, 0);
12356 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
12359 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
12360 ((int8_t)ctx
->opcode
) << 3);
12363 check_insn(ctx
, ISA_MIPS32
);
12365 int do_ra
= ctx
->opcode
& (1 << 6);
12366 int do_s0
= ctx
->opcode
& (1 << 5);
12367 int do_s1
= ctx
->opcode
& (1 << 4);
12368 int framesize
= ctx
->opcode
& 0xf;
12370 if (framesize
== 0) {
12373 framesize
= framesize
<< 3;
12376 if (ctx
->opcode
& (1 << 7)) {
12377 gen_mips16_save(ctx
, 0, 0,
12378 do_ra
, do_s0
, do_s1
, framesize
);
12380 gen_mips16_restore(ctx
, 0, 0,
12381 do_ra
, do_s0
, do_s1
, framesize
);
12387 int rz
= xlat(ctx
->opcode
& 0x7);
12389 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
12390 ((ctx
->opcode
>> 5) & 0x7);
12391 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
12395 reg32
= ctx
->opcode
& 0x1f;
12396 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
12399 generate_exception_end(ctx
, EXCP_RI
);
12406 int16_t imm
= (uint8_t) ctx
->opcode
;
12408 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
12413 int16_t imm
= (uint8_t) ctx
->opcode
;
12414 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
12417 #if defined(TARGET_MIPS64)
12419 check_insn(ctx
, ISA_MIPS3
);
12420 check_mips_64(ctx
);
12421 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
12425 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
12428 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
12431 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12434 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
12437 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
12440 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
12443 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
12445 #if defined (TARGET_MIPS64)
12447 check_insn(ctx
, ISA_MIPS3
);
12448 check_mips_64(ctx
);
12449 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
12453 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
12456 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
12459 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12462 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
12466 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
12469 switch (ctx
->opcode
& 0x3) {
12471 mips32_op
= OPC_ADDU
;
12474 mips32_op
= OPC_SUBU
;
12476 #if defined(TARGET_MIPS64)
12478 mips32_op
= OPC_DADDU
;
12479 check_insn(ctx
, ISA_MIPS3
);
12480 check_mips_64(ctx
);
12483 mips32_op
= OPC_DSUBU
;
12484 check_insn(ctx
, ISA_MIPS3
);
12485 check_mips_64(ctx
);
12489 generate_exception_end(ctx
, EXCP_RI
);
12493 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
12502 int nd
= (ctx
->opcode
>> 7) & 0x1;
12503 int link
= (ctx
->opcode
>> 6) & 0x1;
12504 int ra
= (ctx
->opcode
>> 5) & 0x1;
12507 check_insn(ctx
, ISA_MIPS32
);
12516 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
12521 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
12522 gen_helper_do_semihosting(cpu_env
);
12524 /* XXX: not clear which exception should be raised
12525 * when in debug mode...
12527 check_insn(ctx
, ISA_MIPS32
);
12528 generate_exception_end(ctx
, EXCP_DBp
);
12532 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
12535 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
12538 generate_exception_end(ctx
, EXCP_BREAK
);
12541 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
12544 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
12547 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
12549 #if defined (TARGET_MIPS64)
12551 check_insn(ctx
, ISA_MIPS3
);
12552 check_mips_64(ctx
);
12553 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
12557 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
12560 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
12563 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
12566 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
12569 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12572 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12575 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12578 check_insn(ctx
, ISA_MIPS32
);
12580 case RR_RY_CNVT_ZEB
:
12581 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12583 case RR_RY_CNVT_ZEH
:
12584 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12586 case RR_RY_CNVT_SEB
:
12587 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12589 case RR_RY_CNVT_SEH
:
12590 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12592 #if defined (TARGET_MIPS64)
12593 case RR_RY_CNVT_ZEW
:
12594 check_insn(ctx
, ISA_MIPS64
);
12595 check_mips_64(ctx
);
12596 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12598 case RR_RY_CNVT_SEW
:
12599 check_insn(ctx
, ISA_MIPS64
);
12600 check_mips_64(ctx
);
12601 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12605 generate_exception_end(ctx
, EXCP_RI
);
12610 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12612 #if defined (TARGET_MIPS64)
12614 check_insn(ctx
, ISA_MIPS3
);
12615 check_mips_64(ctx
);
12616 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12619 check_insn(ctx
, ISA_MIPS3
);
12620 check_mips_64(ctx
);
12621 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12624 check_insn(ctx
, ISA_MIPS3
);
12625 check_mips_64(ctx
);
12626 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12629 check_insn(ctx
, ISA_MIPS3
);
12630 check_mips_64(ctx
);
12631 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12635 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12638 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12641 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12644 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12646 #if defined (TARGET_MIPS64)
12648 check_insn(ctx
, ISA_MIPS3
);
12649 check_mips_64(ctx
);
12650 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12653 check_insn(ctx
, ISA_MIPS3
);
12654 check_mips_64(ctx
);
12655 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12658 check_insn(ctx
, ISA_MIPS3
);
12659 check_mips_64(ctx
);
12660 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12663 check_insn(ctx
, ISA_MIPS3
);
12664 check_mips_64(ctx
);
12665 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12669 generate_exception_end(ctx
, EXCP_RI
);
12673 case M16_OPC_EXTEND
:
12674 decode_extended_mips16_opc(env
, ctx
);
12677 #if defined(TARGET_MIPS64)
12679 funct
= (ctx
->opcode
>> 8) & 0x7;
12680 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12684 generate_exception_end(ctx
, EXCP_RI
);
12691 /* microMIPS extension to MIPS32/MIPS64 */
12694 * microMIPS32/microMIPS64 major opcodes
12696 * 1. MIPS Architecture for Programmers Volume II-B:
12697 * The microMIPS32 Instruction Set (Revision 3.05)
12699 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12701 * 2. MIPS Architecture For Programmers Volume II-A:
12702 * The MIPS64 Instruction Set (Revision 3.51)
12732 POOL32S
= 0x16, /* MIPS64 */
12733 DADDIU32
= 0x17, /* MIPS64 */
12762 /* 0x29 is reserved */
12775 /* 0x31 is reserved */
12788 SD32
= 0x36, /* MIPS64 */
12789 LD32
= 0x37, /* MIPS64 */
12791 /* 0x39 is reserved */
12807 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12829 /* POOL32A encoding of minor opcode field */
12832 /* These opcodes are distinguished only by bits 9..6; those bits are
12833 * what are recorded below. */
12870 /* The following can be distinguished by their lower 6 bits. */
12880 /* POOL32AXF encoding of minor opcode field extension */
12883 * 1. MIPS Architecture for Programmers Volume II-B:
12884 * The microMIPS32 Instruction Set (Revision 3.05)
12886 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12888 * 2. MIPS Architecture for Programmers VolumeIV-e:
12889 * The MIPS DSP Application-Specific Extension
12890 * to the microMIPS32 Architecture (Revision 2.34)
12892 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12907 /* begin of microMIPS32 DSP */
12909 /* bits 13..12 for 0x01 */
12915 /* bits 13..12 for 0x2a */
12921 /* bits 13..12 for 0x32 */
12925 /* end of microMIPS32 DSP */
12927 /* bits 15..12 for 0x2c */
12944 /* bits 15..12 for 0x34 */
12952 /* bits 15..12 for 0x3c */
12954 JR
= 0x0, /* alias */
12962 /* bits 15..12 for 0x05 */
12966 /* bits 15..12 for 0x0d */
12978 /* bits 15..12 for 0x15 */
12984 /* bits 15..12 for 0x1d */
12988 /* bits 15..12 for 0x2d */
12993 /* bits 15..12 for 0x35 */
13000 /* POOL32B encoding of minor opcode field (bits 15..12) */
13016 /* POOL32C encoding of minor opcode field (bits 15..12) */
13037 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
13050 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
13063 /* POOL32F encoding of minor opcode field (bits 5..0) */
13066 /* These are the bit 7..6 values */
13075 /* These are the bit 8..6 values */
13100 MOVZ_FMT_05
= 0x05,
13134 CABS_COND_FMT
= 0x1c, /* MIPS3D */
13141 /* POOL32Fxf encoding of minor opcode extension field */
13179 /* POOL32I encoding of minor opcode field (bits 25..21) */
13209 /* These overlap and are distinguished by bit16 of the instruction */
13218 /* POOL16A encoding of minor opcode field */
13225 /* POOL16B encoding of minor opcode field */
13232 /* POOL16C encoding of minor opcode field */
13252 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
13276 /* POOL16D encoding of minor opcode field */
13283 /* POOL16E encoding of minor opcode field */
13290 static int mmreg (int r
)
13292 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13297 /* Used for 16-bit store instructions. */
13298 static int mmreg2 (int r
)
13300 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
13305 #define uMIPS_RD(op) ((op >> 7) & 0x7)
13306 #define uMIPS_RS(op) ((op >> 4) & 0x7)
13307 #define uMIPS_RS2(op) uMIPS_RS(op)
13308 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
13309 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
13310 #define uMIPS_RS5(op) (op & 0x1f)
13312 /* Signed immediate */
13313 #define SIMM(op, start, width) \
13314 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
13317 /* Zero-extended immediate */
13318 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
13320 static void gen_addiur1sp(DisasContext
*ctx
)
13322 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
13324 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
13327 static void gen_addiur2(DisasContext
*ctx
)
13329 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
13330 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
13331 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
13333 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
13336 static void gen_addiusp(DisasContext
*ctx
)
13338 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
13341 if (encoded
<= 1) {
13342 decoded
= 256 + encoded
;
13343 } else if (encoded
<= 255) {
13345 } else if (encoded
<= 509) {
13346 decoded
= encoded
- 512;
13348 decoded
= encoded
- 768;
13351 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
13354 static void gen_addius5(DisasContext
*ctx
)
13356 int imm
= SIMM(ctx
->opcode
, 1, 4);
13357 int rd
= (ctx
->opcode
>> 5) & 0x1f;
13359 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
13362 static void gen_andi16(DisasContext
*ctx
)
13364 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
13365 31, 32, 63, 64, 255, 32768, 65535 };
13366 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
13367 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
13368 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
13370 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
13373 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
13374 int base
, int16_t offset
)
13379 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
13380 generate_exception_end(ctx
, EXCP_RI
);
13384 t0
= tcg_temp_new();
13386 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13388 t1
= tcg_const_tl(reglist
);
13389 t2
= tcg_const_i32(ctx
->mem_idx
);
13391 save_cpu_state(ctx
, 1);
13394 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
13397 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
13399 #ifdef TARGET_MIPS64
13401 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
13404 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
13410 tcg_temp_free_i32(t2
);
13414 static void gen_pool16c_insn(DisasContext
*ctx
)
13416 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
13417 int rs
= mmreg(ctx
->opcode
& 0x7);
13419 switch (((ctx
->opcode
) >> 4) & 0x3f) {
13424 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
13430 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
13436 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
13442 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
13449 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13450 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13452 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
13461 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13462 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13464 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
13471 int reg
= ctx
->opcode
& 0x1f;
13473 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
13479 int reg
= ctx
->opcode
& 0x1f;
13480 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
13481 /* Let normal delay slot handling in our caller take us
13482 to the branch target. */
13487 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
13488 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13492 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
13493 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13497 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
13501 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
13504 generate_exception_end(ctx
, EXCP_BREAK
);
13507 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
13508 gen_helper_do_semihosting(cpu_env
);
13510 /* XXX: not clear which exception should be raised
13511 * when in debug mode...
13513 check_insn(ctx
, ISA_MIPS32
);
13514 generate_exception_end(ctx
, EXCP_DBp
);
13517 case JRADDIUSP
+ 0:
13518 case JRADDIUSP
+ 1:
13520 int imm
= ZIMM(ctx
->opcode
, 0, 5);
13521 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13522 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13523 /* Let normal delay slot handling in our caller take us
13524 to the branch target. */
13528 generate_exception_end(ctx
, EXCP_RI
);
13533 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
13536 int rd
, rs
, re
, rt
;
13537 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
13538 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
13539 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
13540 rd
= rd_enc
[enc_dest
];
13541 re
= re_enc
[enc_dest
];
13542 rs
= rs_rt_enc
[enc_rs
];
13543 rt
= rs_rt_enc
[enc_rt
];
13545 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
13547 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
13550 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
13552 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
13556 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
13558 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
13559 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
13561 switch (ctx
->opcode
& 0xf) {
13563 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
13566 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
13570 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13571 int offset
= extract32(ctx
->opcode
, 4, 4);
13572 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
13575 case R6_JRC16
: /* JRCADDIUSP */
13576 if ((ctx
->opcode
>> 4) & 1) {
13578 int imm
= extract32(ctx
->opcode
, 5, 5);
13579 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13580 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13583 rs
= extract32(ctx
->opcode
, 5, 5);
13584 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
13596 int enc_dest
= uMIPS_RD(ctx
->opcode
);
13597 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
13598 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
13599 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
13603 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
13606 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
13610 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13611 int offset
= extract32(ctx
->opcode
, 4, 4);
13612 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
13615 case JALRC16
: /* BREAK16, SDBBP16 */
13616 switch (ctx
->opcode
& 0x3f) {
13618 case JALRC16
+ 0x20:
13620 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13625 generate_exception(ctx
, EXCP_BREAK
);
13629 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13630 gen_helper_do_semihosting(cpu_env
);
13632 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13633 generate_exception(ctx
, EXCP_RI
);
13635 generate_exception(ctx
, EXCP_DBp
);
13642 generate_exception(ctx
, EXCP_RI
);
13647 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13649 TCGv t0
= tcg_temp_new();
13650 TCGv t1
= tcg_temp_new();
13652 gen_load_gpr(t0
, base
);
13655 gen_load_gpr(t1
, index
);
13656 tcg_gen_shli_tl(t1
, t1
, 2);
13657 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13660 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13661 gen_store_gpr(t1
, rd
);
13667 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13668 int base
, int16_t offset
)
13672 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13673 generate_exception_end(ctx
, EXCP_RI
);
13677 t0
= tcg_temp_new();
13678 t1
= tcg_temp_new();
13680 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13685 generate_exception_end(ctx
, EXCP_RI
);
13688 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13689 gen_store_gpr(t1
, rd
);
13690 tcg_gen_movi_tl(t1
, 4);
13691 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13692 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13693 gen_store_gpr(t1
, rd
+1);
13696 gen_load_gpr(t1
, rd
);
13697 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13698 tcg_gen_movi_tl(t1
, 4);
13699 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13700 gen_load_gpr(t1
, rd
+1);
13701 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13703 #ifdef TARGET_MIPS64
13706 generate_exception_end(ctx
, EXCP_RI
);
13709 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13710 gen_store_gpr(t1
, rd
);
13711 tcg_gen_movi_tl(t1
, 8);
13712 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13713 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13714 gen_store_gpr(t1
, rd
+1);
13717 gen_load_gpr(t1
, rd
);
13718 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13719 tcg_gen_movi_tl(t1
, 8);
13720 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13721 gen_load_gpr(t1
, rd
+1);
13722 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13730 static void gen_sync(int stype
)
13732 TCGBar tcg_mo
= TCG_BAR_SC
;
13735 case 0x4: /* SYNC_WMB */
13736 tcg_mo
|= TCG_MO_ST_ST
;
13738 case 0x10: /* SYNC_MB */
13739 tcg_mo
|= TCG_MO_ALL
;
13741 case 0x11: /* SYNC_ACQUIRE */
13742 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
13744 case 0x12: /* SYNC_RELEASE */
13745 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
13747 case 0x13: /* SYNC_RMB */
13748 tcg_mo
|= TCG_MO_LD_LD
;
13751 tcg_mo
|= TCG_MO_ALL
;
13755 tcg_gen_mb(tcg_mo
);
13758 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13760 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13761 int minor
= (ctx
->opcode
>> 12) & 0xf;
13762 uint32_t mips32_op
;
13764 switch (extension
) {
13766 mips32_op
= OPC_TEQ
;
13769 mips32_op
= OPC_TGE
;
13772 mips32_op
= OPC_TGEU
;
13775 mips32_op
= OPC_TLT
;
13778 mips32_op
= OPC_TLTU
;
13781 mips32_op
= OPC_TNE
;
13783 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13785 #ifndef CONFIG_USER_ONLY
13788 check_cp0_enabled(ctx
);
13790 /* Treat as NOP. */
13793 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13797 check_cp0_enabled(ctx
);
13799 TCGv t0
= tcg_temp_new();
13801 gen_load_gpr(t0
, rt
);
13802 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13808 switch (minor
& 3) {
13810 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13813 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13816 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13819 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13822 goto pool32axf_invalid
;
13826 switch (minor
& 3) {
13828 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13831 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13834 goto pool32axf_invalid
;
13840 check_insn(ctx
, ISA_MIPS32R6
);
13841 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13844 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13847 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13850 mips32_op
= OPC_CLO
;
13853 mips32_op
= OPC_CLZ
;
13855 check_insn(ctx
, ISA_MIPS32
);
13856 gen_cl(ctx
, mips32_op
, rt
, rs
);
13859 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13860 gen_rdhwr(ctx
, rt
, rs
, 0);
13863 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13866 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13867 mips32_op
= OPC_MULT
;
13870 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13871 mips32_op
= OPC_MULTU
;
13874 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13875 mips32_op
= OPC_DIV
;
13878 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13879 mips32_op
= OPC_DIVU
;
13882 check_insn(ctx
, ISA_MIPS32
);
13883 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13886 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13887 mips32_op
= OPC_MADD
;
13890 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13891 mips32_op
= OPC_MADDU
;
13894 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13895 mips32_op
= OPC_MSUB
;
13898 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13899 mips32_op
= OPC_MSUBU
;
13901 check_insn(ctx
, ISA_MIPS32
);
13902 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13905 goto pool32axf_invalid
;
13916 generate_exception_err(ctx
, EXCP_CpU
, 2);
13919 goto pool32axf_invalid
;
13924 case JALR
: /* JALRC */
13925 case JALR_HB
: /* JALRC_HB */
13926 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13927 /* JALRC, JALRC_HB */
13928 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13930 /* JALR, JALR_HB */
13931 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13932 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13937 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13938 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13939 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13942 goto pool32axf_invalid
;
13948 check_cp0_enabled(ctx
);
13949 check_insn(ctx
, ISA_MIPS32R2
);
13950 gen_load_srsgpr(rs
, rt
);
13953 check_cp0_enabled(ctx
);
13954 check_insn(ctx
, ISA_MIPS32R2
);
13955 gen_store_srsgpr(rs
, rt
);
13958 goto pool32axf_invalid
;
13961 #ifndef CONFIG_USER_ONLY
13965 mips32_op
= OPC_TLBP
;
13968 mips32_op
= OPC_TLBR
;
13971 mips32_op
= OPC_TLBWI
;
13974 mips32_op
= OPC_TLBWR
;
13977 mips32_op
= OPC_TLBINV
;
13980 mips32_op
= OPC_TLBINVF
;
13983 mips32_op
= OPC_WAIT
;
13986 mips32_op
= OPC_DERET
;
13989 mips32_op
= OPC_ERET
;
13991 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13994 goto pool32axf_invalid
;
14000 check_cp0_enabled(ctx
);
14002 TCGv t0
= tcg_temp_new();
14004 save_cpu_state(ctx
, 1);
14005 gen_helper_di(t0
, cpu_env
);
14006 gen_store_gpr(t0
, rs
);
14007 /* Stop translation as we may have switched the execution mode */
14008 ctx
->base
.is_jmp
= DISAS_STOP
;
14013 check_cp0_enabled(ctx
);
14015 TCGv t0
= tcg_temp_new();
14017 save_cpu_state(ctx
, 1);
14018 gen_helper_ei(t0
, cpu_env
);
14019 gen_store_gpr(t0
, rs
);
14020 /* DISAS_STOP isn't sufficient, we need to ensure we break out
14021 of translated code to check for pending interrupts. */
14022 gen_save_pc(ctx
->base
.pc_next
+ 4);
14023 ctx
->base
.is_jmp
= DISAS_EXIT
;
14028 goto pool32axf_invalid
;
14035 gen_sync(extract32(ctx
->opcode
, 16, 5));
14038 generate_exception_end(ctx
, EXCP_SYSCALL
);
14041 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
14042 gen_helper_do_semihosting(cpu_env
);
14044 check_insn(ctx
, ISA_MIPS32
);
14045 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
14046 generate_exception_end(ctx
, EXCP_RI
);
14048 generate_exception_end(ctx
, EXCP_DBp
);
14053 goto pool32axf_invalid
;
14057 switch (minor
& 3) {
14059 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
14062 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
14065 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
14068 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
14071 goto pool32axf_invalid
;
14075 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14078 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
14081 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
14084 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
14087 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
14090 goto pool32axf_invalid
;
14095 MIPS_INVAL("pool32axf");
14096 generate_exception_end(ctx
, EXCP_RI
);
14101 /* Values for microMIPS fmt field. Variable-width, depending on which
14102 formats the instruction supports. */
14121 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
14123 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
14124 uint32_t mips32_op
;
14126 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
14127 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
14128 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
14130 switch (extension
) {
14131 case FLOAT_1BIT_FMT(CFC1
, 0):
14132 mips32_op
= OPC_CFC1
;
14134 case FLOAT_1BIT_FMT(CTC1
, 0):
14135 mips32_op
= OPC_CTC1
;
14137 case FLOAT_1BIT_FMT(MFC1
, 0):
14138 mips32_op
= OPC_MFC1
;
14140 case FLOAT_1BIT_FMT(MTC1
, 0):
14141 mips32_op
= OPC_MTC1
;
14143 case FLOAT_1BIT_FMT(MFHC1
, 0):
14144 mips32_op
= OPC_MFHC1
;
14146 case FLOAT_1BIT_FMT(MTHC1
, 0):
14147 mips32_op
= OPC_MTHC1
;
14149 gen_cp1(ctx
, mips32_op
, rt
, rs
);
14152 /* Reciprocal square root */
14153 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
14154 mips32_op
= OPC_RSQRT_S
;
14156 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
14157 mips32_op
= OPC_RSQRT_D
;
14161 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
14162 mips32_op
= OPC_SQRT_S
;
14164 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
14165 mips32_op
= OPC_SQRT_D
;
14169 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
14170 mips32_op
= OPC_RECIP_S
;
14172 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
14173 mips32_op
= OPC_RECIP_D
;
14177 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
14178 mips32_op
= OPC_FLOOR_L_S
;
14180 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
14181 mips32_op
= OPC_FLOOR_L_D
;
14183 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
14184 mips32_op
= OPC_FLOOR_W_S
;
14186 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
14187 mips32_op
= OPC_FLOOR_W_D
;
14191 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
14192 mips32_op
= OPC_CEIL_L_S
;
14194 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
14195 mips32_op
= OPC_CEIL_L_D
;
14197 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
14198 mips32_op
= OPC_CEIL_W_S
;
14200 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
14201 mips32_op
= OPC_CEIL_W_D
;
14205 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
14206 mips32_op
= OPC_TRUNC_L_S
;
14208 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
14209 mips32_op
= OPC_TRUNC_L_D
;
14211 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
14212 mips32_op
= OPC_TRUNC_W_S
;
14214 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
14215 mips32_op
= OPC_TRUNC_W_D
;
14219 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
14220 mips32_op
= OPC_ROUND_L_S
;
14222 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
14223 mips32_op
= OPC_ROUND_L_D
;
14225 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
14226 mips32_op
= OPC_ROUND_W_S
;
14228 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
14229 mips32_op
= OPC_ROUND_W_D
;
14232 /* Integer to floating-point conversion */
14233 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
14234 mips32_op
= OPC_CVT_L_S
;
14236 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
14237 mips32_op
= OPC_CVT_L_D
;
14239 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
14240 mips32_op
= OPC_CVT_W_S
;
14242 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
14243 mips32_op
= OPC_CVT_W_D
;
14246 /* Paired-foo conversions */
14247 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
14248 mips32_op
= OPC_CVT_S_PL
;
14250 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
14251 mips32_op
= OPC_CVT_S_PU
;
14253 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
14254 mips32_op
= OPC_CVT_PW_PS
;
14256 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
14257 mips32_op
= OPC_CVT_PS_PW
;
14260 /* Floating-point moves */
14261 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
14262 mips32_op
= OPC_MOV_S
;
14264 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
14265 mips32_op
= OPC_MOV_D
;
14267 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
14268 mips32_op
= OPC_MOV_PS
;
14271 /* Absolute value */
14272 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
14273 mips32_op
= OPC_ABS_S
;
14275 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
14276 mips32_op
= OPC_ABS_D
;
14278 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
14279 mips32_op
= OPC_ABS_PS
;
14283 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
14284 mips32_op
= OPC_NEG_S
;
14286 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
14287 mips32_op
= OPC_NEG_D
;
14289 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
14290 mips32_op
= OPC_NEG_PS
;
14293 /* Reciprocal square root step */
14294 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
14295 mips32_op
= OPC_RSQRT1_S
;
14297 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
14298 mips32_op
= OPC_RSQRT1_D
;
14300 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
14301 mips32_op
= OPC_RSQRT1_PS
;
14304 /* Reciprocal step */
14305 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
14306 mips32_op
= OPC_RECIP1_S
;
14308 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
14309 mips32_op
= OPC_RECIP1_S
;
14311 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
14312 mips32_op
= OPC_RECIP1_PS
;
14315 /* Conversions from double */
14316 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
14317 mips32_op
= OPC_CVT_D_S
;
14319 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
14320 mips32_op
= OPC_CVT_D_W
;
14322 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
14323 mips32_op
= OPC_CVT_D_L
;
14326 /* Conversions from single */
14327 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
14328 mips32_op
= OPC_CVT_S_D
;
14330 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
14331 mips32_op
= OPC_CVT_S_W
;
14333 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
14334 mips32_op
= OPC_CVT_S_L
;
14336 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
14339 /* Conditional moves on floating-point codes */
14340 case COND_FLOAT_MOV(MOVT
, 0):
14341 case COND_FLOAT_MOV(MOVT
, 1):
14342 case COND_FLOAT_MOV(MOVT
, 2):
14343 case COND_FLOAT_MOV(MOVT
, 3):
14344 case COND_FLOAT_MOV(MOVT
, 4):
14345 case COND_FLOAT_MOV(MOVT
, 5):
14346 case COND_FLOAT_MOV(MOVT
, 6):
14347 case COND_FLOAT_MOV(MOVT
, 7):
14348 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14349 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
14351 case COND_FLOAT_MOV(MOVF
, 0):
14352 case COND_FLOAT_MOV(MOVF
, 1):
14353 case COND_FLOAT_MOV(MOVF
, 2):
14354 case COND_FLOAT_MOV(MOVF
, 3):
14355 case COND_FLOAT_MOV(MOVF
, 4):
14356 case COND_FLOAT_MOV(MOVF
, 5):
14357 case COND_FLOAT_MOV(MOVF
, 6):
14358 case COND_FLOAT_MOV(MOVF
, 7):
14359 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14360 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
14363 MIPS_INVAL("pool32fxf");
14364 generate_exception_end(ctx
, EXCP_RI
);
14369 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
14373 int rt
, rs
, rd
, rr
;
14375 uint32_t op
, minor
, minor2
, mips32_op
;
14376 uint32_t cond
, fmt
, cc
;
14378 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
14379 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
14381 rt
= (ctx
->opcode
>> 21) & 0x1f;
14382 rs
= (ctx
->opcode
>> 16) & 0x1f;
14383 rd
= (ctx
->opcode
>> 11) & 0x1f;
14384 rr
= (ctx
->opcode
>> 6) & 0x1f;
14385 imm
= (int16_t) ctx
->opcode
;
14387 op
= (ctx
->opcode
>> 26) & 0x3f;
14390 minor
= ctx
->opcode
& 0x3f;
14393 minor
= (ctx
->opcode
>> 6) & 0xf;
14396 mips32_op
= OPC_SLL
;
14399 mips32_op
= OPC_SRA
;
14402 mips32_op
= OPC_SRL
;
14405 mips32_op
= OPC_ROTR
;
14407 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
14410 check_insn(ctx
, ISA_MIPS32R6
);
14411 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
14414 check_insn(ctx
, ISA_MIPS32R6
);
14415 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
14418 check_insn(ctx
, ISA_MIPS32R6
);
14419 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
14422 goto pool32a_invalid
;
14426 minor
= (ctx
->opcode
>> 6) & 0xf;
14430 mips32_op
= OPC_ADD
;
14433 mips32_op
= OPC_ADDU
;
14436 mips32_op
= OPC_SUB
;
14439 mips32_op
= OPC_SUBU
;
14442 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14443 mips32_op
= OPC_MUL
;
14445 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
14449 mips32_op
= OPC_SLLV
;
14452 mips32_op
= OPC_SRLV
;
14455 mips32_op
= OPC_SRAV
;
14458 mips32_op
= OPC_ROTRV
;
14460 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
14462 /* Logical operations */
14464 mips32_op
= OPC_AND
;
14467 mips32_op
= OPC_OR
;
14470 mips32_op
= OPC_NOR
;
14473 mips32_op
= OPC_XOR
;
14475 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
14477 /* Set less than */
14479 mips32_op
= OPC_SLT
;
14482 mips32_op
= OPC_SLTU
;
14484 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
14487 goto pool32a_invalid
;
14491 minor
= (ctx
->opcode
>> 6) & 0xf;
14493 /* Conditional moves */
14494 case MOVN
: /* MUL */
14495 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14497 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
14500 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
14503 case MOVZ
: /* MUH */
14504 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14506 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
14509 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
14513 check_insn(ctx
, ISA_MIPS32R6
);
14514 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
14517 check_insn(ctx
, ISA_MIPS32R6
);
14518 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
14520 case LWXS
: /* DIV */
14521 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14523 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
14526 gen_ldxs(ctx
, rs
, rt
, rd
);
14530 check_insn(ctx
, ISA_MIPS32R6
);
14531 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
14534 check_insn(ctx
, ISA_MIPS32R6
);
14535 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
14538 check_insn(ctx
, ISA_MIPS32R6
);
14539 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
14542 goto pool32a_invalid
;
14546 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
14549 check_insn(ctx
, ISA_MIPS32R6
);
14550 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
14551 extract32(ctx
->opcode
, 9, 2));
14554 check_insn(ctx
, ISA_MIPS32R6
);
14555 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
14558 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
14561 gen_pool32axf(env
, ctx
, rt
, rs
);
14564 generate_exception_end(ctx
, EXCP_BREAK
);
14567 check_insn(ctx
, ISA_MIPS32R6
);
14568 generate_exception_end(ctx
, EXCP_RI
);
14572 MIPS_INVAL("pool32a");
14573 generate_exception_end(ctx
, EXCP_RI
);
14578 minor
= (ctx
->opcode
>> 12) & 0xf;
14581 check_cp0_enabled(ctx
);
14582 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14583 gen_cache_operation(ctx
, rt
, rs
, imm
);
14588 /* COP2: Not implemented. */
14589 generate_exception_err(ctx
, EXCP_CpU
, 2);
14591 #ifdef TARGET_MIPS64
14594 check_insn(ctx
, ISA_MIPS3
);
14595 check_mips_64(ctx
);
14600 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14602 #ifdef TARGET_MIPS64
14605 check_insn(ctx
, ISA_MIPS3
);
14606 check_mips_64(ctx
);
14611 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14614 MIPS_INVAL("pool32b");
14615 generate_exception_end(ctx
, EXCP_RI
);
14620 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
14621 minor
= ctx
->opcode
& 0x3f;
14622 check_cp1_enabled(ctx
);
14625 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14626 mips32_op
= OPC_ALNV_PS
;
14629 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14630 mips32_op
= OPC_MADD_S
;
14633 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14634 mips32_op
= OPC_MADD_D
;
14637 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14638 mips32_op
= OPC_MADD_PS
;
14641 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14642 mips32_op
= OPC_MSUB_S
;
14645 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14646 mips32_op
= OPC_MSUB_D
;
14649 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14650 mips32_op
= OPC_MSUB_PS
;
14653 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14654 mips32_op
= OPC_NMADD_S
;
14657 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14658 mips32_op
= OPC_NMADD_D
;
14661 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14662 mips32_op
= OPC_NMADD_PS
;
14665 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14666 mips32_op
= OPC_NMSUB_S
;
14669 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14670 mips32_op
= OPC_NMSUB_D
;
14673 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14674 mips32_op
= OPC_NMSUB_PS
;
14676 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14678 case CABS_COND_FMT
:
14679 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14680 cond
= (ctx
->opcode
>> 6) & 0xf;
14681 cc
= (ctx
->opcode
>> 13) & 0x7;
14682 fmt
= (ctx
->opcode
>> 10) & 0x3;
14685 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14688 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14691 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14694 goto pool32f_invalid
;
14698 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14699 cond
= (ctx
->opcode
>> 6) & 0xf;
14700 cc
= (ctx
->opcode
>> 13) & 0x7;
14701 fmt
= (ctx
->opcode
>> 10) & 0x3;
14704 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14707 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14710 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14713 goto pool32f_invalid
;
14717 check_insn(ctx
, ISA_MIPS32R6
);
14718 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14721 check_insn(ctx
, ISA_MIPS32R6
);
14722 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14725 gen_pool32fxf(ctx
, rt
, rs
);
14729 switch ((ctx
->opcode
>> 6) & 0x7) {
14731 mips32_op
= OPC_PLL_PS
;
14734 mips32_op
= OPC_PLU_PS
;
14737 mips32_op
= OPC_PUL_PS
;
14740 mips32_op
= OPC_PUU_PS
;
14743 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14744 mips32_op
= OPC_CVT_PS_S
;
14746 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14749 goto pool32f_invalid
;
14753 check_insn(ctx
, ISA_MIPS32R6
);
14754 switch ((ctx
->opcode
>> 9) & 0x3) {
14756 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14759 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14762 goto pool32f_invalid
;
14767 switch ((ctx
->opcode
>> 6) & 0x7) {
14769 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14770 mips32_op
= OPC_LWXC1
;
14773 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14774 mips32_op
= OPC_SWXC1
;
14777 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14778 mips32_op
= OPC_LDXC1
;
14781 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14782 mips32_op
= OPC_SDXC1
;
14785 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14786 mips32_op
= OPC_LUXC1
;
14789 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14790 mips32_op
= OPC_SUXC1
;
14792 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14795 goto pool32f_invalid
;
14799 check_insn(ctx
, ISA_MIPS32R6
);
14800 switch ((ctx
->opcode
>> 9) & 0x3) {
14802 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14805 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14808 goto pool32f_invalid
;
14813 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14814 fmt
= (ctx
->opcode
>> 9) & 0x3;
14815 switch ((ctx
->opcode
>> 6) & 0x7) {
14819 mips32_op
= OPC_RSQRT2_S
;
14822 mips32_op
= OPC_RSQRT2_D
;
14825 mips32_op
= OPC_RSQRT2_PS
;
14828 goto pool32f_invalid
;
14834 mips32_op
= OPC_RECIP2_S
;
14837 mips32_op
= OPC_RECIP2_D
;
14840 mips32_op
= OPC_RECIP2_PS
;
14843 goto pool32f_invalid
;
14847 mips32_op
= OPC_ADDR_PS
;
14850 mips32_op
= OPC_MULR_PS
;
14852 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14855 goto pool32f_invalid
;
14859 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14860 cc
= (ctx
->opcode
>> 13) & 0x7;
14861 fmt
= (ctx
->opcode
>> 9) & 0x3;
14862 switch ((ctx
->opcode
>> 6) & 0x7) {
14863 case MOVF_FMT
: /* RINT_FMT */
14864 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14868 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14871 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14874 goto pool32f_invalid
;
14880 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14883 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14887 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14890 goto pool32f_invalid
;
14894 case MOVT_FMT
: /* CLASS_FMT */
14895 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14899 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14902 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14905 goto pool32f_invalid
;
14911 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14914 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14918 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14921 goto pool32f_invalid
;
14926 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14929 goto pool32f_invalid
;
14932 #define FINSN_3ARG_SDPS(prfx) \
14933 switch ((ctx->opcode >> 8) & 0x3) { \
14935 mips32_op = OPC_##prfx##_S; \
14938 mips32_op = OPC_##prfx##_D; \
14940 case FMT_SDPS_PS: \
14942 mips32_op = OPC_##prfx##_PS; \
14945 goto pool32f_invalid; \
14948 check_insn(ctx
, ISA_MIPS32R6
);
14949 switch ((ctx
->opcode
>> 9) & 0x3) {
14951 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14954 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14957 goto pool32f_invalid
;
14961 check_insn(ctx
, ISA_MIPS32R6
);
14962 switch ((ctx
->opcode
>> 9) & 0x3) {
14964 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14967 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14970 goto pool32f_invalid
;
14974 /* regular FP ops */
14975 switch ((ctx
->opcode
>> 6) & 0x3) {
14977 FINSN_3ARG_SDPS(ADD
);
14980 FINSN_3ARG_SDPS(SUB
);
14983 FINSN_3ARG_SDPS(MUL
);
14986 fmt
= (ctx
->opcode
>> 8) & 0x3;
14988 mips32_op
= OPC_DIV_D
;
14989 } else if (fmt
== 0) {
14990 mips32_op
= OPC_DIV_S
;
14992 goto pool32f_invalid
;
14996 goto pool32f_invalid
;
15001 switch ((ctx
->opcode
>> 6) & 0x7) {
15002 case MOVN_FMT
: /* SELNEZ_FMT */
15003 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15005 switch ((ctx
->opcode
>> 9) & 0x3) {
15007 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
15010 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
15013 goto pool32f_invalid
;
15017 FINSN_3ARG_SDPS(MOVN
);
15021 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15022 FINSN_3ARG_SDPS(MOVN
);
15024 case MOVZ_FMT
: /* SELEQZ_FMT */
15025 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15027 switch ((ctx
->opcode
>> 9) & 0x3) {
15029 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
15032 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
15035 goto pool32f_invalid
;
15039 FINSN_3ARG_SDPS(MOVZ
);
15043 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15044 FINSN_3ARG_SDPS(MOVZ
);
15047 check_insn(ctx
, ISA_MIPS32R6
);
15048 switch ((ctx
->opcode
>> 9) & 0x3) {
15050 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
15053 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
15056 goto pool32f_invalid
;
15060 check_insn(ctx
, ISA_MIPS32R6
);
15061 switch ((ctx
->opcode
>> 9) & 0x3) {
15063 mips32_op
= OPC_MADDF_S
;
15066 mips32_op
= OPC_MADDF_D
;
15069 goto pool32f_invalid
;
15073 check_insn(ctx
, ISA_MIPS32R6
);
15074 switch ((ctx
->opcode
>> 9) & 0x3) {
15076 mips32_op
= OPC_MSUBF_S
;
15079 mips32_op
= OPC_MSUBF_D
;
15082 goto pool32f_invalid
;
15086 goto pool32f_invalid
;
15090 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
15094 MIPS_INVAL("pool32f");
15095 generate_exception_end(ctx
, EXCP_RI
);
15099 generate_exception_err(ctx
, EXCP_CpU
, 1);
15103 minor
= (ctx
->opcode
>> 21) & 0x1f;
15106 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15107 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
15110 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15111 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
15112 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15115 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15116 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
15117 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15120 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15121 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
15124 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15125 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
15126 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15129 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15130 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
15131 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15134 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15135 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
15138 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15139 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
15143 case TLTI
: /* BC1EQZC */
15144 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15146 check_cp1_enabled(ctx
);
15147 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
15150 mips32_op
= OPC_TLTI
;
15154 case TGEI
: /* BC1NEZC */
15155 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15157 check_cp1_enabled(ctx
);
15158 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
15161 mips32_op
= OPC_TGEI
;
15166 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15167 mips32_op
= OPC_TLTIU
;
15170 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15171 mips32_op
= OPC_TGEIU
;
15173 case TNEI
: /* SYNCI */
15174 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15176 /* Break the TB to be able to sync copied instructions
15178 ctx
->base
.is_jmp
= DISAS_STOP
;
15181 mips32_op
= OPC_TNEI
;
15186 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15187 mips32_op
= OPC_TEQI
;
15189 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
15194 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15195 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
15196 4, rs
, 0, imm
<< 1, 0);
15197 /* Compact branches don't have a delay slot, so just let
15198 the normal delay slot handling take us to the branch
15202 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15203 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
15206 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15207 /* Break the TB to be able to sync copied instructions
15209 ctx
->base
.is_jmp
= DISAS_STOP
;
15213 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15214 /* COP2: Not implemented. */
15215 generate_exception_err(ctx
, EXCP_CpU
, 2);
15218 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15219 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
15222 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15223 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
15226 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15227 mips32_op
= OPC_BC1FANY4
;
15230 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15231 mips32_op
= OPC_BC1TANY4
;
15234 check_insn(ctx
, ASE_MIPS3D
);
15237 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
15238 check_cp1_enabled(ctx
);
15239 gen_compute_branch1(ctx
, mips32_op
,
15240 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
15242 generate_exception_err(ctx
, EXCP_CpU
, 1);
15247 /* MIPS DSP: not implemented */
15250 MIPS_INVAL("pool32i");
15251 generate_exception_end(ctx
, EXCP_RI
);
15256 minor
= (ctx
->opcode
>> 12) & 0xf;
15257 offset
= sextract32(ctx
->opcode
, 0,
15258 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
15261 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15262 mips32_op
= OPC_LWL
;
15265 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15266 mips32_op
= OPC_SWL
;
15269 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15270 mips32_op
= OPC_LWR
;
15273 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15274 mips32_op
= OPC_SWR
;
15276 #if defined(TARGET_MIPS64)
15278 check_insn(ctx
, ISA_MIPS3
);
15279 check_mips_64(ctx
);
15280 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15281 mips32_op
= OPC_LDL
;
15284 check_insn(ctx
, ISA_MIPS3
);
15285 check_mips_64(ctx
);
15286 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15287 mips32_op
= OPC_SDL
;
15290 check_insn(ctx
, ISA_MIPS3
);
15291 check_mips_64(ctx
);
15292 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15293 mips32_op
= OPC_LDR
;
15296 check_insn(ctx
, ISA_MIPS3
);
15297 check_mips_64(ctx
);
15298 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15299 mips32_op
= OPC_SDR
;
15302 check_insn(ctx
, ISA_MIPS3
);
15303 check_mips_64(ctx
);
15304 mips32_op
= OPC_LWU
;
15307 check_insn(ctx
, ISA_MIPS3
);
15308 check_mips_64(ctx
);
15309 mips32_op
= OPC_LLD
;
15313 mips32_op
= OPC_LL
;
15316 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
15319 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
15322 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
15324 #if defined(TARGET_MIPS64)
15326 check_insn(ctx
, ISA_MIPS3
);
15327 check_mips_64(ctx
);
15328 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
15333 MIPS_INVAL("pool32c ld-eva");
15334 generate_exception_end(ctx
, EXCP_RI
);
15337 check_cp0_enabled(ctx
);
15339 minor2
= (ctx
->opcode
>> 9) & 0x7;
15340 offset
= sextract32(ctx
->opcode
, 0, 9);
15343 mips32_op
= OPC_LBUE
;
15346 mips32_op
= OPC_LHUE
;
15349 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15350 mips32_op
= OPC_LWLE
;
15353 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15354 mips32_op
= OPC_LWRE
;
15357 mips32_op
= OPC_LBE
;
15360 mips32_op
= OPC_LHE
;
15363 mips32_op
= OPC_LLE
;
15366 mips32_op
= OPC_LWE
;
15372 MIPS_INVAL("pool32c st-eva");
15373 generate_exception_end(ctx
, EXCP_RI
);
15376 check_cp0_enabled(ctx
);
15378 minor2
= (ctx
->opcode
>> 9) & 0x7;
15379 offset
= sextract32(ctx
->opcode
, 0, 9);
15382 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15383 mips32_op
= OPC_SWLE
;
15386 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15387 mips32_op
= OPC_SWRE
;
15390 /* Treat as no-op */
15391 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
15392 /* hint codes 24-31 are reserved and signal RI */
15393 generate_exception(ctx
, EXCP_RI
);
15397 /* Treat as no-op */
15398 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15399 gen_cache_operation(ctx
, rt
, rs
, offset
);
15403 mips32_op
= OPC_SBE
;
15406 mips32_op
= OPC_SHE
;
15409 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
15412 mips32_op
= OPC_SWE
;
15417 /* Treat as no-op */
15418 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
15419 /* hint codes 24-31 are reserved and signal RI */
15420 generate_exception(ctx
, EXCP_RI
);
15424 MIPS_INVAL("pool32c");
15425 generate_exception_end(ctx
, EXCP_RI
);
15429 case ADDI32
: /* AUI, LUI */
15430 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15432 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
15435 mips32_op
= OPC_ADDI
;
15440 mips32_op
= OPC_ADDIU
;
15442 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15445 /* Logical operations */
15447 mips32_op
= OPC_ORI
;
15450 mips32_op
= OPC_XORI
;
15453 mips32_op
= OPC_ANDI
;
15455 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15458 /* Set less than immediate */
15460 mips32_op
= OPC_SLTI
;
15463 mips32_op
= OPC_SLTIU
;
15465 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15468 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15469 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15470 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
15471 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15473 case JALS32
: /* BOVC, BEQC, BEQZALC */
15474 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15477 mips32_op
= OPC_BOVC
;
15478 } else if (rs
< rt
&& rs
== 0) {
15480 mips32_op
= OPC_BEQZALC
;
15483 mips32_op
= OPC_BEQC
;
15485 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15488 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
15489 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
15490 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15493 case BEQ32
: /* BC */
15494 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15496 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
15497 sextract32(ctx
->opcode
<< 1, 0, 27));
15500 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
15503 case BNE32
: /* BALC */
15504 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15506 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
15507 sextract32(ctx
->opcode
<< 1, 0, 27));
15510 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
15513 case J32
: /* BGTZC, BLTZC, BLTC */
15514 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15515 if (rs
== 0 && rt
!= 0) {
15517 mips32_op
= OPC_BGTZC
;
15518 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15520 mips32_op
= OPC_BLTZC
;
15523 mips32_op
= OPC_BLTC
;
15525 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15528 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
15529 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15532 case JAL32
: /* BLEZC, BGEZC, BGEC */
15533 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15534 if (rs
== 0 && rt
!= 0) {
15536 mips32_op
= OPC_BLEZC
;
15537 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15539 mips32_op
= OPC_BGEZC
;
15542 mips32_op
= OPC_BGEC
;
15544 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15547 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
15548 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15549 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15552 /* Floating point (COP1) */
15554 mips32_op
= OPC_LWC1
;
15557 mips32_op
= OPC_LDC1
;
15560 mips32_op
= OPC_SWC1
;
15563 mips32_op
= OPC_SDC1
;
15565 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
15567 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15568 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15569 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15570 switch ((ctx
->opcode
>> 16) & 0x1f) {
15579 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
15582 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
15585 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
15595 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
15598 generate_exception(ctx
, EXCP_RI
);
15603 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
15604 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
15606 gen_addiupc(ctx
, reg
, offset
, 0, 0);
15609 case BNVC
: /* BNEC, BNEZALC */
15610 check_insn(ctx
, ISA_MIPS32R6
);
15613 mips32_op
= OPC_BNVC
;
15614 } else if (rs
< rt
&& rs
== 0) {
15616 mips32_op
= OPC_BNEZALC
;
15619 mips32_op
= OPC_BNEC
;
15621 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15623 case R6_BNEZC
: /* JIALC */
15624 check_insn(ctx
, ISA_MIPS32R6
);
15627 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
15628 sextract32(ctx
->opcode
<< 1, 0, 22));
15631 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
15634 case R6_BEQZC
: /* JIC */
15635 check_insn(ctx
, ISA_MIPS32R6
);
15638 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
15639 sextract32(ctx
->opcode
<< 1, 0, 22));
15642 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
15645 case BLEZALC
: /* BGEZALC, BGEUC */
15646 check_insn(ctx
, ISA_MIPS32R6
);
15647 if (rs
== 0 && rt
!= 0) {
15649 mips32_op
= OPC_BLEZALC
;
15650 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15652 mips32_op
= OPC_BGEZALC
;
15655 mips32_op
= OPC_BGEUC
;
15657 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15659 case BGTZALC
: /* BLTZALC, BLTUC */
15660 check_insn(ctx
, ISA_MIPS32R6
);
15661 if (rs
== 0 && rt
!= 0) {
15663 mips32_op
= OPC_BGTZALC
;
15664 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15666 mips32_op
= OPC_BLTZALC
;
15669 mips32_op
= OPC_BLTUC
;
15671 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15673 /* Loads and stores */
15675 mips32_op
= OPC_LB
;
15678 mips32_op
= OPC_LBU
;
15681 mips32_op
= OPC_LH
;
15684 mips32_op
= OPC_LHU
;
15687 mips32_op
= OPC_LW
;
15689 #ifdef TARGET_MIPS64
15691 check_insn(ctx
, ISA_MIPS3
);
15692 check_mips_64(ctx
);
15693 mips32_op
= OPC_LD
;
15696 check_insn(ctx
, ISA_MIPS3
);
15697 check_mips_64(ctx
);
15698 mips32_op
= OPC_SD
;
15702 mips32_op
= OPC_SB
;
15705 mips32_op
= OPC_SH
;
15708 mips32_op
= OPC_SW
;
15711 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
15714 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
15717 generate_exception_end(ctx
, EXCP_RI
);
15722 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
15726 /* make sure instructions are on a halfword boundary */
15727 if (ctx
->base
.pc_next
& 0x1) {
15728 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
15729 generate_exception_end(ctx
, EXCP_AdEL
);
15733 op
= (ctx
->opcode
>> 10) & 0x3f;
15734 /* Enforce properly-sized instructions in a delay slot */
15735 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
15736 switch (op
& 0x7) { /* MSB-3..MSB-5 */
15738 /* POOL32A, POOL32B, POOL32I, POOL32C */
15740 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15742 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15744 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15746 /* LB32, LH32, LWC132, LDC132, LW32 */
15747 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15748 generate_exception_end(ctx
, EXCP_RI
);
15753 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15755 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15757 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15758 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15759 generate_exception_end(ctx
, EXCP_RI
);
15769 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15770 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15771 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15774 switch (ctx
->opcode
& 0x1) {
15782 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15783 /* In the Release 6 the register number location in
15784 * the instruction encoding has changed.
15786 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15788 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15794 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15795 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15796 int amount
= (ctx
->opcode
>> 1) & 0x7;
15798 amount
= amount
== 0 ? 8 : amount
;
15800 switch (ctx
->opcode
& 0x1) {
15809 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15813 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15814 gen_pool16c_r6_insn(ctx
);
15816 gen_pool16c_insn(ctx
);
15821 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15822 int rb
= 28; /* GP */
15823 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15825 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15829 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15830 if (ctx
->opcode
& 1) {
15831 generate_exception_end(ctx
, EXCP_RI
);
15834 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15835 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15836 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15837 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15842 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15843 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15844 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15845 offset
= (offset
== 0xf ? -1 : offset
);
15847 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15852 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15853 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15854 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15856 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15861 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15862 int rb
= 29; /* SP */
15863 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15865 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15870 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15871 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15872 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15874 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15879 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15880 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15881 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15883 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15888 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15889 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15890 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15892 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15897 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15898 int rb
= 29; /* SP */
15899 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15901 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15906 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15907 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15908 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15910 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15915 int rd
= uMIPS_RD5(ctx
->opcode
);
15916 int rs
= uMIPS_RS5(ctx
->opcode
);
15918 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15925 switch (ctx
->opcode
& 0x1) {
15935 switch (ctx
->opcode
& 0x1) {
15940 gen_addiur1sp(ctx
);
15944 case B16
: /* BC16 */
15945 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15946 sextract32(ctx
->opcode
, 0, 10) << 1,
15947 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15949 case BNEZ16
: /* BNEZC16 */
15950 case BEQZ16
: /* BEQZC16 */
15951 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15952 mmreg(uMIPS_RD(ctx
->opcode
)),
15953 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15954 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15959 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15960 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15962 imm
= (imm
== 0x7f ? -1 : imm
);
15963 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15969 generate_exception_end(ctx
, EXCP_RI
);
15972 decode_micromips32_opc(env
, ctx
);
15985 /* MAJOR, P16, and P32 pools opcodes */
15989 NM_MOVE_BALC
= 0x02,
15997 NM_P16_SHIFT
= 0x0c,
16015 NM_P_LS_U12
= 0x21,
16025 NM_P16_ADDU
= 0x2c,
16039 NM_MOVEPREV
= 0x3f,
16042 /* POOL32A instruction pool */
16044 NM_POOL32A0
= 0x00,
16045 NM_SPECIAL2
= 0x01,
16048 NM_POOL32A5
= 0x05,
16049 NM_POOL32A7
= 0x07,
16052 /* P.GP.W instruction pool */
16054 NM_ADDIUGP_W
= 0x00,
16059 /* P48I instruction pool */
16063 NM_ADDIUGP48
= 0x02,
16064 NM_ADDIUPC48
= 0x03,
16069 /* P.U12 instruction pool */
16078 NM_ADDIUNEG
= 0x08,
16085 /* POOL32F instruction pool */
16087 NM_POOL32F_0
= 0x00,
16088 NM_POOL32F_3
= 0x03,
16089 NM_POOL32F_5
= 0x05,
16092 /* POOL32S instruction pool */
16094 NM_POOL32S_0
= 0x00,
16095 NM_POOL32S_4
= 0x04,
16098 /* P.LUI instruction pool */
16104 /* P.GP.BH instruction pool */
16109 NM_ADDIUGP_B
= 0x03,
16112 NM_P_GP_CP1
= 0x06,
16115 /* P.LS.U12 instruction pool */
16120 NM_P_PREFU12
= 0x03,
16133 /* P.LS.S9 instruction pool */
16139 NM_P_LS_UAWM
= 0x05,
16142 /* P.BAL instruction pool */
16148 /* P.J instruction pool */
16151 NM_JALRC_HB
= 0x01,
16152 NM_P_BALRSC
= 0x08,
16155 /* P.BR1 instruction pool */
16163 /* P.BR2 instruction pool */
16170 /* P.BRI instruction pool */
16182 /* P16.SHIFT instruction pool */
16188 /* POOL16C instruction pool */
16190 NM_POOL16C_0
= 0x00,
16194 /* P16.A1 instruction pool */
16196 NM_ADDIUR1SP
= 0x01,
16199 /* P16.A2 instruction pool */
16202 NM_P_ADDIURS5
= 0x01,
16205 /* P16.ADDU instruction pool */
16211 /* P16.SR instruction pool */
16214 NM_RESTORE_JRC16
= 0x01,
16217 /* P16.4X4 instruction pool */
16223 /* P16.LB instruction pool */
16230 /* P16.LH instruction pool */
16237 /* P.RI instruction pool */
16240 NM_P_SYSCALL
= 0x01,
16245 /* POOL32A0 instruction pool */
16280 NM_D_E_MT_VPE
= 0x56,
16288 /* POOL32A5 instruction pool */
16290 NM_CMP_EQ_PH
= 0x00,
16291 NM_CMP_LT_PH
= 0x08,
16292 NM_CMP_LE_PH
= 0x10,
16293 NM_CMPGU_EQ_QB
= 0x18,
16294 NM_CMPGU_LT_QB
= 0x20,
16295 NM_CMPGU_LE_QB
= 0x28,
16296 NM_CMPGDU_EQ_QB
= 0x30,
16297 NM_CMPGDU_LT_QB
= 0x38,
16298 NM_CMPGDU_LE_QB
= 0x40,
16299 NM_CMPU_EQ_QB
= 0x48,
16300 NM_CMPU_LT_QB
= 0x50,
16301 NM_CMPU_LE_QB
= 0x58,
16302 NM_ADDQ_S_W
= 0x60,
16303 NM_SUBQ_S_W
= 0x68,
16307 NM_ADDQ_S_PH
= 0x01,
16308 NM_ADDQH_R_PH
= 0x09,
16309 NM_ADDQH_R_W
= 0x11,
16310 NM_ADDU_S_QB
= 0x19,
16311 NM_ADDU_S_PH
= 0x21,
16312 NM_ADDUH_R_QB
= 0x29,
16313 NM_SHRAV_R_PH
= 0x31,
16314 NM_SHRAV_R_QB
= 0x39,
16315 NM_SUBQ_S_PH
= 0x41,
16316 NM_SUBQH_R_PH
= 0x49,
16317 NM_SUBQH_R_W
= 0x51,
16318 NM_SUBU_S_QB
= 0x59,
16319 NM_SUBU_S_PH
= 0x61,
16320 NM_SUBUH_R_QB
= 0x69,
16321 NM_SHLLV_S_PH
= 0x71,
16322 NM_PRECR_SRA_R_PH_W
= 0x79,
16324 NM_MULEU_S_PH_QBL
= 0x12,
16325 NM_MULEU_S_PH_QBR
= 0x1a,
16326 NM_MULQ_RS_PH
= 0x22,
16327 NM_MULQ_S_PH
= 0x2a,
16328 NM_MULQ_RS_W
= 0x32,
16329 NM_MULQ_S_W
= 0x3a,
16332 NM_SHRAV_R_W
= 0x5a,
16333 NM_SHRLV_PH
= 0x62,
16334 NM_SHRLV_QB
= 0x6a,
16335 NM_SHLLV_QB
= 0x72,
16336 NM_SHLLV_S_W
= 0x7a,
16340 NM_MULEQ_S_W_PHL
= 0x04,
16341 NM_MULEQ_S_W_PHR
= 0x0c,
16343 NM_MUL_S_PH
= 0x05,
16344 NM_PRECR_QB_PH
= 0x0d,
16345 NM_PRECRQ_QB_PH
= 0x15,
16346 NM_PRECRQ_PH_W
= 0x1d,
16347 NM_PRECRQ_RS_PH_W
= 0x25,
16348 NM_PRECRQU_S_QB_PH
= 0x2d,
16349 NM_PACKRL_PH
= 0x35,
16353 NM_SHRA_R_W
= 0x5e,
16354 NM_SHRA_R_PH
= 0x66,
16355 NM_SHLL_S_PH
= 0x76,
16356 NM_SHLL_S_W
= 0x7e,
16361 /* POOL32A7 instruction pool */
16366 NM_POOL32AXF
= 0x07,
16369 /* P.SR instruction pool */
16375 /* P.SHIFT instruction pool */
16383 /* P.ROTX instruction pool */
16388 /* P.INS instruction pool */
16393 /* P.EXT instruction pool */
16398 /* POOL32F_0 (fmt) instruction pool */
16403 NM_SELEQZ_S
= 0x07,
16404 NM_SELEQZ_D
= 0x47,
16408 NM_SELNEZ_S
= 0x0f,
16409 NM_SELNEZ_D
= 0x4f,
16424 /* POOL32F_3 instruction pool */
16428 NM_MINA_FMT
= 0x04,
16429 NM_MAXA_FMT
= 0x05,
16430 NM_POOL32FXF
= 0x07,
16433 /* POOL32F_5 instruction pool */
16435 NM_CMP_CONDN_S
= 0x00,
16436 NM_CMP_CONDN_D
= 0x02,
16439 /* P.GP.LH instruction pool */
16445 /* P.GP.SH instruction pool */
16450 /* P.GP.CP1 instruction pool */
16458 /* P.LS.S0 instruction pool */
16475 NM_P_PREFS9
= 0x03,
16481 /* P.LS.S1 instruction pool */
16483 NM_ASET_ACLR
= 0x02,
16491 /* P.LS.WM instruction pool */
16497 /* P.LS.UAWM instruction pool */
16503 /* P.BR3A instruction pool */
16509 NM_BPOSGE32C
= 0x04,
16512 /* P16.RI instruction pool */
16514 NM_P16_SYSCALL
= 0x01,
16519 /* POOL16C_0 instruction pool */
16521 NM_POOL16C_00
= 0x00,
16524 /* P16.JRC instruction pool */
16530 /* P.SYSCALL instruction pool */
16536 /* P.TRAP instruction pool */
16542 /* P.CMOVE instruction pool */
16548 /* POOL32Axf instruction pool */
16550 NM_POOL32AXF_1
= 0x01,
16551 NM_POOL32AXF_2
= 0x02,
16552 NM_POOL32AXF_4
= 0x04,
16553 NM_POOL32AXF_5
= 0x05,
16554 NM_POOL32AXF_7
= 0x07,
16557 /* POOL32Axf_1 instruction pool */
16559 NM_POOL32AXF_1_0
= 0x00,
16560 NM_POOL32AXF_1_1
= 0x01,
16561 NM_POOL32AXF_1_3
= 0x03,
16562 NM_POOL32AXF_1_4
= 0x04,
16563 NM_POOL32AXF_1_5
= 0x05,
16564 NM_POOL32AXF_1_7
= 0x07,
16567 /* POOL32Axf_2 instruction pool */
16569 NM_POOL32AXF_2_0_7
= 0x00,
16570 NM_POOL32AXF_2_8_15
= 0x01,
16571 NM_POOL32AXF_2_16_23
= 0x02,
16572 NM_POOL32AXF_2_24_31
= 0x03,
16575 /* POOL32Axf_7 instruction pool */
16577 NM_SHRA_R_QB
= 0x0,
16582 /* POOL32Axf_1_0 instruction pool */
16590 /* POOL32Axf_1_1 instruction pool */
16596 /* POOL32Axf_1_3 instruction pool */
16604 /* POOL32Axf_1_4 instruction pool */
16610 /* POOL32Axf_1_5 instruction pool */
16612 NM_MAQ_S_W_PHR
= 0x0,
16613 NM_MAQ_S_W_PHL
= 0x1,
16614 NM_MAQ_SA_W_PHR
= 0x2,
16615 NM_MAQ_SA_W_PHL
= 0x3,
16618 /* POOL32Axf_1_7 instruction pool */
16622 NM_EXTR_RS_W
= 0x2,
16626 /* POOL32Axf_2_0_7 instruction pool */
16629 NM_DPAQ_S_W_PH
= 0x1,
16631 NM_DPSQ_S_W_PH
= 0x3,
16638 /* POOL32Axf_2_8_15 instruction pool */
16640 NM_DPAX_W_PH
= 0x0,
16641 NM_DPAQ_SA_L_W
= 0x1,
16642 NM_DPSX_W_PH
= 0x2,
16643 NM_DPSQ_SA_L_W
= 0x3,
16646 NM_EXTRV_R_W
= 0x7,
16649 /* POOL32Axf_2_16_23 instruction pool */
16651 NM_DPAU_H_QBL
= 0x0,
16652 NM_DPAQX_S_W_PH
= 0x1,
16653 NM_DPSU_H_QBL
= 0x2,
16654 NM_DPSQX_S_W_PH
= 0x3,
16657 NM_MULSA_W_PH
= 0x6,
16658 NM_EXTRV_RS_W
= 0x7,
16661 /* POOL32Axf_2_24_31 instruction pool */
16663 NM_DPAU_H_QBR
= 0x0,
16664 NM_DPAQX_SA_W_PH
= 0x1,
16665 NM_DPSU_H_QBR
= 0x2,
16666 NM_DPSQX_SA_W_PH
= 0x3,
16669 NM_MULSAQ_S_W_PH
= 0x6,
16670 NM_EXTRV_S_H
= 0x7,
16673 /* POOL32Axf_{4, 5} instruction pool */
16692 /* nanoMIPS DSP instructions */
16693 NM_ABSQ_S_QB
= 0x00,
16694 NM_ABSQ_S_PH
= 0x08,
16695 NM_ABSQ_S_W
= 0x10,
16696 NM_PRECEQ_W_PHL
= 0x28,
16697 NM_PRECEQ_W_PHR
= 0x30,
16698 NM_PRECEQU_PH_QBL
= 0x38,
16699 NM_PRECEQU_PH_QBR
= 0x48,
16700 NM_PRECEU_PH_QBL
= 0x58,
16701 NM_PRECEU_PH_QBR
= 0x68,
16702 NM_PRECEQU_PH_QBLA
= 0x39,
16703 NM_PRECEQU_PH_QBRA
= 0x49,
16704 NM_PRECEU_PH_QBLA
= 0x59,
16705 NM_PRECEU_PH_QBRA
= 0x69,
16706 NM_REPLV_PH
= 0x01,
16707 NM_REPLV_QB
= 0x09,
16710 NM_RADDU_W_QB
= 0x78,
16716 /* PP.SR instruction pool */
16720 NM_RESTORE_JRC
= 0x03,
16723 /* P.SR.F instruction pool */
16726 NM_RESTOREF
= 0x01,
16729 /* P16.SYSCALL instruction pool */
16731 NM_SYSCALL16
= 0x00,
16732 NM_HYPCALL16
= 0x01,
16735 /* POOL16C_00 instruction pool */
16743 /* PP.LSX and PP.LSXS instruction pool */
16781 /* ERETx instruction pool */
16787 /* POOL32FxF_{0, 1} insturction pool */
16796 NM_CVT_S_PL
= 0x84,
16797 NM_CVT_S_PU
= 0xa4,
16799 NM_CVT_L_S
= 0x004,
16800 NM_CVT_L_D
= 0x104,
16801 NM_CVT_W_S
= 0x024,
16802 NM_CVT_W_D
= 0x124,
16804 NM_RSQRT_S
= 0x008,
16805 NM_RSQRT_D
= 0x108,
16810 NM_RECIP_S
= 0x048,
16811 NM_RECIP_D
= 0x148,
16813 NM_FLOOR_L_S
= 0x00c,
16814 NM_FLOOR_L_D
= 0x10c,
16816 NM_FLOOR_W_S
= 0x02c,
16817 NM_FLOOR_W_D
= 0x12c,
16819 NM_CEIL_L_S
= 0x04c,
16820 NM_CEIL_L_D
= 0x14c,
16821 NM_CEIL_W_S
= 0x06c,
16822 NM_CEIL_W_D
= 0x16c,
16823 NM_TRUNC_L_S
= 0x08c,
16824 NM_TRUNC_L_D
= 0x18c,
16825 NM_TRUNC_W_S
= 0x0ac,
16826 NM_TRUNC_W_D
= 0x1ac,
16827 NM_ROUND_L_S
= 0x0cc,
16828 NM_ROUND_L_D
= 0x1cc,
16829 NM_ROUND_W_S
= 0x0ec,
16830 NM_ROUND_W_D
= 0x1ec,
16838 NM_CVT_D_S
= 0x04d,
16839 NM_CVT_D_W
= 0x0cd,
16840 NM_CVT_D_L
= 0x14d,
16841 NM_CVT_S_D
= 0x06d,
16842 NM_CVT_S_W
= 0x0ed,
16843 NM_CVT_S_L
= 0x16d,
16846 /* P.LL instruction pool */
16852 /* P.SC instruction pool */
16858 /* P.DVP instruction pool */
16867 * nanoMIPS decoding engine
16872 /* extraction utilities */
16874 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
16875 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
16876 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
16877 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
16878 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
16879 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
16881 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
16882 static inline int decode_gpr_gpr3(int r
)
16884 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
16886 return map
[r
& 0x7];
16889 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
16890 static inline int decode_gpr_gpr3_src_store(int r
)
16892 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
16894 return map
[r
& 0x7];
16897 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
16898 static inline int decode_gpr_gpr4(int r
)
16900 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
16901 16, 17, 18, 19, 20, 21, 22, 23 };
16903 return map
[r
& 0xf];
16906 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
16907 static inline int decode_gpr_gpr4_zero(int r
)
16909 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
16910 16, 17, 18, 19, 20, 21, 22, 23 };
16912 return map
[r
& 0xf];
16916 /* extraction utilities */
16918 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
16919 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
16920 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
16921 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
16922 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
16923 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
16926 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
16928 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
16931 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
16932 uint8_t gp
, uint16_t u
)
16935 TCGv va
= tcg_temp_new();
16936 TCGv t0
= tcg_temp_new();
16938 while (counter
!= count
) {
16939 bool use_gp
= gp
&& (counter
== count
- 1);
16940 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
16941 int this_offset
= -((counter
+ 1) << 2);
16942 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
16943 gen_load_gpr(t0
, this_rt
);
16944 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
16945 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
16949 /* adjust stack pointer */
16950 gen_adjust_sp(ctx
, -u
);
16956 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
16957 uint8_t gp
, uint16_t u
)
16960 TCGv va
= tcg_temp_new();
16961 TCGv t0
= tcg_temp_new();
16963 while (counter
!= count
) {
16964 bool use_gp
= gp
&& (counter
== count
- 1);
16965 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
16966 int this_offset
= u
- ((counter
+ 1) << 2);
16967 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
16968 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
16969 ctx
->default_tcg_memop_mask
);
16970 tcg_gen_ext32s_tl(t0
, t0
);
16971 gen_store_gpr(t0
, this_rt
);
16975 /* adjust stack pointer */
16976 gen_adjust_sp(ctx
, u
);
16982 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
16984 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
16985 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
16987 switch (extract32(ctx
->opcode
, 2, 2)) {
16989 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
16992 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
16995 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
16998 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
17003 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
17005 int rt
= extract32(ctx
->opcode
, 21, 5);
17006 int rs
= extract32(ctx
->opcode
, 16, 5);
17007 int rd
= extract32(ctx
->opcode
, 11, 5);
17009 switch (extract32(ctx
->opcode
, 3, 7)) {
17011 switch (extract32(ctx
->opcode
, 10, 1)) {
17014 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
17018 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
17024 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
17028 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
17031 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
17034 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
17037 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
17040 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
17043 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
17046 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
17049 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
17053 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
17056 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
17059 switch (extract32(ctx
->opcode
, 10, 1)) {
17061 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
17064 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
17069 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
17072 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
17075 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
17078 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
17081 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
17086 #ifndef CONFIG_USER_ONLY
17087 TCGv t0
= tcg_temp_new();
17088 switch (extract32(ctx
->opcode
, 10, 1)) {
17091 check_cp0_enabled(ctx
);
17092 gen_helper_dvp(t0
, cpu_env
);
17093 gen_store_gpr(t0
, rt
);
17098 check_cp0_enabled(ctx
);
17099 gen_helper_evp(t0
, cpu_env
);
17100 gen_store_gpr(t0
, rt
);
17107 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
17112 TCGv t0
= tcg_temp_new();
17113 TCGv t1
= tcg_temp_new();
17114 TCGv t2
= tcg_temp_new();
17116 gen_load_gpr(t1
, rs
);
17117 gen_load_gpr(t2
, rt
);
17118 tcg_gen_add_tl(t0
, t1
, t2
);
17119 tcg_gen_ext32s_tl(t0
, t0
);
17120 tcg_gen_xor_tl(t1
, t1
, t2
);
17121 tcg_gen_xor_tl(t2
, t0
, t2
);
17122 tcg_gen_andc_tl(t1
, t2
, t1
);
17124 /* operands of same sign, result different sign */
17125 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
17126 gen_store_gpr(t0
, rd
);
17134 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
17137 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
17140 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
17143 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
17146 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
17149 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
17152 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
17155 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
17157 #ifndef CONFIG_USER_ONLY
17159 check_cp0_enabled(ctx
);
17161 /* Treat as NOP. */
17164 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
17167 check_cp0_enabled(ctx
);
17169 TCGv t0
= tcg_temp_new();
17171 gen_load_gpr(t0
, rt
);
17172 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
17176 case NM_D_E_MT_VPE
:
17178 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
17179 TCGv t0
= tcg_temp_new();
17186 gen_helper_dmt(t0
);
17187 gen_store_gpr(t0
, rt
);
17188 } else if (rs
== 0) {
17191 gen_helper_dvpe(t0
, cpu_env
);
17192 gen_store_gpr(t0
, rt
);
17194 generate_exception_end(ctx
, EXCP_RI
);
17201 gen_helper_emt(t0
);
17202 gen_store_gpr(t0
, rt
);
17203 } else if (rs
== 0) {
17206 gen_helper_evpe(t0
, cpu_env
);
17207 gen_store_gpr(t0
, rt
);
17209 generate_exception_end(ctx
, EXCP_RI
);
17220 TCGv t0
= tcg_temp_new();
17221 TCGv t1
= tcg_temp_new();
17223 gen_load_gpr(t0
, rt
);
17224 gen_load_gpr(t1
, rs
);
17225 gen_helper_fork(t0
, t1
);
17232 check_cp0_enabled(ctx
);
17234 /* Treat as NOP. */
17237 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
17238 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
17242 check_cp0_enabled(ctx
);
17243 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
17244 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
17249 TCGv t0
= tcg_temp_new();
17251 gen_load_gpr(t0
, rs
);
17252 gen_helper_yield(t0
, cpu_env
, t0
);
17253 gen_store_gpr(t0
, rt
);
17259 generate_exception_end(ctx
, EXCP_RI
);
17265 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
17266 int ret
, int v1
, int v2
)
17272 t0
= tcg_temp_new_i32();
17274 v0_t
= tcg_temp_new();
17275 v1_t
= tcg_temp_new();
17277 tcg_gen_movi_i32(t0
, v2
>> 3);
17279 gen_load_gpr(v0_t
, ret
);
17280 gen_load_gpr(v1_t
, v1
);
17283 case NM_MAQ_S_W_PHR
:
17285 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
17287 case NM_MAQ_S_W_PHL
:
17289 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
17291 case NM_MAQ_SA_W_PHR
:
17293 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
17295 case NM_MAQ_SA_W_PHL
:
17297 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
17300 generate_exception_end(ctx
, EXCP_RI
);
17304 tcg_temp_free_i32(t0
);
17306 tcg_temp_free(v0_t
);
17307 tcg_temp_free(v1_t
);
17311 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
17312 int ret
, int v1
, int v2
)
17315 TCGv t0
= tcg_temp_new();
17316 TCGv t1
= tcg_temp_new();
17317 TCGv v0_t
= tcg_temp_new();
17319 gen_load_gpr(v0_t
, v1
);
17322 case NM_POOL32AXF_1_0
:
17324 switch (extract32(ctx
->opcode
, 12, 2)) {
17326 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
17329 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
17332 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
17335 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
17339 case NM_POOL32AXF_1_1
:
17341 switch (extract32(ctx
->opcode
, 12, 2)) {
17343 tcg_gen_movi_tl(t0
, v2
);
17344 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
17347 tcg_gen_movi_tl(t0
, v2
>> 3);
17348 gen_helper_shilo(t0
, v0_t
, cpu_env
);
17351 generate_exception_end(ctx
, EXCP_RI
);
17355 case NM_POOL32AXF_1_3
:
17357 imm
= extract32(ctx
->opcode
, 14, 7);
17358 switch (extract32(ctx
->opcode
, 12, 2)) {
17360 tcg_gen_movi_tl(t0
, imm
);
17361 gen_helper_rddsp(t0
, t0
, cpu_env
);
17362 gen_store_gpr(t0
, ret
);
17365 gen_load_gpr(t0
, ret
);
17366 tcg_gen_movi_tl(t1
, imm
);
17367 gen_helper_wrdsp(t0
, t1
, cpu_env
);
17370 tcg_gen_movi_tl(t0
, v2
>> 3);
17371 tcg_gen_movi_tl(t1
, v1
);
17372 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
17373 gen_store_gpr(t0
, ret
);
17376 tcg_gen_movi_tl(t0
, v2
>> 3);
17377 tcg_gen_movi_tl(t1
, v1
);
17378 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
17379 gen_store_gpr(t0
, ret
);
17383 case NM_POOL32AXF_1_4
:
17385 tcg_gen_movi_tl(t0
, v2
>> 2);
17386 switch (extract32(ctx
->opcode
, 12, 1)) {
17388 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
17389 gen_store_gpr(t0
, ret
);
17392 gen_helper_shrl_qb(t0
, t0
, v0_t
);
17393 gen_store_gpr(t0
, ret
);
17397 case NM_POOL32AXF_1_5
:
17398 opc
= extract32(ctx
->opcode
, 12, 2);
17399 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
17401 case NM_POOL32AXF_1_7
:
17403 tcg_gen_movi_tl(t0
, v2
>> 3);
17404 tcg_gen_movi_tl(t1
, v1
);
17405 switch (extract32(ctx
->opcode
, 12, 2)) {
17407 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
17408 gen_store_gpr(t0
, ret
);
17411 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
17412 gen_store_gpr(t0
, ret
);
17415 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
17416 gen_store_gpr(t0
, ret
);
17419 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
17420 gen_store_gpr(t0
, ret
);
17425 generate_exception_end(ctx
, EXCP_RI
);
17431 tcg_temp_free(v0_t
);
17434 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
17435 TCGv v0
, TCGv v1
, int rd
)
17439 t0
= tcg_temp_new_i32();
17441 tcg_gen_movi_i32(t0
, rd
>> 3);
17444 case NM_POOL32AXF_2_0_7
:
17445 switch (extract32(ctx
->opcode
, 9, 3)) {
17448 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
17450 case NM_DPAQ_S_W_PH
:
17452 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
17456 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
17458 case NM_DPSQ_S_W_PH
:
17460 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
17463 generate_exception_end(ctx
, EXCP_RI
);
17467 case NM_POOL32AXF_2_8_15
:
17468 switch (extract32(ctx
->opcode
, 9, 3)) {
17471 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
17473 case NM_DPAQ_SA_L_W
:
17475 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
17479 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
17481 case NM_DPSQ_SA_L_W
:
17483 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
17486 generate_exception_end(ctx
, EXCP_RI
);
17490 case NM_POOL32AXF_2_16_23
:
17491 switch (extract32(ctx
->opcode
, 9, 3)) {
17492 case NM_DPAU_H_QBL
:
17494 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
17496 case NM_DPAQX_S_W_PH
:
17498 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
17500 case NM_DPSU_H_QBL
:
17502 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
17504 case NM_DPSQX_S_W_PH
:
17506 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
17508 case NM_MULSA_W_PH
:
17510 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
17513 generate_exception_end(ctx
, EXCP_RI
);
17517 case NM_POOL32AXF_2_24_31
:
17518 switch (extract32(ctx
->opcode
, 9, 3)) {
17519 case NM_DPAU_H_QBR
:
17521 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
17523 case NM_DPAQX_SA_W_PH
:
17525 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
17527 case NM_DPSU_H_QBR
:
17529 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
17531 case NM_DPSQX_SA_W_PH
:
17533 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
17535 case NM_MULSAQ_S_W_PH
:
17537 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
17540 generate_exception_end(ctx
, EXCP_RI
);
17545 generate_exception_end(ctx
, EXCP_RI
);
17549 tcg_temp_free_i32(t0
);
17552 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
17553 int rt
, int rs
, int rd
)
17556 TCGv t0
= tcg_temp_new();
17557 TCGv t1
= tcg_temp_new();
17558 TCGv v0_t
= tcg_temp_new();
17559 TCGv v1_t
= tcg_temp_new();
17561 gen_load_gpr(v0_t
, rt
);
17562 gen_load_gpr(v1_t
, rs
);
17565 case NM_POOL32AXF_2_0_7
:
17566 switch (extract32(ctx
->opcode
, 9, 3)) {
17568 case NM_DPAQ_S_W_PH
:
17570 case NM_DPSQ_S_W_PH
:
17571 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
17576 gen_load_gpr(t0
, rs
);
17578 if (rd
!= 0 && rd
!= 2) {
17579 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
17580 tcg_gen_ext32u_tl(t0
, t0
);
17581 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
17582 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
17584 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
17590 int acc
= extract32(ctx
->opcode
, 14, 2);
17591 TCGv_i64 t2
= tcg_temp_new_i64();
17592 TCGv_i64 t3
= tcg_temp_new_i64();
17594 gen_load_gpr(t0
, rt
);
17595 gen_load_gpr(t1
, rs
);
17596 tcg_gen_ext_tl_i64(t2
, t0
);
17597 tcg_gen_ext_tl_i64(t3
, t1
);
17598 tcg_gen_mul_i64(t2
, t2
, t3
);
17599 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
17600 tcg_gen_add_i64(t2
, t2
, t3
);
17601 tcg_temp_free_i64(t3
);
17602 gen_move_low32(cpu_LO
[acc
], t2
);
17603 gen_move_high32(cpu_HI
[acc
], t2
);
17604 tcg_temp_free_i64(t2
);
17610 int acc
= extract32(ctx
->opcode
, 14, 2);
17611 TCGv_i32 t2
= tcg_temp_new_i32();
17612 TCGv_i32 t3
= tcg_temp_new_i32();
17614 gen_load_gpr(t0
, rs
);
17615 gen_load_gpr(t1
, rt
);
17616 tcg_gen_trunc_tl_i32(t2
, t0
);
17617 tcg_gen_trunc_tl_i32(t3
, t1
);
17618 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
17619 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
17620 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
17621 tcg_temp_free_i32(t2
);
17622 tcg_temp_free_i32(t3
);
17627 gen_load_gpr(v1_t
, rs
);
17628 tcg_gen_movi_tl(t0
, rd
>> 3);
17629 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
17630 gen_store_gpr(t0
, ret
);
17634 case NM_POOL32AXF_2_8_15
:
17635 switch (extract32(ctx
->opcode
, 9, 3)) {
17637 case NM_DPAQ_SA_L_W
:
17639 case NM_DPSQ_SA_L_W
:
17640 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
17645 int acc
= extract32(ctx
->opcode
, 14, 2);
17646 TCGv_i64 t2
= tcg_temp_new_i64();
17647 TCGv_i64 t3
= tcg_temp_new_i64();
17649 gen_load_gpr(t0
, rs
);
17650 gen_load_gpr(t1
, rt
);
17651 tcg_gen_ext32u_tl(t0
, t0
);
17652 tcg_gen_ext32u_tl(t1
, t1
);
17653 tcg_gen_extu_tl_i64(t2
, t0
);
17654 tcg_gen_extu_tl_i64(t3
, t1
);
17655 tcg_gen_mul_i64(t2
, t2
, t3
);
17656 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
17657 tcg_gen_add_i64(t2
, t2
, t3
);
17658 tcg_temp_free_i64(t3
);
17659 gen_move_low32(cpu_LO
[acc
], t2
);
17660 gen_move_high32(cpu_HI
[acc
], t2
);
17661 tcg_temp_free_i64(t2
);
17667 int acc
= extract32(ctx
->opcode
, 14, 2);
17668 TCGv_i32 t2
= tcg_temp_new_i32();
17669 TCGv_i32 t3
= tcg_temp_new_i32();
17671 gen_load_gpr(t0
, rs
);
17672 gen_load_gpr(t1
, rt
);
17673 tcg_gen_trunc_tl_i32(t2
, t0
);
17674 tcg_gen_trunc_tl_i32(t3
, t1
);
17675 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
17676 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
17677 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
17678 tcg_temp_free_i32(t2
);
17679 tcg_temp_free_i32(t3
);
17684 tcg_gen_movi_tl(t0
, rd
>> 3);
17685 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
17686 gen_store_gpr(t0
, ret
);
17689 generate_exception_end(ctx
, EXCP_RI
);
17693 case NM_POOL32AXF_2_16_23
:
17694 switch (extract32(ctx
->opcode
, 9, 3)) {
17695 case NM_DPAU_H_QBL
:
17696 case NM_DPAQX_S_W_PH
:
17697 case NM_DPSU_H_QBL
:
17698 case NM_DPSQX_S_W_PH
:
17699 case NM_MULSA_W_PH
:
17700 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
17704 tcg_gen_movi_tl(t0
, rd
>> 3);
17705 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
17706 gen_store_gpr(t0
, ret
);
17711 int acc
= extract32(ctx
->opcode
, 14, 2);
17712 TCGv_i64 t2
= tcg_temp_new_i64();
17713 TCGv_i64 t3
= tcg_temp_new_i64();
17715 gen_load_gpr(t0
, rs
);
17716 gen_load_gpr(t1
, rt
);
17717 tcg_gen_ext_tl_i64(t2
, t0
);
17718 tcg_gen_ext_tl_i64(t3
, t1
);
17719 tcg_gen_mul_i64(t2
, t2
, t3
);
17720 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
17721 tcg_gen_sub_i64(t2
, t3
, t2
);
17722 tcg_temp_free_i64(t3
);
17723 gen_move_low32(cpu_LO
[acc
], t2
);
17724 gen_move_high32(cpu_HI
[acc
], t2
);
17725 tcg_temp_free_i64(t2
);
17728 case NM_EXTRV_RS_W
:
17730 tcg_gen_movi_tl(t0
, rd
>> 3);
17731 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
17732 gen_store_gpr(t0
, ret
);
17736 case NM_POOL32AXF_2_24_31
:
17737 switch (extract32(ctx
->opcode
, 9, 3)) {
17738 case NM_DPAU_H_QBR
:
17739 case NM_DPAQX_SA_W_PH
:
17740 case NM_DPSU_H_QBR
:
17741 case NM_DPSQX_SA_W_PH
:
17742 case NM_MULSAQ_S_W_PH
:
17743 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
17747 tcg_gen_movi_tl(t0
, rd
>> 3);
17748 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
17749 gen_store_gpr(t0
, ret
);
17754 int acc
= extract32(ctx
->opcode
, 14, 2);
17755 TCGv_i64 t2
= tcg_temp_new_i64();
17756 TCGv_i64 t3
= tcg_temp_new_i64();
17758 gen_load_gpr(t0
, rs
);
17759 gen_load_gpr(t1
, rt
);
17760 tcg_gen_ext32u_tl(t0
, t0
);
17761 tcg_gen_ext32u_tl(t1
, t1
);
17762 tcg_gen_extu_tl_i64(t2
, t0
);
17763 tcg_gen_extu_tl_i64(t3
, t1
);
17764 tcg_gen_mul_i64(t2
, t2
, t3
);
17765 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
17766 tcg_gen_sub_i64(t2
, t3
, t2
);
17767 tcg_temp_free_i64(t3
);
17768 gen_move_low32(cpu_LO
[acc
], t2
);
17769 gen_move_high32(cpu_HI
[acc
], t2
);
17770 tcg_temp_free_i64(t2
);
17775 tcg_gen_movi_tl(t0
, rd
>> 3);
17776 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
17777 gen_store_gpr(t0
, ret
);
17782 generate_exception_end(ctx
, EXCP_RI
);
17789 tcg_temp_free(v0_t
);
17790 tcg_temp_free(v1_t
);
17793 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
17797 TCGv t0
= tcg_temp_new();
17798 TCGv v0_t
= tcg_temp_new();
17800 gen_load_gpr(v0_t
, rs
);
17805 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
17806 gen_store_gpr(v0_t
, ret
);
17810 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
17811 gen_store_gpr(v0_t
, ret
);
17815 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
17816 gen_store_gpr(v0_t
, ret
);
17818 case NM_PRECEQ_W_PHL
:
17820 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
17821 tcg_gen_ext32s_tl(v0_t
, v0_t
);
17822 gen_store_gpr(v0_t
, ret
);
17824 case NM_PRECEQ_W_PHR
:
17826 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
17827 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
17828 tcg_gen_ext32s_tl(v0_t
, v0_t
);
17829 gen_store_gpr(v0_t
, ret
);
17831 case NM_PRECEQU_PH_QBL
:
17833 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
17834 gen_store_gpr(v0_t
, ret
);
17836 case NM_PRECEQU_PH_QBR
:
17838 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
17839 gen_store_gpr(v0_t
, ret
);
17841 case NM_PRECEQU_PH_QBLA
:
17843 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
17844 gen_store_gpr(v0_t
, ret
);
17846 case NM_PRECEQU_PH_QBRA
:
17848 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
17849 gen_store_gpr(v0_t
, ret
);
17851 case NM_PRECEU_PH_QBL
:
17853 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
17854 gen_store_gpr(v0_t
, ret
);
17856 case NM_PRECEU_PH_QBR
:
17858 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
17859 gen_store_gpr(v0_t
, ret
);
17861 case NM_PRECEU_PH_QBLA
:
17863 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
17864 gen_store_gpr(v0_t
, ret
);
17866 case NM_PRECEU_PH_QBRA
:
17868 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
17869 gen_store_gpr(v0_t
, ret
);
17873 tcg_gen_ext16u_tl(v0_t
, v0_t
);
17874 tcg_gen_shli_tl(t0
, v0_t
, 16);
17875 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
17876 tcg_gen_ext32s_tl(v0_t
, v0_t
);
17877 gen_store_gpr(v0_t
, ret
);
17881 tcg_gen_ext8u_tl(v0_t
, v0_t
);
17882 tcg_gen_shli_tl(t0
, v0_t
, 8);
17883 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
17884 tcg_gen_shli_tl(t0
, v0_t
, 16);
17885 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
17886 tcg_gen_ext32s_tl(v0_t
, v0_t
);
17887 gen_store_gpr(v0_t
, ret
);
17891 gen_helper_bitrev(v0_t
, v0_t
);
17892 gen_store_gpr(v0_t
, ret
);
17897 TCGv tv0
= tcg_temp_new();
17899 gen_load_gpr(tv0
, rt
);
17900 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
17901 gen_store_gpr(v0_t
, ret
);
17902 tcg_temp_free(tv0
);
17905 case NM_RADDU_W_QB
:
17907 gen_helper_raddu_w_qb(v0_t
, v0_t
);
17908 gen_store_gpr(v0_t
, ret
);
17911 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
17915 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
17919 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
17922 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
17925 generate_exception_end(ctx
, EXCP_RI
);
17929 tcg_temp_free(v0_t
);
17933 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
17934 int rt
, int rs
, int rd
)
17936 TCGv t0
= tcg_temp_new();
17937 TCGv rs_t
= tcg_temp_new();
17939 gen_load_gpr(rs_t
, rs
);
17944 tcg_gen_movi_tl(t0
, rd
>> 2);
17945 switch (extract32(ctx
->opcode
, 12, 1)) {
17948 gen_helper_shra_qb(t0
, t0
, rs_t
);
17949 gen_store_gpr(t0
, rt
);
17953 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
17954 gen_store_gpr(t0
, rt
);
17960 tcg_gen_movi_tl(t0
, rd
>> 1);
17961 gen_helper_shrl_ph(t0
, t0
, rs_t
);
17962 gen_store_gpr(t0
, rt
);
17968 target_long result
;
17969 imm
= extract32(ctx
->opcode
, 13, 8);
17970 result
= (uint32_t)imm
<< 24 |
17971 (uint32_t)imm
<< 16 |
17972 (uint32_t)imm
<< 8 |
17974 result
= (int32_t)result
;
17975 tcg_gen_movi_tl(t0
, result
);
17976 gen_store_gpr(t0
, rt
);
17980 generate_exception_end(ctx
, EXCP_RI
);
17984 tcg_temp_free(rs_t
);
17988 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
17990 int rt
= extract32(ctx
->opcode
, 21, 5);
17991 int rs
= extract32(ctx
->opcode
, 16, 5);
17992 int rd
= extract32(ctx
->opcode
, 11, 5);
17994 switch (extract32(ctx
->opcode
, 6, 3)) {
17995 case NM_POOL32AXF_1
:
17997 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
17998 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
18001 case NM_POOL32AXF_2
:
18003 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
18004 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
18007 case NM_POOL32AXF_4
:
18009 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
18010 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
18013 case NM_POOL32AXF_5
:
18014 switch (extract32(ctx
->opcode
, 9, 7)) {
18015 #ifndef CONFIG_USER_ONLY
18017 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
18020 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
18023 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
18026 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
18029 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
18032 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
18035 check_cp0_enabled(ctx
);
18037 TCGv t0
= tcg_temp_new();
18039 save_cpu_state(ctx
, 1);
18040 gen_helper_di(t0
, cpu_env
);
18041 gen_store_gpr(t0
, rt
);
18042 /* Stop translation as we may have switched the execution mode */
18043 ctx
->base
.is_jmp
= DISAS_STOP
;
18048 check_cp0_enabled(ctx
);
18050 TCGv t0
= tcg_temp_new();
18052 save_cpu_state(ctx
, 1);
18053 gen_helper_ei(t0
, cpu_env
);
18054 gen_store_gpr(t0
, rt
);
18055 /* Stop translation as we may have switched the execution mode */
18056 ctx
->base
.is_jmp
= DISAS_STOP
;
18061 gen_load_srsgpr(rs
, rt
);
18064 gen_store_srsgpr(rs
, rt
);
18067 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
18070 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
18073 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
18077 generate_exception_end(ctx
, EXCP_RI
);
18081 case NM_POOL32AXF_7
:
18083 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
18084 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
18088 generate_exception_end(ctx
, EXCP_RI
);
18093 /* Immediate Value Compact Branches */
18094 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
18095 int rt
, int32_t imm
, int32_t offset
)
18098 int bcond_compute
= 0;
18099 TCGv t0
= tcg_temp_new();
18100 TCGv t1
= tcg_temp_new();
18102 gen_load_gpr(t0
, rt
);
18103 tcg_gen_movi_tl(t1
, imm
);
18104 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
18106 /* Load needed operands and calculate btarget */
18109 if (rt
== 0 && imm
== 0) {
18110 /* Unconditional branch */
18111 } else if (rt
== 0 && imm
!= 0) {
18116 cond
= TCG_COND_EQ
;
18122 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
18123 generate_exception_end(ctx
, EXCP_RI
);
18125 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
18126 /* Unconditional branch */
18127 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
18131 tcg_gen_shri_tl(t0
, t0
, imm
);
18132 tcg_gen_andi_tl(t0
, t0
, 1);
18133 tcg_gen_movi_tl(t1
, 0);
18135 if (opc
== NM_BBEQZC
) {
18136 cond
= TCG_COND_EQ
;
18138 cond
= TCG_COND_NE
;
18143 if (rt
== 0 && imm
== 0) {
18146 } else if (rt
== 0 && imm
!= 0) {
18147 /* Unconditional branch */
18150 cond
= TCG_COND_NE
;
18154 if (rt
== 0 && imm
== 0) {
18155 /* Unconditional branch */
18158 cond
= TCG_COND_GE
;
18163 cond
= TCG_COND_LT
;
18166 if (rt
== 0 && imm
== 0) {
18167 /* Unconditional branch */
18170 cond
= TCG_COND_GEU
;
18175 cond
= TCG_COND_LTU
;
18178 MIPS_INVAL("Immediate Value Compact branch");
18179 generate_exception_end(ctx
, EXCP_RI
);
18183 if (bcond_compute
== 0) {
18184 /* Uncoditional compact branch */
18185 gen_goto_tb(ctx
, 0, ctx
->btarget
);
18187 /* Conditional compact branch */
18188 TCGLabel
*fs
= gen_new_label();
18190 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
18192 gen_goto_tb(ctx
, 1, ctx
->btarget
);
18195 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
18203 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
18204 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
18207 TCGv t0
= tcg_temp_new();
18208 TCGv t1
= tcg_temp_new();
18211 gen_load_gpr(t0
, rs
);
18215 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
18218 /* calculate btarget */
18219 tcg_gen_shli_tl(t0
, t0
, 1);
18220 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
18221 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
18223 /* unconditional branch to register */
18224 tcg_gen_mov_tl(cpu_PC
, btarget
);
18225 tcg_gen_lookup_and_goto_ptr();
18231 /* nanoMIPS Branches */
18232 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
18233 int rs
, int rt
, int32_t offset
)
18235 int bcond_compute
= 0;
18236 TCGv t0
= tcg_temp_new();
18237 TCGv t1
= tcg_temp_new();
18239 /* Load needed operands and calculate btarget */
18241 /* compact branch */
18244 gen_load_gpr(t0
, rs
);
18245 gen_load_gpr(t1
, rt
);
18247 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
18251 if (rs
== 0 || rs
== rt
) {
18252 /* OPC_BLEZALC, OPC_BGEZALC */
18253 /* OPC_BGTZALC, OPC_BLTZALC */
18254 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
18256 gen_load_gpr(t0
, rs
);
18257 gen_load_gpr(t1
, rt
);
18259 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
18262 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
18266 /* OPC_BEQZC, OPC_BNEZC */
18267 gen_load_gpr(t0
, rs
);
18269 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
18271 /* OPC_JIC, OPC_JIALC */
18272 TCGv tbase
= tcg_temp_new();
18273 TCGv toffset
= tcg_temp_new();
18275 gen_load_gpr(tbase
, rt
);
18276 tcg_gen_movi_tl(toffset
, offset
);
18277 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
18278 tcg_temp_free(tbase
);
18279 tcg_temp_free(toffset
);
18283 MIPS_INVAL("Compact branch/jump");
18284 generate_exception_end(ctx
, EXCP_RI
);
18288 if (bcond_compute
== 0) {
18289 /* Uncoditional compact branch */
18292 gen_goto_tb(ctx
, 0, ctx
->btarget
);
18295 MIPS_INVAL("Compact branch/jump");
18296 generate_exception_end(ctx
, EXCP_RI
);
18300 /* Conditional compact branch */
18301 TCGLabel
*fs
= gen_new_label();
18305 if (rs
== 0 && rt
!= 0) {
18307 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
18308 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
18310 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
18313 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
18317 if (rs
== 0 && rt
!= 0) {
18319 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
18320 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
18322 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
18325 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
18329 if (rs
== 0 && rt
!= 0) {
18331 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
18332 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
18334 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
18337 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
18341 if (rs
== 0 && rt
!= 0) {
18343 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
18344 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
18346 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
18349 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
18353 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
18356 MIPS_INVAL("Compact conditional branch/jump");
18357 generate_exception_end(ctx
, EXCP_RI
);
18361 /* Generating branch here as compact branches don't have delay slot */
18362 gen_goto_tb(ctx
, 1, ctx
->btarget
);
18365 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
18374 /* nanoMIPS CP1 Branches */
18375 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
18376 int32_t ft
, int32_t offset
)
18378 target_ulong btarget
;
18379 TCGv_i64 t0
= tcg_temp_new_i64();
18381 gen_load_fpr64(ctx
, t0
, ft
);
18382 tcg_gen_andi_i64(t0
, t0
, 1);
18384 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
18388 tcg_gen_xori_i64(t0
, t0
, 1);
18389 ctx
->hflags
|= MIPS_HFLAG_BC
;
18392 /* t0 already set */
18393 ctx
->hflags
|= MIPS_HFLAG_BC
;
18396 MIPS_INVAL("cp1 cond branch");
18397 generate_exception_end(ctx
, EXCP_RI
);
18401 tcg_gen_trunc_i64_tl(bcond
, t0
);
18403 ctx
->btarget
= btarget
;
18406 tcg_temp_free_i64(t0
);
18410 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
18413 t0
= tcg_temp_new();
18414 t1
= tcg_temp_new();
18416 gen_load_gpr(t0
, rs
);
18417 gen_load_gpr(t1
, rt
);
18419 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
18420 /* PP.LSXS instructions require shifting */
18421 switch (extract32(ctx
->opcode
, 7, 4)) {
18426 tcg_gen_shli_tl(t0
, t0
, 1);
18433 tcg_gen_shli_tl(t0
, t0
, 2);
18437 tcg_gen_shli_tl(t0
, t0
, 3);
18441 gen_op_addr_add(ctx
, t0
, t0
, t1
);
18443 switch (extract32(ctx
->opcode
, 7, 4)) {
18445 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
18447 gen_store_gpr(t0
, rd
);
18451 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
18453 gen_store_gpr(t0
, rd
);
18457 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
18459 gen_store_gpr(t0
, rd
);
18462 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
18464 gen_store_gpr(t0
, rd
);
18468 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
18470 gen_store_gpr(t0
, rd
);
18474 gen_load_gpr(t1
, rd
);
18475 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
18481 gen_load_gpr(t1
, rd
);
18482 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
18488 gen_load_gpr(t1
, rd
);
18489 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
18493 /*case NM_LWC1XS:*/
18495 /*case NM_LDC1XS:*/
18497 /*case NM_SWC1XS:*/
18499 /*case NM_SDC1XS:*/
18500 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
18501 check_cp1_enabled(ctx
);
18502 switch (extract32(ctx
->opcode
, 7, 4)) {
18504 /*case NM_LWC1XS:*/
18505 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
18508 /*case NM_LDC1XS:*/
18509 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
18512 /*case NM_SWC1XS:*/
18513 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
18516 /*case NM_SDC1XS:*/
18517 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
18521 generate_exception_err(ctx
, EXCP_CpU
, 1);
18525 generate_exception_end(ctx
, EXCP_RI
);
18533 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
18537 rt
= extract32(ctx
->opcode
, 21, 5);
18538 rs
= extract32(ctx
->opcode
, 16, 5);
18539 rd
= extract32(ctx
->opcode
, 11, 5);
18541 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
18542 generate_exception_end(ctx
, EXCP_RI
);
18545 check_cp1_enabled(ctx
);
18546 switch (extract32(ctx
->opcode
, 0, 3)) {
18548 switch (extract32(ctx
->opcode
, 3, 7)) {
18550 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
18553 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
18556 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
18559 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
18562 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
18565 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
18568 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
18571 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
18574 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
18577 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
18580 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
18583 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
18586 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
18589 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
18592 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
18595 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
18598 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
18601 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
18604 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
18607 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
18610 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
18613 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
18616 generate_exception_end(ctx
, EXCP_RI
);
18621 switch (extract32(ctx
->opcode
, 3, 3)) {
18623 switch (extract32(ctx
->opcode
, 9, 1)) {
18625 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
18628 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
18633 switch (extract32(ctx
->opcode
, 9, 1)) {
18635 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
18638 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
18643 switch (extract32(ctx
->opcode
, 9, 1)) {
18645 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
18648 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
18653 switch (extract32(ctx
->opcode
, 9, 1)) {
18655 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
18658 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
18663 switch (extract32(ctx
->opcode
, 6, 8)) {
18665 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
18668 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
18671 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
18674 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
18677 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
18680 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
18683 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
18686 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
18689 switch (extract32(ctx
->opcode
, 6, 9)) {
18691 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
18694 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
18697 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
18700 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
18703 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
18706 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
18709 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
18712 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
18715 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
18718 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
18721 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
18724 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
18727 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
18730 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
18733 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
18736 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
18739 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
18742 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
18745 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
18748 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
18751 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
18754 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
18757 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
18760 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
18763 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
18766 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
18769 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
18772 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
18775 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
18778 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
18781 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
18784 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
18787 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
18790 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
18793 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
18796 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
18799 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
18802 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
18805 generate_exception_end(ctx
, EXCP_RI
);
18814 switch (extract32(ctx
->opcode
, 3, 3)) {
18815 case NM_CMP_CONDN_S
:
18816 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
18818 case NM_CMP_CONDN_D
:
18819 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
18822 generate_exception_end(ctx
, EXCP_RI
);
18827 generate_exception_end(ctx
, EXCP_RI
);
18832 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
18833 int rd
, int rs
, int rt
)
18836 TCGv t0
= tcg_temp_new();
18837 TCGv v1_t
= tcg_temp_new();
18838 TCGv v2_t
= tcg_temp_new();
18840 gen_load_gpr(v1_t
, rs
);
18841 gen_load_gpr(v2_t
, rt
);
18846 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
18850 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
18854 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
18856 case NM_CMPU_EQ_QB
:
18858 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
18860 case NM_CMPU_LT_QB
:
18862 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
18864 case NM_CMPU_LE_QB
:
18866 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
18868 case NM_CMPGU_EQ_QB
:
18870 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
18871 gen_store_gpr(v1_t
, ret
);
18873 case NM_CMPGU_LT_QB
:
18875 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
18876 gen_store_gpr(v1_t
, ret
);
18878 case NM_CMPGU_LE_QB
:
18880 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
18881 gen_store_gpr(v1_t
, ret
);
18883 case NM_CMPGDU_EQ_QB
:
18885 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
18886 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
18887 gen_store_gpr(v1_t
, ret
);
18889 case NM_CMPGDU_LT_QB
:
18891 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
18892 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
18893 gen_store_gpr(v1_t
, ret
);
18895 case NM_CMPGDU_LE_QB
:
18897 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
18898 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
18899 gen_store_gpr(v1_t
, ret
);
18903 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
18904 gen_store_gpr(v1_t
, ret
);
18908 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
18909 gen_store_gpr(v1_t
, ret
);
18913 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18914 gen_store_gpr(v1_t
, ret
);
18918 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
18919 gen_store_gpr(v1_t
, ret
);
18923 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
18924 gen_store_gpr(v1_t
, ret
);
18928 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
18929 gen_store_gpr(v1_t
, ret
);
18933 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
18934 gen_store_gpr(v1_t
, ret
);
18938 switch (extract32(ctx
->opcode
, 10, 1)) {
18941 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18942 gen_store_gpr(v1_t
, ret
);
18946 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18947 gen_store_gpr(v1_t
, ret
);
18951 case NM_ADDQH_R_PH
:
18953 switch (extract32(ctx
->opcode
, 10, 1)) {
18956 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
18957 gen_store_gpr(v1_t
, ret
);
18961 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
18962 gen_store_gpr(v1_t
, ret
);
18968 switch (extract32(ctx
->opcode
, 10, 1)) {
18971 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
18972 gen_store_gpr(v1_t
, ret
);
18976 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
18977 gen_store_gpr(v1_t
, ret
);
18983 switch (extract32(ctx
->opcode
, 10, 1)) {
18986 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
18987 gen_store_gpr(v1_t
, ret
);
18991 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
18992 gen_store_gpr(v1_t
, ret
);
18998 switch (extract32(ctx
->opcode
, 10, 1)) {
19001 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19002 gen_store_gpr(v1_t
, ret
);
19006 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19007 gen_store_gpr(v1_t
, ret
);
19011 case NM_ADDUH_R_QB
:
19013 switch (extract32(ctx
->opcode
, 10, 1)) {
19016 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
19017 gen_store_gpr(v1_t
, ret
);
19021 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
19022 gen_store_gpr(v1_t
, ret
);
19026 case NM_SHRAV_R_PH
:
19028 switch (extract32(ctx
->opcode
, 10, 1)) {
19031 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
19032 gen_store_gpr(v1_t
, ret
);
19036 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
19037 gen_store_gpr(v1_t
, ret
);
19041 case NM_SHRAV_R_QB
:
19043 switch (extract32(ctx
->opcode
, 10, 1)) {
19046 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
19047 gen_store_gpr(v1_t
, ret
);
19051 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
19052 gen_store_gpr(v1_t
, ret
);
19058 switch (extract32(ctx
->opcode
, 10, 1)) {
19061 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19062 gen_store_gpr(v1_t
, ret
);
19066 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19067 gen_store_gpr(v1_t
, ret
);
19071 case NM_SUBQH_R_PH
:
19073 switch (extract32(ctx
->opcode
, 10, 1)) {
19076 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
19077 gen_store_gpr(v1_t
, ret
);
19081 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
19082 gen_store_gpr(v1_t
, ret
);
19088 switch (extract32(ctx
->opcode
, 10, 1)) {
19091 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
19092 gen_store_gpr(v1_t
, ret
);
19096 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
19097 gen_store_gpr(v1_t
, ret
);
19103 switch (extract32(ctx
->opcode
, 10, 1)) {
19106 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
19107 gen_store_gpr(v1_t
, ret
);
19111 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
19112 gen_store_gpr(v1_t
, ret
);
19118 switch (extract32(ctx
->opcode
, 10, 1)) {
19121 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19122 gen_store_gpr(v1_t
, ret
);
19126 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19127 gen_store_gpr(v1_t
, ret
);
19131 case NM_SUBUH_R_QB
:
19133 switch (extract32(ctx
->opcode
, 10, 1)) {
19136 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
19137 gen_store_gpr(v1_t
, ret
);
19141 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
19142 gen_store_gpr(v1_t
, ret
);
19146 case NM_SHLLV_S_PH
:
19148 switch (extract32(ctx
->opcode
, 10, 1)) {
19151 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19152 gen_store_gpr(v1_t
, ret
);
19156 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19157 gen_store_gpr(v1_t
, ret
);
19161 case NM_PRECR_SRA_R_PH_W
:
19163 switch (extract32(ctx
->opcode
, 10, 1)) {
19165 /* PRECR_SRA_PH_W */
19167 TCGv_i32 sa_t
= tcg_const_i32(rd
);
19168 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
19170 gen_store_gpr(v1_t
, rt
);
19171 tcg_temp_free_i32(sa_t
);
19175 /* PRECR_SRA_R_PH_W */
19177 TCGv_i32 sa_t
= tcg_const_i32(rd
);
19178 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
19180 gen_store_gpr(v1_t
, rt
);
19181 tcg_temp_free_i32(sa_t
);
19186 case NM_MULEU_S_PH_QBL
:
19188 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
19189 gen_store_gpr(v1_t
, ret
);
19191 case NM_MULEU_S_PH_QBR
:
19193 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
19194 gen_store_gpr(v1_t
, ret
);
19196 case NM_MULQ_RS_PH
:
19198 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19199 gen_store_gpr(v1_t
, ret
);
19203 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19204 gen_store_gpr(v1_t
, ret
);
19208 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
19209 gen_store_gpr(v1_t
, ret
);
19213 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
19214 gen_store_gpr(v1_t
, ret
);
19218 gen_load_gpr(t0
, rs
);
19220 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
19222 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
19226 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
19227 gen_store_gpr(v1_t
, ret
);
19231 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
19232 gen_store_gpr(v1_t
, ret
);
19236 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
19237 gen_store_gpr(v1_t
, ret
);
19241 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
19242 gen_store_gpr(v1_t
, ret
);
19246 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
19247 gen_store_gpr(v1_t
, ret
);
19251 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
19252 gen_store_gpr(v1_t
, ret
);
19257 TCGv tv0
= tcg_temp_new();
19258 TCGv tv1
= tcg_temp_new();
19259 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
19261 tcg_gen_movi_tl(tv0
, rd
>> 3);
19262 tcg_gen_movi_tl(tv1
, imm
);
19263 gen_helper_shilo(tv0
, tv1
, cpu_env
);
19266 case NM_MULEQ_S_W_PHL
:
19268 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
19269 gen_store_gpr(v1_t
, ret
);
19271 case NM_MULEQ_S_W_PHR
:
19273 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
19274 gen_store_gpr(v1_t
, ret
);
19278 switch (extract32(ctx
->opcode
, 10, 1)) {
19281 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19282 gen_store_gpr(v1_t
, ret
);
19286 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19287 gen_store_gpr(v1_t
, ret
);
19291 case NM_PRECR_QB_PH
:
19293 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
19294 gen_store_gpr(v1_t
, ret
);
19296 case NM_PRECRQ_QB_PH
:
19298 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
19299 gen_store_gpr(v1_t
, ret
);
19301 case NM_PRECRQ_PH_W
:
19303 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
19304 gen_store_gpr(v1_t
, ret
);
19306 case NM_PRECRQ_RS_PH_W
:
19308 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
19309 gen_store_gpr(v1_t
, ret
);
19311 case NM_PRECRQU_S_QB_PH
:
19313 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19314 gen_store_gpr(v1_t
, ret
);
19318 tcg_gen_movi_tl(t0
, rd
);
19319 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
19320 gen_store_gpr(v1_t
, rt
);
19324 tcg_gen_movi_tl(t0
, rd
>> 1);
19325 switch (extract32(ctx
->opcode
, 10, 1)) {
19328 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
19330 gen_store_gpr(v1_t
, rt
);
19333 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
19334 gen_store_gpr(v1_t
, rt
);
19340 tcg_gen_movi_tl(t0
, rd
>> 1);
19341 switch (extract32(ctx
->opcode
, 10, 2)) {
19344 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
19345 gen_store_gpr(v1_t
, rt
);
19349 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
19350 gen_store_gpr(v1_t
, rt
);
19353 generate_exception_end(ctx
, EXCP_RI
);
19359 tcg_gen_movi_tl(t0
, rd
);
19360 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
19361 gen_store_gpr(v1_t
, rt
);
19367 imm
= sextract32(ctx
->opcode
, 11, 11);
19368 imm
= (int16_t)(imm
<< 6) >> 6;
19370 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
19375 generate_exception_end(ctx
, EXCP_RI
);
19380 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19388 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
19389 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
19391 rt
= extract32(ctx
->opcode
, 21, 5);
19392 rs
= extract32(ctx
->opcode
, 16, 5);
19393 rd
= extract32(ctx
->opcode
, 11, 5);
19395 op
= extract32(ctx
->opcode
, 26, 6);
19400 switch (extract32(ctx
->opcode
, 19, 2)) {
19403 generate_exception_end(ctx
, EXCP_RI
);
19406 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
19407 generate_exception_end(ctx
, EXCP_SYSCALL
);
19409 generate_exception_end(ctx
, EXCP_RI
);
19413 generate_exception_end(ctx
, EXCP_BREAK
);
19416 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
19417 gen_helper_do_semihosting(cpu_env
);
19419 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
19420 generate_exception_end(ctx
, EXCP_RI
);
19422 generate_exception_end(ctx
, EXCP_DBp
);
19429 imm
= extract32(ctx
->opcode
, 0, 16);
19431 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
19433 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
19435 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
19440 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
19441 extract32(ctx
->opcode
, 1, 20) << 1;
19442 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19443 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
19447 switch (ctx
->opcode
& 0x07) {
19449 gen_pool32a0_nanomips_insn(env
, ctx
);
19453 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
19454 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
19458 switch (extract32(ctx
->opcode
, 3, 3)) {
19460 gen_p_lsx(ctx
, rd
, rs
, rt
);
19463 /* In nanoMIPS, the shift field directly encodes the shift
19464 * amount, meaning that the supported shift values are in
19465 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
19466 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
19467 extract32(ctx
->opcode
, 9, 2) - 1);
19470 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
19473 gen_pool32axf_nanomips_insn(env
, ctx
);
19476 generate_exception_end(ctx
, EXCP_RI
);
19481 generate_exception_end(ctx
, EXCP_RI
);
19486 switch (ctx
->opcode
& 0x03) {
19489 offset
= extract32(ctx
->opcode
, 0, 21);
19490 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
19494 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
19497 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
19500 generate_exception_end(ctx
, EXCP_RI
);
19506 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
19507 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
19508 switch (extract32(ctx
->opcode
, 16, 5)) {
19512 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
19518 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
19519 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
19525 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
19531 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
19534 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
19541 t0
= tcg_temp_new();
19543 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
19546 tcg_gen_movi_tl(t0
, addr
);
19547 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
19555 t0
= tcg_temp_new();
19556 t1
= tcg_temp_new();
19558 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
19561 tcg_gen_movi_tl(t0
, addr
);
19562 gen_load_gpr(t1
, rt
);
19564 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
19571 generate_exception_end(ctx
, EXCP_RI
);
19577 switch (extract32(ctx
->opcode
, 12, 4)) {
19579 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
19582 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
19585 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
19588 switch (extract32(ctx
->opcode
, 20, 1)) {
19590 switch (ctx
->opcode
& 3) {
19592 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
19593 extract32(ctx
->opcode
, 2, 1),
19594 extract32(ctx
->opcode
, 3, 9) << 3);
19597 case NM_RESTORE_JRC
:
19598 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
19599 extract32(ctx
->opcode
, 2, 1),
19600 extract32(ctx
->opcode
, 3, 9) << 3);
19601 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
19602 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
19606 generate_exception_end(ctx
, EXCP_RI
);
19611 generate_exception_end(ctx
, EXCP_RI
);
19616 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
19619 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
19623 TCGv t0
= tcg_temp_new();
19625 imm
= extract32(ctx
->opcode
, 0, 12);
19626 gen_load_gpr(t0
, rs
);
19627 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
19628 gen_store_gpr(t0
, rt
);
19634 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
19635 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
19639 int shift
= extract32(ctx
->opcode
, 0, 5);
19640 switch (extract32(ctx
->opcode
, 5, 4)) {
19642 if (rt
== 0 && shift
== 0) {
19644 } else if (rt
== 0 && shift
== 3) {
19645 /* EHB - treat as NOP */
19646 } else if (rt
== 0 && shift
== 5) {
19647 /* PAUSE - treat as NOP */
19648 } else if (rt
== 0 && shift
== 6) {
19650 gen_sync(extract32(ctx
->opcode
, 16, 5));
19653 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
19654 extract32(ctx
->opcode
, 0, 5));
19658 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
19659 extract32(ctx
->opcode
, 0, 5));
19662 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
19663 extract32(ctx
->opcode
, 0, 5));
19666 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
19667 extract32(ctx
->opcode
, 0, 5));
19675 TCGv t0
= tcg_temp_new();
19676 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
19677 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
19679 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
19681 gen_load_gpr(t0
, rs
);
19682 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
19685 tcg_temp_free_i32(shift
);
19686 tcg_temp_free_i32(shiftx
);
19687 tcg_temp_free_i32(stripe
);
19691 switch (((ctx
->opcode
>> 10) & 2) |
19692 (extract32(ctx
->opcode
, 5, 1))) {
19695 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
19696 extract32(ctx
->opcode
, 6, 5));
19699 generate_exception_end(ctx
, EXCP_RI
);
19704 switch (((ctx
->opcode
>> 10) & 2) |
19705 (extract32(ctx
->opcode
, 5, 1))) {
19708 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
19709 extract32(ctx
->opcode
, 6, 5));
19712 generate_exception_end(ctx
, EXCP_RI
);
19717 generate_exception_end(ctx
, EXCP_RI
);
19722 gen_pool32f_nanomips_insn(ctx
);
19727 switch (extract32(ctx
->opcode
, 1, 1)) {
19730 tcg_gen_movi_tl(cpu_gpr
[rt
],
19731 sextract32(ctx
->opcode
, 0, 1) << 31 |
19732 extract32(ctx
->opcode
, 2, 10) << 21 |
19733 extract32(ctx
->opcode
, 12, 9) << 12);
19738 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
19739 extract32(ctx
->opcode
, 2, 10) << 21 |
19740 extract32(ctx
->opcode
, 12, 9) << 12;
19742 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19743 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
19750 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
19752 switch (extract32(ctx
->opcode
, 18, 3)) {
19754 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
19757 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
19760 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
19764 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
19769 switch (ctx
->opcode
& 1) {
19771 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
19774 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
19780 switch (ctx
->opcode
& 1) {
19782 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
19785 generate_exception_end(ctx
, EXCP_RI
);
19791 switch (ctx
->opcode
& 0x3) {
19793 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
19796 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
19799 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
19802 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
19807 generate_exception_end(ctx
, EXCP_RI
);
19814 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
19816 switch (extract32(ctx
->opcode
, 12, 4)) {
19820 /* Break the TB to be able to sync copied instructions
19822 ctx
->base
.is_jmp
= DISAS_STOP
;
19825 /* Treat as NOP. */
19829 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
19832 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
19835 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
19838 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
19841 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
19844 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
19847 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
19850 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
19853 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
19856 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
19859 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
19862 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
19865 generate_exception_end(ctx
, EXCP_RI
);
19872 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
19873 extract32(ctx
->opcode
, 0, 8);
19875 switch (extract32(ctx
->opcode
, 8, 3)) {
19877 switch (extract32(ctx
->opcode
, 11, 4)) {
19879 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
19882 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
19885 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
19888 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
19891 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
19894 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
19897 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
19900 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
19903 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
19906 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
19909 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
19912 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
19917 /* Break the TB to be able to sync copied instructions
19919 ctx
->base
.is_jmp
= DISAS_STOP
;
19922 /* Treat as NOP. */
19926 generate_exception_end(ctx
, EXCP_RI
);
19931 switch (extract32(ctx
->opcode
, 11, 4)) {
19936 TCGv t0
= tcg_temp_new();
19937 TCGv t1
= tcg_temp_new();
19939 gen_base_offset_addr(ctx
, t0
, rs
, s
);
19941 switch (extract32(ctx
->opcode
, 11, 4)) {
19943 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
19945 gen_store_gpr(t0
, rt
);
19948 gen_load_gpr(t1
, rt
);
19949 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
19958 switch (ctx
->opcode
& 0x03) {
19960 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
19964 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
19969 switch (ctx
->opcode
& 0x03) {
19971 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, s
);
19975 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
19980 check_cp0_enabled(ctx
);
19981 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19982 gen_cache_operation(ctx
, rt
, rs
, s
);
19991 int count
= extract32(ctx
->opcode
, 12, 3);
19994 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
19995 extract32(ctx
->opcode
, 0, 8);
19996 TCGv va
= tcg_temp_new();
19997 TCGv t1
= tcg_temp_new();
19998 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
19999 NM_P_LS_UAWM
? MO_UNALN
: 0;
20001 count
= (count
== 0) ? 8 : count
;
20002 while (counter
!= count
) {
20003 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
20004 int this_offset
= offset
+ (counter
<< 2);
20006 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
20008 switch (extract32(ctx
->opcode
, 11, 1)) {
20010 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
20012 gen_store_gpr(t1
, this_rt
);
20013 if ((this_rt
== rs
) &&
20014 (counter
!= (count
- 1))) {
20015 /* UNPREDICTABLE */
20019 this_rt
= (rt
== 0) ? 0 : this_rt
;
20020 gen_load_gpr(t1
, this_rt
);
20021 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
20032 generate_exception_end(ctx
, EXCP_RI
);
20040 TCGv t0
= tcg_temp_new();
20041 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
20042 extract32(ctx
->opcode
, 1, 20) << 1;
20043 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
20044 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
20045 extract32(ctx
->opcode
, 21, 3));
20046 gen_load_gpr(t0
, rt
);
20047 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
20048 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
20054 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
20055 extract32(ctx
->opcode
, 1, 24) << 1;
20057 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
20059 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
20062 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
20067 switch (extract32(ctx
->opcode
, 12, 4)) {
20070 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
20073 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
20076 generate_exception_end(ctx
, EXCP_RI
);
20082 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
20083 extract32(ctx
->opcode
, 1, 13) << 1;
20084 switch (extract32(ctx
->opcode
, 14, 2)) {
20087 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
20090 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
20091 extract32(ctx
->opcode
, 1, 13) << 1;
20092 check_cp1_enabled(ctx
);
20093 switch (extract32(ctx
->opcode
, 16, 5)) {
20095 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
20098 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
20103 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
20104 extract32(ctx
->opcode
, 0, 1) << 13;
20106 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
20111 generate_exception_end(ctx
, EXCP_RI
);
20117 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
20119 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
20123 if (rs
== rt
|| rt
== 0) {
20124 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
20125 } else if (rs
== 0) {
20126 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
20128 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
20136 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
20137 extract32(ctx
->opcode
, 1, 13) << 1;
20138 switch (extract32(ctx
->opcode
, 14, 2)) {
20141 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
20144 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20146 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
20148 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
20152 if (rs
== 0 || rs
== rt
) {
20154 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
20156 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
20160 generate_exception_end(ctx
, EXCP_RI
);
20167 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
20168 extract32(ctx
->opcode
, 1, 10) << 1;
20169 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
20171 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
20176 generate_exception_end(ctx
, EXCP_RI
);
20182 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
20185 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
20186 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
20187 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS1(ctx
->opcode
));
20191 /* make sure instructions are on a halfword boundary */
20192 if (ctx
->base
.pc_next
& 0x1) {
20193 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
20194 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
20195 tcg_temp_free(tmp
);
20196 generate_exception_end(ctx
, EXCP_AdEL
);
20200 op
= extract32(ctx
->opcode
, 10, 6);
20203 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
20206 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
20207 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
20210 switch (extract32(ctx
->opcode
, 3, 2)) {
20211 case NM_P16_SYSCALL
:
20212 if (extract32(ctx
->opcode
, 2, 1) == 0) {
20213 generate_exception_end(ctx
, EXCP_SYSCALL
);
20215 generate_exception_end(ctx
, EXCP_RI
);
20219 generate_exception_end(ctx
, EXCP_BREAK
);
20222 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
20223 gen_helper_do_semihosting(cpu_env
);
20225 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
20226 generate_exception_end(ctx
, EXCP_RI
);
20228 generate_exception_end(ctx
, EXCP_DBp
);
20233 generate_exception_end(ctx
, EXCP_RI
);
20240 int shift
= extract32(ctx
->opcode
, 0, 3);
20242 shift
= (shift
== 0) ? 8 : shift
;
20244 switch (extract32(ctx
->opcode
, 3, 1)) {
20252 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
20256 switch (ctx
->opcode
& 1) {
20258 gen_pool16c_nanomips_insn(ctx
);
20261 gen_ldxs(ctx
, rt
, rs
, rd
);
20266 switch (extract32(ctx
->opcode
, 6, 1)) {
20268 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
20269 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
20272 generate_exception_end(ctx
, EXCP_RI
);
20277 switch (extract32(ctx
->opcode
, 3, 1)) {
20279 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
20280 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
20282 case NM_P_ADDIURS5
:
20283 rt
= extract32(ctx
->opcode
, 5, 5);
20285 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
20286 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
20287 (extract32(ctx
->opcode
, 0, 3));
20288 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
20294 switch (ctx
->opcode
& 0x1) {
20296 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
20299 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
20304 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
20305 extract32(ctx
->opcode
, 5, 3);
20306 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
20307 extract32(ctx
->opcode
, 0, 3);
20308 rt
= decode_gpr_gpr4(rt
);
20309 rs
= decode_gpr_gpr4(rs
);
20310 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
20311 (extract32(ctx
->opcode
, 3, 1))) {
20314 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
20318 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
20321 generate_exception_end(ctx
, EXCP_RI
);
20327 int imm
= extract32(ctx
->opcode
, 0, 7);
20328 imm
= (imm
== 0x7f ? -1 : imm
);
20330 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
20336 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
20337 u
= (u
== 12) ? 0xff :
20338 (u
== 13) ? 0xffff : u
;
20339 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
20343 offset
= extract32(ctx
->opcode
, 0, 2);
20344 switch (extract32(ctx
->opcode
, 2, 2)) {
20346 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
20349 rt
= decode_gpr_gpr3_src_store(
20350 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
20351 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
20354 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
20357 generate_exception_end(ctx
, EXCP_RI
);
20362 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
20363 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
20365 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
20368 rt
= decode_gpr_gpr3_src_store(
20369 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
20370 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
20373 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
20376 generate_exception_end(ctx
, EXCP_RI
);
20381 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
20382 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
20385 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
20386 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
20387 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
20391 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
20392 extract32(ctx
->opcode
, 5, 3);
20393 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
20394 extract32(ctx
->opcode
, 0, 3);
20395 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
20396 (extract32(ctx
->opcode
, 8, 1) << 2);
20397 rt
= decode_gpr_gpr4(rt
);
20398 rs
= decode_gpr_gpr4(rs
);
20399 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
20403 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
20404 extract32(ctx
->opcode
, 5, 3);
20405 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
20406 extract32(ctx
->opcode
, 0, 3);
20407 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
20408 (extract32(ctx
->opcode
, 8, 1) << 2);
20409 rt
= decode_gpr_gpr4_zero(rt
);
20410 rs
= decode_gpr_gpr4(rs
);
20411 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
20414 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
20415 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
20418 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
20419 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
20420 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
20423 rt
= decode_gpr_gpr3_src_store(
20424 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
20425 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
20426 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
20427 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
20430 rt
= decode_gpr_gpr3_src_store(
20431 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
20432 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
20433 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
20436 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
20437 (sextract32(ctx
->opcode
, 0, 1) << 10) |
20438 (extract32(ctx
->opcode
, 1, 9) << 1));
20441 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
20442 (sextract32(ctx
->opcode
, 0, 1) << 10) |
20443 (extract32(ctx
->opcode
, 1, 9) << 1));
20446 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
20447 (sextract32(ctx
->opcode
, 0, 1) << 7) |
20448 (extract32(ctx
->opcode
, 1, 6) << 1));
20451 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
20452 (sextract32(ctx
->opcode
, 0, 1) << 7) |
20453 (extract32(ctx
->opcode
, 1, 6) << 1));
20456 switch (ctx
->opcode
& 0xf) {
20459 switch (extract32(ctx
->opcode
, 4, 1)) {
20461 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
20462 extract32(ctx
->opcode
, 5, 5), 0, 0);
20465 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
20466 extract32(ctx
->opcode
, 5, 5), 31, 0);
20473 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
20474 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
20475 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
20476 extract32(ctx
->opcode
, 0, 4) << 1);
20483 int count
= extract32(ctx
->opcode
, 0, 4);
20484 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
20486 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
20487 switch (extract32(ctx
->opcode
, 8, 1)) {
20489 gen_save(ctx
, rt
, count
, 0, u
);
20491 case NM_RESTORE_JRC16
:
20492 gen_restore(ctx
, rt
, count
, 0, u
);
20493 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
20502 static const int gpr2reg1
[] = {4, 5, 6, 7};
20503 static const int gpr2reg2
[] = {5, 6, 7, 8};
20505 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
20506 extract32(ctx
->opcode
, 8, 1);
20507 int r1
= gpr2reg1
[rd2
];
20508 int r2
= gpr2reg2
[rd2
];
20509 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
20510 extract32(ctx
->opcode
, 0, 3);
20511 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
20512 extract32(ctx
->opcode
, 5, 3);
20513 TCGv t0
= tcg_temp_new();
20514 TCGv t1
= tcg_temp_new();
20515 if (op
== NM_MOVEP
) {
20518 rs
= decode_gpr_gpr4_zero(r3
);
20519 rt
= decode_gpr_gpr4_zero(r4
);
20521 rd
= decode_gpr_gpr4(r3
);
20522 re
= decode_gpr_gpr4(r4
);
20526 gen_load_gpr(t0
, rs
);
20527 gen_load_gpr(t1
, rt
);
20528 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
20529 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
20535 return decode_nanomips_32_48_opc(env
, ctx
);
20542 /* SmartMIPS extension to MIPS32 */
20544 #if defined(TARGET_MIPS64)
20546 /* MDMX extension to MIPS64 */
20550 /* MIPSDSP functions. */
20551 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
20552 int rd
, int base
, int offset
)
20557 t0
= tcg_temp_new();
20560 gen_load_gpr(t0
, offset
);
20561 } else if (offset
== 0) {
20562 gen_load_gpr(t0
, base
);
20564 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
20569 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
20570 gen_store_gpr(t0
, rd
);
20573 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
20574 gen_store_gpr(t0
, rd
);
20577 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
20578 gen_store_gpr(t0
, rd
);
20580 #if defined(TARGET_MIPS64)
20582 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
20583 gen_store_gpr(t0
, rd
);
20590 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
20591 int ret
, int v1
, int v2
)
20597 /* Treat as NOP. */
20601 v1_t
= tcg_temp_new();
20602 v2_t
= tcg_temp_new();
20604 gen_load_gpr(v1_t
, v1
);
20605 gen_load_gpr(v2_t
, v2
);
20608 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
20609 case OPC_MULT_G_2E
:
20613 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
20615 case OPC_ADDUH_R_QB
:
20616 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
20619 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20621 case OPC_ADDQH_R_PH
:
20622 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20625 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
20627 case OPC_ADDQH_R_W
:
20628 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
20631 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
20633 case OPC_SUBUH_R_QB
:
20634 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
20637 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20639 case OPC_SUBQH_R_PH
:
20640 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20643 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
20645 case OPC_SUBQH_R_W
:
20646 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
20650 case OPC_ABSQ_S_PH_DSP
:
20652 case OPC_ABSQ_S_QB
:
20654 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
20656 case OPC_ABSQ_S_PH
:
20658 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
20662 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
20664 case OPC_PRECEQ_W_PHL
:
20666 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
20667 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
20669 case OPC_PRECEQ_W_PHR
:
20671 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
20672 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
20673 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
20675 case OPC_PRECEQU_PH_QBL
:
20677 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
20679 case OPC_PRECEQU_PH_QBR
:
20681 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
20683 case OPC_PRECEQU_PH_QBLA
:
20685 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
20687 case OPC_PRECEQU_PH_QBRA
:
20689 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
20691 case OPC_PRECEU_PH_QBL
:
20693 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
20695 case OPC_PRECEU_PH_QBR
:
20697 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
20699 case OPC_PRECEU_PH_QBLA
:
20701 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
20703 case OPC_PRECEU_PH_QBRA
:
20705 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
20709 case OPC_ADDU_QB_DSP
:
20713 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20715 case OPC_ADDQ_S_PH
:
20717 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20721 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20725 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20727 case OPC_ADDU_S_QB
:
20729 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20733 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20735 case OPC_ADDU_S_PH
:
20737 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20741 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20743 case OPC_SUBQ_S_PH
:
20745 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20749 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20753 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20755 case OPC_SUBU_S_QB
:
20757 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20761 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20763 case OPC_SUBU_S_PH
:
20765 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20769 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20773 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20777 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
20779 case OPC_RADDU_W_QB
:
20781 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
20785 case OPC_CMPU_EQ_QB_DSP
:
20787 case OPC_PRECR_QB_PH
:
20789 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20791 case OPC_PRECRQ_QB_PH
:
20793 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20795 case OPC_PRECR_SRA_PH_W
:
20798 TCGv_i32 sa_t
= tcg_const_i32(v2
);
20799 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
20801 tcg_temp_free_i32(sa_t
);
20804 case OPC_PRECR_SRA_R_PH_W
:
20807 TCGv_i32 sa_t
= tcg_const_i32(v2
);
20808 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
20810 tcg_temp_free_i32(sa_t
);
20813 case OPC_PRECRQ_PH_W
:
20815 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
20817 case OPC_PRECRQ_RS_PH_W
:
20819 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20821 case OPC_PRECRQU_S_QB_PH
:
20823 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20827 #ifdef TARGET_MIPS64
20828 case OPC_ABSQ_S_QH_DSP
:
20830 case OPC_PRECEQ_L_PWL
:
20832 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
20834 case OPC_PRECEQ_L_PWR
:
20836 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
20838 case OPC_PRECEQ_PW_QHL
:
20840 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
20842 case OPC_PRECEQ_PW_QHR
:
20844 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
20846 case OPC_PRECEQ_PW_QHLA
:
20848 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
20850 case OPC_PRECEQ_PW_QHRA
:
20852 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
20854 case OPC_PRECEQU_QH_OBL
:
20856 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
20858 case OPC_PRECEQU_QH_OBR
:
20860 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
20862 case OPC_PRECEQU_QH_OBLA
:
20864 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
20866 case OPC_PRECEQU_QH_OBRA
:
20868 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
20870 case OPC_PRECEU_QH_OBL
:
20872 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
20874 case OPC_PRECEU_QH_OBR
:
20876 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
20878 case OPC_PRECEU_QH_OBLA
:
20880 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
20882 case OPC_PRECEU_QH_OBRA
:
20884 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
20886 case OPC_ABSQ_S_OB
:
20888 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
20890 case OPC_ABSQ_S_PW
:
20892 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
20894 case OPC_ABSQ_S_QH
:
20896 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
20900 case OPC_ADDU_OB_DSP
:
20902 case OPC_RADDU_L_OB
:
20904 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
20908 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20910 case OPC_SUBQ_S_PW
:
20912 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20916 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20918 case OPC_SUBQ_S_QH
:
20920 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20924 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20926 case OPC_SUBU_S_OB
:
20928 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20932 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20934 case OPC_SUBU_S_QH
:
20936 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20940 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
20942 case OPC_SUBUH_R_OB
:
20944 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
20948 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20950 case OPC_ADDQ_S_PW
:
20952 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20956 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20958 case OPC_ADDQ_S_QH
:
20960 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20964 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20966 case OPC_ADDU_S_OB
:
20968 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20972 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20974 case OPC_ADDU_S_QH
:
20976 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20980 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
20982 case OPC_ADDUH_R_OB
:
20984 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
20988 case OPC_CMPU_EQ_OB_DSP
:
20990 case OPC_PRECR_OB_QH
:
20992 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
20994 case OPC_PRECR_SRA_QH_PW
:
20997 TCGv_i32 ret_t
= tcg_const_i32(ret
);
20998 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
20999 tcg_temp_free_i32(ret_t
);
21002 case OPC_PRECR_SRA_R_QH_PW
:
21005 TCGv_i32 sa_v
= tcg_const_i32(ret
);
21006 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
21007 tcg_temp_free_i32(sa_v
);
21010 case OPC_PRECRQ_OB_QH
:
21012 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
21014 case OPC_PRECRQ_PW_L
:
21016 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
21018 case OPC_PRECRQ_QH_PW
:
21020 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
21022 case OPC_PRECRQ_RS_QH_PW
:
21024 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21026 case OPC_PRECRQU_S_OB_QH
:
21028 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21035 tcg_temp_free(v1_t
);
21036 tcg_temp_free(v2_t
);
21039 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
21040 int ret
, int v1
, int v2
)
21048 /* Treat as NOP. */
21052 t0
= tcg_temp_new();
21053 v1_t
= tcg_temp_new();
21054 v2_t
= tcg_temp_new();
21056 tcg_gen_movi_tl(t0
, v1
);
21057 gen_load_gpr(v1_t
, v1
);
21058 gen_load_gpr(v2_t
, v2
);
21061 case OPC_SHLL_QB_DSP
:
21063 op2
= MASK_SHLL_QB(ctx
->opcode
);
21067 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
21071 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21075 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
21079 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21081 case OPC_SHLL_S_PH
:
21083 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
21085 case OPC_SHLLV_S_PH
:
21087 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21091 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
21093 case OPC_SHLLV_S_W
:
21095 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21099 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
21103 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21107 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
21111 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21115 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
21117 case OPC_SHRA_R_QB
:
21119 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
21123 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21125 case OPC_SHRAV_R_QB
:
21127 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21131 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
21133 case OPC_SHRA_R_PH
:
21135 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
21139 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21141 case OPC_SHRAV_R_PH
:
21143 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21147 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
21149 case OPC_SHRAV_R_W
:
21151 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21153 default: /* Invalid */
21154 MIPS_INVAL("MASK SHLL.QB");
21155 generate_exception_end(ctx
, EXCP_RI
);
21160 #ifdef TARGET_MIPS64
21161 case OPC_SHLL_OB_DSP
:
21162 op2
= MASK_SHLL_OB(ctx
->opcode
);
21166 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
21170 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
21172 case OPC_SHLL_S_PW
:
21174 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
21176 case OPC_SHLLV_S_PW
:
21178 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
21182 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
21186 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
21190 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
21194 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
21196 case OPC_SHLL_S_QH
:
21198 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
21200 case OPC_SHLLV_S_QH
:
21202 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
21206 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
21210 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
21212 case OPC_SHRA_R_OB
:
21214 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
21216 case OPC_SHRAV_R_OB
:
21218 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
21222 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
21226 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
21228 case OPC_SHRA_R_PW
:
21230 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
21232 case OPC_SHRAV_R_PW
:
21234 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
21238 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
21242 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
21244 case OPC_SHRA_R_QH
:
21246 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
21248 case OPC_SHRAV_R_QH
:
21250 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
21254 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
21258 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
21262 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
21266 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
21268 default: /* Invalid */
21269 MIPS_INVAL("MASK SHLL.OB");
21270 generate_exception_end(ctx
, EXCP_RI
);
21278 tcg_temp_free(v1_t
);
21279 tcg_temp_free(v2_t
);
21282 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
21283 int ret
, int v1
, int v2
, int check_ret
)
21289 if ((ret
== 0) && (check_ret
== 1)) {
21290 /* Treat as NOP. */
21294 t0
= tcg_temp_new_i32();
21295 v1_t
= tcg_temp_new();
21296 v2_t
= tcg_temp_new();
21298 tcg_gen_movi_i32(t0
, ret
);
21299 gen_load_gpr(v1_t
, v1
);
21300 gen_load_gpr(v2_t
, v2
);
21303 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
21304 * the same mask and op1. */
21305 case OPC_MULT_G_2E
:
21309 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21312 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21315 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21317 case OPC_MULQ_RS_W
:
21318 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21322 case OPC_DPA_W_PH_DSP
:
21324 case OPC_DPAU_H_QBL
:
21326 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
21328 case OPC_DPAU_H_QBR
:
21330 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
21332 case OPC_DPSU_H_QBL
:
21334 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
21336 case OPC_DPSU_H_QBR
:
21338 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
21342 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21344 case OPC_DPAX_W_PH
:
21346 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21348 case OPC_DPAQ_S_W_PH
:
21350 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21352 case OPC_DPAQX_S_W_PH
:
21354 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21356 case OPC_DPAQX_SA_W_PH
:
21358 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21362 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21364 case OPC_DPSX_W_PH
:
21366 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21368 case OPC_DPSQ_S_W_PH
:
21370 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21372 case OPC_DPSQX_S_W_PH
:
21374 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21376 case OPC_DPSQX_SA_W_PH
:
21378 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21380 case OPC_MULSAQ_S_W_PH
:
21382 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21384 case OPC_DPAQ_SA_L_W
:
21386 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
21388 case OPC_DPSQ_SA_L_W
:
21390 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
21392 case OPC_MAQ_S_W_PHL
:
21394 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
21396 case OPC_MAQ_S_W_PHR
:
21398 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
21400 case OPC_MAQ_SA_W_PHL
:
21402 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
21404 case OPC_MAQ_SA_W_PHR
:
21406 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
21408 case OPC_MULSA_W_PH
:
21410 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21414 #ifdef TARGET_MIPS64
21415 case OPC_DPAQ_W_QH_DSP
:
21417 int ac
= ret
& 0x03;
21418 tcg_gen_movi_i32(t0
, ac
);
21423 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
21427 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
21431 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
21435 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
21439 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
21441 case OPC_DPAQ_S_W_QH
:
21443 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
21445 case OPC_DPAQ_SA_L_PW
:
21447 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
21449 case OPC_DPAU_H_OBL
:
21451 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
21453 case OPC_DPAU_H_OBR
:
21455 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
21459 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
21461 case OPC_DPSQ_S_W_QH
:
21463 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
21465 case OPC_DPSQ_SA_L_PW
:
21467 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
21469 case OPC_DPSU_H_OBL
:
21471 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
21473 case OPC_DPSU_H_OBR
:
21475 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
21477 case OPC_MAQ_S_L_PWL
:
21479 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
21481 case OPC_MAQ_S_L_PWR
:
21483 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
21485 case OPC_MAQ_S_W_QHLL
:
21487 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
21489 case OPC_MAQ_SA_W_QHLL
:
21491 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
21493 case OPC_MAQ_S_W_QHLR
:
21495 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
21497 case OPC_MAQ_SA_W_QHLR
:
21499 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
21501 case OPC_MAQ_S_W_QHRL
:
21503 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
21505 case OPC_MAQ_SA_W_QHRL
:
21507 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
21509 case OPC_MAQ_S_W_QHRR
:
21511 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
21513 case OPC_MAQ_SA_W_QHRR
:
21515 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
21517 case OPC_MULSAQ_S_L_PW
:
21519 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
21521 case OPC_MULSAQ_S_W_QH
:
21523 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
21529 case OPC_ADDU_QB_DSP
:
21531 case OPC_MULEU_S_PH_QBL
:
21533 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21535 case OPC_MULEU_S_PH_QBR
:
21537 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21539 case OPC_MULQ_RS_PH
:
21541 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21543 case OPC_MULEQ_S_W_PHL
:
21545 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21547 case OPC_MULEQ_S_W_PHR
:
21549 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21551 case OPC_MULQ_S_PH
:
21553 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21557 #ifdef TARGET_MIPS64
21558 case OPC_ADDU_OB_DSP
:
21560 case OPC_MULEQ_S_PW_QHL
:
21562 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21564 case OPC_MULEQ_S_PW_QHR
:
21566 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21568 case OPC_MULEU_S_QH_OBL
:
21570 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21572 case OPC_MULEU_S_QH_OBR
:
21574 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21576 case OPC_MULQ_RS_QH
:
21578 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21585 tcg_temp_free_i32(t0
);
21586 tcg_temp_free(v1_t
);
21587 tcg_temp_free(v2_t
);
21590 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
21598 /* Treat as NOP. */
21602 t0
= tcg_temp_new();
21603 val_t
= tcg_temp_new();
21604 gen_load_gpr(val_t
, val
);
21607 case OPC_ABSQ_S_PH_DSP
:
21611 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
21616 target_long result
;
21617 imm
= (ctx
->opcode
>> 16) & 0xFF;
21618 result
= (uint32_t)imm
<< 24 |
21619 (uint32_t)imm
<< 16 |
21620 (uint32_t)imm
<< 8 |
21622 result
= (int32_t)result
;
21623 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
21628 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
21629 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
21630 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21631 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
21632 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21633 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
21638 imm
= (ctx
->opcode
>> 16) & 0x03FF;
21639 imm
= (int16_t)(imm
<< 6) >> 6;
21640 tcg_gen_movi_tl(cpu_gpr
[ret
], \
21641 (target_long
)((int32_t)imm
<< 16 | \
21647 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
21648 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
21649 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21650 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
21654 #ifdef TARGET_MIPS64
21655 case OPC_ABSQ_S_QH_DSP
:
21662 imm
= (ctx
->opcode
>> 16) & 0xFF;
21663 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
21664 temp
= (temp
<< 16) | temp
;
21665 temp
= (temp
<< 32) | temp
;
21666 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
21674 imm
= (ctx
->opcode
>> 16) & 0x03FF;
21675 imm
= (int16_t)(imm
<< 6) >> 6;
21676 temp
= ((target_long
)imm
<< 32) \
21677 | ((target_long
)imm
& 0xFFFFFFFF);
21678 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
21686 imm
= (ctx
->opcode
>> 16) & 0x03FF;
21687 imm
= (int16_t)(imm
<< 6) >> 6;
21689 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
21690 ((uint64_t)(uint16_t)imm
<< 32) |
21691 ((uint64_t)(uint16_t)imm
<< 16) |
21692 (uint64_t)(uint16_t)imm
;
21693 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
21698 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
21699 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
21700 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21701 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
21702 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21703 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
21704 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21708 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
21709 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
21710 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21714 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
21715 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
21716 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21717 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
21718 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21725 tcg_temp_free(val_t
);
21728 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
21729 uint32_t op1
, uint32_t op2
,
21730 int ret
, int v1
, int v2
, int check_ret
)
21736 if ((ret
== 0) && (check_ret
== 1)) {
21737 /* Treat as NOP. */
21741 t1
= tcg_temp_new();
21742 v1_t
= tcg_temp_new();
21743 v2_t
= tcg_temp_new();
21745 gen_load_gpr(v1_t
, v1
);
21746 gen_load_gpr(v2_t
, v2
);
21749 case OPC_CMPU_EQ_QB_DSP
:
21751 case OPC_CMPU_EQ_QB
:
21753 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
21755 case OPC_CMPU_LT_QB
:
21757 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
21759 case OPC_CMPU_LE_QB
:
21761 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
21763 case OPC_CMPGU_EQ_QB
:
21765 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21767 case OPC_CMPGU_LT_QB
:
21769 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21771 case OPC_CMPGU_LE_QB
:
21773 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21775 case OPC_CMPGDU_EQ_QB
:
21777 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
21778 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
21779 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
21780 tcg_gen_shli_tl(t1
, t1
, 24);
21781 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
21783 case OPC_CMPGDU_LT_QB
:
21785 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
21786 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
21787 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
21788 tcg_gen_shli_tl(t1
, t1
, 24);
21789 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
21791 case OPC_CMPGDU_LE_QB
:
21793 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
21794 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
21795 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
21796 tcg_gen_shli_tl(t1
, t1
, 24);
21797 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
21799 case OPC_CMP_EQ_PH
:
21801 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
21803 case OPC_CMP_LT_PH
:
21805 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
21807 case OPC_CMP_LE_PH
:
21809 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
21813 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21817 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21819 case OPC_PACKRL_PH
:
21821 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21825 #ifdef TARGET_MIPS64
21826 case OPC_CMPU_EQ_OB_DSP
:
21828 case OPC_CMP_EQ_PW
:
21830 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
21832 case OPC_CMP_LT_PW
:
21834 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
21836 case OPC_CMP_LE_PW
:
21838 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
21840 case OPC_CMP_EQ_QH
:
21842 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
21844 case OPC_CMP_LT_QH
:
21846 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
21848 case OPC_CMP_LE_QH
:
21850 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
21852 case OPC_CMPGDU_EQ_OB
:
21854 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21856 case OPC_CMPGDU_LT_OB
:
21858 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21860 case OPC_CMPGDU_LE_OB
:
21862 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21864 case OPC_CMPGU_EQ_OB
:
21866 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
21868 case OPC_CMPGU_LT_OB
:
21870 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
21872 case OPC_CMPGU_LE_OB
:
21874 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
21876 case OPC_CMPU_EQ_OB
:
21878 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
21880 case OPC_CMPU_LT_OB
:
21882 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
21884 case OPC_CMPU_LE_OB
:
21886 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
21888 case OPC_PACKRL_PW
:
21890 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
21894 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21898 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21902 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21910 tcg_temp_free(v1_t
);
21911 tcg_temp_free(v2_t
);
21914 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
21915 uint32_t op1
, int rt
, int rs
, int sa
)
21922 /* Treat as NOP. */
21926 t0
= tcg_temp_new();
21927 gen_load_gpr(t0
, rs
);
21930 case OPC_APPEND_DSP
:
21931 switch (MASK_APPEND(ctx
->opcode
)) {
21934 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
21936 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21940 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21941 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
21942 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
21943 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
21945 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21949 if (sa
!= 0 && sa
!= 2) {
21950 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
21951 tcg_gen_ext32u_tl(t0
, t0
);
21952 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
21953 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
21955 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21957 default: /* Invalid */
21958 MIPS_INVAL("MASK APPEND");
21959 generate_exception_end(ctx
, EXCP_RI
);
21963 #ifdef TARGET_MIPS64
21964 case OPC_DAPPEND_DSP
:
21965 switch (MASK_DAPPEND(ctx
->opcode
)) {
21968 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
21972 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
21973 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
21974 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
21978 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
21979 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
21980 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
21985 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
21986 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
21987 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
21988 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
21991 default: /* Invalid */
21992 MIPS_INVAL("MASK DAPPEND");
21993 generate_exception_end(ctx
, EXCP_RI
);
22002 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22003 int ret
, int v1
, int v2
, int check_ret
)
22012 if ((ret
== 0) && (check_ret
== 1)) {
22013 /* Treat as NOP. */
22017 t0
= tcg_temp_new();
22018 t1
= tcg_temp_new();
22019 v1_t
= tcg_temp_new();
22020 v2_t
= tcg_temp_new();
22022 gen_load_gpr(v1_t
, v1
);
22023 gen_load_gpr(v2_t
, v2
);
22026 case OPC_EXTR_W_DSP
:
22030 tcg_gen_movi_tl(t0
, v2
);
22031 tcg_gen_movi_tl(t1
, v1
);
22032 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22035 tcg_gen_movi_tl(t0
, v2
);
22036 tcg_gen_movi_tl(t1
, v1
);
22037 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22039 case OPC_EXTR_RS_W
:
22040 tcg_gen_movi_tl(t0
, v2
);
22041 tcg_gen_movi_tl(t1
, v1
);
22042 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22045 tcg_gen_movi_tl(t0
, v2
);
22046 tcg_gen_movi_tl(t1
, v1
);
22047 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22049 case OPC_EXTRV_S_H
:
22050 tcg_gen_movi_tl(t0
, v2
);
22051 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22054 tcg_gen_movi_tl(t0
, v2
);
22055 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22057 case OPC_EXTRV_R_W
:
22058 tcg_gen_movi_tl(t0
, v2
);
22059 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22061 case OPC_EXTRV_RS_W
:
22062 tcg_gen_movi_tl(t0
, v2
);
22063 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22066 tcg_gen_movi_tl(t0
, v2
);
22067 tcg_gen_movi_tl(t1
, v1
);
22068 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22071 tcg_gen_movi_tl(t0
, v2
);
22072 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22075 tcg_gen_movi_tl(t0
, v2
);
22076 tcg_gen_movi_tl(t1
, v1
);
22077 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22080 tcg_gen_movi_tl(t0
, v2
);
22081 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22084 imm
= (ctx
->opcode
>> 20) & 0x3F;
22085 tcg_gen_movi_tl(t0
, ret
);
22086 tcg_gen_movi_tl(t1
, imm
);
22087 gen_helper_shilo(t0
, t1
, cpu_env
);
22090 tcg_gen_movi_tl(t0
, ret
);
22091 gen_helper_shilo(t0
, v1_t
, cpu_env
);
22094 tcg_gen_movi_tl(t0
, ret
);
22095 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
22098 imm
= (ctx
->opcode
>> 11) & 0x3FF;
22099 tcg_gen_movi_tl(t0
, imm
);
22100 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
22103 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22104 tcg_gen_movi_tl(t0
, imm
);
22105 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
22109 #ifdef TARGET_MIPS64
22110 case OPC_DEXTR_W_DSP
:
22114 tcg_gen_movi_tl(t0
, ret
);
22115 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
22119 int shift
= (ctx
->opcode
>> 19) & 0x7F;
22120 int ac
= (ctx
->opcode
>> 11) & 0x03;
22121 tcg_gen_movi_tl(t0
, shift
);
22122 tcg_gen_movi_tl(t1
, ac
);
22123 gen_helper_dshilo(t0
, t1
, cpu_env
);
22128 int ac
= (ctx
->opcode
>> 11) & 0x03;
22129 tcg_gen_movi_tl(t0
, ac
);
22130 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
22134 tcg_gen_movi_tl(t0
, v2
);
22135 tcg_gen_movi_tl(t1
, v1
);
22137 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22140 tcg_gen_movi_tl(t0
, v2
);
22141 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22144 tcg_gen_movi_tl(t0
, v2
);
22145 tcg_gen_movi_tl(t1
, v1
);
22146 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22149 tcg_gen_movi_tl(t0
, v2
);
22150 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22153 tcg_gen_movi_tl(t0
, v2
);
22154 tcg_gen_movi_tl(t1
, v1
);
22155 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22157 case OPC_DEXTR_R_L
:
22158 tcg_gen_movi_tl(t0
, v2
);
22159 tcg_gen_movi_tl(t1
, v1
);
22160 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22162 case OPC_DEXTR_RS_L
:
22163 tcg_gen_movi_tl(t0
, v2
);
22164 tcg_gen_movi_tl(t1
, v1
);
22165 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22168 tcg_gen_movi_tl(t0
, v2
);
22169 tcg_gen_movi_tl(t1
, v1
);
22170 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22172 case OPC_DEXTR_R_W
:
22173 tcg_gen_movi_tl(t0
, v2
);
22174 tcg_gen_movi_tl(t1
, v1
);
22175 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22177 case OPC_DEXTR_RS_W
:
22178 tcg_gen_movi_tl(t0
, v2
);
22179 tcg_gen_movi_tl(t1
, v1
);
22180 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22182 case OPC_DEXTR_S_H
:
22183 tcg_gen_movi_tl(t0
, v2
);
22184 tcg_gen_movi_tl(t1
, v1
);
22185 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22187 case OPC_DEXTRV_S_H
:
22188 tcg_gen_movi_tl(t0
, v2
);
22189 tcg_gen_movi_tl(t1
, v1
);
22190 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22193 tcg_gen_movi_tl(t0
, v2
);
22194 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22196 case OPC_DEXTRV_R_L
:
22197 tcg_gen_movi_tl(t0
, v2
);
22198 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22200 case OPC_DEXTRV_RS_L
:
22201 tcg_gen_movi_tl(t0
, v2
);
22202 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22205 tcg_gen_movi_tl(t0
, v2
);
22206 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22208 case OPC_DEXTRV_R_W
:
22209 tcg_gen_movi_tl(t0
, v2
);
22210 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22212 case OPC_DEXTRV_RS_W
:
22213 tcg_gen_movi_tl(t0
, v2
);
22214 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22223 tcg_temp_free(v1_t
);
22224 tcg_temp_free(v2_t
);
22227 /* End MIPSDSP functions. */
22229 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
22231 int rs
, rt
, rd
, sa
;
22234 rs
= (ctx
->opcode
>> 21) & 0x1f;
22235 rt
= (ctx
->opcode
>> 16) & 0x1f;
22236 rd
= (ctx
->opcode
>> 11) & 0x1f;
22237 sa
= (ctx
->opcode
>> 6) & 0x1f;
22239 op1
= MASK_SPECIAL(ctx
->opcode
);
22242 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
22248 op2
= MASK_R6_MULDIV(ctx
->opcode
);
22258 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
22261 MIPS_INVAL("special_r6 muldiv");
22262 generate_exception_end(ctx
, EXCP_RI
);
22268 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
22272 if (rt
== 0 && sa
== 1) {
22273 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
22274 We need additionally to check other fields */
22275 gen_cl(ctx
, op1
, rd
, rs
);
22277 generate_exception_end(ctx
, EXCP_RI
);
22281 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
22282 gen_helper_do_semihosting(cpu_env
);
22284 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
22285 generate_exception_end(ctx
, EXCP_RI
);
22287 generate_exception_end(ctx
, EXCP_DBp
);
22291 #if defined(TARGET_MIPS64)
22293 check_mips_64(ctx
);
22294 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
22298 if (rt
== 0 && sa
== 1) {
22299 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
22300 We need additionally to check other fields */
22301 check_mips_64(ctx
);
22302 gen_cl(ctx
, op1
, rd
, rs
);
22304 generate_exception_end(ctx
, EXCP_RI
);
22312 op2
= MASK_R6_MULDIV(ctx
->opcode
);
22322 check_mips_64(ctx
);
22323 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
22326 MIPS_INVAL("special_r6 muldiv");
22327 generate_exception_end(ctx
, EXCP_RI
);
22332 default: /* Invalid */
22333 MIPS_INVAL("special_r6");
22334 generate_exception_end(ctx
, EXCP_RI
);
22339 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
22341 int rs
, rt
, rd
, sa
;
22344 rs
= (ctx
->opcode
>> 21) & 0x1f;
22345 rt
= (ctx
->opcode
>> 16) & 0x1f;
22346 rd
= (ctx
->opcode
>> 11) & 0x1f;
22347 sa
= (ctx
->opcode
>> 6) & 0x1f;
22349 op1
= MASK_SPECIAL(ctx
->opcode
);
22351 case OPC_MOVN
: /* Conditional move */
22353 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
22354 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
22355 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
22357 case OPC_MFHI
: /* Move from HI/LO */
22359 gen_HILO(ctx
, op1
, rs
& 3, rd
);
22362 case OPC_MTLO
: /* Move to HI/LO */
22363 gen_HILO(ctx
, op1
, rd
& 3, rs
);
22366 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
22367 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
22368 check_cp1_enabled(ctx
);
22369 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
22370 (ctx
->opcode
>> 16) & 1);
22372 generate_exception_err(ctx
, EXCP_CpU
, 1);
22378 check_insn(ctx
, INSN_VR54XX
);
22379 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
22380 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
22382 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
22387 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
22389 #if defined(TARGET_MIPS64)
22394 check_insn(ctx
, ISA_MIPS3
);
22395 check_mips_64(ctx
);
22396 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
22400 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
22403 #ifdef MIPS_STRICT_STANDARD
22404 MIPS_INVAL("SPIM");
22405 generate_exception_end(ctx
, EXCP_RI
);
22407 /* Implemented as RI exception for now. */
22408 MIPS_INVAL("spim (unofficial)");
22409 generate_exception_end(ctx
, EXCP_RI
);
22412 default: /* Invalid */
22413 MIPS_INVAL("special_legacy");
22414 generate_exception_end(ctx
, EXCP_RI
);
22419 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
22421 int rs
, rt
, rd
, sa
;
22424 rs
= (ctx
->opcode
>> 21) & 0x1f;
22425 rt
= (ctx
->opcode
>> 16) & 0x1f;
22426 rd
= (ctx
->opcode
>> 11) & 0x1f;
22427 sa
= (ctx
->opcode
>> 6) & 0x1f;
22429 op1
= MASK_SPECIAL(ctx
->opcode
);
22431 case OPC_SLL
: /* Shift with immediate */
22432 if (sa
== 5 && rd
== 0 &&
22433 rs
== 0 && rt
== 0) { /* PAUSE */
22434 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
22435 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
22436 generate_exception_end(ctx
, EXCP_RI
);
22442 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
22445 switch ((ctx
->opcode
>> 21) & 0x1f) {
22447 /* rotr is decoded as srl on non-R2 CPUs */
22448 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
22453 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
22456 generate_exception_end(ctx
, EXCP_RI
);
22464 gen_arith(ctx
, op1
, rd
, rs
, rt
);
22466 case OPC_SLLV
: /* Shifts */
22468 gen_shift(ctx
, op1
, rd
, rs
, rt
);
22471 switch ((ctx
->opcode
>> 6) & 0x1f) {
22473 /* rotrv is decoded as srlv on non-R2 CPUs */
22474 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
22479 gen_shift(ctx
, op1
, rd
, rs
, rt
);
22482 generate_exception_end(ctx
, EXCP_RI
);
22486 case OPC_SLT
: /* Set on less than */
22488 gen_slt(ctx
, op1
, rd
, rs
, rt
);
22490 case OPC_AND
: /* Logic*/
22494 gen_logic(ctx
, op1
, rd
, rs
, rt
);
22497 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
22499 case OPC_TGE
: /* Traps */
22505 check_insn(ctx
, ISA_MIPS2
);
22506 gen_trap(ctx
, op1
, rs
, rt
, -1);
22508 case OPC_LSA
: /* OPC_PMON */
22509 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
22510 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
22511 decode_opc_special_r6(env
, ctx
);
22513 /* Pmon entry point, also R4010 selsl */
22514 #ifdef MIPS_STRICT_STANDARD
22515 MIPS_INVAL("PMON / selsl");
22516 generate_exception_end(ctx
, EXCP_RI
);
22518 gen_helper_0e0i(pmon
, sa
);
22523 generate_exception_end(ctx
, EXCP_SYSCALL
);
22526 generate_exception_end(ctx
, EXCP_BREAK
);
22529 check_insn(ctx
, ISA_MIPS2
);
22530 gen_sync(extract32(ctx
->opcode
, 6, 5));
22533 #if defined(TARGET_MIPS64)
22534 /* MIPS64 specific opcodes */
22539 check_insn(ctx
, ISA_MIPS3
);
22540 check_mips_64(ctx
);
22541 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
22544 switch ((ctx
->opcode
>> 21) & 0x1f) {
22546 /* drotr is decoded as dsrl on non-R2 CPUs */
22547 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
22552 check_insn(ctx
, ISA_MIPS3
);
22553 check_mips_64(ctx
);
22554 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
22557 generate_exception_end(ctx
, EXCP_RI
);
22562 switch ((ctx
->opcode
>> 21) & 0x1f) {
22564 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
22565 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
22570 check_insn(ctx
, ISA_MIPS3
);
22571 check_mips_64(ctx
);
22572 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
22575 generate_exception_end(ctx
, EXCP_RI
);
22583 check_insn(ctx
, ISA_MIPS3
);
22584 check_mips_64(ctx
);
22585 gen_arith(ctx
, op1
, rd
, rs
, rt
);
22589 check_insn(ctx
, ISA_MIPS3
);
22590 check_mips_64(ctx
);
22591 gen_shift(ctx
, op1
, rd
, rs
, rt
);
22594 switch ((ctx
->opcode
>> 6) & 0x1f) {
22596 /* drotrv is decoded as dsrlv on non-R2 CPUs */
22597 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
22602 check_insn(ctx
, ISA_MIPS3
);
22603 check_mips_64(ctx
);
22604 gen_shift(ctx
, op1
, rd
, rs
, rt
);
22607 generate_exception_end(ctx
, EXCP_RI
);
22612 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
22613 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
22614 decode_opc_special_r6(env
, ctx
);
22619 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
22620 decode_opc_special_r6(env
, ctx
);
22622 decode_opc_special_legacy(env
, ctx
);
22627 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
22632 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
22634 rs
= (ctx
->opcode
>> 21) & 0x1f;
22635 rt
= (ctx
->opcode
>> 16) & 0x1f;
22636 rd
= (ctx
->opcode
>> 11) & 0x1f;
22638 op1
= MASK_SPECIAL2(ctx
->opcode
);
22640 case OPC_MADD
: /* Multiply and add/sub */
22644 check_insn(ctx
, ISA_MIPS32
);
22645 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
22648 gen_arith(ctx
, op1
, rd
, rs
, rt
);
22651 case OPC_DIVU_G_2F
:
22652 case OPC_MULT_G_2F
:
22653 case OPC_MULTU_G_2F
:
22655 case OPC_MODU_G_2F
:
22656 check_insn(ctx
, INSN_LOONGSON2F
);
22657 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
22661 check_insn(ctx
, ISA_MIPS32
);
22662 gen_cl(ctx
, op1
, rd
, rs
);
22665 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
22666 gen_helper_do_semihosting(cpu_env
);
22668 /* XXX: not clear which exception should be raised
22669 * when in debug mode...
22671 check_insn(ctx
, ISA_MIPS32
);
22672 generate_exception_end(ctx
, EXCP_DBp
);
22675 #if defined(TARGET_MIPS64)
22678 check_insn(ctx
, ISA_MIPS64
);
22679 check_mips_64(ctx
);
22680 gen_cl(ctx
, op1
, rd
, rs
);
22682 case OPC_DMULT_G_2F
:
22683 case OPC_DMULTU_G_2F
:
22684 case OPC_DDIV_G_2F
:
22685 case OPC_DDIVU_G_2F
:
22686 case OPC_DMOD_G_2F
:
22687 case OPC_DMODU_G_2F
:
22688 check_insn(ctx
, INSN_LOONGSON2F
);
22689 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
22692 default: /* Invalid */
22693 MIPS_INVAL("special2_legacy");
22694 generate_exception_end(ctx
, EXCP_RI
);
22699 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
22701 int rs
, rt
, rd
, sa
;
22705 rs
= (ctx
->opcode
>> 21) & 0x1f;
22706 rt
= (ctx
->opcode
>> 16) & 0x1f;
22707 rd
= (ctx
->opcode
>> 11) & 0x1f;
22708 sa
= (ctx
->opcode
>> 6) & 0x1f;
22709 imm
= (int16_t)ctx
->opcode
>> 7;
22711 op1
= MASK_SPECIAL3(ctx
->opcode
);
22715 /* hint codes 24-31 are reserved and signal RI */
22716 generate_exception_end(ctx
, EXCP_RI
);
22718 /* Treat as NOP. */
22721 check_cp0_enabled(ctx
);
22722 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
22723 gen_cache_operation(ctx
, rt
, rs
, imm
);
22727 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
22730 gen_ld(ctx
, op1
, rt
, rs
, imm
);
22735 /* Treat as NOP. */
22738 op2
= MASK_BSHFL(ctx
->opcode
);
22741 case OPC_ALIGN_END
:
22742 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
22745 gen_bitswap(ctx
, op2
, rd
, rt
);
22750 #if defined(TARGET_MIPS64)
22752 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
22755 gen_ld(ctx
, op1
, rt
, rs
, imm
);
22758 check_mips_64(ctx
);
22761 /* Treat as NOP. */
22764 op2
= MASK_DBSHFL(ctx
->opcode
);
22767 case OPC_DALIGN_END
:
22768 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
22771 gen_bitswap(ctx
, op2
, rd
, rt
);
22778 default: /* Invalid */
22779 MIPS_INVAL("special3_r6");
22780 generate_exception_end(ctx
, EXCP_RI
);
22785 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
22790 rs
= (ctx
->opcode
>> 21) & 0x1f;
22791 rt
= (ctx
->opcode
>> 16) & 0x1f;
22792 rd
= (ctx
->opcode
>> 11) & 0x1f;
22794 op1
= MASK_SPECIAL3(ctx
->opcode
);
22797 case OPC_DIVU_G_2E
:
22799 case OPC_MODU_G_2E
:
22800 case OPC_MULT_G_2E
:
22801 case OPC_MULTU_G_2E
:
22802 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22803 * the same mask and op1. */
22804 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
22805 op2
= MASK_ADDUH_QB(ctx
->opcode
);
22808 case OPC_ADDUH_R_QB
:
22810 case OPC_ADDQH_R_PH
:
22812 case OPC_ADDQH_R_W
:
22814 case OPC_SUBUH_R_QB
:
22816 case OPC_SUBQH_R_PH
:
22818 case OPC_SUBQH_R_W
:
22819 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
22824 case OPC_MULQ_RS_W
:
22825 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
22828 MIPS_INVAL("MASK ADDUH.QB");
22829 generate_exception_end(ctx
, EXCP_RI
);
22832 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
22833 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
22835 generate_exception_end(ctx
, EXCP_RI
);
22839 op2
= MASK_LX(ctx
->opcode
);
22841 #if defined(TARGET_MIPS64)
22847 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
22849 default: /* Invalid */
22850 MIPS_INVAL("MASK LX");
22851 generate_exception_end(ctx
, EXCP_RI
);
22855 case OPC_ABSQ_S_PH_DSP
:
22856 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
22858 case OPC_ABSQ_S_QB
:
22859 case OPC_ABSQ_S_PH
:
22861 case OPC_PRECEQ_W_PHL
:
22862 case OPC_PRECEQ_W_PHR
:
22863 case OPC_PRECEQU_PH_QBL
:
22864 case OPC_PRECEQU_PH_QBR
:
22865 case OPC_PRECEQU_PH_QBLA
:
22866 case OPC_PRECEQU_PH_QBRA
:
22867 case OPC_PRECEU_PH_QBL
:
22868 case OPC_PRECEU_PH_QBR
:
22869 case OPC_PRECEU_PH_QBLA
:
22870 case OPC_PRECEU_PH_QBRA
:
22871 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
22878 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
22881 MIPS_INVAL("MASK ABSQ_S.PH");
22882 generate_exception_end(ctx
, EXCP_RI
);
22886 case OPC_ADDU_QB_DSP
:
22887 op2
= MASK_ADDU_QB(ctx
->opcode
);
22890 case OPC_ADDQ_S_PH
:
22893 case OPC_ADDU_S_QB
:
22895 case OPC_ADDU_S_PH
:
22897 case OPC_SUBQ_S_PH
:
22900 case OPC_SUBU_S_QB
:
22902 case OPC_SUBU_S_PH
:
22906 case OPC_RADDU_W_QB
:
22907 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
22909 case OPC_MULEU_S_PH_QBL
:
22910 case OPC_MULEU_S_PH_QBR
:
22911 case OPC_MULQ_RS_PH
:
22912 case OPC_MULEQ_S_W_PHL
:
22913 case OPC_MULEQ_S_W_PHR
:
22914 case OPC_MULQ_S_PH
:
22915 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
22917 default: /* Invalid */
22918 MIPS_INVAL("MASK ADDU.QB");
22919 generate_exception_end(ctx
, EXCP_RI
);
22924 case OPC_CMPU_EQ_QB_DSP
:
22925 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
22927 case OPC_PRECR_SRA_PH_W
:
22928 case OPC_PRECR_SRA_R_PH_W
:
22929 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
22931 case OPC_PRECR_QB_PH
:
22932 case OPC_PRECRQ_QB_PH
:
22933 case OPC_PRECRQ_PH_W
:
22934 case OPC_PRECRQ_RS_PH_W
:
22935 case OPC_PRECRQU_S_QB_PH
:
22936 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
22938 case OPC_CMPU_EQ_QB
:
22939 case OPC_CMPU_LT_QB
:
22940 case OPC_CMPU_LE_QB
:
22941 case OPC_CMP_EQ_PH
:
22942 case OPC_CMP_LT_PH
:
22943 case OPC_CMP_LE_PH
:
22944 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
22946 case OPC_CMPGU_EQ_QB
:
22947 case OPC_CMPGU_LT_QB
:
22948 case OPC_CMPGU_LE_QB
:
22949 case OPC_CMPGDU_EQ_QB
:
22950 case OPC_CMPGDU_LT_QB
:
22951 case OPC_CMPGDU_LE_QB
:
22954 case OPC_PACKRL_PH
:
22955 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
22957 default: /* Invalid */
22958 MIPS_INVAL("MASK CMPU.EQ.QB");
22959 generate_exception_end(ctx
, EXCP_RI
);
22963 case OPC_SHLL_QB_DSP
:
22964 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
22966 case OPC_DPA_W_PH_DSP
:
22967 op2
= MASK_DPA_W_PH(ctx
->opcode
);
22969 case OPC_DPAU_H_QBL
:
22970 case OPC_DPAU_H_QBR
:
22971 case OPC_DPSU_H_QBL
:
22972 case OPC_DPSU_H_QBR
:
22974 case OPC_DPAX_W_PH
:
22975 case OPC_DPAQ_S_W_PH
:
22976 case OPC_DPAQX_S_W_PH
:
22977 case OPC_DPAQX_SA_W_PH
:
22979 case OPC_DPSX_W_PH
:
22980 case OPC_DPSQ_S_W_PH
:
22981 case OPC_DPSQX_S_W_PH
:
22982 case OPC_DPSQX_SA_W_PH
:
22983 case OPC_MULSAQ_S_W_PH
:
22984 case OPC_DPAQ_SA_L_W
:
22985 case OPC_DPSQ_SA_L_W
:
22986 case OPC_MAQ_S_W_PHL
:
22987 case OPC_MAQ_S_W_PHR
:
22988 case OPC_MAQ_SA_W_PHL
:
22989 case OPC_MAQ_SA_W_PHR
:
22990 case OPC_MULSA_W_PH
:
22991 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
22993 default: /* Invalid */
22994 MIPS_INVAL("MASK DPAW.PH");
22995 generate_exception_end(ctx
, EXCP_RI
);
23000 op2
= MASK_INSV(ctx
->opcode
);
23011 t0
= tcg_temp_new();
23012 t1
= tcg_temp_new();
23014 gen_load_gpr(t0
, rt
);
23015 gen_load_gpr(t1
, rs
);
23017 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
23023 default: /* Invalid */
23024 MIPS_INVAL("MASK INSV");
23025 generate_exception_end(ctx
, EXCP_RI
);
23029 case OPC_APPEND_DSP
:
23030 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
23032 case OPC_EXTR_W_DSP
:
23033 op2
= MASK_EXTR_W(ctx
->opcode
);
23037 case OPC_EXTR_RS_W
:
23039 case OPC_EXTRV_S_H
:
23041 case OPC_EXTRV_R_W
:
23042 case OPC_EXTRV_RS_W
:
23047 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
23050 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
23056 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
23058 default: /* Invalid */
23059 MIPS_INVAL("MASK EXTR.W");
23060 generate_exception_end(ctx
, EXCP_RI
);
23064 #if defined(TARGET_MIPS64)
23065 case OPC_DDIV_G_2E
:
23066 case OPC_DDIVU_G_2E
:
23067 case OPC_DMULT_G_2E
:
23068 case OPC_DMULTU_G_2E
:
23069 case OPC_DMOD_G_2E
:
23070 case OPC_DMODU_G_2E
:
23071 check_insn(ctx
, INSN_LOONGSON2E
);
23072 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
23074 case OPC_ABSQ_S_QH_DSP
:
23075 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
23077 case OPC_PRECEQ_L_PWL
:
23078 case OPC_PRECEQ_L_PWR
:
23079 case OPC_PRECEQ_PW_QHL
:
23080 case OPC_PRECEQ_PW_QHR
:
23081 case OPC_PRECEQ_PW_QHLA
:
23082 case OPC_PRECEQ_PW_QHRA
:
23083 case OPC_PRECEQU_QH_OBL
:
23084 case OPC_PRECEQU_QH_OBR
:
23085 case OPC_PRECEQU_QH_OBLA
:
23086 case OPC_PRECEQU_QH_OBRA
:
23087 case OPC_PRECEU_QH_OBL
:
23088 case OPC_PRECEU_QH_OBR
:
23089 case OPC_PRECEU_QH_OBLA
:
23090 case OPC_PRECEU_QH_OBRA
:
23091 case OPC_ABSQ_S_OB
:
23092 case OPC_ABSQ_S_PW
:
23093 case OPC_ABSQ_S_QH
:
23094 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
23102 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
23104 default: /* Invalid */
23105 MIPS_INVAL("MASK ABSQ_S.QH");
23106 generate_exception_end(ctx
, EXCP_RI
);
23110 case OPC_ADDU_OB_DSP
:
23111 op2
= MASK_ADDU_OB(ctx
->opcode
);
23113 case OPC_RADDU_L_OB
:
23115 case OPC_SUBQ_S_PW
:
23117 case OPC_SUBQ_S_QH
:
23119 case OPC_SUBU_S_OB
:
23121 case OPC_SUBU_S_QH
:
23123 case OPC_SUBUH_R_OB
:
23125 case OPC_ADDQ_S_PW
:
23127 case OPC_ADDQ_S_QH
:
23129 case OPC_ADDU_S_OB
:
23131 case OPC_ADDU_S_QH
:
23133 case OPC_ADDUH_R_OB
:
23134 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
23136 case OPC_MULEQ_S_PW_QHL
:
23137 case OPC_MULEQ_S_PW_QHR
:
23138 case OPC_MULEU_S_QH_OBL
:
23139 case OPC_MULEU_S_QH_OBR
:
23140 case OPC_MULQ_RS_QH
:
23141 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
23143 default: /* Invalid */
23144 MIPS_INVAL("MASK ADDU.OB");
23145 generate_exception_end(ctx
, EXCP_RI
);
23149 case OPC_CMPU_EQ_OB_DSP
:
23150 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
23152 case OPC_PRECR_SRA_QH_PW
:
23153 case OPC_PRECR_SRA_R_QH_PW
:
23154 /* Return value is rt. */
23155 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
23157 case OPC_PRECR_OB_QH
:
23158 case OPC_PRECRQ_OB_QH
:
23159 case OPC_PRECRQ_PW_L
:
23160 case OPC_PRECRQ_QH_PW
:
23161 case OPC_PRECRQ_RS_QH_PW
:
23162 case OPC_PRECRQU_S_OB_QH
:
23163 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
23165 case OPC_CMPU_EQ_OB
:
23166 case OPC_CMPU_LT_OB
:
23167 case OPC_CMPU_LE_OB
:
23168 case OPC_CMP_EQ_QH
:
23169 case OPC_CMP_LT_QH
:
23170 case OPC_CMP_LE_QH
:
23171 case OPC_CMP_EQ_PW
:
23172 case OPC_CMP_LT_PW
:
23173 case OPC_CMP_LE_PW
:
23174 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
23176 case OPC_CMPGDU_EQ_OB
:
23177 case OPC_CMPGDU_LT_OB
:
23178 case OPC_CMPGDU_LE_OB
:
23179 case OPC_CMPGU_EQ_OB
:
23180 case OPC_CMPGU_LT_OB
:
23181 case OPC_CMPGU_LE_OB
:
23182 case OPC_PACKRL_PW
:
23186 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
23188 default: /* Invalid */
23189 MIPS_INVAL("MASK CMPU_EQ.OB");
23190 generate_exception_end(ctx
, EXCP_RI
);
23194 case OPC_DAPPEND_DSP
:
23195 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
23197 case OPC_DEXTR_W_DSP
:
23198 op2
= MASK_DEXTR_W(ctx
->opcode
);
23205 case OPC_DEXTR_R_L
:
23206 case OPC_DEXTR_RS_L
:
23208 case OPC_DEXTR_R_W
:
23209 case OPC_DEXTR_RS_W
:
23210 case OPC_DEXTR_S_H
:
23212 case OPC_DEXTRV_R_L
:
23213 case OPC_DEXTRV_RS_L
:
23214 case OPC_DEXTRV_S_H
:
23216 case OPC_DEXTRV_R_W
:
23217 case OPC_DEXTRV_RS_W
:
23218 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
23223 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
23225 default: /* Invalid */
23226 MIPS_INVAL("MASK EXTR.W");
23227 generate_exception_end(ctx
, EXCP_RI
);
23231 case OPC_DPAQ_W_QH_DSP
:
23232 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
23234 case OPC_DPAU_H_OBL
:
23235 case OPC_DPAU_H_OBR
:
23236 case OPC_DPSU_H_OBL
:
23237 case OPC_DPSU_H_OBR
:
23239 case OPC_DPAQ_S_W_QH
:
23241 case OPC_DPSQ_S_W_QH
:
23242 case OPC_MULSAQ_S_W_QH
:
23243 case OPC_DPAQ_SA_L_PW
:
23244 case OPC_DPSQ_SA_L_PW
:
23245 case OPC_MULSAQ_S_L_PW
:
23246 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
23248 case OPC_MAQ_S_W_QHLL
:
23249 case OPC_MAQ_S_W_QHLR
:
23250 case OPC_MAQ_S_W_QHRL
:
23251 case OPC_MAQ_S_W_QHRR
:
23252 case OPC_MAQ_SA_W_QHLL
:
23253 case OPC_MAQ_SA_W_QHLR
:
23254 case OPC_MAQ_SA_W_QHRL
:
23255 case OPC_MAQ_SA_W_QHRR
:
23256 case OPC_MAQ_S_L_PWL
:
23257 case OPC_MAQ_S_L_PWR
:
23262 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
23264 default: /* Invalid */
23265 MIPS_INVAL("MASK DPAQ.W.QH");
23266 generate_exception_end(ctx
, EXCP_RI
);
23270 case OPC_DINSV_DSP
:
23271 op2
= MASK_INSV(ctx
->opcode
);
23282 t0
= tcg_temp_new();
23283 t1
= tcg_temp_new();
23285 gen_load_gpr(t0
, rt
);
23286 gen_load_gpr(t1
, rs
);
23288 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
23294 default: /* Invalid */
23295 MIPS_INVAL("MASK DINSV");
23296 generate_exception_end(ctx
, EXCP_RI
);
23300 case OPC_SHLL_OB_DSP
:
23301 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
23304 default: /* Invalid */
23305 MIPS_INVAL("special3_legacy");
23306 generate_exception_end(ctx
, EXCP_RI
);
23311 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
23313 int rs
, rt
, rd
, sa
;
23317 rs
= (ctx
->opcode
>> 21) & 0x1f;
23318 rt
= (ctx
->opcode
>> 16) & 0x1f;
23319 rd
= (ctx
->opcode
>> 11) & 0x1f;
23320 sa
= (ctx
->opcode
>> 6) & 0x1f;
23321 imm
= sextract32(ctx
->opcode
, 7, 9);
23323 op1
= MASK_SPECIAL3(ctx
->opcode
);
23326 * EVA loads and stores overlap Loongson 2E instructions decoded by
23327 * decode_opc_special3_legacy(), so be careful to allow their decoding when
23334 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
23342 check_cp0_enabled(ctx
);
23343 gen_ld(ctx
, op1
, rt
, rs
, imm
);
23347 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
23352 check_cp0_enabled(ctx
);
23353 gen_st(ctx
, op1
, rt
, rs
, imm
);
23356 check_cp0_enabled(ctx
);
23357 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
23360 check_cp0_enabled(ctx
);
23361 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
23362 gen_cache_operation(ctx
, rt
, rs
, imm
);
23364 /* Treat as NOP. */
23367 check_cp0_enabled(ctx
);
23368 /* Treat as NOP. */
23376 check_insn(ctx
, ISA_MIPS32R2
);
23377 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
23380 op2
= MASK_BSHFL(ctx
->opcode
);
23383 case OPC_ALIGN_END
:
23385 check_insn(ctx
, ISA_MIPS32R6
);
23386 decode_opc_special3_r6(env
, ctx
);
23389 check_insn(ctx
, ISA_MIPS32R2
);
23390 gen_bshfl(ctx
, op2
, rt
, rd
);
23394 #if defined(TARGET_MIPS64)
23401 check_insn(ctx
, ISA_MIPS64R2
);
23402 check_mips_64(ctx
);
23403 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
23406 op2
= MASK_DBSHFL(ctx
->opcode
);
23409 case OPC_DALIGN_END
:
23411 check_insn(ctx
, ISA_MIPS32R6
);
23412 decode_opc_special3_r6(env
, ctx
);
23415 check_insn(ctx
, ISA_MIPS64R2
);
23416 check_mips_64(ctx
);
23417 op2
= MASK_DBSHFL(ctx
->opcode
);
23418 gen_bshfl(ctx
, op2
, rt
, rd
);
23424 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
23429 TCGv t0
= tcg_temp_new();
23430 TCGv t1
= tcg_temp_new();
23432 gen_load_gpr(t0
, rt
);
23433 gen_load_gpr(t1
, rs
);
23434 gen_helper_fork(t0
, t1
);
23442 TCGv t0
= tcg_temp_new();
23444 gen_load_gpr(t0
, rs
);
23445 gen_helper_yield(t0
, cpu_env
, t0
);
23446 gen_store_gpr(t0
, rd
);
23451 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
23452 decode_opc_special3_r6(env
, ctx
);
23454 decode_opc_special3_legacy(env
, ctx
);
23459 /* MIPS SIMD Architecture (MSA) */
23460 static inline int check_msa_access(DisasContext
*ctx
)
23462 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
23463 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
23464 generate_exception_end(ctx
, EXCP_RI
);
23468 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
23469 if (ctx
->insn_flags
& ASE_MSA
) {
23470 generate_exception_end(ctx
, EXCP_MSADIS
);
23473 generate_exception_end(ctx
, EXCP_RI
);
23480 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
23482 /* generates tcg ops to check if any element is 0 */
23483 /* Note this function only works with MSA_WRLEN = 128 */
23484 uint64_t eval_zero_or_big
= 0;
23485 uint64_t eval_big
= 0;
23486 TCGv_i64 t0
= tcg_temp_new_i64();
23487 TCGv_i64 t1
= tcg_temp_new_i64();
23490 eval_zero_or_big
= 0x0101010101010101ULL
;
23491 eval_big
= 0x8080808080808080ULL
;
23494 eval_zero_or_big
= 0x0001000100010001ULL
;
23495 eval_big
= 0x8000800080008000ULL
;
23498 eval_zero_or_big
= 0x0000000100000001ULL
;
23499 eval_big
= 0x8000000080000000ULL
;
23502 eval_zero_or_big
= 0x0000000000000001ULL
;
23503 eval_big
= 0x8000000000000000ULL
;
23506 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
23507 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
23508 tcg_gen_andi_i64(t0
, t0
, eval_big
);
23509 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
23510 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
23511 tcg_gen_andi_i64(t1
, t1
, eval_big
);
23512 tcg_gen_or_i64(t0
, t0
, t1
);
23513 /* if all bits are zero then all elements are not zero */
23514 /* if some bit is non-zero then some element is zero */
23515 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
23516 tcg_gen_trunc_i64_tl(tresult
, t0
);
23517 tcg_temp_free_i64(t0
);
23518 tcg_temp_free_i64(t1
);
23521 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
23523 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
23524 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
23525 int64_t s16
= (int16_t)ctx
->opcode
;
23527 check_msa_access(ctx
);
23529 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
23530 generate_exception_end(ctx
, EXCP_RI
);
23537 TCGv_i64 t0
= tcg_temp_new_i64();
23538 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
23539 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
23540 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
23541 tcg_gen_trunc_i64_tl(bcond
, t0
);
23542 tcg_temp_free_i64(t0
);
23549 gen_check_zero_element(bcond
, df
, wt
);
23555 gen_check_zero_element(bcond
, df
, wt
);
23556 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
23560 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
23562 ctx
->hflags
|= MIPS_HFLAG_BC
;
23563 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
23566 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
23568 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
23569 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
23570 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
23571 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
23573 TCGv_i32 twd
= tcg_const_i32(wd
);
23574 TCGv_i32 tws
= tcg_const_i32(ws
);
23575 TCGv_i32 ti8
= tcg_const_i32(i8
);
23577 switch (MASK_MSA_I8(ctx
->opcode
)) {
23579 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
23582 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
23585 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
23588 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
23591 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
23594 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
23597 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
23603 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
23604 if (df
== DF_DOUBLE
) {
23605 generate_exception_end(ctx
, EXCP_RI
);
23607 TCGv_i32 tdf
= tcg_const_i32(df
);
23608 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
23609 tcg_temp_free_i32(tdf
);
23614 MIPS_INVAL("MSA instruction");
23615 generate_exception_end(ctx
, EXCP_RI
);
23619 tcg_temp_free_i32(twd
);
23620 tcg_temp_free_i32(tws
);
23621 tcg_temp_free_i32(ti8
);
23624 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
23626 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
23627 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
23628 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
23629 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
23630 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
23631 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
23633 TCGv_i32 tdf
= tcg_const_i32(df
);
23634 TCGv_i32 twd
= tcg_const_i32(wd
);
23635 TCGv_i32 tws
= tcg_const_i32(ws
);
23636 TCGv_i32 timm
= tcg_temp_new_i32();
23637 tcg_gen_movi_i32(timm
, u5
);
23639 switch (MASK_MSA_I5(ctx
->opcode
)) {
23641 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
23644 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
23646 case OPC_MAXI_S_df
:
23647 tcg_gen_movi_i32(timm
, s5
);
23648 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
23650 case OPC_MAXI_U_df
:
23651 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
23653 case OPC_MINI_S_df
:
23654 tcg_gen_movi_i32(timm
, s5
);
23655 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
23657 case OPC_MINI_U_df
:
23658 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
23661 tcg_gen_movi_i32(timm
, s5
);
23662 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
23664 case OPC_CLTI_S_df
:
23665 tcg_gen_movi_i32(timm
, s5
);
23666 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
23668 case OPC_CLTI_U_df
:
23669 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
23671 case OPC_CLEI_S_df
:
23672 tcg_gen_movi_i32(timm
, s5
);
23673 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
23675 case OPC_CLEI_U_df
:
23676 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
23680 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
23681 tcg_gen_movi_i32(timm
, s10
);
23682 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
23686 MIPS_INVAL("MSA instruction");
23687 generate_exception_end(ctx
, EXCP_RI
);
23691 tcg_temp_free_i32(tdf
);
23692 tcg_temp_free_i32(twd
);
23693 tcg_temp_free_i32(tws
);
23694 tcg_temp_free_i32(timm
);
23697 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
23699 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
23700 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
23701 uint32_t df
= 0, m
= 0;
23702 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
23703 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
23710 if ((dfm
& 0x40) == 0x00) {
23713 } else if ((dfm
& 0x60) == 0x40) {
23716 } else if ((dfm
& 0x70) == 0x60) {
23719 } else if ((dfm
& 0x78) == 0x70) {
23723 generate_exception_end(ctx
, EXCP_RI
);
23727 tdf
= tcg_const_i32(df
);
23728 tm
= tcg_const_i32(m
);
23729 twd
= tcg_const_i32(wd
);
23730 tws
= tcg_const_i32(ws
);
23732 switch (MASK_MSA_BIT(ctx
->opcode
)) {
23734 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
23737 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
23740 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
23743 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
23746 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
23749 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
23751 case OPC_BINSLI_df
:
23752 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
23754 case OPC_BINSRI_df
:
23755 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
23758 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
23761 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
23764 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
23767 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
23770 MIPS_INVAL("MSA instruction");
23771 generate_exception_end(ctx
, EXCP_RI
);
23775 tcg_temp_free_i32(tdf
);
23776 tcg_temp_free_i32(tm
);
23777 tcg_temp_free_i32(twd
);
23778 tcg_temp_free_i32(tws
);
23781 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
23783 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
23784 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
23785 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
23786 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
23787 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
23789 TCGv_i32 tdf
= tcg_const_i32(df
);
23790 TCGv_i32 twd
= tcg_const_i32(wd
);
23791 TCGv_i32 tws
= tcg_const_i32(ws
);
23792 TCGv_i32 twt
= tcg_const_i32(wt
);
23794 switch (MASK_MSA_3R(ctx
->opcode
)) {
23796 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
23799 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
23802 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
23805 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
23807 case OPC_SUBS_S_df
:
23808 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23811 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
23814 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
23817 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
23820 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
23823 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
23825 case OPC_ADDS_A_df
:
23826 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
23828 case OPC_SUBS_U_df
:
23829 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23832 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
23835 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
23838 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
23841 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
23844 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23847 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23849 case OPC_ADDS_S_df
:
23850 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23852 case OPC_SUBSUS_U_df
:
23853 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23856 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
23859 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
23862 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
23865 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
23868 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23871 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23873 case OPC_ADDS_U_df
:
23874 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23876 case OPC_SUBSUU_S_df
:
23877 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23880 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
23883 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
23886 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23889 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23892 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23894 case OPC_ASUB_S_df
:
23895 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23898 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23901 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
23904 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
23907 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23910 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23913 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23915 case OPC_ASUB_U_df
:
23916 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23919 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23922 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
23925 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
23928 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
23930 case OPC_AVER_S_df
:
23931 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23934 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23937 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
23940 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
23943 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
23945 case OPC_AVER_U_df
:
23946 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23949 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23952 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
23955 case OPC_DOTP_S_df
:
23956 case OPC_DOTP_U_df
:
23957 case OPC_DPADD_S_df
:
23958 case OPC_DPADD_U_df
:
23959 case OPC_DPSUB_S_df
:
23960 case OPC_HADD_S_df
:
23961 case OPC_DPSUB_U_df
:
23962 case OPC_HADD_U_df
:
23963 case OPC_HSUB_S_df
:
23964 case OPC_HSUB_U_df
:
23965 if (df
== DF_BYTE
) {
23966 generate_exception_end(ctx
, EXCP_RI
);
23969 switch (MASK_MSA_3R(ctx
->opcode
)) {
23970 case OPC_DOTP_S_df
:
23971 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23973 case OPC_DOTP_U_df
:
23974 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23976 case OPC_DPADD_S_df
:
23977 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23979 case OPC_DPADD_U_df
:
23980 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23982 case OPC_DPSUB_S_df
:
23983 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23985 case OPC_HADD_S_df
:
23986 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23988 case OPC_DPSUB_U_df
:
23989 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23991 case OPC_HADD_U_df
:
23992 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23994 case OPC_HSUB_S_df
:
23995 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23997 case OPC_HSUB_U_df
:
23998 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24003 MIPS_INVAL("MSA instruction");
24004 generate_exception_end(ctx
, EXCP_RI
);
24007 tcg_temp_free_i32(twd
);
24008 tcg_temp_free_i32(tws
);
24009 tcg_temp_free_i32(twt
);
24010 tcg_temp_free_i32(tdf
);
24013 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
24015 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
24016 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
24017 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
24018 TCGv telm
= tcg_temp_new();
24019 TCGv_i32 tsr
= tcg_const_i32(source
);
24020 TCGv_i32 tdt
= tcg_const_i32(dest
);
24022 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
24024 gen_load_gpr(telm
, source
);
24025 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
24028 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
24029 gen_store_gpr(telm
, dest
);
24032 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
24035 MIPS_INVAL("MSA instruction");
24036 generate_exception_end(ctx
, EXCP_RI
);
24040 tcg_temp_free(telm
);
24041 tcg_temp_free_i32(tdt
);
24042 tcg_temp_free_i32(tsr
);
24045 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
24048 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
24049 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24050 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24052 TCGv_i32 tws
= tcg_const_i32(ws
);
24053 TCGv_i32 twd
= tcg_const_i32(wd
);
24054 TCGv_i32 tn
= tcg_const_i32(n
);
24055 TCGv_i32 tdf
= tcg_const_i32(df
);
24057 switch (MASK_MSA_ELM(ctx
->opcode
)) {
24059 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
24061 case OPC_SPLATI_df
:
24062 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
24065 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
24067 case OPC_COPY_S_df
:
24068 case OPC_COPY_U_df
:
24069 case OPC_INSERT_df
:
24070 #if !defined(TARGET_MIPS64)
24071 /* Double format valid only for MIPS64 */
24072 if (df
== DF_DOUBLE
) {
24073 generate_exception_end(ctx
, EXCP_RI
);
24077 switch (MASK_MSA_ELM(ctx
->opcode
)) {
24078 case OPC_COPY_S_df
:
24079 if (likely(wd
!= 0)) {
24080 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
24083 case OPC_COPY_U_df
:
24084 if (likely(wd
!= 0)) {
24085 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
24088 case OPC_INSERT_df
:
24089 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
24094 MIPS_INVAL("MSA instruction");
24095 generate_exception_end(ctx
, EXCP_RI
);
24097 tcg_temp_free_i32(twd
);
24098 tcg_temp_free_i32(tws
);
24099 tcg_temp_free_i32(tn
);
24100 tcg_temp_free_i32(tdf
);
24103 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
24105 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
24106 uint32_t df
= 0, n
= 0;
24108 if ((dfn
& 0x30) == 0x00) {
24111 } else if ((dfn
& 0x38) == 0x20) {
24114 } else if ((dfn
& 0x3c) == 0x30) {
24117 } else if ((dfn
& 0x3e) == 0x38) {
24120 } else if (dfn
== 0x3E) {
24121 /* CTCMSA, CFCMSA, MOVE.V */
24122 gen_msa_elm_3e(env
, ctx
);
24125 generate_exception_end(ctx
, EXCP_RI
);
24129 gen_msa_elm_df(env
, ctx
, df
, n
);
24132 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
24134 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
24135 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
24136 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
24137 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24138 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24140 TCGv_i32 twd
= tcg_const_i32(wd
);
24141 TCGv_i32 tws
= tcg_const_i32(ws
);
24142 TCGv_i32 twt
= tcg_const_i32(wt
);
24143 TCGv_i32 tdf
= tcg_temp_new_i32();
24145 /* adjust df value for floating-point instruction */
24146 tcg_gen_movi_i32(tdf
, df
+ 2);
24148 switch (MASK_MSA_3RF(ctx
->opcode
)) {
24150 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
24153 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
24156 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
24159 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
24162 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
24165 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
24168 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
24171 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
24174 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
24177 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
24180 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
24183 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
24186 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
24189 tcg_gen_movi_i32(tdf
, df
+ 1);
24190 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
24193 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
24196 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
24198 case OPC_MADD_Q_df
:
24199 tcg_gen_movi_i32(tdf
, df
+ 1);
24200 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
24203 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
24205 case OPC_MSUB_Q_df
:
24206 tcg_gen_movi_i32(tdf
, df
+ 1);
24207 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
24210 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
24213 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
24216 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
24219 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
24222 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
24225 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
24228 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
24231 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
24234 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
24237 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
24240 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
24243 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
24246 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
24248 case OPC_MULR_Q_df
:
24249 tcg_gen_movi_i32(tdf
, df
+ 1);
24250 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
24253 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
24255 case OPC_FMIN_A_df
:
24256 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
24258 case OPC_MADDR_Q_df
:
24259 tcg_gen_movi_i32(tdf
, df
+ 1);
24260 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
24263 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
24266 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
24268 case OPC_MSUBR_Q_df
:
24269 tcg_gen_movi_i32(tdf
, df
+ 1);
24270 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
24273 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
24275 case OPC_FMAX_A_df
:
24276 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
24279 MIPS_INVAL("MSA instruction");
24280 generate_exception_end(ctx
, EXCP_RI
);
24284 tcg_temp_free_i32(twd
);
24285 tcg_temp_free_i32(tws
);
24286 tcg_temp_free_i32(twt
);
24287 tcg_temp_free_i32(tdf
);
24290 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
24292 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
24293 (op & (0x7 << 18)))
24294 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
24295 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24296 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24297 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
24298 TCGv_i32 twd
= tcg_const_i32(wd
);
24299 TCGv_i32 tws
= tcg_const_i32(ws
);
24300 TCGv_i32 twt
= tcg_const_i32(wt
);
24301 TCGv_i32 tdf
= tcg_const_i32(df
);
24303 switch (MASK_MSA_2R(ctx
->opcode
)) {
24305 #if !defined(TARGET_MIPS64)
24306 /* Double format valid only for MIPS64 */
24307 if (df
== DF_DOUBLE
) {
24308 generate_exception_end(ctx
, EXCP_RI
);
24312 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
24315 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
24318 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
24321 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
24324 MIPS_INVAL("MSA instruction");
24325 generate_exception_end(ctx
, EXCP_RI
);
24329 tcg_temp_free_i32(twd
);
24330 tcg_temp_free_i32(tws
);
24331 tcg_temp_free_i32(twt
);
24332 tcg_temp_free_i32(tdf
);
24335 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
24337 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
24338 (op & (0xf << 17)))
24339 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
24340 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24341 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24342 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
24343 TCGv_i32 twd
= tcg_const_i32(wd
);
24344 TCGv_i32 tws
= tcg_const_i32(ws
);
24345 TCGv_i32 twt
= tcg_const_i32(wt
);
24346 /* adjust df value for floating-point instruction */
24347 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
24349 switch (MASK_MSA_2RF(ctx
->opcode
)) {
24350 case OPC_FCLASS_df
:
24351 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
24353 case OPC_FTRUNC_S_df
:
24354 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
24356 case OPC_FTRUNC_U_df
:
24357 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
24360 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
24362 case OPC_FRSQRT_df
:
24363 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
24366 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
24369 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
24372 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
24374 case OPC_FEXUPL_df
:
24375 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
24377 case OPC_FEXUPR_df
:
24378 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
24381 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
24384 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
24386 case OPC_FTINT_S_df
:
24387 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
24389 case OPC_FTINT_U_df
:
24390 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
24392 case OPC_FFINT_S_df
:
24393 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
24395 case OPC_FFINT_U_df
:
24396 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
24400 tcg_temp_free_i32(twd
);
24401 tcg_temp_free_i32(tws
);
24402 tcg_temp_free_i32(twt
);
24403 tcg_temp_free_i32(tdf
);
24406 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
24408 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
24409 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
24410 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24411 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24412 TCGv_i32 twd
= tcg_const_i32(wd
);
24413 TCGv_i32 tws
= tcg_const_i32(ws
);
24414 TCGv_i32 twt
= tcg_const_i32(wt
);
24416 switch (MASK_MSA_VEC(ctx
->opcode
)) {
24418 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
24421 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
24424 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
24427 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
24430 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
24433 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
24436 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
24439 MIPS_INVAL("MSA instruction");
24440 generate_exception_end(ctx
, EXCP_RI
);
24444 tcg_temp_free_i32(twd
);
24445 tcg_temp_free_i32(tws
);
24446 tcg_temp_free_i32(twt
);
24449 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
24451 switch (MASK_MSA_VEC(ctx
->opcode
)) {
24459 gen_msa_vec_v(env
, ctx
);
24462 gen_msa_2r(env
, ctx
);
24465 gen_msa_2rf(env
, ctx
);
24468 MIPS_INVAL("MSA instruction");
24469 generate_exception_end(ctx
, EXCP_RI
);
24474 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
24476 uint32_t opcode
= ctx
->opcode
;
24477 check_insn(ctx
, ASE_MSA
);
24478 check_msa_access(ctx
);
24480 switch (MASK_MSA_MINOR(opcode
)) {
24481 case OPC_MSA_I8_00
:
24482 case OPC_MSA_I8_01
:
24483 case OPC_MSA_I8_02
:
24484 gen_msa_i8(env
, ctx
);
24486 case OPC_MSA_I5_06
:
24487 case OPC_MSA_I5_07
:
24488 gen_msa_i5(env
, ctx
);
24490 case OPC_MSA_BIT_09
:
24491 case OPC_MSA_BIT_0A
:
24492 gen_msa_bit(env
, ctx
);
24494 case OPC_MSA_3R_0D
:
24495 case OPC_MSA_3R_0E
:
24496 case OPC_MSA_3R_0F
:
24497 case OPC_MSA_3R_10
:
24498 case OPC_MSA_3R_11
:
24499 case OPC_MSA_3R_12
:
24500 case OPC_MSA_3R_13
:
24501 case OPC_MSA_3R_14
:
24502 case OPC_MSA_3R_15
:
24503 gen_msa_3r(env
, ctx
);
24506 gen_msa_elm(env
, ctx
);
24508 case OPC_MSA_3RF_1A
:
24509 case OPC_MSA_3RF_1B
:
24510 case OPC_MSA_3RF_1C
:
24511 gen_msa_3rf(env
, ctx
);
24514 gen_msa_vec(env
, ctx
);
24525 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
24526 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
24527 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24528 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
24530 TCGv_i32 twd
= tcg_const_i32(wd
);
24531 TCGv taddr
= tcg_temp_new();
24532 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
24534 switch (MASK_MSA_MINOR(opcode
)) {
24536 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
24539 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
24542 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
24545 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
24548 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
24551 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
24554 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
24557 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
24561 tcg_temp_free_i32(twd
);
24562 tcg_temp_free(taddr
);
24566 MIPS_INVAL("MSA instruction");
24567 generate_exception_end(ctx
, EXCP_RI
);
24573 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
24576 int rs
, rt
, rd
, sa
;
24580 /* make sure instructions are on a word boundary */
24581 if (ctx
->base
.pc_next
& 0x3) {
24582 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
24583 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
24587 /* Handle blikely not taken case */
24588 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
24589 TCGLabel
*l1
= gen_new_label();
24591 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
24592 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
24593 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
24597 op
= MASK_OP_MAJOR(ctx
->opcode
);
24598 rs
= (ctx
->opcode
>> 21) & 0x1f;
24599 rt
= (ctx
->opcode
>> 16) & 0x1f;
24600 rd
= (ctx
->opcode
>> 11) & 0x1f;
24601 sa
= (ctx
->opcode
>> 6) & 0x1f;
24602 imm
= (int16_t)ctx
->opcode
;
24605 decode_opc_special(env
, ctx
);
24608 decode_opc_special2_legacy(env
, ctx
);
24611 decode_opc_special3(env
, ctx
);
24614 op1
= MASK_REGIMM(ctx
->opcode
);
24616 case OPC_BLTZL
: /* REGIMM branches */
24620 check_insn(ctx
, ISA_MIPS2
);
24621 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24625 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
24629 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24631 /* OPC_NAL, OPC_BAL */
24632 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
24634 generate_exception_end(ctx
, EXCP_RI
);
24637 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
24640 case OPC_TGEI
: /* REGIMM traps */
24647 check_insn(ctx
, ISA_MIPS2
);
24648 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24649 gen_trap(ctx
, op1
, rs
, -1, imm
);
24652 check_insn(ctx
, ISA_MIPS32R6
);
24653 generate_exception_end(ctx
, EXCP_RI
);
24656 check_insn(ctx
, ISA_MIPS32R2
);
24657 /* Break the TB to be able to sync copied instructions
24659 ctx
->base
.is_jmp
= DISAS_STOP
;
24661 case OPC_BPOSGE32
: /* MIPS DSP branch */
24662 #if defined(TARGET_MIPS64)
24666 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
24668 #if defined(TARGET_MIPS64)
24670 check_insn(ctx
, ISA_MIPS32R6
);
24671 check_mips_64(ctx
);
24673 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
24677 check_insn(ctx
, ISA_MIPS32R6
);
24678 check_mips_64(ctx
);
24680 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
24684 default: /* Invalid */
24685 MIPS_INVAL("regimm");
24686 generate_exception_end(ctx
, EXCP_RI
);
24691 check_cp0_enabled(ctx
);
24692 op1
= MASK_CP0(ctx
->opcode
);
24700 #if defined(TARGET_MIPS64)
24704 #ifndef CONFIG_USER_ONLY
24705 gen_cp0(env
, ctx
, op1
, rt
, rd
);
24706 #endif /* !CONFIG_USER_ONLY */
24724 #ifndef CONFIG_USER_ONLY
24725 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
24726 #endif /* !CONFIG_USER_ONLY */
24729 #ifndef CONFIG_USER_ONLY
24732 TCGv t0
= tcg_temp_new();
24734 op2
= MASK_MFMC0(ctx
->opcode
);
24738 gen_helper_dmt(t0
);
24739 gen_store_gpr(t0
, rt
);
24743 gen_helper_emt(t0
);
24744 gen_store_gpr(t0
, rt
);
24748 gen_helper_dvpe(t0
, cpu_env
);
24749 gen_store_gpr(t0
, rt
);
24753 gen_helper_evpe(t0
, cpu_env
);
24754 gen_store_gpr(t0
, rt
);
24757 check_insn(ctx
, ISA_MIPS32R6
);
24759 gen_helper_dvp(t0
, cpu_env
);
24760 gen_store_gpr(t0
, rt
);
24764 check_insn(ctx
, ISA_MIPS32R6
);
24766 gen_helper_evp(t0
, cpu_env
);
24767 gen_store_gpr(t0
, rt
);
24771 check_insn(ctx
, ISA_MIPS32R2
);
24772 save_cpu_state(ctx
, 1);
24773 gen_helper_di(t0
, cpu_env
);
24774 gen_store_gpr(t0
, rt
);
24775 /* Stop translation as we may have switched
24776 the execution mode. */
24777 ctx
->base
.is_jmp
= DISAS_STOP
;
24780 check_insn(ctx
, ISA_MIPS32R2
);
24781 save_cpu_state(ctx
, 1);
24782 gen_helper_ei(t0
, cpu_env
);
24783 gen_store_gpr(t0
, rt
);
24784 /* DISAS_STOP isn't sufficient, we need to ensure we break
24785 out of translated code to check for pending interrupts */
24786 gen_save_pc(ctx
->base
.pc_next
+ 4);
24787 ctx
->base
.is_jmp
= DISAS_EXIT
;
24789 default: /* Invalid */
24790 MIPS_INVAL("mfmc0");
24791 generate_exception_end(ctx
, EXCP_RI
);
24796 #endif /* !CONFIG_USER_ONLY */
24799 check_insn(ctx
, ISA_MIPS32R2
);
24800 gen_load_srsgpr(rt
, rd
);
24803 check_insn(ctx
, ISA_MIPS32R2
);
24804 gen_store_srsgpr(rt
, rd
);
24808 generate_exception_end(ctx
, EXCP_RI
);
24812 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
24813 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24814 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
24815 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
24818 /* Arithmetic with immediate opcode */
24819 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
24823 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
24825 case OPC_SLTI
: /* Set on less than with immediate opcode */
24827 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
24829 case OPC_ANDI
: /* Arithmetic with immediate opcode */
24830 case OPC_LUI
: /* OPC_AUI */
24833 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
24835 case OPC_J
: /* Jump */
24837 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
24838 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
24841 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
24842 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24844 generate_exception_end(ctx
, EXCP_RI
);
24847 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
24848 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
24851 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
24854 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
24855 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24857 generate_exception_end(ctx
, EXCP_RI
);
24860 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
24861 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
24864 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
24867 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
24870 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
24872 check_insn(ctx
, ISA_MIPS32R6
);
24873 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
24874 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
24877 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
24880 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
24882 check_insn(ctx
, ISA_MIPS32R6
);
24883 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
24884 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
24889 check_insn(ctx
, ISA_MIPS2
);
24890 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24894 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
24896 case OPC_LL
: /* Load and stores */
24897 check_insn(ctx
, ISA_MIPS2
);
24901 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24909 gen_ld(ctx
, op
, rt
, rs
, imm
);
24913 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24918 gen_st(ctx
, op
, rt
, rs
, imm
);
24921 check_insn(ctx
, ISA_MIPS2
);
24922 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24923 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
24926 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24927 check_cp0_enabled(ctx
);
24928 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
24929 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
24930 gen_cache_operation(ctx
, rt
, rs
, imm
);
24932 /* Treat as NOP. */
24935 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24936 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
24937 /* Treat as NOP. */
24940 /* Floating point (COP1). */
24945 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
24949 op1
= MASK_CP1(ctx
->opcode
);
24954 check_cp1_enabled(ctx
);
24955 check_insn(ctx
, ISA_MIPS32R2
);
24961 check_cp1_enabled(ctx
);
24962 gen_cp1(ctx
, op1
, rt
, rd
);
24964 #if defined(TARGET_MIPS64)
24967 check_cp1_enabled(ctx
);
24968 check_insn(ctx
, ISA_MIPS3
);
24969 check_mips_64(ctx
);
24970 gen_cp1(ctx
, op1
, rt
, rd
);
24973 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
24974 check_cp1_enabled(ctx
);
24975 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24977 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
24982 check_insn(ctx
, ASE_MIPS3D
);
24983 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
24984 (rt
>> 2) & 0x7, imm
<< 2);
24988 check_cp1_enabled(ctx
);
24989 check_insn(ctx
, ISA_MIPS32R6
);
24990 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
24994 check_cp1_enabled(ctx
);
24995 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24997 check_insn(ctx
, ASE_MIPS3D
);
25000 check_cp1_enabled(ctx
);
25001 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25002 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
25003 (rt
>> 2) & 0x7, imm
<< 2);
25010 check_cp1_enabled(ctx
);
25011 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
25017 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
25018 check_cp1_enabled(ctx
);
25019 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25021 case R6_OPC_CMP_AF_S
:
25022 case R6_OPC_CMP_UN_S
:
25023 case R6_OPC_CMP_EQ_S
:
25024 case R6_OPC_CMP_UEQ_S
:
25025 case R6_OPC_CMP_LT_S
:
25026 case R6_OPC_CMP_ULT_S
:
25027 case R6_OPC_CMP_LE_S
:
25028 case R6_OPC_CMP_ULE_S
:
25029 case R6_OPC_CMP_SAF_S
:
25030 case R6_OPC_CMP_SUN_S
:
25031 case R6_OPC_CMP_SEQ_S
:
25032 case R6_OPC_CMP_SEUQ_S
:
25033 case R6_OPC_CMP_SLT_S
:
25034 case R6_OPC_CMP_SULT_S
:
25035 case R6_OPC_CMP_SLE_S
:
25036 case R6_OPC_CMP_SULE_S
:
25037 case R6_OPC_CMP_OR_S
:
25038 case R6_OPC_CMP_UNE_S
:
25039 case R6_OPC_CMP_NE_S
:
25040 case R6_OPC_CMP_SOR_S
:
25041 case R6_OPC_CMP_SUNE_S
:
25042 case R6_OPC_CMP_SNE_S
:
25043 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
25045 case R6_OPC_CMP_AF_D
:
25046 case R6_OPC_CMP_UN_D
:
25047 case R6_OPC_CMP_EQ_D
:
25048 case R6_OPC_CMP_UEQ_D
:
25049 case R6_OPC_CMP_LT_D
:
25050 case R6_OPC_CMP_ULT_D
:
25051 case R6_OPC_CMP_LE_D
:
25052 case R6_OPC_CMP_ULE_D
:
25053 case R6_OPC_CMP_SAF_D
:
25054 case R6_OPC_CMP_SUN_D
:
25055 case R6_OPC_CMP_SEQ_D
:
25056 case R6_OPC_CMP_SEUQ_D
:
25057 case R6_OPC_CMP_SLT_D
:
25058 case R6_OPC_CMP_SULT_D
:
25059 case R6_OPC_CMP_SLE_D
:
25060 case R6_OPC_CMP_SULE_D
:
25061 case R6_OPC_CMP_OR_D
:
25062 case R6_OPC_CMP_UNE_D
:
25063 case R6_OPC_CMP_NE_D
:
25064 case R6_OPC_CMP_SOR_D
:
25065 case R6_OPC_CMP_SUNE_D
:
25066 case R6_OPC_CMP_SNE_D
:
25067 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
25070 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
25071 rt
, rd
, sa
, (imm
>> 8) & 0x7);
25076 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
25091 check_insn(ctx
, ASE_MSA
);
25092 gen_msa_branch(env
, ctx
, op1
);
25096 generate_exception_end(ctx
, EXCP_RI
);
25101 /* Compact branches [R6] and COP2 [non-R6] */
25102 case OPC_BC
: /* OPC_LWC2 */
25103 case OPC_BALC
: /* OPC_SWC2 */
25104 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25105 /* OPC_BC, OPC_BALC */
25106 gen_compute_compact_branch(ctx
, op
, 0, 0,
25107 sextract32(ctx
->opcode
<< 2, 0, 28));
25109 /* OPC_LWC2, OPC_SWC2 */
25110 /* COP2: Not implemented. */
25111 generate_exception_err(ctx
, EXCP_CpU
, 2);
25114 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
25115 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
25116 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25118 /* OPC_BEQZC, OPC_BNEZC */
25119 gen_compute_compact_branch(ctx
, op
, rs
, 0,
25120 sextract32(ctx
->opcode
<< 2, 0, 23));
25122 /* OPC_JIC, OPC_JIALC */
25123 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
25126 /* OPC_LWC2, OPC_SWC2 */
25127 /* COP2: Not implemented. */
25128 generate_exception_err(ctx
, EXCP_CpU
, 2);
25132 check_insn(ctx
, INSN_LOONGSON2F
);
25133 /* Note that these instructions use different fields. */
25134 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
25138 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25139 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
25140 check_cp1_enabled(ctx
);
25141 op1
= MASK_CP3(ctx
->opcode
);
25145 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
25151 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
25152 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
25155 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
25156 /* Treat as NOP. */
25159 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
25173 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
25174 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
25178 generate_exception_end(ctx
, EXCP_RI
);
25182 generate_exception_err(ctx
, EXCP_CpU
, 1);
25186 #if defined(TARGET_MIPS64)
25187 /* MIPS64 opcodes */
25191 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25195 check_insn(ctx
, ISA_MIPS3
);
25196 check_mips_64(ctx
);
25197 gen_ld(ctx
, op
, rt
, rs
, imm
);
25201 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25204 check_insn(ctx
, ISA_MIPS3
);
25205 check_mips_64(ctx
);
25206 gen_st(ctx
, op
, rt
, rs
, imm
);
25209 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25210 check_insn(ctx
, ISA_MIPS3
);
25211 check_mips_64(ctx
);
25212 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
25214 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
25215 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25216 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
25217 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
25220 check_insn(ctx
, ISA_MIPS3
);
25221 check_mips_64(ctx
);
25222 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
25226 check_insn(ctx
, ISA_MIPS3
);
25227 check_mips_64(ctx
);
25228 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
25231 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
25232 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25233 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
25235 MIPS_INVAL("major opcode");
25236 generate_exception_end(ctx
, EXCP_RI
);
25240 case OPC_DAUI
: /* OPC_JALX */
25241 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25242 #if defined(TARGET_MIPS64)
25244 check_mips_64(ctx
);
25246 generate_exception(ctx
, EXCP_RI
);
25247 } else if (rt
!= 0) {
25248 TCGv t0
= tcg_temp_new();
25249 gen_load_gpr(t0
, rs
);
25250 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
25254 generate_exception_end(ctx
, EXCP_RI
);
25255 MIPS_INVAL("major opcode");
25259 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
25260 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
25261 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
25264 case OPC_MSA
: /* OPC_MDMX */
25265 /* MDMX: Not implemented. */
25269 check_insn(ctx
, ISA_MIPS32R6
);
25270 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
25272 default: /* Invalid */
25273 MIPS_INVAL("major opcode");
25274 generate_exception_end(ctx
, EXCP_RI
);
25279 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
25281 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
25282 CPUMIPSState
*env
= cs
->env_ptr
;
25284 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
25285 ctx
->saved_pc
= -1;
25286 ctx
->insn_flags
= env
->insn_flags
;
25287 ctx
->CP0_Config1
= env
->CP0_Config1
;
25288 ctx
->CP0_Config3
= env
->CP0_Config3
;
25289 ctx
->CP0_Config5
= env
->CP0_Config5
;
25291 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
25292 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
25293 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
25294 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
25295 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
25296 ctx
->PAMask
= env
->PAMask
;
25297 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
25298 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
25299 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
25300 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
25301 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
25302 /* Restore delay slot state from the tb context. */
25303 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
25304 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
25305 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
25306 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
25307 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
25308 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
25309 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
25310 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
25311 restore_cpu_state(env
, ctx
);
25312 #ifdef CONFIG_USER_ONLY
25313 ctx
->mem_idx
= MIPS_HFLAG_UM
;
25315 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
25317 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
25318 MO_UNALN
: MO_ALIGN
;
25320 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
25324 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
25328 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
25330 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
25332 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
25336 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
25337 const CPUBreakpoint
*bp
)
25339 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
25341 save_cpu_state(ctx
, 1);
25342 ctx
->base
.is_jmp
= DISAS_NORETURN
;
25343 gen_helper_raise_exception_debug(cpu_env
);
25344 /* The address covered by the breakpoint must be included in
25345 [tb->pc, tb->pc + tb->size) in order to for it to be
25346 properly cleared -- thus we increment the PC here so that
25347 the logic setting tb->size below does the right thing. */
25348 ctx
->base
.pc_next
+= 4;
25352 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
25354 CPUMIPSState
*env
= cs
->env_ptr
;
25355 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
25359 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
25360 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
25361 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
25362 insn_bytes
= decode_nanomips_opc(env
, ctx
);
25363 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
25364 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
25366 decode_opc(env
, ctx
);
25367 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
25368 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
25369 insn_bytes
= decode_micromips_opc(env
, ctx
);
25370 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
25371 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
25372 insn_bytes
= decode_mips16_opc(env
, ctx
);
25374 generate_exception_end(ctx
, EXCP_RI
);
25375 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
25379 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
25380 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
25381 MIPS_HFLAG_FBNSLOT
))) {
25382 /* force to generate branch as there is neither delay nor
25386 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
25387 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
25388 /* Force to generate branch as microMIPS R6 doesn't restrict
25389 branches in the forbidden slot. */
25394 gen_branch(ctx
, insn_bytes
);
25396 ctx
->base
.pc_next
+= insn_bytes
;
25398 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
25401 /* Execute a branch and its delay slot as a single instruction.
25402 This is what GDB expects and is consistent with what the
25403 hardware does (e.g. if a delay slot instruction faults, the
25404 reported PC is the PC of the branch). */
25405 if (ctx
->base
.singlestep_enabled
&&
25406 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
25407 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
25409 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
25410 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
25414 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
25416 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
25418 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
25419 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
25420 gen_helper_raise_exception_debug(cpu_env
);
25422 switch (ctx
->base
.is_jmp
) {
25424 gen_save_pc(ctx
->base
.pc_next
);
25425 tcg_gen_lookup_and_goto_ptr();
25428 case DISAS_TOO_MANY
:
25429 save_cpu_state(ctx
, 0);
25430 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
25433 tcg_gen_exit_tb(NULL
, 0);
25435 case DISAS_NORETURN
:
25438 g_assert_not_reached();
25443 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
25445 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
25446 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
25449 static const TranslatorOps mips_tr_ops
= {
25450 .init_disas_context
= mips_tr_init_disas_context
,
25451 .tb_start
= mips_tr_tb_start
,
25452 .insn_start
= mips_tr_insn_start
,
25453 .breakpoint_check
= mips_tr_breakpoint_check
,
25454 .translate_insn
= mips_tr_translate_insn
,
25455 .tb_stop
= mips_tr_tb_stop
,
25456 .disas_log
= mips_tr_disas_log
,
25459 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
25463 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
25466 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
25470 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
25472 #define printfpr(fp) \
25475 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
25476 " fd:%13g fs:%13g psu: %13g\n", \
25477 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
25478 (double)(fp)->fd, \
25479 (double)(fp)->fs[FP_ENDIAN_IDX], \
25480 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
25483 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
25484 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
25485 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
25486 " fd:%13g fs:%13g psu:%13g\n", \
25487 tmp.w[FP_ENDIAN_IDX], tmp.d, \
25489 (double)tmp.fs[FP_ENDIAN_IDX], \
25490 (double)tmp.fs[!FP_ENDIAN_IDX]); \
25495 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
25496 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
25497 get_float_exception_flags(&env
->active_fpu
.fp_status
));
25498 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
25499 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
25500 printfpr(&env
->active_fpu
.fpr
[i
]);
25506 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
25509 MIPSCPU
*cpu
= MIPS_CPU(cs
);
25510 CPUMIPSState
*env
= &cpu
->env
;
25513 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
25514 " LO=0x" TARGET_FMT_lx
" ds %04x "
25515 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
25516 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
25517 env
->hflags
, env
->btarget
, env
->bcond
);
25518 for (i
= 0; i
< 32; i
++) {
25520 cpu_fprintf(f
, "GPR%02d:", i
);
25521 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
25523 cpu_fprintf(f
, "\n");
25526 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
25527 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
25528 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
25530 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
25531 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
25532 env
->CP0_Config2
, env
->CP0_Config3
);
25533 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
25534 env
->CP0_Config4
, env
->CP0_Config5
);
25535 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
25536 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
25540 void mips_tcg_init(void)
25545 for (i
= 1; i
< 32; i
++)
25546 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
25547 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
25550 for (i
= 0; i
< 32; i
++) {
25551 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
25553 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
25554 /* The scalar floating-point unit (FPU) registers are mapped on
25555 * the MSA vector registers. */
25556 fpu_f64
[i
] = msa_wr_d
[i
* 2];
25557 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
25558 msa_wr_d
[i
* 2 + 1] =
25559 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
25562 cpu_PC
= tcg_global_mem_new(cpu_env
,
25563 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
25564 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
25565 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
25566 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
25568 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
25569 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
25572 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
25573 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
25575 bcond
= tcg_global_mem_new(cpu_env
,
25576 offsetof(CPUMIPSState
, bcond
), "bcond");
25577 btarget
= tcg_global_mem_new(cpu_env
,
25578 offsetof(CPUMIPSState
, btarget
), "btarget");
25579 hflags
= tcg_global_mem_new_i32(cpu_env
,
25580 offsetof(CPUMIPSState
, hflags
), "hflags");
25582 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
25583 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
25585 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
25586 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
25590 #include "translate_init.inc.c"
25592 void cpu_mips_realize_env(CPUMIPSState
*env
)
25594 env
->exception_base
= (int32_t)0xBFC00000;
25596 #ifndef CONFIG_USER_ONLY
25597 mmu_init(env
, env
->cpu_model
);
25599 fpu_init(env
, env
->cpu_model
);
25600 mvp_init(env
, env
->cpu_model
);
25603 bool cpu_supports_cps_smp(const char *cpu_type
)
25605 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
25606 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
25609 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
25611 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
25612 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
25615 void cpu_set_exception_base(int vp_index
, target_ulong address
)
25617 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
25618 vp
->env
.exception_base
= address
;
25621 void cpu_state_reset(CPUMIPSState
*env
)
25623 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
25624 CPUState
*cs
= CPU(cpu
);
25626 /* Reset registers to their default values */
25627 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
25628 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
25629 #ifdef TARGET_WORDS_BIGENDIAN
25630 env
->CP0_Config0
|= (1 << CP0C0_BE
);
25632 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
25633 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
25634 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
25635 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
25636 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
25637 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
25638 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
25639 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
25640 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
25641 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
25642 << env
->cpu_model
->CP0_LLAddr_shift
;
25643 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
25644 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
25645 env
->CCRes
= env
->cpu_model
->CCRes
;
25646 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
25647 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
25648 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
25649 env
->current_tc
= 0;
25650 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
25651 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
25652 #if defined(TARGET_MIPS64)
25653 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
25654 env
->SEGMask
|= 3ULL << 62;
25657 env
->PABITS
= env
->cpu_model
->PABITS
;
25658 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
25659 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
25660 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
25661 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
25662 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
25663 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
25664 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
25665 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
25666 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
25667 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
25668 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
25669 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
25670 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
25671 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
25672 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
25673 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
25674 env
->msair
= env
->cpu_model
->MSAIR
;
25675 env
->insn_flags
= env
->cpu_model
->insn_flags
;
25677 #if defined(CONFIG_USER_ONLY)
25678 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
25679 # ifdef TARGET_MIPS64
25680 /* Enable 64-bit register mode. */
25681 env
->CP0_Status
|= (1 << CP0St_PX
);
25683 # ifdef TARGET_ABI_MIPSN64
25684 /* Enable 64-bit address mode. */
25685 env
->CP0_Status
|= (1 << CP0St_UX
);
25687 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
25688 hardware registers. */
25689 env
->CP0_HWREna
|= 0x0000000F;
25690 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
25691 env
->CP0_Status
|= (1 << CP0St_CU1
);
25693 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
25694 env
->CP0_Status
|= (1 << CP0St_MX
);
25696 # if defined(TARGET_MIPS64)
25697 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
25698 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
25699 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
25700 env
->CP0_Status
|= (1 << CP0St_FR
);
25704 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
25705 /* If the exception was raised from a delay slot,
25706 come back to the jump. */
25707 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
25708 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
25710 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
25712 env
->active_tc
.PC
= env
->exception_base
;
25713 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
25714 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
25715 env
->CP0_Wired
= 0;
25716 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
25717 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
25718 if (mips_um_ksegs_enabled()) {
25719 env
->CP0_EBase
|= 0x40000000;
25721 env
->CP0_EBase
|= (int32_t)0x80000000;
25723 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
25724 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
25726 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
25728 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
25729 /* vectored interrupts not implemented, timer on int 7,
25730 no performance counters. */
25731 env
->CP0_IntCtl
= 0xe0000000;
25735 for (i
= 0; i
< 7; i
++) {
25736 env
->CP0_WatchLo
[i
] = 0;
25737 env
->CP0_WatchHi
[i
] = 0x80000000;
25739 env
->CP0_WatchLo
[7] = 0;
25740 env
->CP0_WatchHi
[7] = 0;
25742 /* Count register increments in debug mode, EJTAG version 1 */
25743 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
25745 cpu_mips_store_count(env
, 1);
25747 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
25750 /* Only TC0 on VPE 0 starts as active. */
25751 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
25752 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
25753 env
->tcs
[i
].CP0_TCHalt
= 1;
25755 env
->active_tc
.CP0_TCHalt
= 1;
25758 if (cs
->cpu_index
== 0) {
25759 /* VPE0 starts up enabled. */
25760 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
25761 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
25763 /* TC0 starts up unhalted. */
25765 env
->active_tc
.CP0_TCHalt
= 0;
25766 env
->tcs
[0].CP0_TCHalt
= 0;
25767 /* With thread 0 active. */
25768 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
25769 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
25774 * Configure default legacy segmentation control. We use this regardless of
25775 * whether segmentation control is presented to the guest.
25777 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
25778 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
25779 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
25780 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
25781 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
25782 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
25784 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
25785 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
25786 (3 << CP0SC_C
)) << 16;
25787 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
25788 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
25789 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
25790 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
25791 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
25792 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
25793 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
25794 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
25796 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
25797 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
25798 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
25799 env
->CP0_Status
|= (1 << CP0St_FR
);
25802 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
25803 /* microMIPS on reset when Config3.ISA is 3 */
25804 env
->hflags
|= MIPS_HFLAG_M16
;
25808 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
25812 compute_hflags(env
);
25813 restore_fp_status(env
);
25814 restore_pamask(env
);
25815 cs
->exception_index
= EXCP_NONE
;
25817 if (semihosting_get_argc()) {
25818 /* UHI interface can be used to obtain argc and argv */
25819 env
->active_tc
.gpr
[4] = -1;
25823 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
25824 target_ulong
*data
)
25826 env
->active_tc
.PC
= data
[0];
25827 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
25828 env
->hflags
|= data
[1];
25829 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
25830 case MIPS_HFLAG_BR
:
25832 case MIPS_HFLAG_BC
:
25833 case MIPS_HFLAG_BL
:
25835 env
->btarget
= data
[2];