2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
467 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
468 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
469 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
470 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
474 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
477 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
478 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
479 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
480 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
481 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
482 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
483 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
484 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
485 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
486 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
487 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
490 /* MIPS DSP REGIMM opcodes */
492 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
493 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
496 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
500 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
501 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
502 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
505 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
507 /* MIPS DSP Arithmetic Sub-class */
508 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
509 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
510 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
515 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
516 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
517 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
518 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
519 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
522 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
523 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
525 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
526 /* MIPS DSP Multiply Sub-class insns */
527 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
528 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
529 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
530 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
531 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
532 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
535 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
536 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
538 /* MIPS DSP Arithmetic Sub-class */
539 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
548 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
549 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
550 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
551 /* MIPS DSP Multiply Sub-class insns */
552 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
553 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
554 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
555 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
558 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
560 /* MIPS DSP Arithmetic Sub-class */
561 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
572 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
573 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
574 /* DSP Bit/Manipulation Sub-class */
575 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
576 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
577 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
578 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
579 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
582 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
584 /* MIPS DSP Arithmetic Sub-class */
585 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
592 /* DSP Compare-Pick Sub-class */
593 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
600 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
601 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
602 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
603 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
604 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
605 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
606 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
607 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
610 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
612 /* MIPS DSP GPR-Based Shift Sub-class */
613 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
627 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
628 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
629 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
630 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
631 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
632 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
633 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
634 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
637 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
639 /* MIPS DSP Multiply Sub-class insns */
640 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
654 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
655 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
656 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
657 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
658 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
659 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
660 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
661 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
664 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
666 /* DSP Bit/Manipulation Sub-class */
667 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
670 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Append Sub-class */
673 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
674 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
675 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
678 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
680 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
681 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
685 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
686 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
687 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
688 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
689 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
690 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
691 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
692 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
693 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
694 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
695 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
696 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
697 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
700 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
702 /* MIPS DSP Arithmetic Sub-class */
703 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
719 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
720 /* DSP Bit/Manipulation Sub-class */
721 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
722 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
723 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
724 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
725 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
726 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
729 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
731 /* MIPS DSP Multiply Sub-class insns */
732 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
733 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
734 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
735 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
737 /* MIPS DSP Arithmetic Sub-class */
738 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
741 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
742 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
743 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
744 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
745 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
747 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
751 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
752 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
753 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
754 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
755 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
756 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
757 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
758 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
761 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
763 /* DSP Compare-Pick Sub-class */
764 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
783 /* MIPS DSP Arithmetic Sub-class */
784 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
785 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
786 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
787 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
788 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
789 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
794 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
796 /* DSP Append Sub-class */
797 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
798 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
799 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
800 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
803 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
805 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
806 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
819 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
820 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
821 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
822 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
823 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
824 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
825 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
826 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
829 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
831 /* DSP Bit/Manipulation Sub-class */
832 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
835 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
837 /* MIPS DSP Multiply Sub-class insns */
838 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
856 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
857 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
858 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
859 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
860 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
861 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
862 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
863 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
866 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
868 /* MIPS DSP GPR-Based Shift Sub-class */
869 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
887 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
888 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
889 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
890 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
891 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
892 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
893 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
894 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
897 /* Coprocessor 0 (rs field) */
898 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
901 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
902 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
903 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
904 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
905 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
906 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
907 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
908 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
909 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
910 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
911 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
912 OPC_C0
= (0x10 << 21) | OPC_CP0
,
913 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
914 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
915 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
916 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
917 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
918 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
919 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
920 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
921 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
922 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
923 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
924 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
925 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
926 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
927 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
931 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
934 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
935 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
936 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
937 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
938 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
939 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
940 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
941 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
944 /* Coprocessor 0 (with rs == C0) */
945 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
948 OPC_TLBR
= 0x01 | OPC_C0
,
949 OPC_TLBWI
= 0x02 | OPC_C0
,
950 OPC_TLBINV
= 0x03 | OPC_C0
,
951 OPC_TLBINVF
= 0x04 | OPC_C0
,
952 OPC_TLBWR
= 0x06 | OPC_C0
,
953 OPC_TLBP
= 0x08 | OPC_C0
,
954 OPC_RFE
= 0x10 | OPC_C0
,
955 OPC_ERET
= 0x18 | OPC_C0
,
956 OPC_DERET
= 0x1F | OPC_C0
,
957 OPC_WAIT
= 0x20 | OPC_C0
,
960 /* Coprocessor 1 (rs field) */
961 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
963 /* Values for the fmt field in FP instructions */
965 /* 0 - 15 are reserved */
966 FMT_S
= 16, /* single fp */
967 FMT_D
= 17, /* double fp */
968 FMT_E
= 18, /* extended fp */
969 FMT_Q
= 19, /* quad fp */
970 FMT_W
= 20, /* 32-bit fixed */
971 FMT_L
= 21, /* 64-bit fixed */
972 FMT_PS
= 22, /* paired single fp */
973 /* 23 - 31 are reserved */
977 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
978 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
979 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
980 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
981 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
982 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
983 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
984 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
985 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
986 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
987 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
988 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
989 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
990 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
991 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
992 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
993 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
994 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
995 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
996 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
997 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
998 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
999 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
1000 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
1001 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
1002 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
1003 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
1004 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
1005 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
1006 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1009 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1010 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1013 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1014 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1015 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1016 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1020 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1021 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1025 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1026 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1029 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1032 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1033 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1034 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1035 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1036 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1037 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1038 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1039 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1040 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1041 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1042 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1045 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1048 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1050 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1051 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1052 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1053 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1054 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1055 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1057 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1059 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1060 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1061 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1062 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1063 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1064 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1066 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1068 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1069 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1070 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1071 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1072 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1073 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1075 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1077 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1078 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1079 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1080 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1081 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1082 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1084 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1085 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1086 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1087 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1088 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1089 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1091 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1092 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1093 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1094 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1095 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1096 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1098 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1099 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1100 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1101 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1102 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1103 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1105 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1106 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1107 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1108 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1109 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1110 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1112 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1113 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1114 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1115 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1116 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1117 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1119 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1120 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1121 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1122 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1123 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1124 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1126 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1127 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1128 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1129 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1130 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1131 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1133 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1134 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1135 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1136 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1137 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1138 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1142 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1145 OPC_LWXC1
= 0x00 | OPC_CP3
,
1146 OPC_LDXC1
= 0x01 | OPC_CP3
,
1147 OPC_LUXC1
= 0x05 | OPC_CP3
,
1148 OPC_SWXC1
= 0x08 | OPC_CP3
,
1149 OPC_SDXC1
= 0x09 | OPC_CP3
,
1150 OPC_SUXC1
= 0x0D | OPC_CP3
,
1151 OPC_PREFX
= 0x0F | OPC_CP3
,
1152 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1153 OPC_MADD_S
= 0x20 | OPC_CP3
,
1154 OPC_MADD_D
= 0x21 | OPC_CP3
,
1155 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1156 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1157 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1158 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1159 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1160 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1161 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1162 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1163 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1164 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1168 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1170 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1171 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1172 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1173 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1174 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1175 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1176 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1177 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1178 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1179 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1180 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1181 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1182 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1183 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1184 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1185 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1186 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1187 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1188 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1189 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1190 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1192 /* MI10 instruction */
1193 OPC_LD_B
= (0x20) | OPC_MSA
,
1194 OPC_LD_H
= (0x21) | OPC_MSA
,
1195 OPC_LD_W
= (0x22) | OPC_MSA
,
1196 OPC_LD_D
= (0x23) | OPC_MSA
,
1197 OPC_ST_B
= (0x24) | OPC_MSA
,
1198 OPC_ST_H
= (0x25) | OPC_MSA
,
1199 OPC_ST_W
= (0x26) | OPC_MSA
,
1200 OPC_ST_D
= (0x27) | OPC_MSA
,
1204 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1205 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1206 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1207 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1208 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1209 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1210 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1211 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1212 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1213 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1214 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1215 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1216 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1218 /* I8 instruction */
1219 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1220 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1221 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1222 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1223 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1224 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1225 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1226 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1227 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1228 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1230 /* VEC/2R/2RF instruction */
1231 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1232 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1233 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1234 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1235 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1236 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1237 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1239 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1240 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1242 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1243 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1244 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1245 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1246 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1248 /* 2RF instruction df(bit 16) = _w, _d */
1249 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1250 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1251 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1252 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1253 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1254 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1255 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1256 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1257 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1258 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1259 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1260 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1261 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1262 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1263 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1264 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1266 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1267 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1268 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1269 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1270 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1272 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1274 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1275 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1277 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1278 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1279 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1280 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1281 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1282 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1283 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1284 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1285 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1286 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1287 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1288 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1289 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1290 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1292 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1293 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1294 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1295 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1296 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1297 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1298 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1299 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1300 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1301 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1302 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1303 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1304 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1305 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1306 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1307 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1308 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1309 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1310 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1311 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1312 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1313 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1314 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1315 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1316 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1317 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1318 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1319 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1320 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1321 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1322 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1323 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1324 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1325 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1326 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1327 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1328 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1329 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1331 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1332 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1333 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1334 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1335 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1336 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1337 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1338 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1339 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1340 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1342 /* 3RF instruction _df(bit 21) = _w, _d */
1343 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1346 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1347 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1348 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1349 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1350 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1351 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1352 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1353 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1357 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1358 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1359 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1361 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1362 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1363 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1364 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1365 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1366 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1367 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1370 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1373 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1374 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1375 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1376 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1377 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1378 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1379 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1380 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1381 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1382 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1383 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1385 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1386 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1387 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1388 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1389 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1390 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1391 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1392 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1393 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1394 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1395 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1396 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1397 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1402 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1403 * ============================================
1405 * MXU (full name: MIPS eXtension/enhanced Unit) is an SIMD extension of MIPS32
1406 * instructions set. It is designed to fit the needs of signal, graphical and
1407 * video processing applications. MXU instruction set is used in Xburst family
1408 * of microprocessors by Ingenic.
1410 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1411 * the control register.
1413 * The notation used in MXU assembler mnemonics:
1415 * XRa, XRb, XRc, XRd - MXU registers
1416 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1417 * s12 - a subfield of an instruction code
1418 * strd2 - a subfield of an instruction code
1419 * eptn2 - a subfield of an instruction code
1420 * eptn3 - a subfield of an instruction code
1421 * optn2 - a subfield of an instruction code
1422 * optn3 - a subfield of an instruction code
1423 * sft4 - a subfield of an instruction code
1425 * Load/Store instructions Multiplication instructions
1426 * ----------------------- ---------------------------
1428 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1429 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1430 * S32LDDV XRa, Rb, rc, strd2 S32SUB XRa, XRd, Rs, Rt
1431 * S32STDV XRa, Rb, rc, strd2 S32SUBU XRa, XRd, Rs, Rt
1432 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1433 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1434 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1435 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1436 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1437 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1438 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1439 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1440 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1441 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1442 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1443 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1444 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1445 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1446 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1447 * S16SDI XRa, Rb, s10, eptn2
1448 * S8LDD XRa, Rb, s8, eptn3
1449 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1450 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1451 * S8SDI XRa, Rb, s8, eptn3
1452 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1453 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1454 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1455 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1456 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1457 * S32CPS XRa, XRb, XRc
1458 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1459 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1460 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1461 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1462 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1463 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1464 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1465 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1466 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1467 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1468 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1469 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1470 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1471 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1472 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1473 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1474 * Q8SLT XRa, XRb, XRc
1475 * Q8SLTU XRa, XRb, XRc
1476 * Q8MOVZ XRa, XRb, XRc Shift instructions
1477 * Q8MOVN XRa, XRb, XRc ------------------
1479 * D32SLL XRa, XRb, XRc, XRd, sft4
1480 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1481 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1482 * D32SARL XRa, XRb, XRc, sft4
1483 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1484 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1485 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1486 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1487 * Q16SLL XRa, XRb, XRc, XRd, sft4
1488 * Q16SLR XRa, XRb, XRc, XRd, sft4
1489 * Miscellaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1490 * ------------------------- Q16SLLV XRa, XRb, Rb
1491 * Q16SLRV XRa, XRb, Rb
1492 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1493 * S32ALN XRa, XRb, XRc, Rb
1494 * S32ALNI XRa, XRb, XRc, s3
1495 * S32LUI XRa, s8, optn3 Move instructions
1496 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1497 * S32EXTRV XRa, XRb, Rs, Rt
1498 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1499 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1505 * ┌─ 000000 ─ OPC_MXU_S32MADD
1506 * ├─ 000001 ─ OPC_MXU_S32MADDU
1507 * ├─ 000010 ─ <not assigned> (non-MXU OPC_MUL)
1510 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1511 * │ ├─ 001 ─ OPC_MXU_S32MIN
1512 * │ ├─ 010 ─ OPC_MXU_D16MAX
1513 * │ ├─ 011 ─ OPC_MXU_D16MIN
1514 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1515 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1516 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1517 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1518 * ├─ 000100 ─ OPC_MXU_S32MSUB
1519 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1520 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1521 * │ ├─ 001 ─ OPC_MXU_D16SLT
1522 * │ ├─ 010 ─ OPC_MXU_D16AVG
1523 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1524 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1525 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1526 * │ └─ 111 ─ OPC_MXU_Q8ADD
1529 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1530 * │ ├─ 010 ─ OPC_MXU_D16CPS
1531 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1532 * │ └─ 110 ─ OPC_MXU_Q16SAT
1533 * ├─ 001000 ─ OPC_MXU_D16MUL
1535 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1536 * │ └─ 01 ─ OPC_MXU_D16MULE
1537 * ├─ 001010 ─ OPC_MXU_D16MAC
1538 * ├─ 001011 ─ OPC_MXU_D16MACF
1539 * ├─ 001100 ─ OPC_MXU_D16MADL
1540 * ├─ 001101 ─ OPC_MXU_S16MAD
1541 * ├─ 001110 ─ OPC_MXU_Q16ADD
1542 * ├─ 001111 ─ OPC_MXU_D16MACE 23
1543 * │ ┌─ 0 ─ OPC_MXU_S32LDD
1544 * ├─ 010000 ─ OPC_MXU__POOL04 ─┴─ 1 ─ OPC_MXU_S32LDDR
1547 * ├─ 010001 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32STD
1548 * │ └─ 1 ─ OPC_MXU_S32STDR
1551 * ├─ 010010 ─ OPC_MXU__POOL06 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1552 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1555 * ├─ 010011 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32STDV
1556 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1559 * ├─ 010100 ─ OPC_MXU__POOL08 ─┬─ 0 ─ OPC_MXU_S32LDI
1560 * │ └─ 1 ─ OPC_MXU_S32LDIR
1563 * ├─ 010101 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32SDI
1564 * │ └─ 1 ─ OPC_MXU_S32SDIR
1567 * ├─ 010110 ─ OPC_MXU__POOL10 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1568 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1571 * ├─ 010111 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1572 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1573 * ├─ 011000 ─ OPC_MXU_D32ADD
1575 * MXU ├─ 011001 ─ OPC_MXU__POOL12 ─┬─ 00 ─ OPC_MXU_D32ACC
1576 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1577 * │ └─ 10 ─ OPC_MXU_D32ASUM
1578 * ├─ 011010 ─ <not assigned>
1580 * ├─ 011011 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_Q16ACC
1581 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1582 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1585 * ├─ 011100 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1586 * │ ├─ 01 ─ OPC_MXU_D8SUM
1587 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1588 * ├─ 011110 ─ <not assigned>
1589 * ├─ 011111 ─ <not assigned>
1590 * ├─ 100000 ─ <not assigned> (overlaps with CLZ)
1591 * ├─ 100001 ─ <not assigned> (overlaps with CLO)
1592 * ├─ 100010 ─ OPC_MXU_S8LDD
1593 * ├─ 100011 ─ OPC_MXU_S8STD 15..14
1594 * ├─ 100100 ─ OPC_MXU_S8LDI ┌─ 00 ─ OPC_MXU_S32MUL
1595 * ├─ 100101 ─ OPC_MXU_S8SDI ├─ 00 ─ OPC_MXU_S32MULU
1596 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1597 * ├─ 100110 ─ OPC_MXU__POOL15 ─┴─ 00 ─ OPC_MXU_S32EXTRV
1600 * ├─ 100111 ─ OPC_MXU__POOL16 ─┬─ 000 ─ OPC_MXU_D32SARW
1601 * │ ├─ 001 ─ OPC_MXU_S32ALN
1602 * ├─ 101000 ─ OPC_MXU_LXB ├─ 010 ─ OPC_MXU_S32ALNI
1603 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_S32NOR
1604 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_S32AND
1605 * ├─ 101011 ─ OPC_MXU_S16STD ├─ 101 ─ OPC_MXU_S32OR
1606 * ├─ 101100 ─ OPC_MXU_S16LDI ├─ 110 ─ OPC_MXU_S32XOR
1607 * ├─ 101101 ─ OPC_MXU_S16SDI └─ 111 ─ OPC_MXU_S32LUI
1608 * ├─ 101110 ─ OPC_MXU_S32M2I
1609 * ├─ 101111 ─ OPC_MXU_S32I2M
1610 * ├─ 110000 ─ OPC_MXU_D32SLL
1611 * ├─ 110001 ─ OPC_MXU_D32SLR 20..18
1612 * ├─ 110010 ─ OPC_MXU_D32SARL ┌─ 000 ─ OPC_MXU_D32SLLV
1613 * ├─ 110011 ─ OPC_MXU_D32SAR ├─ 001 ─ OPC_MXU_D32SLRV
1614 * ├─ 110100 ─ OPC_MXU_Q16SLL ├─ 010 ─ OPC_MXU_D32SARV
1615 * ├─ 110101 ─ OPC_MXU_Q16SLR ├─ 011 ─ OPC_MXU_Q16SLLV
1616 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1617 * ├─ 110110 ─ OPC_MXU__POOL17 ─┴─ 101 ─ OPC_MXU_Q16SARV
1619 * ├─ 110111 ─ OPC_MXU_Q16SAR
1621 * ├─ 111000 ─ OPC_MXU__POOL18 ─┬─ 00 ─ OPC_MXU_Q8MUL
1622 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1625 * ├─ 111001 ─ OPC_MXU__POOL19 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1626 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1627 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1628 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1629 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1630 * │ └─ 101 ─ OPC_MXU_S32MOV
1633 * ├─ 111010 ─ OPC_MXU__POOL20 ─┬─ 00 ─ OPC_MXU_Q8MAC
1634 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1635 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1636 * ├─ 111100 ─ OPC_MXU_Q8MADL
1637 * ├─ 111101 ─ OPC_MXU_S32SFL
1638 * ├─ 111110 ─ OPC_MXU_Q8SAD
1639 * └─ 111111 ─ <not assigned> (overlaps with SDBBP)
1644 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1645 * Programming Manual", Ingenic Semiconductor Co, Ltd., 2017
1649 OPC_MXU_S32MADD
= 0x00,
1650 OPC_MXU_S32MADDU
= 0x01,
1651 /* not assigned 0x02 */
1652 OPC_MXU__POOL00
= 0x03,
1653 OPC_MXU_S32MSUB
= 0x04,
1654 OPC_MXU_S32MSUBU
= 0x05,
1655 OPC_MXU__POOL01
= 0x06,
1656 OPC_MXU__POOL02
= 0x07,
1657 OPC_MXU_D16MUL
= 0x08,
1658 OPC_MXU__POOL03
= 0x09,
1659 OPC_MXU_D16MAC
= 0x0A,
1660 OPC_MXU_D16MACF
= 0x0B,
1661 OPC_MXU_D16MADL
= 0x0C,
1662 OPC_MXU_S16MAD
= 0x0D,
1663 OPC_MXU_Q16ADD
= 0x0E,
1664 OPC_MXU_D16MACE
= 0x0F,
1665 OPC_MXU__POOL04
= 0x10,
1666 OPC_MXU__POOL05
= 0x11,
1667 OPC_MXU__POOL06
= 0x12,
1668 OPC_MXU__POOL07
= 0x13,
1669 OPC_MXU__POOL08
= 0x14,
1670 OPC_MXU__POOL09
= 0x15,
1671 OPC_MXU__POOL10
= 0x16,
1672 OPC_MXU__POOL11
= 0x17,
1673 OPC_MXU_D32ADD
= 0x18,
1674 OPC_MXU__POOL12
= 0x19,
1675 /* not assigned 0x1A */
1676 OPC_MXU__POOL13
= 0x1B,
1677 OPC_MXU__POOL14
= 0x1C,
1678 OPC_MXU_Q8ACCE
= 0x1D,
1679 /* not assigned 0x1E */
1680 /* not assigned 0x1F */
1681 /* not assigned 0x20 */
1682 /* not assigned 0x21 */
1683 OPC_MXU_S8LDD
= 0x22,
1684 OPC_MXU_S8STD
= 0x23,
1685 OPC_MXU_S8LDI
= 0x24,
1686 OPC_MXU_S8SDI
= 0x25,
1687 OPC_MXU__POOL15
= 0x26,
1688 OPC_MXU__POOL16
= 0x27,
1690 /* not assigned 0x29 */
1691 OPC_MXU_S16LDD
= 0x2A,
1692 OPC_MXU_S16STD
= 0x2B,
1693 OPC_MXU_S16LDI
= 0x2C,
1694 OPC_MXU_S16SDI
= 0x2D,
1695 OPC_MXU_S32M2I
= 0x2E,
1696 OPC_MXU_S32I2M
= 0x2F,
1697 OPC_MXU_D32SLL
= 0x30,
1698 OPC_MXU_D32SLR
= 0x31,
1699 OPC_MXU_D32SARL
= 0x32,
1700 OPC_MXU_D32SAR
= 0x33,
1701 OPC_MXU_Q16SLL
= 0x34,
1702 OPC_MXU_Q16SLR
= 0x35,
1703 OPC_MXU__POOL17
= 0x36,
1704 OPC_MXU_Q16SAR
= 0x37,
1705 OPC_MXU__POOL18
= 0x38,
1706 OPC_MXU__POOL19
= 0x39,
1707 OPC_MXU__POOL20
= 0x3A,
1708 OPC_MXU_Q16SCOP
= 0x3B,
1709 OPC_MXU_Q8MADL
= 0x3C,
1710 OPC_MXU_S32SFL
= 0x3D,
1711 OPC_MXU_Q8SAD
= 0x3E,
1712 /* not assigned 0x3F */
1720 OPC_MXU_S32MAX
= 0x00,
1721 OPC_MXU_S32MIN
= 0x01,
1722 OPC_MXU_D16MAX
= 0x02,
1723 OPC_MXU_D16MIN
= 0x03,
1724 OPC_MXU_Q8MAX
= 0x04,
1725 OPC_MXU_Q8MIN
= 0x05,
1726 OPC_MXU_Q8SLT
= 0x06,
1727 OPC_MXU_Q8SLTU
= 0x07,
1734 OPC_MXU_S32SLT
= 0x00,
1735 OPC_MXU_D16SLT
= 0x01,
1736 OPC_MXU_D16AVG
= 0x02,
1737 OPC_MXU_D16AVGR
= 0x03,
1738 OPC_MXU_Q8AVG
= 0x04,
1739 OPC_MXU_Q8AVGR
= 0x05,
1740 OPC_MXU_Q8ADD
= 0x07,
1747 OPC_MXU_S32CPS
= 0x00,
1748 OPC_MXU_D16CPS
= 0x02,
1749 OPC_MXU_Q8ABD
= 0x04,
1750 OPC_MXU_Q16SAT
= 0x06,
1757 OPC_MXU_D16MULF
= 0x00,
1758 OPC_MXU_D16MULE
= 0x01,
1765 OPC_MXU_S32LDD
= 0x00,
1766 OPC_MXU_S32LDDR
= 0x01,
1773 OPC_MXU_S32STD
= 0x00,
1774 OPC_MXU_S32STDR
= 0x01,
1781 OPC_MXU_S32LDDV
= 0x00,
1782 OPC_MXU_S32LDDVR
= 0x01,
1789 OPC_MXU_S32STDV
= 0x00,
1790 OPC_MXU_S32STDVR
= 0x01,
1797 OPC_MXU_S32LDI
= 0x00,
1798 OPC_MXU_S32LDIR
= 0x01,
1805 OPC_MXU_S32SDI
= 0x00,
1806 OPC_MXU_S32SDIR
= 0x01,
1813 OPC_MXU_S32LDIV
= 0x00,
1814 OPC_MXU_S32LDIVR
= 0x01,
1821 OPC_MXU_S32SDIV
= 0x00,
1822 OPC_MXU_S32SDIVR
= 0x01,
1829 OPC_MXU_D32ACC
= 0x00,
1830 OPC_MXU_D32ACCM
= 0x01,
1831 OPC_MXU_D32ASUM
= 0x02,
1838 OPC_MXU_Q16ACC
= 0x00,
1839 OPC_MXU_Q16ACCM
= 0x01,
1840 OPC_MXU_Q16ASUM
= 0x02,
1847 OPC_MXU_Q8ADDE
= 0x00,
1848 OPC_MXU_D8SUM
= 0x01,
1849 OPC_MXU_D8SUMC
= 0x02,
1856 OPC_MXU_S32MUL
= 0x00,
1857 OPC_MXU_S32MULU
= 0x01,
1858 OPC_MXU_S32EXTR
= 0x02,
1859 OPC_MXU_S32EXTRV
= 0x03,
1866 OPC_MXU_D32SARW
= 0x00,
1867 OPC_MXU_S32ALN
= 0x01,
1868 OPC_MXU_S32ALNI
= 0x02,
1869 OPC_MXU_S32NOR
= 0x03,
1870 OPC_MXU_S32AND
= 0x04,
1871 OPC_MXU_S32OR
= 0x05,
1872 OPC_MXU_S32XOR
= 0x06,
1873 OPC_MXU_S32LUI
= 0x07,
1880 OPC_MXU_D32SLLV
= 0x00,
1881 OPC_MXU_D32SLRV
= 0x01,
1882 OPC_MXU_D32SARV
= 0x03,
1883 OPC_MXU_Q16SLLV
= 0x04,
1884 OPC_MXU_Q16SLRV
= 0x05,
1885 OPC_MXU_Q16SARV
= 0x07,
1892 OPC_MXU_Q8MUL
= 0x00,
1893 OPC_MXU_Q8MULSU
= 0x01,
1900 OPC_MXU_Q8MOVZ
= 0x00,
1901 OPC_MXU_Q8MOVN
= 0x01,
1902 OPC_MXU_D16MOVZ
= 0x02,
1903 OPC_MXU_D16MOVN
= 0x03,
1904 OPC_MXU_S32MOVZ
= 0x04,
1905 OPC_MXU_S32MOVN
= 0x05,
1912 OPC_MXU_Q8MAC
= 0x00,
1913 OPC_MXU_Q8MACSU
= 0x01,
1917 * Overview of the TX79-specific instruction set
1918 * =============================================
1920 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
1921 * are only used by the specific quadword (128-bit) LQ/SQ load/store
1922 * instructions and certain multimedia instructions (MMIs). These MMIs
1923 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
1924 * or sixteen 8-bit paths.
1928 * The Toshiba TX System RISC TX79 Core Architecture manual,
1929 * https://wiki.qemu.org/File:C790.pdf
1931 * Three-Operand Multiply and Multiply-Add (4 instructions)
1932 * --------------------------------------------------------
1933 * MADD [rd,] rs, rt Multiply/Add
1934 * MADDU [rd,] rs, rt Multiply/Add Unsigned
1935 * MULT [rd,] rs, rt Multiply (3-operand)
1936 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
1938 * Multiply Instructions for Pipeline 1 (10 instructions)
1939 * ------------------------------------------------------
1940 * MULT1 [rd,] rs, rt Multiply Pipeline 1
1941 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
1942 * DIV1 rs, rt Divide Pipeline 1
1943 * DIVU1 rs, rt Divide Unsigned Pipeline 1
1944 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
1945 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
1946 * MFHI1 rd Move From HI1 Register
1947 * MFLO1 rd Move From LO1 Register
1948 * MTHI1 rs Move To HI1 Register
1949 * MTLO1 rs Move To LO1 Register
1951 * Arithmetic (19 instructions)
1952 * ----------------------------
1953 * PADDB rd, rs, rt Parallel Add Byte
1954 * PSUBB rd, rs, rt Parallel Subtract Byte
1955 * PADDH rd, rs, rt Parallel Add Halfword
1956 * PSUBH rd, rs, rt Parallel Subtract Halfword
1957 * PADDW rd, rs, rt Parallel Add Word
1958 * PSUBW rd, rs, rt Parallel Subtract Word
1959 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
1960 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
1961 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
1962 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
1963 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
1964 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
1965 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
1966 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
1967 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
1968 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
1969 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
1970 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
1971 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
1973 * Min/Max (4 instructions)
1974 * ------------------------
1975 * PMAXH rd, rs, rt Parallel Maximum Halfword
1976 * PMINH rd, rs, rt Parallel Minimum Halfword
1977 * PMAXW rd, rs, rt Parallel Maximum Word
1978 * PMINW rd, rs, rt Parallel Minimum Word
1980 * Absolute (2 instructions)
1981 * -------------------------
1982 * PABSH rd, rt Parallel Absolute Halfword
1983 * PABSW rd, rt Parallel Absolute Word
1985 * Logical (4 instructions)
1986 * ------------------------
1987 * PAND rd, rs, rt Parallel AND
1988 * POR rd, rs, rt Parallel OR
1989 * PXOR rd, rs, rt Parallel XOR
1990 * PNOR rd, rs, rt Parallel NOR
1992 * Shift (9 instructions)
1993 * ----------------------
1994 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
1995 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
1996 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
1997 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
1998 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
1999 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2000 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2001 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2002 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2004 * Compare (6 instructions)
2005 * ------------------------
2006 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2007 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2008 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2009 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2010 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2011 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2013 * LZC (1 instruction)
2014 * -------------------
2015 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2017 * Quadword Load and Store (2 instructions)
2018 * ----------------------------------------
2019 * LQ rt, offset(base) Load Quadword
2020 * SQ rt, offset(base) Store Quadword
2022 * Multiply and Divide (19 instructions)
2023 * -------------------------------------
2024 * PMULTW rd, rs, rt Parallel Multiply Word
2025 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2026 * PDIVW rs, rt Parallel Divide Word
2027 * PDIVUW rs, rt Parallel Divide Unsigned Word
2028 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2029 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2030 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2031 * PMULTH rd, rs, rt Parallel Multiply Halfword
2032 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2033 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2034 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2035 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2036 * PDIVBW rs, rt Parallel Divide Broadcast Word
2037 * PMFHI rd Parallel Move From HI Register
2038 * PMFLO rd Parallel Move From LO Register
2039 * PMTHI rs Parallel Move To HI Register
2040 * PMTLO rs Parallel Move To LO Register
2041 * PMFHL rd Parallel Move From HI/LO Register
2042 * PMTHL rs Parallel Move To HI/LO Register
2044 * Pack/Extend (11 instructions)
2045 * -----------------------------
2046 * PPAC5 rd, rt Parallel Pack to 5 bits
2047 * PPACB rd, rs, rt Parallel Pack to Byte
2048 * PPACH rd, rs, rt Parallel Pack to Halfword
2049 * PPACW rd, rs, rt Parallel Pack to Word
2050 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2051 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2052 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2053 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2054 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2055 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2056 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2058 * Others (16 instructions)
2059 * ------------------------
2060 * PCPYH rd, rt Parallel Copy Halfword
2061 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2062 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2063 * PREVH rd, rt Parallel Reverse Halfword
2064 * PINTH rd, rs, rt Parallel Interleave Halfword
2065 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2066 * PEXEH rd, rt Parallel Exchange Even Halfword
2067 * PEXCH rd, rt Parallel Exchange Center Halfword
2068 * PEXEW rd, rt Parallel Exchange Even Word
2069 * PEXCW rd, rt Parallel Exchange Center Word
2070 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2071 * MFSA rd Move from Shift Amount Register
2072 * MTSA rs Move to Shift Amount Register
2073 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2074 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2075 * PROT3W rd, rt Parallel Rotate 3 Words
2077 * The TX79-specific Multimedia Instruction encodings
2078 * ==================================================
2080 * TX79 Multimedia Instruction encoding table keys:
2082 * * This code is reserved for future use. An attempt to execute it
2083 * causes a Reserved Instruction exception.
2084 * % This code indicates an instruction class. The instruction word
2085 * must be further decoded by examining additional tables that show
2086 * the values for other instruction fields.
2087 * # This code is reserved for the unsupported instructions DMULT,
2088 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2089 * to execute it causes a Reserved Instruction exception.
2091 * TX79 Multimedia Instructions encoded by opcode field (MMI, LQ, SQ):
2094 * +--------+----------------------------------------+
2096 * +--------+----------------------------------------+
2098 * opcode bits 28..26
2099 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2100 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2101 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2102 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2103 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2104 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2105 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2106 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2107 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2108 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2109 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2113 TX79_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2114 TX79_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2115 TX79_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2119 * TX79 Multimedia Instructions with opcode field = MMI:
2122 * +--------+-------------------------------+--------+
2123 * | MMI | |function|
2124 * +--------+-------------------------------+--------+
2126 * function bits 2..0
2127 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2128 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2129 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2130 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2131 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2132 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2133 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2134 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2135 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2136 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2137 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2140 #define MASK_TX79_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2142 TX79_MMI_MADD
= 0x00 | TX79_CLASS_MMI
, /* Same as OPC_MADD */
2143 TX79_MMI_MADDU
= 0x01 | TX79_CLASS_MMI
, /* Same as OPC_MADDU */
2144 TX79_MMI_PLZCW
= 0x04 | TX79_CLASS_MMI
,
2145 TX79_MMI_CLASS_MMI0
= 0x08 | TX79_CLASS_MMI
,
2146 TX79_MMI_CLASS_MMI2
= 0x09 | TX79_CLASS_MMI
,
2147 TX79_MMI_MFHI1
= 0x10 | TX79_CLASS_MMI
, /* Same minor as OPC_MFHI */
2148 TX79_MMI_MTHI1
= 0x11 | TX79_CLASS_MMI
, /* Same minor as OPC_MTHI */
2149 TX79_MMI_MFLO1
= 0x12 | TX79_CLASS_MMI
, /* Same minor as OPC_MFLO */
2150 TX79_MMI_MTLO1
= 0x13 | TX79_CLASS_MMI
, /* Same minor as OPC_MTLO */
2151 TX79_MMI_MULT1
= 0x18 | TX79_CLASS_MMI
, /* Same minor as OPC_MULT */
2152 TX79_MMI_MULTU1
= 0x19 | TX79_CLASS_MMI
, /* Same minor as OPC_MULTU */
2153 TX79_MMI_DIV1
= 0x1A | TX79_CLASS_MMI
, /* Same minor as OPC_DIV */
2154 TX79_MMI_DIVU1
= 0x1B | TX79_CLASS_MMI
, /* Same minor as OPC_DIVU */
2155 TX79_MMI_MADD1
= 0x20 | TX79_CLASS_MMI
,
2156 TX79_MMI_MADDU1
= 0x21 | TX79_CLASS_MMI
,
2157 TX79_MMI_CLASS_MMI1
= 0x28 | TX79_CLASS_MMI
,
2158 TX79_MMI_CLASS_MMI3
= 0x29 | TX79_CLASS_MMI
,
2159 TX79_MMI_PMFHL
= 0x30 | TX79_CLASS_MMI
,
2160 TX79_MMI_PMTHL
= 0x31 | TX79_CLASS_MMI
,
2161 TX79_MMI_PSLLH
= 0x34 | TX79_CLASS_MMI
,
2162 TX79_MMI_PSRLH
= 0x36 | TX79_CLASS_MMI
,
2163 TX79_MMI_PSRAH
= 0x37 | TX79_CLASS_MMI
,
2164 TX79_MMI_PSLLW
= 0x3C | TX79_CLASS_MMI
,
2165 TX79_MMI_PSRLW
= 0x3E | TX79_CLASS_MMI
,
2166 TX79_MMI_PSRAW
= 0x3F | TX79_CLASS_MMI
,
2170 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI0:
2173 * +--------+----------------------+--------+--------+
2174 * | MMI | |function| MMI0 |
2175 * +--------+----------------------+--------+--------+
2177 * function bits 7..6
2178 * bits | 0 | 1 | 2 | 3
2179 * 10..8 | 00 | 01 | 10 | 11
2180 * -------+-------+-------+-------+-------
2181 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2182 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2183 * 2 010 | PADDB | PSUBB | PCGTB | *
2184 * 3 011 | * | * | * | *
2185 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2186 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2187 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2188 * 7 111 | * | * | PEXT5 | PPAC5
2191 #define MASK_TX79_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2193 TX79_MMI0_PADDW
= (0x00 << 6) | TX79_MMI_CLASS_MMI0
,
2194 TX79_MMI0_PSUBW
= (0x01 << 6) | TX79_MMI_CLASS_MMI0
,
2195 TX79_MMI0_PCGTW
= (0x02 << 6) | TX79_MMI_CLASS_MMI0
,
2196 TX79_MMI0_PMAXW
= (0x03 << 6) | TX79_MMI_CLASS_MMI0
,
2197 TX79_MMI0_PADDH
= (0x04 << 6) | TX79_MMI_CLASS_MMI0
,
2198 TX79_MMI0_PSUBH
= (0x05 << 6) | TX79_MMI_CLASS_MMI0
,
2199 TX79_MMI0_PCGTH
= (0x06 << 6) | TX79_MMI_CLASS_MMI0
,
2200 TX79_MMI0_PMAXH
= (0x07 << 6) | TX79_MMI_CLASS_MMI0
,
2201 TX79_MMI0_PADDB
= (0x08 << 6) | TX79_MMI_CLASS_MMI0
,
2202 TX79_MMI0_PSUBB
= (0x09 << 6) | TX79_MMI_CLASS_MMI0
,
2203 TX79_MMI0_PCGTB
= (0x0A << 6) | TX79_MMI_CLASS_MMI0
,
2204 TX79_MMI0_PADDSW
= (0x10 << 6) | TX79_MMI_CLASS_MMI0
,
2205 TX79_MMI0_PSUBSW
= (0x11 << 6) | TX79_MMI_CLASS_MMI0
,
2206 TX79_MMI0_PEXTLW
= (0x12 << 6) | TX79_MMI_CLASS_MMI0
,
2207 TX79_MMI0_PPACW
= (0x13 << 6) | TX79_MMI_CLASS_MMI0
,
2208 TX79_MMI0_PADDSH
= (0x14 << 6) | TX79_MMI_CLASS_MMI0
,
2209 TX79_MMI0_PSUBSH
= (0x15 << 6) | TX79_MMI_CLASS_MMI0
,
2210 TX79_MMI0_PEXTLH
= (0x16 << 6) | TX79_MMI_CLASS_MMI0
,
2211 TX79_MMI0_PPACH
= (0x17 << 6) | TX79_MMI_CLASS_MMI0
,
2212 TX79_MMI0_PADDSB
= (0x18 << 6) | TX79_MMI_CLASS_MMI0
,
2213 TX79_MMI0_PSUBSB
= (0x19 << 6) | TX79_MMI_CLASS_MMI0
,
2214 TX79_MMI0_PEXTLB
= (0x1A << 6) | TX79_MMI_CLASS_MMI0
,
2215 TX79_MMI0_PPACB
= (0x1B << 6) | TX79_MMI_CLASS_MMI0
,
2216 TX79_MMI0_PEXT5
= (0x1E << 6) | TX79_MMI_CLASS_MMI0
,
2217 TX79_MMI0_PPAC5
= (0x1F << 6) | TX79_MMI_CLASS_MMI0
,
2221 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI1:
2224 * +--------+----------------------+--------+--------+
2225 * | MMI | |function| MMI1 |
2226 * +--------+----------------------+--------+--------+
2228 * function bits 7..6
2229 * bits | 0 | 1 | 2 | 3
2230 * 10..8 | 00 | 01 | 10 | 11
2231 * -------+-------+-------+-------+-------
2232 * 0 000 | * | PABSW | PCEQW | PMINW
2233 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2234 * 2 010 | * | * | PCEQB | *
2235 * 3 011 | * | * | * | *
2236 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2237 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2238 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2239 * 7 111 | * | * | * | *
2242 #define MASK_TX79_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2244 TX79_MMI1_PABSW
= (0x01 << 6) | TX79_MMI_CLASS_MMI1
,
2245 TX79_MMI1_PCEQW
= (0x02 << 6) | TX79_MMI_CLASS_MMI1
,
2246 TX79_MMI1_PMINW
= (0x03 << 6) | TX79_MMI_CLASS_MMI1
,
2247 TX79_MMI1_PADSBH
= (0x04 << 6) | TX79_MMI_CLASS_MMI1
,
2248 TX79_MMI1_PABSH
= (0x05 << 6) | TX79_MMI_CLASS_MMI1
,
2249 TX79_MMI1_PCEQH
= (0x06 << 6) | TX79_MMI_CLASS_MMI1
,
2250 TX79_MMI1_PMINH
= (0x07 << 6) | TX79_MMI_CLASS_MMI1
,
2251 TX79_MMI1_PCEQB
= (0x0A << 6) | TX79_MMI_CLASS_MMI1
,
2252 TX79_MMI1_PADDUW
= (0x10 << 6) | TX79_MMI_CLASS_MMI1
,
2253 TX79_MMI1_PSUBUW
= (0x11 << 6) | TX79_MMI_CLASS_MMI1
,
2254 TX79_MMI1_PEXTUW
= (0x12 << 6) | TX79_MMI_CLASS_MMI1
,
2255 TX79_MMI1_PADDUH
= (0x14 << 6) | TX79_MMI_CLASS_MMI1
,
2256 TX79_MMI1_PSUBUH
= (0x15 << 6) | TX79_MMI_CLASS_MMI1
,
2257 TX79_MMI1_PEXTUH
= (0x16 << 6) | TX79_MMI_CLASS_MMI1
,
2258 TX79_MMI1_PADDUB
= (0x18 << 6) | TX79_MMI_CLASS_MMI1
,
2259 TX79_MMI1_PSUBUB
= (0x19 << 6) | TX79_MMI_CLASS_MMI1
,
2260 TX79_MMI1_PEXTUB
= (0x1A << 6) | TX79_MMI_CLASS_MMI1
,
2261 TX79_MMI1_QFSRV
= (0x1B << 6) | TX79_MMI_CLASS_MMI1
,
2265 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI2:
2268 * +--------+----------------------+--------+--------+
2269 * | MMI | |function| MMI2 |
2270 * +--------+----------------------+--------+--------+
2272 * function bits 7..6
2273 * bits | 0 | 1 | 2 | 3
2274 * 10..8 | 00 | 01 | 10 | 11
2275 * -------+-------+-------+-------+-------
2276 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2277 * 1 001 | PMSUBW| * | * | *
2278 * 2 010 | PMFHI | PMFLO | PINTH | *
2279 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2280 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2281 * 5 101 | PMSUBH| PHMSBH| * | *
2282 * 6 110 | * | * | PEXEH | PREVH
2283 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2286 #define MASK_TX79_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2288 TX79_MMI2_PMADDW
= (0x00 << 6) | TX79_MMI_CLASS_MMI2
,
2289 TX79_MMI2_PSLLVW
= (0x02 << 6) | TX79_MMI_CLASS_MMI2
,
2290 TX79_MMI2_PSRLVW
= (0x03 << 6) | TX79_MMI_CLASS_MMI2
,
2291 TX79_MMI2_PMSUBW
= (0x04 << 6) | TX79_MMI_CLASS_MMI2
,
2292 TX79_MMI2_PMFHI
= (0x08 << 6) | TX79_MMI_CLASS_MMI2
,
2293 TX79_MMI2_PMFLO
= (0x09 << 6) | TX79_MMI_CLASS_MMI2
,
2294 TX79_MMI2_PINTH
= (0x0A << 6) | TX79_MMI_CLASS_MMI2
,
2295 TX79_MMI2_PMULTW
= (0x0C << 6) | TX79_MMI_CLASS_MMI2
,
2296 TX79_MMI2_PDIVW
= (0x0D << 6) | TX79_MMI_CLASS_MMI2
,
2297 TX79_MMI2_PCPYLD
= (0x0E << 6) | TX79_MMI_CLASS_MMI2
,
2298 TX79_MMI2_PMADDH
= (0x10 << 6) | TX79_MMI_CLASS_MMI2
,
2299 TX79_MMI2_PHMADH
= (0x11 << 6) | TX79_MMI_CLASS_MMI2
,
2300 TX79_MMI2_PAND
= (0x12 << 6) | TX79_MMI_CLASS_MMI2
,
2301 TX79_MMI2_PXOR
= (0x13 << 6) | TX79_MMI_CLASS_MMI2
,
2302 TX79_MMI2_PMSUBH
= (0x14 << 6) | TX79_MMI_CLASS_MMI2
,
2303 TX79_MMI2_PHMSBH
= (0x15 << 6) | TX79_MMI_CLASS_MMI2
,
2304 TX79_MMI2_PEXEH
= (0x1A << 6) | TX79_MMI_CLASS_MMI2
,
2305 TX79_MMI2_PREVH
= (0x1B << 6) | TX79_MMI_CLASS_MMI2
,
2306 TX79_MMI2_PMULTH
= (0x1C << 6) | TX79_MMI_CLASS_MMI2
,
2307 TX79_MMI2_PDIVBW
= (0x1D << 6) | TX79_MMI_CLASS_MMI2
,
2308 TX79_MMI2_PEXEW
= (0x1E << 6) | TX79_MMI_CLASS_MMI2
,
2309 TX79_MMI2_PROT3W
= (0x1F << 6) | TX79_MMI_CLASS_MMI2
,
2313 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI3:
2316 * +--------+----------------------+--------+--------+
2317 * | MMI | |function| MMI3 |
2318 * +--------+----------------------+--------+--------+
2320 * function bits 7..6
2321 * bits | 0 | 1 | 2 | 3
2322 * 10..8 | 00 | 01 | 10 | 11
2323 * -------+-------+-------+-------+-------
2324 * 0 000 |PMADDUW| * | * | PSRAVW
2325 * 1 001 | * | * | * | *
2326 * 2 010 | PMTHI | PMTLO | PINTEH| *
2327 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2328 * 4 100 | * | * | POR | PNOR
2329 * 5 101 | * | * | * | *
2330 * 6 110 | * | * | PEXCH | PCPYH
2331 * 7 111 | * | * | PEXCW | *
2334 #define MASK_TX79_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2336 TX79_MMI3_PMADDUW
= (0x00 << 6) | TX79_MMI_CLASS_MMI3
,
2337 TX79_MMI3_PSRAVW
= (0x03 << 6) | TX79_MMI_CLASS_MMI3
,
2338 TX79_MMI3_PMTHI
= (0x08 << 6) | TX79_MMI_CLASS_MMI3
,
2339 TX79_MMI3_PMTLO
= (0x09 << 6) | TX79_MMI_CLASS_MMI3
,
2340 TX79_MMI3_PINTEH
= (0x0A << 6) | TX79_MMI_CLASS_MMI3
,
2341 TX79_MMI3_PMULTUW
= (0x0C << 6) | TX79_MMI_CLASS_MMI3
,
2342 TX79_MMI3_PDIVUW
= (0x0D << 6) | TX79_MMI_CLASS_MMI3
,
2343 TX79_MMI3_PCPYUD
= (0x0E << 6) | TX79_MMI_CLASS_MMI3
,
2344 TX79_MMI3_POR
= (0x12 << 6) | TX79_MMI_CLASS_MMI3
,
2345 TX79_MMI3_PNOR
= (0x13 << 6) | TX79_MMI_CLASS_MMI3
,
2346 TX79_MMI3_PEXCH
= (0x1A << 6) | TX79_MMI_CLASS_MMI3
,
2347 TX79_MMI3_PCPYH
= (0x1B << 6) | TX79_MMI_CLASS_MMI3
,
2348 TX79_MMI3_PEXCW
= (0x1E << 6) | TX79_MMI_CLASS_MMI3
,
2351 /* global register indices */
2352 static TCGv cpu_gpr
[32], cpu_PC
;
2353 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2354 static TCGv cpu_dspctrl
, btarget
, bcond
;
2355 static TCGv_i32 hflags
;
2356 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2357 static TCGv_i64 fpu_f64
[32];
2358 static TCGv_i64 msa_wr_d
[64];
2361 static TCGv mxu_gpr
[NUMBER_OF_MXU_REGISTERS
- 1];
2364 #include "exec/gen-icount.h"
2366 #define gen_helper_0e0i(name, arg) do { \
2367 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2368 gen_helper_##name(cpu_env, helper_tmp); \
2369 tcg_temp_free_i32(helper_tmp); \
2372 #define gen_helper_0e1i(name, arg1, arg2) do { \
2373 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2374 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2375 tcg_temp_free_i32(helper_tmp); \
2378 #define gen_helper_1e0i(name, ret, arg1) do { \
2379 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2380 gen_helper_##name(ret, cpu_env, helper_tmp); \
2381 tcg_temp_free_i32(helper_tmp); \
2384 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2385 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2386 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2387 tcg_temp_free_i32(helper_tmp); \
2390 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2391 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2392 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2393 tcg_temp_free_i32(helper_tmp); \
2396 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2397 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2398 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2399 tcg_temp_free_i32(helper_tmp); \
2402 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2403 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2404 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2405 tcg_temp_free_i32(helper_tmp); \
2408 typedef struct DisasContext
{
2409 DisasContextBase base
;
2410 target_ulong saved_pc
;
2411 target_ulong page_start
;
2413 uint64_t insn_flags
;
2414 int32_t CP0_Config1
;
2415 int32_t CP0_Config2
;
2416 int32_t CP0_Config3
;
2417 int32_t CP0_Config5
;
2418 /* Routine used to access memory */
2420 TCGMemOp default_tcg_memop_mask
;
2421 uint32_t hflags
, saved_hflags
;
2422 target_ulong btarget
;
2433 int CP0_LLAddr_shift
;
2442 #define DISAS_STOP DISAS_TARGET_0
2443 #define DISAS_EXIT DISAS_TARGET_1
2445 static const char * const regnames
[] = {
2446 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2447 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2448 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2449 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2452 static const char * const regnames_HI
[] = {
2453 "HI0", "HI1", "HI2", "HI3",
2456 static const char * const regnames_LO
[] = {
2457 "LO0", "LO1", "LO2", "LO3",
2460 static const char * const fregnames
[] = {
2461 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2462 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2463 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2464 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2467 static const char * const msaregnames
[] = {
2468 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2469 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2470 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2471 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2472 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2473 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2474 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2475 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2476 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2477 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2478 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2479 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2480 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2481 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2482 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2483 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2486 static const char * const mxuregnames
[] = {
2487 "XR1", "XR2", "XR3", "XR4", "XR5", "XR6", "XR7", "XR8",
2488 "XR9", "XR10", "XR11", "XR12", "XR13", "XR14", "XR15", "MXU_CR",
2491 #define LOG_DISAS(...) \
2493 if (MIPS_DEBUG_DISAS) { \
2494 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2498 #define MIPS_INVAL(op) \
2500 if (MIPS_DEBUG_DISAS) { \
2501 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2502 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2503 ctx->base.pc_next, ctx->opcode, op, \
2504 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2505 ((ctx->opcode >> 16) & 0x1F)); \
2509 /* General purpose registers moves. */
2510 static inline void gen_load_gpr (TCGv t
, int reg
)
2513 tcg_gen_movi_tl(t
, 0);
2515 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2518 static inline void gen_store_gpr (TCGv t
, int reg
)
2521 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2524 /* Moves to/from shadow registers. */
2525 static inline void gen_load_srsgpr (int from
, int to
)
2527 TCGv t0
= tcg_temp_new();
2530 tcg_gen_movi_tl(t0
, 0);
2532 TCGv_i32 t2
= tcg_temp_new_i32();
2533 TCGv_ptr addr
= tcg_temp_new_ptr();
2535 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2536 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2537 tcg_gen_andi_i32(t2
, t2
, 0xf);
2538 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2539 tcg_gen_ext_i32_ptr(addr
, t2
);
2540 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2542 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2543 tcg_temp_free_ptr(addr
);
2544 tcg_temp_free_i32(t2
);
2546 gen_store_gpr(t0
, to
);
2550 static inline void gen_store_srsgpr (int from
, int to
)
2553 TCGv t0
= tcg_temp_new();
2554 TCGv_i32 t2
= tcg_temp_new_i32();
2555 TCGv_ptr addr
= tcg_temp_new_ptr();
2557 gen_load_gpr(t0
, from
);
2558 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2559 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2560 tcg_gen_andi_i32(t2
, t2
, 0xf);
2561 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2562 tcg_gen_ext_i32_ptr(addr
, t2
);
2563 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2565 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2566 tcg_temp_free_ptr(addr
);
2567 tcg_temp_free_i32(t2
);
2573 static inline void gen_save_pc(target_ulong pc
)
2575 tcg_gen_movi_tl(cpu_PC
, pc
);
2578 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2580 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2581 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2582 gen_save_pc(ctx
->base
.pc_next
);
2583 ctx
->saved_pc
= ctx
->base
.pc_next
;
2585 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2586 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2587 ctx
->saved_hflags
= ctx
->hflags
;
2588 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2594 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2600 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2602 ctx
->saved_hflags
= ctx
->hflags
;
2603 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2609 ctx
->btarget
= env
->btarget
;
2614 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2616 TCGv_i32 texcp
= tcg_const_i32(excp
);
2617 TCGv_i32 terr
= tcg_const_i32(err
);
2618 save_cpu_state(ctx
, 1);
2619 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2620 tcg_temp_free_i32(terr
);
2621 tcg_temp_free_i32(texcp
);
2622 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2625 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2627 gen_helper_0e0i(raise_exception
, excp
);
2630 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2632 generate_exception_err(ctx
, excp
, 0);
2635 /* Floating point register moves. */
2636 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2638 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2639 generate_exception(ctx
, EXCP_RI
);
2641 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2644 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2647 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2648 generate_exception(ctx
, EXCP_RI
);
2650 t64
= tcg_temp_new_i64();
2651 tcg_gen_extu_i32_i64(t64
, t
);
2652 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2653 tcg_temp_free_i64(t64
);
2656 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2658 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2659 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2661 gen_load_fpr32(ctx
, t
, reg
| 1);
2665 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2667 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2668 TCGv_i64 t64
= tcg_temp_new_i64();
2669 tcg_gen_extu_i32_i64(t64
, t
);
2670 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2671 tcg_temp_free_i64(t64
);
2673 gen_store_fpr32(ctx
, t
, reg
| 1);
2677 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2679 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2680 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2682 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2686 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2688 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2689 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2692 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2693 t0
= tcg_temp_new_i64();
2694 tcg_gen_shri_i64(t0
, t
, 32);
2695 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2696 tcg_temp_free_i64(t0
);
2700 static inline int get_fp_bit (int cc
)
2708 /* Addresses computation */
2709 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
2711 tcg_gen_add_tl(ret
, arg0
, arg1
);
2713 #if defined(TARGET_MIPS64)
2714 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2715 tcg_gen_ext32s_i64(ret
, ret
);
2720 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2723 tcg_gen_addi_tl(ret
, base
, ofs
);
2725 #if defined(TARGET_MIPS64)
2726 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2727 tcg_gen_ext32s_i64(ret
, ret
);
2732 /* Addresses computation (translation time) */
2733 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2736 target_long sum
= base
+ offset
;
2738 #if defined(TARGET_MIPS64)
2739 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2746 /* Sign-extract the low 32-bits to a target_long. */
2747 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2749 #if defined(TARGET_MIPS64)
2750 tcg_gen_ext32s_i64(ret
, arg
);
2752 tcg_gen_extrl_i64_i32(ret
, arg
);
2756 /* Sign-extract the high 32-bits to a target_long. */
2757 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2759 #if defined(TARGET_MIPS64)
2760 tcg_gen_sari_i64(ret
, arg
, 32);
2762 tcg_gen_extrh_i64_i32(ret
, arg
);
2766 static inline void check_cp0_enabled(DisasContext
*ctx
)
2768 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
2769 generate_exception_err(ctx
, EXCP_CpU
, 0);
2772 static inline void check_cp1_enabled(DisasContext
*ctx
)
2774 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
2775 generate_exception_err(ctx
, EXCP_CpU
, 1);
2778 /* Verify that the processor is running with COP1X instructions enabled.
2779 This is associated with the nabla symbol in the MIPS32 and MIPS64
2782 static inline void check_cop1x(DisasContext
*ctx
)
2784 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
2785 generate_exception_end(ctx
, EXCP_RI
);
2788 /* Verify that the processor is running with 64-bit floating-point
2789 operations enabled. */
2791 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2793 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
2794 generate_exception_end(ctx
, EXCP_RI
);
2798 * Verify if floating point register is valid; an operation is not defined
2799 * if bit 0 of any register specification is set and the FR bit in the
2800 * Status register equals zero, since the register numbers specify an
2801 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2802 * in the Status register equals one, both even and odd register numbers
2803 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2805 * Multiple 64 bit wide registers can be checked by calling
2806 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2808 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2810 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
2811 generate_exception_end(ctx
, EXCP_RI
);
2814 /* Verify that the processor is running with DSP instructions enabled.
2815 This is enabled by CP0 Status register MX(24) bit.
2818 static inline void check_dsp(DisasContext
*ctx
)
2820 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2821 if (ctx
->insn_flags
& ASE_DSP
) {
2822 generate_exception_end(ctx
, EXCP_DSPDIS
);
2824 generate_exception_end(ctx
, EXCP_RI
);
2829 static inline void check_dsp_r2(DisasContext
*ctx
)
2831 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2832 if (ctx
->insn_flags
& ASE_DSP
) {
2833 generate_exception_end(ctx
, EXCP_DSPDIS
);
2835 generate_exception_end(ctx
, EXCP_RI
);
2840 static inline void check_dsp_r3(DisasContext
*ctx
)
2842 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2843 if (ctx
->insn_flags
& ASE_DSP
) {
2844 generate_exception_end(ctx
, EXCP_DSPDIS
);
2846 generate_exception_end(ctx
, EXCP_RI
);
2851 /* This code generates a "reserved instruction" exception if the
2852 CPU does not support the instruction set corresponding to flags. */
2853 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
2855 if (unlikely(!(ctx
->insn_flags
& flags
))) {
2856 generate_exception_end(ctx
, EXCP_RI
);
2860 /* This code generates a "reserved instruction" exception if the
2861 CPU has corresponding flag set which indicates that the instruction
2862 has been removed. */
2863 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
2865 if (unlikely(ctx
->insn_flags
& flags
)) {
2866 generate_exception_end(ctx
, EXCP_RI
);
2871 * The Linux kernel traps certain reserved instruction exceptions to
2872 * emulate the corresponding instructions. QEMU is the kernel in user
2873 * mode, so those traps are emulated by accepting the instructions.
2875 * A reserved instruction exception is generated for flagged CPUs if
2876 * QEMU runs in system mode.
2878 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
2880 #ifndef CONFIG_USER_ONLY
2881 check_insn_opc_removed(ctx
, flags
);
2885 /* This code generates a "reserved instruction" exception if the
2886 CPU does not support 64-bit paired-single (PS) floating point data type */
2887 static inline void check_ps(DisasContext
*ctx
)
2889 if (unlikely(!ctx
->ps
)) {
2890 generate_exception(ctx
, EXCP_RI
);
2892 check_cp1_64bitmode(ctx
);
2895 #ifdef TARGET_MIPS64
2896 /* This code generates a "reserved instruction" exception if 64-bit
2897 instructions are not enabled. */
2898 static inline void check_mips_64(DisasContext
*ctx
)
2900 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
2901 generate_exception_end(ctx
, EXCP_RI
);
2905 #ifndef CONFIG_USER_ONLY
2906 static inline void check_mvh(DisasContext
*ctx
)
2908 if (unlikely(!ctx
->mvh
)) {
2909 generate_exception(ctx
, EXCP_RI
);
2915 * This code generates a "reserved instruction" exception if the
2916 * Config5 XNP bit is set.
2918 static inline void check_xnp(DisasContext
*ctx
)
2920 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
2921 generate_exception_end(ctx
, EXCP_RI
);
2925 #ifndef CONFIG_USER_ONLY
2927 * This code generates a "reserved instruction" exception if the
2928 * Config3 PW bit is NOT set.
2930 static inline void check_pw(DisasContext
*ctx
)
2932 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
2933 generate_exception_end(ctx
, EXCP_RI
);
2939 * This code generates a "reserved instruction" exception if the
2940 * Config3 MT bit is NOT set.
2942 static inline void check_mt(DisasContext
*ctx
)
2944 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2945 generate_exception_end(ctx
, EXCP_RI
);
2949 #ifndef CONFIG_USER_ONLY
2951 * This code generates a "coprocessor unusable" exception if CP0 is not
2952 * available, and, if that is not the case, generates a "reserved instruction"
2953 * exception if the Config5 MT bit is NOT set. This is needed for availability
2954 * control of some of MT ASE instructions.
2956 static inline void check_cp0_mt(DisasContext
*ctx
)
2958 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
2959 generate_exception_err(ctx
, EXCP_CpU
, 0);
2961 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2962 generate_exception_err(ctx
, EXCP_RI
, 0);
2969 * This code generates a "reserved instruction" exception if the
2970 * Config5 NMS bit is set.
2972 static inline void check_nms(DisasContext
*ctx
)
2974 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
2975 generate_exception_end(ctx
, EXCP_RI
);
2980 * This code generates a "reserved instruction" exception if the
2981 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
2982 * Config2 TL, and Config5 L2C are unset.
2984 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
2986 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
2987 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
2988 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
2989 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
2990 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
2991 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))
2993 generate_exception_end(ctx
, EXCP_RI
);
2998 * This code generates a "reserved instruction" exception if the
2999 * Config5 EVA bit is NOT set.
3001 static inline void check_eva(DisasContext
*ctx
)
3003 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
3004 generate_exception_end(ctx
, EXCP_RI
);
3009 /* Define small wrappers for gen_load_fpr* so that we have a uniform
3010 calling interface for 32 and 64-bit FPRs. No sense in changing
3011 all callers for gen_load_fpr32 when we need the CTX parameter for
3013 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
3014 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
3015 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
3016 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
3017 int ft, int fs, int cc) \
3019 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
3020 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
3029 check_cp1_registers(ctx, fs | ft); \
3037 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
3038 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
3040 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
3041 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
3042 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
3043 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
3044 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
3045 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
3046 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
3047 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
3048 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
3049 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
3050 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
3051 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
3052 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
3053 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
3054 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
3055 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
3058 tcg_temp_free_i##bits (fp0); \
3059 tcg_temp_free_i##bits (fp1); \
3062 FOP_CONDS(, 0, d
, FMT_D
, 64)
3063 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3064 FOP_CONDS(, 0, s
, FMT_S
, 32)
3065 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3066 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3067 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3070 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3071 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
3072 int ft, int fs, int fd) \
3074 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3075 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3076 if (ifmt == FMT_D) { \
3077 check_cp1_registers(ctx, fs | ft | fd); \
3079 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3080 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3083 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3086 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3089 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3092 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3095 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3098 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3101 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3104 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3107 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3110 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3113 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3116 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3119 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3122 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3125 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3128 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3131 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3134 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3137 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3140 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3143 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3146 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3152 tcg_temp_free_i ## bits (fp0); \
3153 tcg_temp_free_i ## bits (fp1); \
3156 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3157 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3159 #undef gen_ldcmp_fpr32
3160 #undef gen_ldcmp_fpr64
3162 /* load/store instructions. */
3163 #ifdef CONFIG_USER_ONLY
3164 #define OP_LD_ATOMIC(insn,fname) \
3165 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3166 DisasContext *ctx) \
3168 TCGv t0 = tcg_temp_new(); \
3169 tcg_gen_mov_tl(t0, arg1); \
3170 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3171 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3172 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3173 tcg_temp_free(t0); \
3176 #define OP_LD_ATOMIC(insn,fname) \
3177 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3178 DisasContext *ctx) \
3180 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3183 OP_LD_ATOMIC(ll
,ld32s
);
3184 #if defined(TARGET_MIPS64)
3185 OP_LD_ATOMIC(lld
,ld64
);
3189 #ifdef CONFIG_USER_ONLY
3190 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3191 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3192 DisasContext *ctx) \
3194 TCGv t0 = tcg_temp_new(); \
3195 TCGLabel *l1 = gen_new_label(); \
3196 TCGLabel *l2 = gen_new_label(); \
3198 tcg_gen_andi_tl(t0, arg2, almask); \
3199 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
3200 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
3201 generate_exception(ctx, EXCP_AdES); \
3202 gen_set_label(l1); \
3203 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3204 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
3205 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
3206 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
3207 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
3208 generate_exception_end(ctx, EXCP_SC); \
3209 gen_set_label(l2); \
3210 tcg_gen_movi_tl(t0, 0); \
3211 gen_store_gpr(t0, rt); \
3212 tcg_temp_free(t0); \
3215 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3216 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3217 DisasContext *ctx) \
3219 TCGv t0 = tcg_temp_new(); \
3220 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
3221 gen_store_gpr(t0, rt); \
3222 tcg_temp_free(t0); \
3225 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
3226 #if defined(TARGET_MIPS64)
3227 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
3231 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
3232 int base
, int offset
)
3235 tcg_gen_movi_tl(addr
, offset
);
3236 } else if (offset
== 0) {
3237 gen_load_gpr(addr
, base
);
3239 tcg_gen_movi_tl(addr
, offset
);
3240 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3244 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
3246 target_ulong pc
= ctx
->base
.pc_next
;
3248 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3249 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3254 pc
&= ~(target_ulong
)3;
3259 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3260 int rt
, int base
, int offset
)
3263 int mem_idx
= ctx
->mem_idx
;
3265 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3266 /* Loongson CPU uses a load to zero register for prefetch.
3267 We emulate it as a NOP. On other CPU we must perform the
3268 actual memory access. */
3272 t0
= tcg_temp_new();
3273 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3276 #if defined(TARGET_MIPS64)
3278 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3279 ctx
->default_tcg_memop_mask
);
3280 gen_store_gpr(t0
, rt
);
3283 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3284 ctx
->default_tcg_memop_mask
);
3285 gen_store_gpr(t0
, rt
);
3289 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3290 gen_store_gpr(t0
, rt
);
3293 t1
= tcg_temp_new();
3294 /* Do a byte access to possibly trigger a page
3295 fault with the unaligned address. */
3296 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3297 tcg_gen_andi_tl(t1
, t0
, 7);
3298 #ifndef TARGET_WORDS_BIGENDIAN
3299 tcg_gen_xori_tl(t1
, t1
, 7);
3301 tcg_gen_shli_tl(t1
, t1
, 3);
3302 tcg_gen_andi_tl(t0
, t0
, ~7);
3303 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3304 tcg_gen_shl_tl(t0
, t0
, t1
);
3305 t2
= tcg_const_tl(-1);
3306 tcg_gen_shl_tl(t2
, t2
, t1
);
3307 gen_load_gpr(t1
, rt
);
3308 tcg_gen_andc_tl(t1
, t1
, t2
);
3310 tcg_gen_or_tl(t0
, t0
, t1
);
3312 gen_store_gpr(t0
, rt
);
3315 t1
= tcg_temp_new();
3316 /* Do a byte access to possibly trigger a page
3317 fault with the unaligned address. */
3318 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3319 tcg_gen_andi_tl(t1
, t0
, 7);
3320 #ifdef TARGET_WORDS_BIGENDIAN
3321 tcg_gen_xori_tl(t1
, t1
, 7);
3323 tcg_gen_shli_tl(t1
, t1
, 3);
3324 tcg_gen_andi_tl(t0
, t0
, ~7);
3325 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3326 tcg_gen_shr_tl(t0
, t0
, t1
);
3327 tcg_gen_xori_tl(t1
, t1
, 63);
3328 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3329 tcg_gen_shl_tl(t2
, t2
, t1
);
3330 gen_load_gpr(t1
, rt
);
3331 tcg_gen_and_tl(t1
, t1
, t2
);
3333 tcg_gen_or_tl(t0
, t0
, t1
);
3335 gen_store_gpr(t0
, rt
);
3338 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3339 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3341 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3342 gen_store_gpr(t0
, rt
);
3346 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3347 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3349 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3350 gen_store_gpr(t0
, rt
);
3353 mem_idx
= MIPS_HFLAG_UM
;
3356 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3357 ctx
->default_tcg_memop_mask
);
3358 gen_store_gpr(t0
, rt
);
3361 mem_idx
= MIPS_HFLAG_UM
;
3364 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3365 ctx
->default_tcg_memop_mask
);
3366 gen_store_gpr(t0
, rt
);
3369 mem_idx
= MIPS_HFLAG_UM
;
3372 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3373 ctx
->default_tcg_memop_mask
);
3374 gen_store_gpr(t0
, rt
);
3377 mem_idx
= MIPS_HFLAG_UM
;
3380 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3381 gen_store_gpr(t0
, rt
);
3384 mem_idx
= MIPS_HFLAG_UM
;
3387 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3388 gen_store_gpr(t0
, rt
);
3391 mem_idx
= MIPS_HFLAG_UM
;
3394 t1
= tcg_temp_new();
3395 /* Do a byte access to possibly trigger a page
3396 fault with the unaligned address. */
3397 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3398 tcg_gen_andi_tl(t1
, t0
, 3);
3399 #ifndef TARGET_WORDS_BIGENDIAN
3400 tcg_gen_xori_tl(t1
, t1
, 3);
3402 tcg_gen_shli_tl(t1
, t1
, 3);
3403 tcg_gen_andi_tl(t0
, t0
, ~3);
3404 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3405 tcg_gen_shl_tl(t0
, t0
, t1
);
3406 t2
= tcg_const_tl(-1);
3407 tcg_gen_shl_tl(t2
, t2
, t1
);
3408 gen_load_gpr(t1
, rt
);
3409 tcg_gen_andc_tl(t1
, t1
, t2
);
3411 tcg_gen_or_tl(t0
, t0
, t1
);
3413 tcg_gen_ext32s_tl(t0
, t0
);
3414 gen_store_gpr(t0
, rt
);
3417 mem_idx
= MIPS_HFLAG_UM
;
3420 t1
= tcg_temp_new();
3421 /* Do a byte access to possibly trigger a page
3422 fault with the unaligned address. */
3423 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3424 tcg_gen_andi_tl(t1
, t0
, 3);
3425 #ifdef TARGET_WORDS_BIGENDIAN
3426 tcg_gen_xori_tl(t1
, t1
, 3);
3428 tcg_gen_shli_tl(t1
, t1
, 3);
3429 tcg_gen_andi_tl(t0
, t0
, ~3);
3430 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3431 tcg_gen_shr_tl(t0
, t0
, t1
);
3432 tcg_gen_xori_tl(t1
, t1
, 31);
3433 t2
= tcg_const_tl(0xfffffffeull
);
3434 tcg_gen_shl_tl(t2
, t2
, t1
);
3435 gen_load_gpr(t1
, rt
);
3436 tcg_gen_and_tl(t1
, t1
, t2
);
3438 tcg_gen_or_tl(t0
, t0
, t1
);
3440 tcg_gen_ext32s_tl(t0
, t0
);
3441 gen_store_gpr(t0
, rt
);
3444 mem_idx
= MIPS_HFLAG_UM
;
3448 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3449 gen_store_gpr(t0
, rt
);
3455 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3456 uint32_t reg1
, uint32_t reg2
)
3458 TCGv taddr
= tcg_temp_new();
3459 TCGv_i64 tval
= tcg_temp_new_i64();
3460 TCGv tmp1
= tcg_temp_new();
3461 TCGv tmp2
= tcg_temp_new();
3463 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3464 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3465 #ifdef TARGET_WORDS_BIGENDIAN
3466 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3468 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3470 gen_store_gpr(tmp1
, reg1
);
3471 tcg_temp_free(tmp1
);
3472 gen_store_gpr(tmp2
, reg2
);
3473 tcg_temp_free(tmp2
);
3474 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3475 tcg_temp_free_i64(tval
);
3476 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3477 tcg_temp_free(taddr
);
3481 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
3482 int base
, int offset
)
3484 TCGv t0
= tcg_temp_new();
3485 TCGv t1
= tcg_temp_new();
3486 int mem_idx
= ctx
->mem_idx
;
3488 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3489 gen_load_gpr(t1
, rt
);
3491 #if defined(TARGET_MIPS64)
3493 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3494 ctx
->default_tcg_memop_mask
);
3497 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3500 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3504 mem_idx
= MIPS_HFLAG_UM
;
3507 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3508 ctx
->default_tcg_memop_mask
);
3511 mem_idx
= MIPS_HFLAG_UM
;
3514 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3515 ctx
->default_tcg_memop_mask
);
3518 mem_idx
= MIPS_HFLAG_UM
;
3521 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3524 mem_idx
= MIPS_HFLAG_UM
;
3527 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3530 mem_idx
= MIPS_HFLAG_UM
;
3533 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3541 /* Store conditional */
3542 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
3543 int base
, int16_t offset
)
3546 int mem_idx
= ctx
->mem_idx
;
3548 #ifdef CONFIG_USER_ONLY
3549 t0
= tcg_temp_local_new();
3550 t1
= tcg_temp_local_new();
3552 t0
= tcg_temp_new();
3553 t1
= tcg_temp_new();
3555 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3556 gen_load_gpr(t1
, rt
);
3558 #if defined(TARGET_MIPS64)
3561 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
3565 mem_idx
= MIPS_HFLAG_UM
;
3569 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
3576 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3577 uint32_t reg1
, uint32_t reg2
)
3579 TCGv taddr
= tcg_temp_local_new();
3580 TCGv lladdr
= tcg_temp_local_new();
3581 TCGv_i64 tval
= tcg_temp_new_i64();
3582 TCGv_i64 llval
= tcg_temp_new_i64();
3583 TCGv_i64 val
= tcg_temp_new_i64();
3584 TCGv tmp1
= tcg_temp_new();
3585 TCGv tmp2
= tcg_temp_new();
3586 TCGLabel
*lab_fail
= gen_new_label();
3587 TCGLabel
*lab_done
= gen_new_label();
3589 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3591 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3592 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3594 gen_load_gpr(tmp1
, reg1
);
3595 gen_load_gpr(tmp2
, reg2
);
3597 #ifdef TARGET_WORDS_BIGENDIAN
3598 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3600 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3603 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3604 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3605 ctx
->mem_idx
, MO_64
);
3607 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3609 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3611 gen_set_label(lab_fail
);
3614 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3616 gen_set_label(lab_done
);
3617 tcg_gen_movi_tl(lladdr
, -1);
3618 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3621 /* Load and store */
3622 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
3625 /* Don't do NOP if destination is zero: we must perform the actual
3630 TCGv_i32 fp0
= tcg_temp_new_i32();
3631 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3632 ctx
->default_tcg_memop_mask
);
3633 gen_store_fpr32(ctx
, fp0
, ft
);
3634 tcg_temp_free_i32(fp0
);
3639 TCGv_i32 fp0
= tcg_temp_new_i32();
3640 gen_load_fpr32(ctx
, fp0
, ft
);
3641 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3642 ctx
->default_tcg_memop_mask
);
3643 tcg_temp_free_i32(fp0
);
3648 TCGv_i64 fp0
= tcg_temp_new_i64();
3649 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3650 ctx
->default_tcg_memop_mask
);
3651 gen_store_fpr64(ctx
, fp0
, ft
);
3652 tcg_temp_free_i64(fp0
);
3657 TCGv_i64 fp0
= tcg_temp_new_i64();
3658 gen_load_fpr64(ctx
, fp0
, ft
);
3659 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3660 ctx
->default_tcg_memop_mask
);
3661 tcg_temp_free_i64(fp0
);
3665 MIPS_INVAL("flt_ldst");
3666 generate_exception_end(ctx
, EXCP_RI
);
3671 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3672 int rs
, int16_t imm
)
3674 TCGv t0
= tcg_temp_new();
3676 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3677 check_cp1_enabled(ctx
);
3681 check_insn(ctx
, ISA_MIPS2
);
3684 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3685 gen_flt_ldst(ctx
, op
, rt
, t0
);
3688 generate_exception_err(ctx
, EXCP_CpU
, 1);
3693 /* Arithmetic with immediate operand */
3694 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3695 int rt
, int rs
, int imm
)
3697 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3699 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3700 /* If no destination, treat it as a NOP.
3701 For addi, we must generate the overflow exception when needed. */
3707 TCGv t0
= tcg_temp_local_new();
3708 TCGv t1
= tcg_temp_new();
3709 TCGv t2
= tcg_temp_new();
3710 TCGLabel
*l1
= gen_new_label();
3712 gen_load_gpr(t1
, rs
);
3713 tcg_gen_addi_tl(t0
, t1
, uimm
);
3714 tcg_gen_ext32s_tl(t0
, t0
);
3716 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3717 tcg_gen_xori_tl(t2
, t0
, uimm
);
3718 tcg_gen_and_tl(t1
, t1
, t2
);
3720 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3722 /* operands of same sign, result different sign */
3723 generate_exception(ctx
, EXCP_OVERFLOW
);
3725 tcg_gen_ext32s_tl(t0
, t0
);
3726 gen_store_gpr(t0
, rt
);
3732 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3733 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3735 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3738 #if defined(TARGET_MIPS64)
3741 TCGv t0
= tcg_temp_local_new();
3742 TCGv t1
= tcg_temp_new();
3743 TCGv t2
= tcg_temp_new();
3744 TCGLabel
*l1
= gen_new_label();
3746 gen_load_gpr(t1
, rs
);
3747 tcg_gen_addi_tl(t0
, t1
, uimm
);
3749 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3750 tcg_gen_xori_tl(t2
, t0
, uimm
);
3751 tcg_gen_and_tl(t1
, t1
, t2
);
3753 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3755 /* operands of same sign, result different sign */
3756 generate_exception(ctx
, EXCP_OVERFLOW
);
3758 gen_store_gpr(t0
, rt
);
3764 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3766 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3773 /* Logic with immediate operand */
3774 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3775 int rt
, int rs
, int16_t imm
)
3780 /* If no destination, treat it as a NOP. */
3783 uimm
= (uint16_t)imm
;
3786 if (likely(rs
!= 0))
3787 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3789 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3793 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3795 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3798 if (likely(rs
!= 0))
3799 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3801 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3804 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3806 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3807 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3809 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3818 /* Set on less than with immediate operand */
3819 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3820 int rt
, int rs
, int16_t imm
)
3822 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3826 /* If no destination, treat it as a NOP. */
3829 t0
= tcg_temp_new();
3830 gen_load_gpr(t0
, rs
);
3833 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3836 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3842 /* Shifts with immediate operand */
3843 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3844 int rt
, int rs
, int16_t imm
)
3846 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3850 /* If no destination, treat it as a NOP. */
3854 t0
= tcg_temp_new();
3855 gen_load_gpr(t0
, rs
);
3858 tcg_gen_shli_tl(t0
, t0
, uimm
);
3859 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3862 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3866 tcg_gen_ext32u_tl(t0
, t0
);
3867 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3869 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3874 TCGv_i32 t1
= tcg_temp_new_i32();
3876 tcg_gen_trunc_tl_i32(t1
, t0
);
3877 tcg_gen_rotri_i32(t1
, t1
, uimm
);
3878 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
3879 tcg_temp_free_i32(t1
);
3881 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3884 #if defined(TARGET_MIPS64)
3886 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
3889 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3892 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3896 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
3898 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
3902 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3905 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3908 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3911 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3919 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
3920 int rd
, int rs
, int rt
)
3922 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
3923 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
3924 /* If no destination, treat it as a NOP.
3925 For add & sub, we must generate the overflow exception when needed. */
3932 TCGv t0
= tcg_temp_local_new();
3933 TCGv t1
= tcg_temp_new();
3934 TCGv t2
= tcg_temp_new();
3935 TCGLabel
*l1
= gen_new_label();
3937 gen_load_gpr(t1
, rs
);
3938 gen_load_gpr(t2
, rt
);
3939 tcg_gen_add_tl(t0
, t1
, t2
);
3940 tcg_gen_ext32s_tl(t0
, t0
);
3941 tcg_gen_xor_tl(t1
, t1
, t2
);
3942 tcg_gen_xor_tl(t2
, t0
, t2
);
3943 tcg_gen_andc_tl(t1
, t2
, t1
);
3945 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3947 /* operands of same sign, result different sign */
3948 generate_exception(ctx
, EXCP_OVERFLOW
);
3950 gen_store_gpr(t0
, rd
);
3955 if (rs
!= 0 && rt
!= 0) {
3956 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3957 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3958 } else if (rs
== 0 && rt
!= 0) {
3959 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3960 } else if (rs
!= 0 && rt
== 0) {
3961 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3963 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3968 TCGv t0
= tcg_temp_local_new();
3969 TCGv t1
= tcg_temp_new();
3970 TCGv t2
= tcg_temp_new();
3971 TCGLabel
*l1
= gen_new_label();
3973 gen_load_gpr(t1
, rs
);
3974 gen_load_gpr(t2
, rt
);
3975 tcg_gen_sub_tl(t0
, t1
, t2
);
3976 tcg_gen_ext32s_tl(t0
, t0
);
3977 tcg_gen_xor_tl(t2
, t1
, t2
);
3978 tcg_gen_xor_tl(t1
, t0
, t1
);
3979 tcg_gen_and_tl(t1
, t1
, t2
);
3981 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3983 /* operands of different sign, first operand and result different sign */
3984 generate_exception(ctx
, EXCP_OVERFLOW
);
3986 gen_store_gpr(t0
, rd
);
3991 if (rs
!= 0 && rt
!= 0) {
3992 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3993 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3994 } else if (rs
== 0 && rt
!= 0) {
3995 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3996 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3997 } else if (rs
!= 0 && rt
== 0) {
3998 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4000 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4003 #if defined(TARGET_MIPS64)
4006 TCGv t0
= tcg_temp_local_new();
4007 TCGv t1
= tcg_temp_new();
4008 TCGv t2
= tcg_temp_new();
4009 TCGLabel
*l1
= gen_new_label();
4011 gen_load_gpr(t1
, rs
);
4012 gen_load_gpr(t2
, rt
);
4013 tcg_gen_add_tl(t0
, t1
, t2
);
4014 tcg_gen_xor_tl(t1
, t1
, t2
);
4015 tcg_gen_xor_tl(t2
, t0
, t2
);
4016 tcg_gen_andc_tl(t1
, t2
, t1
);
4018 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4020 /* operands of same sign, result different sign */
4021 generate_exception(ctx
, EXCP_OVERFLOW
);
4023 gen_store_gpr(t0
, rd
);
4028 if (rs
!= 0 && rt
!= 0) {
4029 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4030 } else if (rs
== 0 && rt
!= 0) {
4031 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4032 } else if (rs
!= 0 && rt
== 0) {
4033 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4035 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4040 TCGv t0
= tcg_temp_local_new();
4041 TCGv t1
= tcg_temp_new();
4042 TCGv t2
= tcg_temp_new();
4043 TCGLabel
*l1
= gen_new_label();
4045 gen_load_gpr(t1
, rs
);
4046 gen_load_gpr(t2
, rt
);
4047 tcg_gen_sub_tl(t0
, t1
, t2
);
4048 tcg_gen_xor_tl(t2
, t1
, t2
);
4049 tcg_gen_xor_tl(t1
, t0
, t1
);
4050 tcg_gen_and_tl(t1
, t1
, t2
);
4052 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4054 /* operands of different sign, first operand and result different sign */
4055 generate_exception(ctx
, EXCP_OVERFLOW
);
4057 gen_store_gpr(t0
, rd
);
4062 if (rs
!= 0 && rt
!= 0) {
4063 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4064 } else if (rs
== 0 && rt
!= 0) {
4065 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4066 } else if (rs
!= 0 && rt
== 0) {
4067 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4069 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4074 if (likely(rs
!= 0 && rt
!= 0)) {
4075 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4076 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4078 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4084 /* Conditional move */
4085 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4086 int rd
, int rs
, int rt
)
4091 /* If no destination, treat it as a NOP. */
4095 t0
= tcg_temp_new();
4096 gen_load_gpr(t0
, rt
);
4097 t1
= tcg_const_tl(0);
4098 t2
= tcg_temp_new();
4099 gen_load_gpr(t2
, rs
);
4102 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4105 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4108 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4111 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4120 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4121 int rd
, int rs
, int rt
)
4124 /* If no destination, treat it as a NOP. */
4130 if (likely(rs
!= 0 && rt
!= 0)) {
4131 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4133 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4137 if (rs
!= 0 && rt
!= 0) {
4138 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4139 } else if (rs
== 0 && rt
!= 0) {
4140 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4141 } else if (rs
!= 0 && rt
== 0) {
4142 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4144 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4148 if (likely(rs
!= 0 && rt
!= 0)) {
4149 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4150 } else if (rs
== 0 && rt
!= 0) {
4151 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4152 } else if (rs
!= 0 && rt
== 0) {
4153 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4155 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4159 if (likely(rs
!= 0 && rt
!= 0)) {
4160 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4161 } else if (rs
== 0 && rt
!= 0) {
4162 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4163 } else if (rs
!= 0 && rt
== 0) {
4164 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4166 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4172 /* Set on lower than */
4173 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4174 int rd
, int rs
, int rt
)
4179 /* If no destination, treat it as a NOP. */
4183 t0
= tcg_temp_new();
4184 t1
= tcg_temp_new();
4185 gen_load_gpr(t0
, rs
);
4186 gen_load_gpr(t1
, rt
);
4189 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4192 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4200 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4201 int rd
, int rs
, int rt
)
4206 /* If no destination, treat it as a NOP.
4207 For add & sub, we must generate the overflow exception when needed. */
4211 t0
= tcg_temp_new();
4212 t1
= tcg_temp_new();
4213 gen_load_gpr(t0
, rs
);
4214 gen_load_gpr(t1
, rt
);
4217 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4218 tcg_gen_shl_tl(t0
, t1
, t0
);
4219 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4222 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4223 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4226 tcg_gen_ext32u_tl(t1
, t1
);
4227 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4228 tcg_gen_shr_tl(t0
, t1
, t0
);
4229 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4233 TCGv_i32 t2
= tcg_temp_new_i32();
4234 TCGv_i32 t3
= tcg_temp_new_i32();
4236 tcg_gen_trunc_tl_i32(t2
, t0
);
4237 tcg_gen_trunc_tl_i32(t3
, t1
);
4238 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4239 tcg_gen_rotr_i32(t2
, t3
, t2
);
4240 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4241 tcg_temp_free_i32(t2
);
4242 tcg_temp_free_i32(t3
);
4245 #if defined(TARGET_MIPS64)
4247 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4248 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4251 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4252 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4255 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4256 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4259 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4260 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4268 /* Arithmetic on HI/LO registers */
4269 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4271 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== TX79_MMI_MFHI1
||
4272 opc
== OPC_MFLO
|| opc
== TX79_MMI_MFLO1
)) {
4278 if (!(ctx
->insn_flags
& INSN_R5900
)) {
4285 case TX79_MMI_MFHI1
:
4286 #if defined(TARGET_MIPS64)
4288 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4292 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4296 case TX79_MMI_MFLO1
:
4297 #if defined(TARGET_MIPS64)
4299 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4303 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4307 case TX79_MMI_MTHI1
:
4309 #if defined(TARGET_MIPS64)
4311 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4315 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4318 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4322 case TX79_MMI_MTLO1
:
4324 #if defined(TARGET_MIPS64)
4326 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4330 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4333 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4339 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4342 TCGv t0
= tcg_const_tl(addr
);
4343 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4344 gen_store_gpr(t0
, reg
);
4348 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4354 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4357 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4358 addr
= addr_add(ctx
, pc
, offset
);
4359 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4363 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4364 addr
= addr_add(ctx
, pc
, offset
);
4365 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4367 #if defined(TARGET_MIPS64)
4370 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4371 addr
= addr_add(ctx
, pc
, offset
);
4372 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4376 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4379 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4380 addr
= addr_add(ctx
, pc
, offset
);
4381 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4386 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4387 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4388 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4391 #if defined(TARGET_MIPS64)
4392 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4393 case R6_OPC_LDPC
+ (1 << 16):
4394 case R6_OPC_LDPC
+ (2 << 16):
4395 case R6_OPC_LDPC
+ (3 << 16):
4397 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4398 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4399 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4403 MIPS_INVAL("OPC_PCREL");
4404 generate_exception_end(ctx
, EXCP_RI
);
4411 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4420 t0
= tcg_temp_new();
4421 t1
= tcg_temp_new();
4423 gen_load_gpr(t0
, rs
);
4424 gen_load_gpr(t1
, rt
);
4429 TCGv t2
= tcg_temp_new();
4430 TCGv t3
= tcg_temp_new();
4431 tcg_gen_ext32s_tl(t0
, t0
);
4432 tcg_gen_ext32s_tl(t1
, t1
);
4433 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4434 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4435 tcg_gen_and_tl(t2
, t2
, t3
);
4436 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4437 tcg_gen_or_tl(t2
, t2
, t3
);
4438 tcg_gen_movi_tl(t3
, 0);
4439 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4440 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4441 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4448 TCGv t2
= tcg_temp_new();
4449 TCGv t3
= tcg_temp_new();
4450 tcg_gen_ext32s_tl(t0
, t0
);
4451 tcg_gen_ext32s_tl(t1
, t1
);
4452 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4453 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4454 tcg_gen_and_tl(t2
, t2
, t3
);
4455 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4456 tcg_gen_or_tl(t2
, t2
, t3
);
4457 tcg_gen_movi_tl(t3
, 0);
4458 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4459 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4460 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4467 TCGv t2
= tcg_const_tl(0);
4468 TCGv t3
= tcg_const_tl(1);
4469 tcg_gen_ext32u_tl(t0
, t0
);
4470 tcg_gen_ext32u_tl(t1
, t1
);
4471 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4472 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4473 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4480 TCGv t2
= tcg_const_tl(0);
4481 TCGv t3
= tcg_const_tl(1);
4482 tcg_gen_ext32u_tl(t0
, t0
);
4483 tcg_gen_ext32u_tl(t1
, t1
);
4484 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4485 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4486 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4493 TCGv_i32 t2
= tcg_temp_new_i32();
4494 TCGv_i32 t3
= tcg_temp_new_i32();
4495 tcg_gen_trunc_tl_i32(t2
, t0
);
4496 tcg_gen_trunc_tl_i32(t3
, t1
);
4497 tcg_gen_mul_i32(t2
, t2
, t3
);
4498 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4499 tcg_temp_free_i32(t2
);
4500 tcg_temp_free_i32(t3
);
4505 TCGv_i32 t2
= tcg_temp_new_i32();
4506 TCGv_i32 t3
= tcg_temp_new_i32();
4507 tcg_gen_trunc_tl_i32(t2
, t0
);
4508 tcg_gen_trunc_tl_i32(t3
, t1
);
4509 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4510 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4511 tcg_temp_free_i32(t2
);
4512 tcg_temp_free_i32(t3
);
4517 TCGv_i32 t2
= tcg_temp_new_i32();
4518 TCGv_i32 t3
= tcg_temp_new_i32();
4519 tcg_gen_trunc_tl_i32(t2
, t0
);
4520 tcg_gen_trunc_tl_i32(t3
, t1
);
4521 tcg_gen_mul_i32(t2
, t2
, t3
);
4522 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4523 tcg_temp_free_i32(t2
);
4524 tcg_temp_free_i32(t3
);
4529 TCGv_i32 t2
= tcg_temp_new_i32();
4530 TCGv_i32 t3
= tcg_temp_new_i32();
4531 tcg_gen_trunc_tl_i32(t2
, t0
);
4532 tcg_gen_trunc_tl_i32(t3
, t1
);
4533 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4534 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4535 tcg_temp_free_i32(t2
);
4536 tcg_temp_free_i32(t3
);
4539 #if defined(TARGET_MIPS64)
4542 TCGv t2
= tcg_temp_new();
4543 TCGv t3
= tcg_temp_new();
4544 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4545 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4546 tcg_gen_and_tl(t2
, t2
, t3
);
4547 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4548 tcg_gen_or_tl(t2
, t2
, t3
);
4549 tcg_gen_movi_tl(t3
, 0);
4550 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4551 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4558 TCGv t2
= tcg_temp_new();
4559 TCGv t3
= tcg_temp_new();
4560 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4561 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4562 tcg_gen_and_tl(t2
, t2
, t3
);
4563 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4564 tcg_gen_or_tl(t2
, t2
, t3
);
4565 tcg_gen_movi_tl(t3
, 0);
4566 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4567 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4574 TCGv t2
= tcg_const_tl(0);
4575 TCGv t3
= tcg_const_tl(1);
4576 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4577 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4584 TCGv t2
= tcg_const_tl(0);
4585 TCGv t3
= tcg_const_tl(1);
4586 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4587 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4593 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4597 TCGv t2
= tcg_temp_new();
4598 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4603 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4607 TCGv t2
= tcg_temp_new();
4608 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4614 MIPS_INVAL("r6 mul/div");
4615 generate_exception_end(ctx
, EXCP_RI
);
4623 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4624 int acc
, int rs
, int rt
)
4628 t0
= tcg_temp_new();
4629 t1
= tcg_temp_new();
4631 gen_load_gpr(t0
, rs
);
4632 gen_load_gpr(t1
, rt
);
4635 if (!(ctx
->insn_flags
& INSN_R5900
)) {
4644 TCGv t2
= tcg_temp_new();
4645 TCGv t3
= tcg_temp_new();
4646 tcg_gen_ext32s_tl(t0
, t0
);
4647 tcg_gen_ext32s_tl(t1
, t1
);
4648 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4649 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4650 tcg_gen_and_tl(t2
, t2
, t3
);
4651 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4652 tcg_gen_or_tl(t2
, t2
, t3
);
4653 tcg_gen_movi_tl(t3
, 0);
4654 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4655 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4656 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4657 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4658 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4664 case TX79_MMI_DIVU1
:
4666 TCGv t2
= tcg_const_tl(0);
4667 TCGv t3
= tcg_const_tl(1);
4668 tcg_gen_ext32u_tl(t0
, t0
);
4669 tcg_gen_ext32u_tl(t1
, t1
);
4670 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4671 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4672 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4673 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4674 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4681 TCGv_i32 t2
= tcg_temp_new_i32();
4682 TCGv_i32 t3
= tcg_temp_new_i32();
4683 tcg_gen_trunc_tl_i32(t2
, t0
);
4684 tcg_gen_trunc_tl_i32(t3
, t1
);
4685 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4686 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4687 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4688 tcg_temp_free_i32(t2
);
4689 tcg_temp_free_i32(t3
);
4694 TCGv_i32 t2
= tcg_temp_new_i32();
4695 TCGv_i32 t3
= tcg_temp_new_i32();
4696 tcg_gen_trunc_tl_i32(t2
, t0
);
4697 tcg_gen_trunc_tl_i32(t3
, t1
);
4698 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4699 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4700 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4701 tcg_temp_free_i32(t2
);
4702 tcg_temp_free_i32(t3
);
4705 #if defined(TARGET_MIPS64)
4708 TCGv t2
= tcg_temp_new();
4709 TCGv t3
= tcg_temp_new();
4710 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4711 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4712 tcg_gen_and_tl(t2
, t2
, t3
);
4713 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4714 tcg_gen_or_tl(t2
, t2
, t3
);
4715 tcg_gen_movi_tl(t3
, 0);
4716 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4717 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4718 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4725 TCGv t2
= tcg_const_tl(0);
4726 TCGv t3
= tcg_const_tl(1);
4727 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4728 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4729 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4735 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4738 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4743 TCGv_i64 t2
= tcg_temp_new_i64();
4744 TCGv_i64 t3
= tcg_temp_new_i64();
4746 tcg_gen_ext_tl_i64(t2
, t0
);
4747 tcg_gen_ext_tl_i64(t3
, t1
);
4748 tcg_gen_mul_i64(t2
, t2
, t3
);
4749 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4750 tcg_gen_add_i64(t2
, t2
, t3
);
4751 tcg_temp_free_i64(t3
);
4752 gen_move_low32(cpu_LO
[acc
], t2
);
4753 gen_move_high32(cpu_HI
[acc
], t2
);
4754 tcg_temp_free_i64(t2
);
4759 TCGv_i64 t2
= tcg_temp_new_i64();
4760 TCGv_i64 t3
= tcg_temp_new_i64();
4762 tcg_gen_ext32u_tl(t0
, t0
);
4763 tcg_gen_ext32u_tl(t1
, t1
);
4764 tcg_gen_extu_tl_i64(t2
, t0
);
4765 tcg_gen_extu_tl_i64(t3
, t1
);
4766 tcg_gen_mul_i64(t2
, t2
, t3
);
4767 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4768 tcg_gen_add_i64(t2
, t2
, t3
);
4769 tcg_temp_free_i64(t3
);
4770 gen_move_low32(cpu_LO
[acc
], t2
);
4771 gen_move_high32(cpu_HI
[acc
], t2
);
4772 tcg_temp_free_i64(t2
);
4777 TCGv_i64 t2
= tcg_temp_new_i64();
4778 TCGv_i64 t3
= tcg_temp_new_i64();
4780 tcg_gen_ext_tl_i64(t2
, t0
);
4781 tcg_gen_ext_tl_i64(t3
, t1
);
4782 tcg_gen_mul_i64(t2
, t2
, t3
);
4783 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4784 tcg_gen_sub_i64(t2
, t3
, t2
);
4785 tcg_temp_free_i64(t3
);
4786 gen_move_low32(cpu_LO
[acc
], t2
);
4787 gen_move_high32(cpu_HI
[acc
], t2
);
4788 tcg_temp_free_i64(t2
);
4793 TCGv_i64 t2
= tcg_temp_new_i64();
4794 TCGv_i64 t3
= tcg_temp_new_i64();
4796 tcg_gen_ext32u_tl(t0
, t0
);
4797 tcg_gen_ext32u_tl(t1
, t1
);
4798 tcg_gen_extu_tl_i64(t2
, t0
);
4799 tcg_gen_extu_tl_i64(t3
, t1
);
4800 tcg_gen_mul_i64(t2
, t2
, t3
);
4801 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4802 tcg_gen_sub_i64(t2
, t3
, t2
);
4803 tcg_temp_free_i64(t3
);
4804 gen_move_low32(cpu_LO
[acc
], t2
);
4805 gen_move_high32(cpu_HI
[acc
], t2
);
4806 tcg_temp_free_i64(t2
);
4810 MIPS_INVAL("mul/div");
4811 generate_exception_end(ctx
, EXCP_RI
);
4820 * These MULT and MULTU instructions implemented in for example the
4821 * Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
4822 * architectures are special three-operand variants with the syntax
4824 * MULT[U][1] rd, rs, rt
4828 * (rd, LO, HI) <- rs * rt
4830 * where the low-order 32-bits of the result is placed into both the
4831 * GPR rd and the special register LO. The high-order 32-bits of the
4832 * result is placed into the special register HI.
4834 * If the GPR rd is omitted in assembly language, it is taken to be 0,
4835 * which is the zero register that always reads as 0.
4837 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
4838 int rd
, int rs
, int rt
)
4840 TCGv t0
= tcg_temp_new();
4841 TCGv t1
= tcg_temp_new();
4844 gen_load_gpr(t0
, rs
);
4845 gen_load_gpr(t1
, rt
);
4848 case TX79_MMI_MULT1
:
4853 TCGv_i32 t2
= tcg_temp_new_i32();
4854 TCGv_i32 t3
= tcg_temp_new_i32();
4855 tcg_gen_trunc_tl_i32(t2
, t0
);
4856 tcg_gen_trunc_tl_i32(t3
, t1
);
4857 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4859 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4861 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4862 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4863 tcg_temp_free_i32(t2
);
4864 tcg_temp_free_i32(t3
);
4867 case TX79_MMI_MULTU1
:
4872 TCGv_i32 t2
= tcg_temp_new_i32();
4873 TCGv_i32 t3
= tcg_temp_new_i32();
4874 tcg_gen_trunc_tl_i32(t2
, t0
);
4875 tcg_gen_trunc_tl_i32(t3
, t1
);
4876 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4878 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4880 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4881 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4882 tcg_temp_free_i32(t2
);
4883 tcg_temp_free_i32(t3
);
4887 MIPS_INVAL("mul TXx9");
4888 generate_exception_end(ctx
, EXCP_RI
);
4897 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
4898 int rd
, int rs
, int rt
)
4900 TCGv t0
= tcg_temp_new();
4901 TCGv t1
= tcg_temp_new();
4903 gen_load_gpr(t0
, rs
);
4904 gen_load_gpr(t1
, rt
);
4907 case OPC_VR54XX_MULS
:
4908 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
4910 case OPC_VR54XX_MULSU
:
4911 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
4913 case OPC_VR54XX_MACC
:
4914 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
4916 case OPC_VR54XX_MACCU
:
4917 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
4919 case OPC_VR54XX_MSAC
:
4920 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
4922 case OPC_VR54XX_MSACU
:
4923 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
4925 case OPC_VR54XX_MULHI
:
4926 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
4928 case OPC_VR54XX_MULHIU
:
4929 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
4931 case OPC_VR54XX_MULSHI
:
4932 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
4934 case OPC_VR54XX_MULSHIU
:
4935 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
4937 case OPC_VR54XX_MACCHI
:
4938 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
4940 case OPC_VR54XX_MACCHIU
:
4941 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
4943 case OPC_VR54XX_MSACHI
:
4944 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
4946 case OPC_VR54XX_MSACHIU
:
4947 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
4950 MIPS_INVAL("mul vr54xx");
4951 generate_exception_end(ctx
, EXCP_RI
);
4954 gen_store_gpr(t0
, rd
);
4961 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
4971 gen_load_gpr(t0
, rs
);
4976 #if defined(TARGET_MIPS64)
4980 tcg_gen_not_tl(t0
, t0
);
4989 tcg_gen_ext32u_tl(t0
, t0
);
4990 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
4991 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
4993 #if defined(TARGET_MIPS64)
4998 tcg_gen_clzi_i64(t0
, t0
, 64);
5004 /* Godson integer instructions */
5005 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
5006 int rd
, int rs
, int rt
)
5018 case OPC_MULTU_G_2E
:
5019 case OPC_MULTU_G_2F
:
5020 #if defined(TARGET_MIPS64)
5021 case OPC_DMULT_G_2E
:
5022 case OPC_DMULT_G_2F
:
5023 case OPC_DMULTU_G_2E
:
5024 case OPC_DMULTU_G_2F
:
5026 t0
= tcg_temp_new();
5027 t1
= tcg_temp_new();
5030 t0
= tcg_temp_local_new();
5031 t1
= tcg_temp_local_new();
5035 gen_load_gpr(t0
, rs
);
5036 gen_load_gpr(t1
, rt
);
5041 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5042 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5044 case OPC_MULTU_G_2E
:
5045 case OPC_MULTU_G_2F
:
5046 tcg_gen_ext32u_tl(t0
, t0
);
5047 tcg_gen_ext32u_tl(t1
, t1
);
5048 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5049 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5054 TCGLabel
*l1
= gen_new_label();
5055 TCGLabel
*l2
= gen_new_label();
5056 TCGLabel
*l3
= gen_new_label();
5057 tcg_gen_ext32s_tl(t0
, t0
);
5058 tcg_gen_ext32s_tl(t1
, t1
);
5059 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5060 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5063 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5064 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5065 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5068 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5069 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5076 TCGLabel
*l1
= gen_new_label();
5077 TCGLabel
*l2
= gen_new_label();
5078 tcg_gen_ext32u_tl(t0
, t0
);
5079 tcg_gen_ext32u_tl(t1
, t1
);
5080 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5081 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5084 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5085 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5092 TCGLabel
*l1
= gen_new_label();
5093 TCGLabel
*l2
= gen_new_label();
5094 TCGLabel
*l3
= gen_new_label();
5095 tcg_gen_ext32u_tl(t0
, t0
);
5096 tcg_gen_ext32u_tl(t1
, t1
);
5097 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5098 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5099 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5101 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5104 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5105 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5112 TCGLabel
*l1
= gen_new_label();
5113 TCGLabel
*l2
= gen_new_label();
5114 tcg_gen_ext32u_tl(t0
, t0
);
5115 tcg_gen_ext32u_tl(t1
, t1
);
5116 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5117 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5120 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5121 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5125 #if defined(TARGET_MIPS64)
5126 case OPC_DMULT_G_2E
:
5127 case OPC_DMULT_G_2F
:
5128 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5130 case OPC_DMULTU_G_2E
:
5131 case OPC_DMULTU_G_2F
:
5132 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5137 TCGLabel
*l1
= gen_new_label();
5138 TCGLabel
*l2
= gen_new_label();
5139 TCGLabel
*l3
= gen_new_label();
5140 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5141 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5144 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5145 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5146 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5149 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5153 case OPC_DDIVU_G_2E
:
5154 case OPC_DDIVU_G_2F
:
5156 TCGLabel
*l1
= gen_new_label();
5157 TCGLabel
*l2
= gen_new_label();
5158 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5159 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5162 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5169 TCGLabel
*l1
= gen_new_label();
5170 TCGLabel
*l2
= gen_new_label();
5171 TCGLabel
*l3
= gen_new_label();
5172 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5173 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5174 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5176 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5179 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5183 case OPC_DMODU_G_2E
:
5184 case OPC_DMODU_G_2F
:
5186 TCGLabel
*l1
= gen_new_label();
5187 TCGLabel
*l2
= gen_new_label();
5188 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5189 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5192 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5203 /* Loongson multimedia instructions */
5204 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5206 uint32_t opc
, shift_max
;
5209 opc
= MASK_LMI(ctx
->opcode
);
5215 t0
= tcg_temp_local_new_i64();
5216 t1
= tcg_temp_local_new_i64();
5219 t0
= tcg_temp_new_i64();
5220 t1
= tcg_temp_new_i64();
5224 check_cp1_enabled(ctx
);
5225 gen_load_fpr64(ctx
, t0
, rs
);
5226 gen_load_fpr64(ctx
, t1
, rt
);
5228 #define LMI_HELPER(UP, LO) \
5229 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5230 #define LMI_HELPER_1(UP, LO) \
5231 case OPC_##UP: gen_helper_##LO(t0, t0); break
5232 #define LMI_DIRECT(UP, LO, OP) \
5233 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5236 LMI_HELPER(PADDSH
, paddsh
);
5237 LMI_HELPER(PADDUSH
, paddush
);
5238 LMI_HELPER(PADDH
, paddh
);
5239 LMI_HELPER(PADDW
, paddw
);
5240 LMI_HELPER(PADDSB
, paddsb
);
5241 LMI_HELPER(PADDUSB
, paddusb
);
5242 LMI_HELPER(PADDB
, paddb
);
5244 LMI_HELPER(PSUBSH
, psubsh
);
5245 LMI_HELPER(PSUBUSH
, psubush
);
5246 LMI_HELPER(PSUBH
, psubh
);
5247 LMI_HELPER(PSUBW
, psubw
);
5248 LMI_HELPER(PSUBSB
, psubsb
);
5249 LMI_HELPER(PSUBUSB
, psubusb
);
5250 LMI_HELPER(PSUBB
, psubb
);
5252 LMI_HELPER(PSHUFH
, pshufh
);
5253 LMI_HELPER(PACKSSWH
, packsswh
);
5254 LMI_HELPER(PACKSSHB
, packsshb
);
5255 LMI_HELPER(PACKUSHB
, packushb
);
5257 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5258 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5259 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5260 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5261 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5262 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5264 LMI_HELPER(PAVGH
, pavgh
);
5265 LMI_HELPER(PAVGB
, pavgb
);
5266 LMI_HELPER(PMAXSH
, pmaxsh
);
5267 LMI_HELPER(PMINSH
, pminsh
);
5268 LMI_HELPER(PMAXUB
, pmaxub
);
5269 LMI_HELPER(PMINUB
, pminub
);
5271 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5272 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5273 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5274 LMI_HELPER(PCMPGTH
, pcmpgth
);
5275 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5276 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5278 LMI_HELPER(PSLLW
, psllw
);
5279 LMI_HELPER(PSLLH
, psllh
);
5280 LMI_HELPER(PSRLW
, psrlw
);
5281 LMI_HELPER(PSRLH
, psrlh
);
5282 LMI_HELPER(PSRAW
, psraw
);
5283 LMI_HELPER(PSRAH
, psrah
);
5285 LMI_HELPER(PMULLH
, pmullh
);
5286 LMI_HELPER(PMULHH
, pmulhh
);
5287 LMI_HELPER(PMULHUH
, pmulhuh
);
5288 LMI_HELPER(PMADDHW
, pmaddhw
);
5290 LMI_HELPER(PASUBUB
, pasubub
);
5291 LMI_HELPER_1(BIADD
, biadd
);
5292 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5294 LMI_DIRECT(PADDD
, paddd
, add
);
5295 LMI_DIRECT(PSUBD
, psubd
, sub
);
5296 LMI_DIRECT(XOR_CP2
, xor, xor);
5297 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5298 LMI_DIRECT(AND_CP2
, and, and);
5299 LMI_DIRECT(OR_CP2
, or, or);
5302 tcg_gen_andc_i64(t0
, t1
, t0
);
5306 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5309 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5312 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5315 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5319 tcg_gen_andi_i64(t1
, t1
, 3);
5320 tcg_gen_shli_i64(t1
, t1
, 4);
5321 tcg_gen_shr_i64(t0
, t0
, t1
);
5322 tcg_gen_ext16u_i64(t0
, t0
);
5326 tcg_gen_add_i64(t0
, t0
, t1
);
5327 tcg_gen_ext32s_i64(t0
, t0
);
5330 tcg_gen_sub_i64(t0
, t0
, t1
);
5331 tcg_gen_ext32s_i64(t0
, t0
);
5353 /* Make sure shift count isn't TCG undefined behaviour. */
5354 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5359 tcg_gen_shl_i64(t0
, t0
, t1
);
5363 /* Since SRA is UndefinedResult without sign-extended inputs,
5364 we can treat SRA and DSRA the same. */
5365 tcg_gen_sar_i64(t0
, t0
, t1
);
5368 /* We want to shift in zeros for SRL; zero-extend first. */
5369 tcg_gen_ext32u_i64(t0
, t0
);
5372 tcg_gen_shr_i64(t0
, t0
, t1
);
5376 if (shift_max
== 32) {
5377 tcg_gen_ext32s_i64(t0
, t0
);
5380 /* Shifts larger than MAX produce zero. */
5381 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5382 tcg_gen_neg_i64(t1
, t1
);
5383 tcg_gen_and_i64(t0
, t0
, t1
);
5389 TCGv_i64 t2
= tcg_temp_new_i64();
5390 TCGLabel
*lab
= gen_new_label();
5392 tcg_gen_mov_i64(t2
, t0
);
5393 tcg_gen_add_i64(t0
, t1
, t2
);
5394 if (opc
== OPC_ADD_CP2
) {
5395 tcg_gen_ext32s_i64(t0
, t0
);
5397 tcg_gen_xor_i64(t1
, t1
, t2
);
5398 tcg_gen_xor_i64(t2
, t2
, t0
);
5399 tcg_gen_andc_i64(t1
, t2
, t1
);
5400 tcg_temp_free_i64(t2
);
5401 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5402 generate_exception(ctx
, EXCP_OVERFLOW
);
5410 TCGv_i64 t2
= tcg_temp_new_i64();
5411 TCGLabel
*lab
= gen_new_label();
5413 tcg_gen_mov_i64(t2
, t0
);
5414 tcg_gen_sub_i64(t0
, t1
, t2
);
5415 if (opc
== OPC_SUB_CP2
) {
5416 tcg_gen_ext32s_i64(t0
, t0
);
5418 tcg_gen_xor_i64(t1
, t1
, t2
);
5419 tcg_gen_xor_i64(t2
, t2
, t0
);
5420 tcg_gen_and_i64(t1
, t1
, t2
);
5421 tcg_temp_free_i64(t2
);
5422 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5423 generate_exception(ctx
, EXCP_OVERFLOW
);
5429 tcg_gen_ext32u_i64(t0
, t0
);
5430 tcg_gen_ext32u_i64(t1
, t1
);
5431 tcg_gen_mul_i64(t0
, t0
, t1
);
5440 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
5441 FD field is the CC field? */
5443 MIPS_INVAL("loongson_cp2");
5444 generate_exception_end(ctx
, EXCP_RI
);
5451 gen_store_fpr64(ctx
, t0
, rd
);
5453 tcg_temp_free_i64(t0
);
5454 tcg_temp_free_i64(t1
);
5458 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5459 int rs
, int rt
, int16_t imm
)
5462 TCGv t0
= tcg_temp_new();
5463 TCGv t1
= tcg_temp_new();
5466 /* Load needed operands */
5474 /* Compare two registers */
5476 gen_load_gpr(t0
, rs
);
5477 gen_load_gpr(t1
, rt
);
5487 /* Compare register to immediate */
5488 if (rs
!= 0 || imm
!= 0) {
5489 gen_load_gpr(t0
, rs
);
5490 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5497 case OPC_TEQ
: /* rs == rs */
5498 case OPC_TEQI
: /* r0 == 0 */
5499 case OPC_TGE
: /* rs >= rs */
5500 case OPC_TGEI
: /* r0 >= 0 */
5501 case OPC_TGEU
: /* rs >= rs unsigned */
5502 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5504 generate_exception_end(ctx
, EXCP_TRAP
);
5506 case OPC_TLT
: /* rs < rs */
5507 case OPC_TLTI
: /* r0 < 0 */
5508 case OPC_TLTU
: /* rs < rs unsigned */
5509 case OPC_TLTIU
: /* r0 < 0 unsigned */
5510 case OPC_TNE
: /* rs != rs */
5511 case OPC_TNEI
: /* r0 != 0 */
5512 /* Never trap: treat as NOP. */
5516 TCGLabel
*l1
= gen_new_label();
5521 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5525 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5529 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5533 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5537 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5541 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5544 generate_exception(ctx
, EXCP_TRAP
);
5551 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5553 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5557 #ifndef CONFIG_USER_ONLY
5558 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5564 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5566 if (use_goto_tb(ctx
, dest
)) {
5569 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5572 if (ctx
->base
.singlestep_enabled
) {
5573 save_cpu_state(ctx
, 0);
5574 gen_helper_raise_exception_debug(cpu_env
);
5576 tcg_gen_lookup_and_goto_ptr();
5580 /* Branches (before delay slot) */
5581 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5583 int rs
, int rt
, int32_t offset
,
5586 target_ulong btgt
= -1;
5588 int bcond_compute
= 0;
5589 TCGv t0
= tcg_temp_new();
5590 TCGv t1
= tcg_temp_new();
5592 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5593 #ifdef MIPS_DEBUG_DISAS
5594 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5595 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5597 generate_exception_end(ctx
, EXCP_RI
);
5601 /* Load needed operands */
5607 /* Compare two registers */
5609 gen_load_gpr(t0
, rs
);
5610 gen_load_gpr(t1
, rt
);
5613 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5627 /* Compare to zero */
5629 gen_load_gpr(t0
, rs
);
5632 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5635 #if defined(TARGET_MIPS64)
5637 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5639 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5642 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5647 /* Jump to immediate */
5648 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5653 /* Jump to register */
5654 if (offset
!= 0 && offset
!= 16) {
5655 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5656 others are reserved. */
5657 MIPS_INVAL("jump hint");
5658 generate_exception_end(ctx
, EXCP_RI
);
5661 gen_load_gpr(btarget
, rs
);
5664 MIPS_INVAL("branch/jump");
5665 generate_exception_end(ctx
, EXCP_RI
);
5668 if (bcond_compute
== 0) {
5669 /* No condition to be computed */
5671 case OPC_BEQ
: /* rx == rx */
5672 case OPC_BEQL
: /* rx == rx likely */
5673 case OPC_BGEZ
: /* 0 >= 0 */
5674 case OPC_BGEZL
: /* 0 >= 0 likely */
5675 case OPC_BLEZ
: /* 0 <= 0 */
5676 case OPC_BLEZL
: /* 0 <= 0 likely */
5678 ctx
->hflags
|= MIPS_HFLAG_B
;
5680 case OPC_BGEZAL
: /* 0 >= 0 */
5681 case OPC_BGEZALL
: /* 0 >= 0 likely */
5682 /* Always take and link */
5684 ctx
->hflags
|= MIPS_HFLAG_B
;
5686 case OPC_BNE
: /* rx != rx */
5687 case OPC_BGTZ
: /* 0 > 0 */
5688 case OPC_BLTZ
: /* 0 < 0 */
5691 case OPC_BLTZAL
: /* 0 < 0 */
5692 /* Handle as an unconditional branch to get correct delay
5695 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5696 ctx
->hflags
|= MIPS_HFLAG_B
;
5698 case OPC_BLTZALL
: /* 0 < 0 likely */
5699 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5700 /* Skip the instruction in the delay slot */
5701 ctx
->base
.pc_next
+= 4;
5703 case OPC_BNEL
: /* rx != rx likely */
5704 case OPC_BGTZL
: /* 0 > 0 likely */
5705 case OPC_BLTZL
: /* 0 < 0 likely */
5706 /* Skip the instruction in the delay slot */
5707 ctx
->base
.pc_next
+= 4;
5710 ctx
->hflags
|= MIPS_HFLAG_B
;
5713 ctx
->hflags
|= MIPS_HFLAG_BX
;
5717 ctx
->hflags
|= MIPS_HFLAG_B
;
5720 ctx
->hflags
|= MIPS_HFLAG_BR
;
5724 ctx
->hflags
|= MIPS_HFLAG_BR
;
5727 MIPS_INVAL("branch/jump");
5728 generate_exception_end(ctx
, EXCP_RI
);
5734 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5737 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5740 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5743 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5746 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5749 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5752 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5756 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5760 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5763 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5766 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5769 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5772 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5775 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5778 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5780 #if defined(TARGET_MIPS64)
5782 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
5786 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5789 ctx
->hflags
|= MIPS_HFLAG_BC
;
5792 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5795 ctx
->hflags
|= MIPS_HFLAG_BL
;
5798 MIPS_INVAL("conditional branch/jump");
5799 generate_exception_end(ctx
, EXCP_RI
);
5804 ctx
->btarget
= btgt
;
5806 switch (delayslot_size
) {
5808 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
5811 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
5816 int post_delay
= insn_bytes
+ delayslot_size
;
5817 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
5819 tcg_gen_movi_tl(cpu_gpr
[blink
],
5820 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
5824 if (insn_bytes
== 2)
5825 ctx
->hflags
|= MIPS_HFLAG_B16
;
5831 /* nanoMIPS Branches */
5832 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
5834 int rs
, int rt
, int32_t offset
)
5836 target_ulong btgt
= -1;
5837 int bcond_compute
= 0;
5838 TCGv t0
= tcg_temp_new();
5839 TCGv t1
= tcg_temp_new();
5841 /* Load needed operands */
5845 /* Compare two registers */
5847 gen_load_gpr(t0
, rs
);
5848 gen_load_gpr(t1
, rt
);
5851 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5854 /* Compare to zero */
5856 gen_load_gpr(t0
, rs
);
5859 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5862 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5864 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5868 /* Jump to register */
5869 if (offset
!= 0 && offset
!= 16) {
5870 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5871 others are reserved. */
5872 MIPS_INVAL("jump hint");
5873 generate_exception_end(ctx
, EXCP_RI
);
5876 gen_load_gpr(btarget
, rs
);
5879 MIPS_INVAL("branch/jump");
5880 generate_exception_end(ctx
, EXCP_RI
);
5883 if (bcond_compute
== 0) {
5884 /* No condition to be computed */
5886 case OPC_BEQ
: /* rx == rx */
5888 ctx
->hflags
|= MIPS_HFLAG_B
;
5890 case OPC_BGEZAL
: /* 0 >= 0 */
5891 /* Always take and link */
5892 tcg_gen_movi_tl(cpu_gpr
[31],
5893 ctx
->base
.pc_next
+ insn_bytes
);
5894 ctx
->hflags
|= MIPS_HFLAG_B
;
5896 case OPC_BNE
: /* rx != rx */
5897 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5898 /* Skip the instruction in the delay slot */
5899 ctx
->base
.pc_next
+= 4;
5902 ctx
->hflags
|= MIPS_HFLAG_BR
;
5906 tcg_gen_movi_tl(cpu_gpr
[rt
],
5907 ctx
->base
.pc_next
+ insn_bytes
);
5909 ctx
->hflags
|= MIPS_HFLAG_BR
;
5912 MIPS_INVAL("branch/jump");
5913 generate_exception_end(ctx
, EXCP_RI
);
5919 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5922 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5925 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5926 tcg_gen_movi_tl(cpu_gpr
[31],
5927 ctx
->base
.pc_next
+ insn_bytes
);
5930 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5932 ctx
->hflags
|= MIPS_HFLAG_BC
;
5935 MIPS_INVAL("conditional branch/jump");
5936 generate_exception_end(ctx
, EXCP_RI
);
5941 ctx
->btarget
= btgt
;
5944 if (insn_bytes
== 2) {
5945 ctx
->hflags
|= MIPS_HFLAG_B16
;
5952 /* special3 bitfield operations */
5953 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
5954 int rs
, int lsb
, int msb
)
5956 TCGv t0
= tcg_temp_new();
5957 TCGv t1
= tcg_temp_new();
5959 gen_load_gpr(t1
, rs
);
5962 if (lsb
+ msb
> 31) {
5966 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5968 /* The two checks together imply that lsb == 0,
5969 so this is a simple sign-extension. */
5970 tcg_gen_ext32s_tl(t0
, t1
);
5973 #if defined(TARGET_MIPS64)
5982 if (lsb
+ msb
> 63) {
5985 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5992 gen_load_gpr(t0
, rt
);
5993 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5994 tcg_gen_ext32s_tl(t0
, t0
);
5996 #if defined(TARGET_MIPS64)
6007 gen_load_gpr(t0
, rt
);
6008 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6013 MIPS_INVAL("bitops");
6014 generate_exception_end(ctx
, EXCP_RI
);
6019 gen_store_gpr(t0
, rt
);
6024 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6029 /* If no destination, treat it as a NOP. */
6033 t0
= tcg_temp_new();
6034 gen_load_gpr(t0
, rt
);
6038 TCGv t1
= tcg_temp_new();
6039 TCGv t2
= tcg_const_tl(0x00FF00FF);
6041 tcg_gen_shri_tl(t1
, t0
, 8);
6042 tcg_gen_and_tl(t1
, t1
, t2
);
6043 tcg_gen_and_tl(t0
, t0
, t2
);
6044 tcg_gen_shli_tl(t0
, t0
, 8);
6045 tcg_gen_or_tl(t0
, t0
, t1
);
6048 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6052 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6055 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6057 #if defined(TARGET_MIPS64)
6060 TCGv t1
= tcg_temp_new();
6061 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6063 tcg_gen_shri_tl(t1
, t0
, 8);
6064 tcg_gen_and_tl(t1
, t1
, t2
);
6065 tcg_gen_and_tl(t0
, t0
, t2
);
6066 tcg_gen_shli_tl(t0
, t0
, 8);
6067 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6074 TCGv t1
= tcg_temp_new();
6075 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6077 tcg_gen_shri_tl(t1
, t0
, 16);
6078 tcg_gen_and_tl(t1
, t1
, t2
);
6079 tcg_gen_and_tl(t0
, t0
, t2
);
6080 tcg_gen_shli_tl(t0
, t0
, 16);
6081 tcg_gen_or_tl(t0
, t0
, t1
);
6082 tcg_gen_shri_tl(t1
, t0
, 32);
6083 tcg_gen_shli_tl(t0
, t0
, 32);
6084 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6091 MIPS_INVAL("bsfhl");
6092 generate_exception_end(ctx
, EXCP_RI
);
6099 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6108 t0
= tcg_temp_new();
6109 t1
= tcg_temp_new();
6110 gen_load_gpr(t0
, rs
);
6111 gen_load_gpr(t1
, rt
);
6112 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6113 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6114 if (opc
== OPC_LSA
) {
6115 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6124 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6132 t0
= tcg_temp_new();
6133 if (bits
== 0 || bits
== wordsz
) {
6135 gen_load_gpr(t0
, rt
);
6137 gen_load_gpr(t0
, rs
);
6141 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6143 #if defined(TARGET_MIPS64)
6145 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6150 TCGv t1
= tcg_temp_new();
6151 gen_load_gpr(t0
, rt
);
6152 gen_load_gpr(t1
, rs
);
6156 TCGv_i64 t2
= tcg_temp_new_i64();
6157 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6158 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6159 gen_move_low32(cpu_gpr
[rd
], t2
);
6160 tcg_temp_free_i64(t2
);
6163 #if defined(TARGET_MIPS64)
6165 tcg_gen_shli_tl(t0
, t0
, bits
);
6166 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6167 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6177 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6180 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6183 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6186 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6189 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6196 t0
= tcg_temp_new();
6197 gen_load_gpr(t0
, rt
);
6200 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6202 #if defined(TARGET_MIPS64)
6204 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6211 #ifndef CONFIG_USER_ONLY
6212 /* CP0 (MMU and control) */
6213 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6215 TCGv_i64 t0
= tcg_temp_new_i64();
6216 TCGv_i64 t1
= tcg_temp_new_i64();
6218 tcg_gen_ext_tl_i64(t0
, arg
);
6219 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6220 #if defined(TARGET_MIPS64)
6221 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6223 tcg_gen_concat32_i64(t1
, t1
, t0
);
6225 tcg_gen_st_i64(t1
, cpu_env
, off
);
6226 tcg_temp_free_i64(t1
);
6227 tcg_temp_free_i64(t0
);
6230 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6232 TCGv_i64 t0
= tcg_temp_new_i64();
6233 TCGv_i64 t1
= tcg_temp_new_i64();
6235 tcg_gen_ext_tl_i64(t0
, arg
);
6236 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6237 tcg_gen_concat32_i64(t1
, t1
, t0
);
6238 tcg_gen_st_i64(t1
, cpu_env
, off
);
6239 tcg_temp_free_i64(t1
);
6240 tcg_temp_free_i64(t0
);
6243 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6245 TCGv_i64 t0
= tcg_temp_new_i64();
6247 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6248 #if defined(TARGET_MIPS64)
6249 tcg_gen_shri_i64(t0
, t0
, 30);
6251 tcg_gen_shri_i64(t0
, t0
, 32);
6253 gen_move_low32(arg
, t0
);
6254 tcg_temp_free_i64(t0
);
6257 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6259 TCGv_i64 t0
= tcg_temp_new_i64();
6261 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6262 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6263 gen_move_low32(arg
, t0
);
6264 tcg_temp_free_i64(t0
);
6267 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
6269 TCGv_i32 t0
= tcg_temp_new_i32();
6271 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6272 tcg_gen_ext_i32_tl(arg
, t0
);
6273 tcg_temp_free_i32(t0
);
6276 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
6278 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6279 tcg_gen_ext32s_tl(arg
, arg
);
6282 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
6284 TCGv_i32 t0
= tcg_temp_new_i32();
6286 tcg_gen_trunc_tl_i32(t0
, arg
);
6287 tcg_gen_st_i32(t0
, cpu_env
, off
);
6288 tcg_temp_free_i32(t0
);
6291 #define CP0_CHECK(c) \
6294 goto cp0_unimplemented; \
6298 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6300 const char *rn
= "invalid";
6306 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6307 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6311 goto cp0_unimplemented
;
6317 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6318 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6322 goto cp0_unimplemented
;
6328 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
6329 ctx
->CP0_LLAddr_shift
);
6333 CP0_CHECK(ctx
->mrp
);
6334 gen_helper_mfhc0_maar(arg
, cpu_env
);
6338 goto cp0_unimplemented
;
6347 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6351 goto cp0_unimplemented
;
6355 goto cp0_unimplemented
;
6357 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
6361 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6362 tcg_gen_movi_tl(arg
, 0);
6365 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6367 const char *rn
= "invalid";
6368 uint64_t mask
= ctx
->PAMask
>> 36;
6374 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6375 tcg_gen_andi_tl(arg
, arg
, mask
);
6376 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6380 goto cp0_unimplemented
;
6386 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6387 tcg_gen_andi_tl(arg
, arg
, mask
);
6388 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6392 goto cp0_unimplemented
;
6398 /* LLAddr is read-only (the only exception is bit 0 if LLB is
6399 supported); the CP0_LLAddr_rw_bitmask does not seem to be
6400 relevant for modern MIPS cores supporting MTHC0, therefore
6401 treating MTHC0 to LLAddr as NOP. */
6405 CP0_CHECK(ctx
->mrp
);
6406 gen_helper_mthc0_maar(cpu_env
, arg
);
6410 goto cp0_unimplemented
;
6419 tcg_gen_andi_tl(arg
, arg
, mask
);
6420 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6424 goto cp0_unimplemented
;
6428 goto cp0_unimplemented
;
6430 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
6433 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6436 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6438 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6439 tcg_gen_movi_tl(arg
, 0);
6441 tcg_gen_movi_tl(arg
, ~0);
6445 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6447 const char *rn
= "invalid";
6450 check_insn(ctx
, ISA_MIPS32
);
6456 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6460 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6461 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6465 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6466 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6470 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6471 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6476 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6480 goto cp0_unimplemented
;
6486 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6487 gen_helper_mfc0_random(arg
, cpu_env
);
6491 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6492 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6496 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6497 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6501 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6502 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6506 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6507 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6511 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6512 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6516 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6517 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6518 rn
= "VPEScheFBack";
6521 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6522 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6526 goto cp0_unimplemented
;
6533 TCGv_i64 tmp
= tcg_temp_new_i64();
6534 tcg_gen_ld_i64(tmp
, cpu_env
,
6535 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6536 #if defined(TARGET_MIPS64)
6538 /* Move RI/XI fields to bits 31:30 */
6539 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6540 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6543 gen_move_low32(arg
, tmp
);
6544 tcg_temp_free_i64(tmp
);
6549 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6550 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6554 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6555 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6559 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6560 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6564 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6565 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6569 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6570 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6574 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6575 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6579 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6580 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6584 goto cp0_unimplemented
;
6591 TCGv_i64 tmp
= tcg_temp_new_i64();
6592 tcg_gen_ld_i64(tmp
, cpu_env
,
6593 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6594 #if defined(TARGET_MIPS64)
6596 /* Move RI/XI fields to bits 31:30 */
6597 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6598 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6601 gen_move_low32(arg
, tmp
);
6602 tcg_temp_free_i64(tmp
);
6608 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6609 rn
= "GlobalNumber";
6612 goto cp0_unimplemented
;
6618 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6619 tcg_gen_ext32s_tl(arg
, arg
);
6623 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
6624 rn
= "ContextConfig";
6625 goto cp0_unimplemented
;
6627 CP0_CHECK(ctx
->ulri
);
6628 tcg_gen_ld_tl(arg
, cpu_env
,
6629 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6630 tcg_gen_ext32s_tl(arg
, arg
);
6634 goto cp0_unimplemented
;
6640 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6644 check_insn(ctx
, ISA_MIPS32R2
);
6645 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6650 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6651 tcg_gen_ext32s_tl(arg
, arg
);
6656 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6657 tcg_gen_ext32s_tl(arg
, arg
);
6662 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6663 tcg_gen_ext32s_tl(arg
, arg
);
6668 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6673 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
6678 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
6682 goto cp0_unimplemented
;
6688 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6692 check_insn(ctx
, ISA_MIPS32R2
);
6693 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6697 check_insn(ctx
, ISA_MIPS32R2
);
6698 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6702 check_insn(ctx
, ISA_MIPS32R2
);
6703 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6707 check_insn(ctx
, ISA_MIPS32R2
);
6708 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6712 check_insn(ctx
, ISA_MIPS32R2
);
6713 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6718 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
6722 goto cp0_unimplemented
;
6728 check_insn(ctx
, ISA_MIPS32R2
);
6729 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6733 goto cp0_unimplemented
;
6739 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6740 tcg_gen_ext32s_tl(arg
, arg
);
6745 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6750 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6755 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
6756 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
6760 goto cp0_unimplemented
;
6766 /* Mark as an IO operation because we read the time. */
6767 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6770 gen_helper_mfc0_count(arg
, cpu_env
);
6771 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6774 /* Break the TB to be able to take timer interrupts immediately
6775 after reading count. DISAS_STOP isn't sufficient, we need to
6776 ensure we break completely out of translated code. */
6777 gen_save_pc(ctx
->base
.pc_next
+ 4);
6778 ctx
->base
.is_jmp
= DISAS_EXIT
;
6781 /* 6,7 are implementation dependent */
6783 goto cp0_unimplemented
;
6789 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6790 tcg_gen_ext32s_tl(arg
, arg
);
6794 goto cp0_unimplemented
;
6800 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6803 /* 6,7 are implementation dependent */
6805 goto cp0_unimplemented
;
6811 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6815 check_insn(ctx
, ISA_MIPS32R2
);
6816 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6820 check_insn(ctx
, ISA_MIPS32R2
);
6821 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6825 check_insn(ctx
, ISA_MIPS32R2
);
6826 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6830 goto cp0_unimplemented
;
6836 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6840 goto cp0_unimplemented
;
6846 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6847 tcg_gen_ext32s_tl(arg
, arg
);
6851 goto cp0_unimplemented
;
6857 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6861 check_insn(ctx
, ISA_MIPS32R2
);
6862 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6863 tcg_gen_ext32s_tl(arg
, arg
);
6867 check_insn(ctx
, ISA_MIPS32R2
);
6868 CP0_CHECK(ctx
->cmgcr
);
6869 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6870 tcg_gen_ext32s_tl(arg
, arg
);
6874 goto cp0_unimplemented
;
6880 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6884 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6888 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6892 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6896 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6900 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6903 /* 6,7 are implementation dependent */
6905 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6909 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6913 goto cp0_unimplemented
;
6919 gen_helper_mfc0_lladdr(arg
, cpu_env
);
6923 CP0_CHECK(ctx
->mrp
);
6924 gen_helper_mfc0_maar(arg
, cpu_env
);
6928 CP0_CHECK(ctx
->mrp
);
6929 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6933 goto cp0_unimplemented
;
6946 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6947 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
6951 goto cp0_unimplemented
;
6964 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6965 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6969 goto cp0_unimplemented
;
6975 #if defined(TARGET_MIPS64)
6976 check_insn(ctx
, ISA_MIPS3
);
6977 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6978 tcg_gen_ext32s_tl(arg
, arg
);
6983 goto cp0_unimplemented
;
6987 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6988 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6991 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6995 goto cp0_unimplemented
;
6999 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7000 rn
= "'Diagnostic"; /* implementation dependent */
7005 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7009 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
7010 rn
= "TraceControl";
7011 goto cp0_unimplemented
;
7013 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
7014 rn
= "TraceControl2";
7015 goto cp0_unimplemented
;
7017 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
7018 rn
= "UserTraceData";
7019 goto cp0_unimplemented
;
7021 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
7023 goto cp0_unimplemented
;
7025 goto cp0_unimplemented
;
7032 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7033 tcg_gen_ext32s_tl(arg
, arg
);
7037 goto cp0_unimplemented
;
7043 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7044 rn
= "Performance0";
7047 // gen_helper_mfc0_performance1(arg);
7048 rn
= "Performance1";
7049 goto cp0_unimplemented
;
7051 // gen_helper_mfc0_performance2(arg);
7052 rn
= "Performance2";
7053 goto cp0_unimplemented
;
7055 // gen_helper_mfc0_performance3(arg);
7056 rn
= "Performance3";
7057 goto cp0_unimplemented
;
7059 // gen_helper_mfc0_performance4(arg);
7060 rn
= "Performance4";
7061 goto cp0_unimplemented
;
7063 // gen_helper_mfc0_performance5(arg);
7064 rn
= "Performance5";
7065 goto cp0_unimplemented
;
7067 // gen_helper_mfc0_performance6(arg);
7068 rn
= "Performance6";
7069 goto cp0_unimplemented
;
7071 // gen_helper_mfc0_performance7(arg);
7072 rn
= "Performance7";
7073 goto cp0_unimplemented
;
7075 goto cp0_unimplemented
;
7081 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7085 goto cp0_unimplemented
;
7094 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7098 goto cp0_unimplemented
;
7108 TCGv_i64 tmp
= tcg_temp_new_i64();
7109 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7110 gen_move_low32(arg
, tmp
);
7111 tcg_temp_free_i64(tmp
);
7119 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7123 goto cp0_unimplemented
;
7132 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7139 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7143 goto cp0_unimplemented
;
7149 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7150 tcg_gen_ext32s_tl(arg
, arg
);
7154 goto cp0_unimplemented
;
7161 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7170 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7171 tcg_gen_ld_tl(arg
, cpu_env
,
7172 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7173 tcg_gen_ext32s_tl(arg
, arg
);
7177 goto cp0_unimplemented
;
7181 goto cp0_unimplemented
;
7183 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
7187 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7188 gen_mfc0_unimplemented(ctx
, arg
);
7191 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7193 const char *rn
= "invalid";
7196 check_insn(ctx
, ISA_MIPS32
);
7198 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7206 gen_helper_mtc0_index(cpu_env
, arg
);
7210 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7211 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7215 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7220 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7230 goto cp0_unimplemented
;
7240 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7241 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7245 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7246 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7250 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7251 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7255 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7256 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7260 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7261 tcg_gen_st_tl(arg
, cpu_env
,
7262 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7266 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7267 tcg_gen_st_tl(arg
, cpu_env
,
7268 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7269 rn
= "VPEScheFBack";
7272 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7273 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7277 goto cp0_unimplemented
;
7283 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7287 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7288 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7292 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7293 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7297 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7298 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7302 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7303 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7307 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7308 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7312 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7313 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7317 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7318 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7322 goto cp0_unimplemented
;
7328 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7334 rn
= "GlobalNumber";
7337 goto cp0_unimplemented
;
7343 gen_helper_mtc0_context(cpu_env
, arg
);
7347 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7348 rn
= "ContextConfig";
7349 goto cp0_unimplemented
;
7351 CP0_CHECK(ctx
->ulri
);
7352 tcg_gen_st_tl(arg
, cpu_env
,
7353 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7357 goto cp0_unimplemented
;
7363 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7367 check_insn(ctx
, ISA_MIPS32R2
);
7368 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7370 ctx
->base
.is_jmp
= DISAS_STOP
;
7374 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7379 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7384 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7389 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7394 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7399 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7403 goto cp0_unimplemented
;
7409 gen_helper_mtc0_wired(cpu_env
, arg
);
7413 check_insn(ctx
, ISA_MIPS32R2
);
7414 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7418 check_insn(ctx
, ISA_MIPS32R2
);
7419 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7423 check_insn(ctx
, ISA_MIPS32R2
);
7424 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7428 check_insn(ctx
, ISA_MIPS32R2
);
7429 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7433 check_insn(ctx
, ISA_MIPS32R2
);
7434 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7439 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7443 goto cp0_unimplemented
;
7449 check_insn(ctx
, ISA_MIPS32R2
);
7450 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7451 ctx
->base
.is_jmp
= DISAS_STOP
;
7455 goto cp0_unimplemented
;
7477 goto cp0_unimplemented
;
7483 gen_helper_mtc0_count(cpu_env
, arg
);
7486 /* 6,7 are implementation dependent */
7488 goto cp0_unimplemented
;
7494 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7498 goto cp0_unimplemented
;
7504 gen_helper_mtc0_compare(cpu_env
, arg
);
7507 /* 6,7 are implementation dependent */
7509 goto cp0_unimplemented
;
7515 save_cpu_state(ctx
, 1);
7516 gen_helper_mtc0_status(cpu_env
, arg
);
7517 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7518 gen_save_pc(ctx
->base
.pc_next
+ 4);
7519 ctx
->base
.is_jmp
= DISAS_EXIT
;
7523 check_insn(ctx
, ISA_MIPS32R2
);
7524 gen_helper_mtc0_intctl(cpu_env
, arg
);
7525 /* Stop translation as we may have switched the execution mode */
7526 ctx
->base
.is_jmp
= DISAS_STOP
;
7530 check_insn(ctx
, ISA_MIPS32R2
);
7531 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7532 /* Stop translation as we may have switched the execution mode */
7533 ctx
->base
.is_jmp
= DISAS_STOP
;
7537 check_insn(ctx
, ISA_MIPS32R2
);
7538 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7539 /* Stop translation as we may have switched the execution mode */
7540 ctx
->base
.is_jmp
= DISAS_STOP
;
7544 goto cp0_unimplemented
;
7550 save_cpu_state(ctx
, 1);
7551 gen_helper_mtc0_cause(cpu_env
, arg
);
7552 /* Stop translation as we may have triggered an interrupt.
7553 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7554 * translated code to check for pending interrupts. */
7555 gen_save_pc(ctx
->base
.pc_next
+ 4);
7556 ctx
->base
.is_jmp
= DISAS_EXIT
;
7560 goto cp0_unimplemented
;
7566 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7570 goto cp0_unimplemented
;
7580 check_insn(ctx
, ISA_MIPS32R2
);
7581 gen_helper_mtc0_ebase(cpu_env
, arg
);
7585 goto cp0_unimplemented
;
7591 gen_helper_mtc0_config0(cpu_env
, arg
);
7593 /* Stop translation as we may have switched the execution mode */
7594 ctx
->base
.is_jmp
= DISAS_STOP
;
7597 /* ignored, read only */
7601 gen_helper_mtc0_config2(cpu_env
, arg
);
7603 /* Stop translation as we may have switched the execution mode */
7604 ctx
->base
.is_jmp
= DISAS_STOP
;
7607 gen_helper_mtc0_config3(cpu_env
, arg
);
7609 /* Stop translation as we may have switched the execution mode */
7610 ctx
->base
.is_jmp
= DISAS_STOP
;
7613 gen_helper_mtc0_config4(cpu_env
, arg
);
7615 ctx
->base
.is_jmp
= DISAS_STOP
;
7618 gen_helper_mtc0_config5(cpu_env
, arg
);
7620 /* Stop translation as we may have switched the execution mode */
7621 ctx
->base
.is_jmp
= DISAS_STOP
;
7623 /* 6,7 are implementation dependent */
7633 rn
= "Invalid config selector";
7634 goto cp0_unimplemented
;
7640 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7644 CP0_CHECK(ctx
->mrp
);
7645 gen_helper_mtc0_maar(cpu_env
, arg
);
7649 CP0_CHECK(ctx
->mrp
);
7650 gen_helper_mtc0_maari(cpu_env
, arg
);
7654 goto cp0_unimplemented
;
7667 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7668 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7672 goto cp0_unimplemented
;
7685 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7686 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7690 goto cp0_unimplemented
;
7696 #if defined(TARGET_MIPS64)
7697 check_insn(ctx
, ISA_MIPS3
);
7698 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7703 goto cp0_unimplemented
;
7707 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7708 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7711 gen_helper_mtc0_framemask(cpu_env
, arg
);
7715 goto cp0_unimplemented
;
7720 rn
= "Diagnostic"; /* implementation dependent */
7725 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7726 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7727 gen_save_pc(ctx
->base
.pc_next
+ 4);
7728 ctx
->base
.is_jmp
= DISAS_EXIT
;
7732 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7733 rn
= "TraceControl";
7734 /* Stop translation as we may have switched the execution mode */
7735 ctx
->base
.is_jmp
= DISAS_STOP
;
7736 goto cp0_unimplemented
;
7738 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7739 rn
= "TraceControl2";
7740 /* Stop translation as we may have switched the execution mode */
7741 ctx
->base
.is_jmp
= DISAS_STOP
;
7742 goto cp0_unimplemented
;
7744 /* Stop translation as we may have switched the execution mode */
7745 ctx
->base
.is_jmp
= DISAS_STOP
;
7746 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7747 rn
= "UserTraceData";
7748 /* Stop translation as we may have switched the execution mode */
7749 ctx
->base
.is_jmp
= DISAS_STOP
;
7750 goto cp0_unimplemented
;
7752 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7753 /* Stop translation as we may have switched the execution mode */
7754 ctx
->base
.is_jmp
= DISAS_STOP
;
7756 goto cp0_unimplemented
;
7758 goto cp0_unimplemented
;
7765 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7769 goto cp0_unimplemented
;
7775 gen_helper_mtc0_performance0(cpu_env
, arg
);
7776 rn
= "Performance0";
7779 // gen_helper_mtc0_performance1(arg);
7780 rn
= "Performance1";
7781 goto cp0_unimplemented
;
7783 // gen_helper_mtc0_performance2(arg);
7784 rn
= "Performance2";
7785 goto cp0_unimplemented
;
7787 // gen_helper_mtc0_performance3(arg);
7788 rn
= "Performance3";
7789 goto cp0_unimplemented
;
7791 // gen_helper_mtc0_performance4(arg);
7792 rn
= "Performance4";
7793 goto cp0_unimplemented
;
7795 // gen_helper_mtc0_performance5(arg);
7796 rn
= "Performance5";
7797 goto cp0_unimplemented
;
7799 // gen_helper_mtc0_performance6(arg);
7800 rn
= "Performance6";
7801 goto cp0_unimplemented
;
7803 // gen_helper_mtc0_performance7(arg);
7804 rn
= "Performance7";
7805 goto cp0_unimplemented
;
7807 goto cp0_unimplemented
;
7813 gen_helper_mtc0_errctl(cpu_env
, arg
);
7814 ctx
->base
.is_jmp
= DISAS_STOP
;
7818 goto cp0_unimplemented
;
7831 goto cp0_unimplemented
;
7840 gen_helper_mtc0_taglo(cpu_env
, arg
);
7847 gen_helper_mtc0_datalo(cpu_env
, arg
);
7851 goto cp0_unimplemented
;
7860 gen_helper_mtc0_taghi(cpu_env
, arg
);
7867 gen_helper_mtc0_datahi(cpu_env
, arg
);
7872 goto cp0_unimplemented
;
7878 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7882 goto cp0_unimplemented
;
7889 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7898 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7899 tcg_gen_st_tl(arg
, cpu_env
,
7900 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7904 goto cp0_unimplemented
;
7908 goto cp0_unimplemented
;
7910 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
7912 /* For simplicity assume that all writes can cause interrupts. */
7913 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7915 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
7916 * translated code to check for pending interrupts. */
7917 gen_save_pc(ctx
->base
.pc_next
+ 4);
7918 ctx
->base
.is_jmp
= DISAS_EXIT
;
7923 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7926 #if defined(TARGET_MIPS64)
7927 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7929 const char *rn
= "invalid";
7932 check_insn(ctx
, ISA_MIPS64
);
7938 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
7942 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7943 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
7947 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7948 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
7952 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7953 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
7958 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
7962 goto cp0_unimplemented
;
7968 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7969 gen_helper_mfc0_random(arg
, cpu_env
);
7973 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7974 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
7978 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7979 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
7983 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7984 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
7988 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7989 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
7993 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7994 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7998 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7999 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8000 rn
= "VPEScheFBack";
8003 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8004 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
8008 goto cp0_unimplemented
;
8014 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
8018 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8019 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8023 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8024 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8028 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8029 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8033 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8034 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8038 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8039 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8043 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8044 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8048 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8049 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8053 goto cp0_unimplemented
;
8059 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8064 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8065 rn
= "GlobalNumber";
8068 goto cp0_unimplemented
;
8074 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8078 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8079 rn
= "ContextConfig";
8080 goto cp0_unimplemented
;
8082 CP0_CHECK(ctx
->ulri
);
8083 tcg_gen_ld_tl(arg
, cpu_env
,
8084 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8088 goto cp0_unimplemented
;
8094 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8098 check_insn(ctx
, ISA_MIPS32R2
);
8099 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8104 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8109 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8114 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8119 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8124 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8129 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8133 goto cp0_unimplemented
;
8139 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8143 check_insn(ctx
, ISA_MIPS32R2
);
8144 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8148 check_insn(ctx
, ISA_MIPS32R2
);
8149 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8153 check_insn(ctx
, ISA_MIPS32R2
);
8154 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8158 check_insn(ctx
, ISA_MIPS32R2
);
8159 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8163 check_insn(ctx
, ISA_MIPS32R2
);
8164 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8169 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8173 goto cp0_unimplemented
;
8179 check_insn(ctx
, ISA_MIPS32R2
);
8180 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8184 goto cp0_unimplemented
;
8190 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8195 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8200 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8205 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8206 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8210 goto cp0_unimplemented
;
8216 /* Mark as an IO operation because we read the time. */
8217 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8220 gen_helper_mfc0_count(arg
, cpu_env
);
8221 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8224 /* Break the TB to be able to take timer interrupts immediately
8225 after reading count. DISAS_STOP isn't sufficient, we need to
8226 ensure we break completely out of translated code. */
8227 gen_save_pc(ctx
->base
.pc_next
+ 4);
8228 ctx
->base
.is_jmp
= DISAS_EXIT
;
8231 /* 6,7 are implementation dependent */
8233 goto cp0_unimplemented
;
8239 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8243 goto cp0_unimplemented
;
8249 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8252 /* 6,7 are implementation dependent */
8254 goto cp0_unimplemented
;
8260 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8264 check_insn(ctx
, ISA_MIPS32R2
);
8265 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8269 check_insn(ctx
, ISA_MIPS32R2
);
8270 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8274 check_insn(ctx
, ISA_MIPS32R2
);
8275 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8279 goto cp0_unimplemented
;
8285 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8289 goto cp0_unimplemented
;
8295 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8299 goto cp0_unimplemented
;
8305 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8309 check_insn(ctx
, ISA_MIPS32R2
);
8310 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8314 check_insn(ctx
, ISA_MIPS32R2
);
8315 CP0_CHECK(ctx
->cmgcr
);
8316 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8320 goto cp0_unimplemented
;
8326 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8330 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8334 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8338 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8342 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8346 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8349 /* 6,7 are implementation dependent */
8351 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8355 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8359 goto cp0_unimplemented
;
8365 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8369 CP0_CHECK(ctx
->mrp
);
8370 gen_helper_dmfc0_maar(arg
, cpu_env
);
8374 CP0_CHECK(ctx
->mrp
);
8375 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8379 goto cp0_unimplemented
;
8392 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8393 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8397 goto cp0_unimplemented
;
8410 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8411 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8415 goto cp0_unimplemented
;
8421 check_insn(ctx
, ISA_MIPS3
);
8422 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8426 goto cp0_unimplemented
;
8430 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8431 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8434 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8438 goto cp0_unimplemented
;
8442 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8443 rn
= "'Diagnostic"; /* implementation dependent */
8448 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8452 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8453 rn
= "TraceControl";
8454 goto cp0_unimplemented
;
8456 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8457 rn
= "TraceControl2";
8458 goto cp0_unimplemented
;
8460 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8461 rn
= "UserTraceData";
8462 goto cp0_unimplemented
;
8464 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8466 goto cp0_unimplemented
;
8468 goto cp0_unimplemented
;
8475 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8479 goto cp0_unimplemented
;
8485 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8486 rn
= "Performance0";
8489 // gen_helper_dmfc0_performance1(arg);
8490 rn
= "Performance1";
8491 goto cp0_unimplemented
;
8493 // gen_helper_dmfc0_performance2(arg);
8494 rn
= "Performance2";
8495 goto cp0_unimplemented
;
8497 // gen_helper_dmfc0_performance3(arg);
8498 rn
= "Performance3";
8499 goto cp0_unimplemented
;
8501 // gen_helper_dmfc0_performance4(arg);
8502 rn
= "Performance4";
8503 goto cp0_unimplemented
;
8505 // gen_helper_dmfc0_performance5(arg);
8506 rn
= "Performance5";
8507 goto cp0_unimplemented
;
8509 // gen_helper_dmfc0_performance6(arg);
8510 rn
= "Performance6";
8511 goto cp0_unimplemented
;
8513 // gen_helper_dmfc0_performance7(arg);
8514 rn
= "Performance7";
8515 goto cp0_unimplemented
;
8517 goto cp0_unimplemented
;
8523 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8527 goto cp0_unimplemented
;
8537 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8541 goto cp0_unimplemented
;
8550 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8557 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8561 goto cp0_unimplemented
;
8570 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8577 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8581 goto cp0_unimplemented
;
8587 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8591 goto cp0_unimplemented
;
8598 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8607 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8608 tcg_gen_ld_tl(arg
, cpu_env
,
8609 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8613 goto cp0_unimplemented
;
8617 goto cp0_unimplemented
;
8619 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
8623 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8624 gen_mfc0_unimplemented(ctx
, arg
);
8627 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8629 const char *rn
= "invalid";
8632 check_insn(ctx
, ISA_MIPS64
);
8634 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8642 gen_helper_mtc0_index(cpu_env
, arg
);
8646 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8647 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
8651 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8656 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8666 goto cp0_unimplemented
;
8676 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8677 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
8681 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8682 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
8686 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8687 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
8691 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8692 gen_helper_mtc0_yqmask(cpu_env
, arg
);
8696 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8697 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8701 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8702 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8703 rn
= "VPEScheFBack";
8706 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8707 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
8711 goto cp0_unimplemented
;
8717 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
8721 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8722 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
8726 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8727 gen_helper_mtc0_tcbind(cpu_env
, arg
);
8731 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8732 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
8736 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8737 gen_helper_mtc0_tchalt(cpu_env
, arg
);
8741 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8742 gen_helper_mtc0_tccontext(cpu_env
, arg
);
8746 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8747 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
8751 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8752 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
8756 goto cp0_unimplemented
;
8762 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
8768 rn
= "GlobalNumber";
8771 goto cp0_unimplemented
;
8777 gen_helper_mtc0_context(cpu_env
, arg
);
8781 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
8782 rn
= "ContextConfig";
8783 goto cp0_unimplemented
;
8785 CP0_CHECK(ctx
->ulri
);
8786 tcg_gen_st_tl(arg
, cpu_env
,
8787 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8791 goto cp0_unimplemented
;
8797 gen_helper_mtc0_pagemask(cpu_env
, arg
);
8801 check_insn(ctx
, ISA_MIPS32R2
);
8802 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
8807 gen_helper_mtc0_segctl0(cpu_env
, arg
);
8812 gen_helper_mtc0_segctl1(cpu_env
, arg
);
8817 gen_helper_mtc0_segctl2(cpu_env
, arg
);
8822 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8827 gen_helper_mtc0_pwfield(cpu_env
, arg
);
8832 gen_helper_mtc0_pwsize(cpu_env
, arg
);
8836 goto cp0_unimplemented
;
8842 gen_helper_mtc0_wired(cpu_env
, arg
);
8846 check_insn(ctx
, ISA_MIPS32R2
);
8847 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
8851 check_insn(ctx
, ISA_MIPS32R2
);
8852 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
8856 check_insn(ctx
, ISA_MIPS32R2
);
8857 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
8861 check_insn(ctx
, ISA_MIPS32R2
);
8862 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
8866 check_insn(ctx
, ISA_MIPS32R2
);
8867 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
8872 gen_helper_mtc0_pwctl(cpu_env
, arg
);
8876 goto cp0_unimplemented
;
8882 check_insn(ctx
, ISA_MIPS32R2
);
8883 gen_helper_mtc0_hwrena(cpu_env
, arg
);
8884 ctx
->base
.is_jmp
= DISAS_STOP
;
8888 goto cp0_unimplemented
;
8910 goto cp0_unimplemented
;
8916 gen_helper_mtc0_count(cpu_env
, arg
);
8919 /* 6,7 are implementation dependent */
8921 goto cp0_unimplemented
;
8923 /* Stop translation as we may have switched the execution mode */
8924 ctx
->base
.is_jmp
= DISAS_STOP
;
8929 gen_helper_mtc0_entryhi(cpu_env
, arg
);
8933 goto cp0_unimplemented
;
8939 gen_helper_mtc0_compare(cpu_env
, arg
);
8942 /* 6,7 are implementation dependent */
8944 goto cp0_unimplemented
;
8946 /* Stop translation as we may have switched the execution mode */
8947 ctx
->base
.is_jmp
= DISAS_STOP
;
8952 save_cpu_state(ctx
, 1);
8953 gen_helper_mtc0_status(cpu_env
, arg
);
8954 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8955 gen_save_pc(ctx
->base
.pc_next
+ 4);
8956 ctx
->base
.is_jmp
= DISAS_EXIT
;
8960 check_insn(ctx
, ISA_MIPS32R2
);
8961 gen_helper_mtc0_intctl(cpu_env
, arg
);
8962 /* Stop translation as we may have switched the execution mode */
8963 ctx
->base
.is_jmp
= DISAS_STOP
;
8967 check_insn(ctx
, ISA_MIPS32R2
);
8968 gen_helper_mtc0_srsctl(cpu_env
, arg
);
8969 /* Stop translation as we may have switched the execution mode */
8970 ctx
->base
.is_jmp
= DISAS_STOP
;
8974 check_insn(ctx
, ISA_MIPS32R2
);
8975 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8976 /* Stop translation as we may have switched the execution mode */
8977 ctx
->base
.is_jmp
= DISAS_STOP
;
8981 goto cp0_unimplemented
;
8987 save_cpu_state(ctx
, 1);
8988 gen_helper_mtc0_cause(cpu_env
, arg
);
8989 /* Stop translation as we may have triggered an interrupt.
8990 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8991 * translated code to check for pending interrupts. */
8992 gen_save_pc(ctx
->base
.pc_next
+ 4);
8993 ctx
->base
.is_jmp
= DISAS_EXIT
;
8997 goto cp0_unimplemented
;
9003 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
9007 goto cp0_unimplemented
;
9017 check_insn(ctx
, ISA_MIPS32R2
);
9018 gen_helper_mtc0_ebase(cpu_env
, arg
);
9022 goto cp0_unimplemented
;
9028 gen_helper_mtc0_config0(cpu_env
, arg
);
9030 /* Stop translation as we may have switched the execution mode */
9031 ctx
->base
.is_jmp
= DISAS_STOP
;
9034 /* ignored, read only */
9038 gen_helper_mtc0_config2(cpu_env
, arg
);
9040 /* Stop translation as we may have switched the execution mode */
9041 ctx
->base
.is_jmp
= DISAS_STOP
;
9044 gen_helper_mtc0_config3(cpu_env
, arg
);
9046 /* Stop translation as we may have switched the execution mode */
9047 ctx
->base
.is_jmp
= DISAS_STOP
;
9050 /* currently ignored */
9054 gen_helper_mtc0_config5(cpu_env
, arg
);
9056 /* Stop translation as we may have switched the execution mode */
9057 ctx
->base
.is_jmp
= DISAS_STOP
;
9059 /* 6,7 are implementation dependent */
9061 rn
= "Invalid config selector";
9062 goto cp0_unimplemented
;
9068 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9072 CP0_CHECK(ctx
->mrp
);
9073 gen_helper_mtc0_maar(cpu_env
, arg
);
9077 CP0_CHECK(ctx
->mrp
);
9078 gen_helper_mtc0_maari(cpu_env
, arg
);
9082 goto cp0_unimplemented
;
9095 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9096 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9100 goto cp0_unimplemented
;
9113 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9114 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9118 goto cp0_unimplemented
;
9124 check_insn(ctx
, ISA_MIPS3
);
9125 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9129 goto cp0_unimplemented
;
9133 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9134 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9137 gen_helper_mtc0_framemask(cpu_env
, arg
);
9141 goto cp0_unimplemented
;
9146 rn
= "Diagnostic"; /* implementation dependent */
9151 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9152 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9153 gen_save_pc(ctx
->base
.pc_next
+ 4);
9154 ctx
->base
.is_jmp
= DISAS_EXIT
;
9158 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9159 /* Stop translation as we may have switched the execution mode */
9160 ctx
->base
.is_jmp
= DISAS_STOP
;
9161 rn
= "TraceControl";
9162 goto cp0_unimplemented
;
9164 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9165 /* Stop translation as we may have switched the execution mode */
9166 ctx
->base
.is_jmp
= DISAS_STOP
;
9167 rn
= "TraceControl2";
9168 goto cp0_unimplemented
;
9170 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9171 /* Stop translation as we may have switched the execution mode */
9172 ctx
->base
.is_jmp
= DISAS_STOP
;
9173 rn
= "UserTraceData";
9174 goto cp0_unimplemented
;
9176 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9177 /* Stop translation as we may have switched the execution mode */
9178 ctx
->base
.is_jmp
= DISAS_STOP
;
9180 goto cp0_unimplemented
;
9182 goto cp0_unimplemented
;
9189 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9193 goto cp0_unimplemented
;
9199 gen_helper_mtc0_performance0(cpu_env
, arg
);
9200 rn
= "Performance0";
9203 // gen_helper_mtc0_performance1(cpu_env, arg);
9204 rn
= "Performance1";
9205 goto cp0_unimplemented
;
9207 // gen_helper_mtc0_performance2(cpu_env, arg);
9208 rn
= "Performance2";
9209 goto cp0_unimplemented
;
9211 // gen_helper_mtc0_performance3(cpu_env, arg);
9212 rn
= "Performance3";
9213 goto cp0_unimplemented
;
9215 // gen_helper_mtc0_performance4(cpu_env, arg);
9216 rn
= "Performance4";
9217 goto cp0_unimplemented
;
9219 // gen_helper_mtc0_performance5(cpu_env, arg);
9220 rn
= "Performance5";
9221 goto cp0_unimplemented
;
9223 // gen_helper_mtc0_performance6(cpu_env, arg);
9224 rn
= "Performance6";
9225 goto cp0_unimplemented
;
9227 // gen_helper_mtc0_performance7(cpu_env, arg);
9228 rn
= "Performance7";
9229 goto cp0_unimplemented
;
9231 goto cp0_unimplemented
;
9237 gen_helper_mtc0_errctl(cpu_env
, arg
);
9238 ctx
->base
.is_jmp
= DISAS_STOP
;
9242 goto cp0_unimplemented
;
9255 goto cp0_unimplemented
;
9264 gen_helper_mtc0_taglo(cpu_env
, arg
);
9271 gen_helper_mtc0_datalo(cpu_env
, arg
);
9275 goto cp0_unimplemented
;
9284 gen_helper_mtc0_taghi(cpu_env
, arg
);
9291 gen_helper_mtc0_datahi(cpu_env
, arg
);
9296 goto cp0_unimplemented
;
9302 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9306 goto cp0_unimplemented
;
9313 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9322 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9323 tcg_gen_st_tl(arg
, cpu_env
,
9324 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9328 goto cp0_unimplemented
;
9332 goto cp0_unimplemented
;
9334 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
9336 /* For simplicity assume that all writes can cause interrupts. */
9337 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9339 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
9340 * translated code to check for pending interrupts. */
9341 gen_save_pc(ctx
->base
.pc_next
+ 4);
9342 ctx
->base
.is_jmp
= DISAS_EXIT
;
9347 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
9349 #endif /* TARGET_MIPS64 */
9351 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9352 int u
, int sel
, int h
)
9354 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9355 TCGv t0
= tcg_temp_local_new();
9357 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9358 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9359 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9360 tcg_gen_movi_tl(t0
, -1);
9361 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9362 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9363 tcg_gen_movi_tl(t0
, -1);
9369 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9372 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9382 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9385 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9388 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9391 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9394 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9397 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9400 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9403 gen_mfc0(ctx
, t0
, rt
, sel
);
9410 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9413 gen_mfc0(ctx
, t0
, rt
, sel
);
9419 gen_helper_mftc0_status(t0
, cpu_env
);
9422 gen_mfc0(ctx
, t0
, rt
, sel
);
9428 gen_helper_mftc0_cause(t0
, cpu_env
);
9438 gen_helper_mftc0_epc(t0
, cpu_env
);
9448 gen_helper_mftc0_ebase(t0
, cpu_env
);
9465 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9475 gen_helper_mftc0_debug(t0
, cpu_env
);
9478 gen_mfc0(ctx
, t0
, rt
, sel
);
9483 gen_mfc0(ctx
, t0
, rt
, sel
);
9485 } else switch (sel
) {
9486 /* GPR registers. */
9488 gen_helper_1e0i(mftgpr
, t0
, rt
);
9490 /* Auxiliary CPU registers */
9494 gen_helper_1e0i(mftlo
, t0
, 0);
9497 gen_helper_1e0i(mfthi
, t0
, 0);
9500 gen_helper_1e0i(mftacx
, t0
, 0);
9503 gen_helper_1e0i(mftlo
, t0
, 1);
9506 gen_helper_1e0i(mfthi
, t0
, 1);
9509 gen_helper_1e0i(mftacx
, t0
, 1);
9512 gen_helper_1e0i(mftlo
, t0
, 2);
9515 gen_helper_1e0i(mfthi
, t0
, 2);
9518 gen_helper_1e0i(mftacx
, t0
, 2);
9521 gen_helper_1e0i(mftlo
, t0
, 3);
9524 gen_helper_1e0i(mfthi
, t0
, 3);
9527 gen_helper_1e0i(mftacx
, t0
, 3);
9530 gen_helper_mftdsp(t0
, cpu_env
);
9536 /* Floating point (COP1). */
9538 /* XXX: For now we support only a single FPU context. */
9540 TCGv_i32 fp0
= tcg_temp_new_i32();
9542 gen_load_fpr32(ctx
, fp0
, rt
);
9543 tcg_gen_ext_i32_tl(t0
, fp0
);
9544 tcg_temp_free_i32(fp0
);
9546 TCGv_i32 fp0
= tcg_temp_new_i32();
9548 gen_load_fpr32h(ctx
, fp0
, rt
);
9549 tcg_gen_ext_i32_tl(t0
, fp0
);
9550 tcg_temp_free_i32(fp0
);
9554 /* XXX: For now we support only a single FPU context. */
9555 gen_helper_1e0i(cfc1
, t0
, rt
);
9557 /* COP2: Not implemented. */
9564 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9565 gen_store_gpr(t0
, rd
);
9571 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9572 generate_exception_end(ctx
, EXCP_RI
);
9575 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9576 int u
, int sel
, int h
)
9578 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9579 TCGv t0
= tcg_temp_local_new();
9581 gen_load_gpr(t0
, rt
);
9582 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9583 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9584 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9586 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9587 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9594 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
9597 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
9607 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
9610 gen_helper_mttc0_tcbind(cpu_env
, t0
);
9613 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
9616 gen_helper_mttc0_tchalt(cpu_env
, t0
);
9619 gen_helper_mttc0_tccontext(cpu_env
, t0
);
9622 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
9625 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
9628 gen_mtc0(ctx
, t0
, rd
, sel
);
9635 gen_helper_mttc0_entryhi(cpu_env
, t0
);
9638 gen_mtc0(ctx
, t0
, rd
, sel
);
9644 gen_helper_mttc0_status(cpu_env
, t0
);
9647 gen_mtc0(ctx
, t0
, rd
, sel
);
9653 gen_helper_mttc0_cause(cpu_env
, t0
);
9663 gen_helper_mttc0_ebase(cpu_env
, t0
);
9673 gen_helper_mttc0_debug(cpu_env
, t0
);
9676 gen_mtc0(ctx
, t0
, rd
, sel
);
9681 gen_mtc0(ctx
, t0
, rd
, sel
);
9683 } else switch (sel
) {
9684 /* GPR registers. */
9686 gen_helper_0e1i(mttgpr
, t0
, rd
);
9688 /* Auxiliary CPU registers */
9692 gen_helper_0e1i(mttlo
, t0
, 0);
9695 gen_helper_0e1i(mtthi
, t0
, 0);
9698 gen_helper_0e1i(mttacx
, t0
, 0);
9701 gen_helper_0e1i(mttlo
, t0
, 1);
9704 gen_helper_0e1i(mtthi
, t0
, 1);
9707 gen_helper_0e1i(mttacx
, t0
, 1);
9710 gen_helper_0e1i(mttlo
, t0
, 2);
9713 gen_helper_0e1i(mtthi
, t0
, 2);
9716 gen_helper_0e1i(mttacx
, t0
, 2);
9719 gen_helper_0e1i(mttlo
, t0
, 3);
9722 gen_helper_0e1i(mtthi
, t0
, 3);
9725 gen_helper_0e1i(mttacx
, t0
, 3);
9728 gen_helper_mttdsp(cpu_env
, t0
);
9734 /* Floating point (COP1). */
9736 /* XXX: For now we support only a single FPU context. */
9738 TCGv_i32 fp0
= tcg_temp_new_i32();
9740 tcg_gen_trunc_tl_i32(fp0
, t0
);
9741 gen_store_fpr32(ctx
, fp0
, rd
);
9742 tcg_temp_free_i32(fp0
);
9744 TCGv_i32 fp0
= tcg_temp_new_i32();
9746 tcg_gen_trunc_tl_i32(fp0
, t0
);
9747 gen_store_fpr32h(ctx
, fp0
, rd
);
9748 tcg_temp_free_i32(fp0
);
9752 /* XXX: For now we support only a single FPU context. */
9754 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
9756 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
9757 tcg_temp_free_i32(fs_tmp
);
9759 /* Stop translation as we may have changed hflags */
9760 ctx
->base
.is_jmp
= DISAS_STOP
;
9762 /* COP2: Not implemented. */
9769 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
9775 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
9776 generate_exception_end(ctx
, EXCP_RI
);
9779 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
9781 const char *opn
= "ldst";
9783 check_cp0_enabled(ctx
);
9790 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9795 TCGv t0
= tcg_temp_new();
9797 gen_load_gpr(t0
, rt
);
9798 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9803 #if defined(TARGET_MIPS64)
9805 check_insn(ctx
, ISA_MIPS3
);
9810 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9814 check_insn(ctx
, ISA_MIPS3
);
9816 TCGv t0
= tcg_temp_new();
9818 gen_load_gpr(t0
, rt
);
9819 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9831 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9837 TCGv t0
= tcg_temp_new();
9838 gen_load_gpr(t0
, rt
);
9839 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9845 check_cp0_enabled(ctx
);
9850 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
9851 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9855 check_cp0_enabled(ctx
);
9856 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
9857 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9862 if (!env
->tlb
->helper_tlbwi
)
9864 gen_helper_tlbwi(cpu_env
);
9869 if (!env
->tlb
->helper_tlbinv
) {
9872 gen_helper_tlbinv(cpu_env
);
9873 } /* treat as nop if TLBINV not supported */
9878 if (!env
->tlb
->helper_tlbinvf
) {
9881 gen_helper_tlbinvf(cpu_env
);
9882 } /* treat as nop if TLBINV not supported */
9886 if (!env
->tlb
->helper_tlbwr
)
9888 gen_helper_tlbwr(cpu_env
);
9892 if (!env
->tlb
->helper_tlbp
)
9894 gen_helper_tlbp(cpu_env
);
9898 if (!env
->tlb
->helper_tlbr
)
9900 gen_helper_tlbr(cpu_env
);
9902 case OPC_ERET
: /* OPC_ERETNC */
9903 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9904 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9907 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
9908 if (ctx
->opcode
& (1 << bit_shift
)) {
9911 check_insn(ctx
, ISA_MIPS32R5
);
9912 gen_helper_eretnc(cpu_env
);
9916 check_insn(ctx
, ISA_MIPS2
);
9917 gen_helper_eret(cpu_env
);
9919 ctx
->base
.is_jmp
= DISAS_EXIT
;
9924 check_insn(ctx
, ISA_MIPS32
);
9925 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9926 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9929 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9931 generate_exception_end(ctx
, EXCP_RI
);
9933 gen_helper_deret(cpu_env
);
9934 ctx
->base
.is_jmp
= DISAS_EXIT
;
9939 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
9940 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9941 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9944 /* If we get an exception, we want to restart at next instruction */
9945 ctx
->base
.pc_next
+= 4;
9946 save_cpu_state(ctx
, 1);
9947 ctx
->base
.pc_next
-= 4;
9948 gen_helper_wait(cpu_env
);
9949 ctx
->base
.is_jmp
= DISAS_NORETURN
;
9954 generate_exception_end(ctx
, EXCP_RI
);
9957 (void)opn
; /* avoid a compiler warning */
9959 #endif /* !CONFIG_USER_ONLY */
9961 /* CP1 Branches (before delay slot) */
9962 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
9963 int32_t cc
, int32_t offset
)
9965 target_ulong btarget
;
9966 TCGv_i32 t0
= tcg_temp_new_i32();
9968 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9969 generate_exception_end(ctx
, EXCP_RI
);
9974 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
9976 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
9980 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9981 tcg_gen_not_i32(t0
, t0
);
9982 tcg_gen_andi_i32(t0
, t0
, 1);
9983 tcg_gen_extu_i32_tl(bcond
, t0
);
9986 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9987 tcg_gen_not_i32(t0
, t0
);
9988 tcg_gen_andi_i32(t0
, t0
, 1);
9989 tcg_gen_extu_i32_tl(bcond
, t0
);
9992 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9993 tcg_gen_andi_i32(t0
, t0
, 1);
9994 tcg_gen_extu_i32_tl(bcond
, t0
);
9997 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9998 tcg_gen_andi_i32(t0
, t0
, 1);
9999 tcg_gen_extu_i32_tl(bcond
, t0
);
10001 ctx
->hflags
|= MIPS_HFLAG_BL
;
10005 TCGv_i32 t1
= tcg_temp_new_i32();
10006 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10007 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10008 tcg_gen_nand_i32(t0
, t0
, t1
);
10009 tcg_temp_free_i32(t1
);
10010 tcg_gen_andi_i32(t0
, t0
, 1);
10011 tcg_gen_extu_i32_tl(bcond
, t0
);
10016 TCGv_i32 t1
= tcg_temp_new_i32();
10017 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10018 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10019 tcg_gen_or_i32(t0
, t0
, t1
);
10020 tcg_temp_free_i32(t1
);
10021 tcg_gen_andi_i32(t0
, t0
, 1);
10022 tcg_gen_extu_i32_tl(bcond
, t0
);
10027 TCGv_i32 t1
= tcg_temp_new_i32();
10028 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10029 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10030 tcg_gen_and_i32(t0
, t0
, t1
);
10031 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10032 tcg_gen_and_i32(t0
, t0
, t1
);
10033 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10034 tcg_gen_nand_i32(t0
, t0
, t1
);
10035 tcg_temp_free_i32(t1
);
10036 tcg_gen_andi_i32(t0
, t0
, 1);
10037 tcg_gen_extu_i32_tl(bcond
, t0
);
10042 TCGv_i32 t1
= tcg_temp_new_i32();
10043 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10044 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10045 tcg_gen_or_i32(t0
, t0
, t1
);
10046 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10047 tcg_gen_or_i32(t0
, t0
, t1
);
10048 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10049 tcg_gen_or_i32(t0
, t0
, t1
);
10050 tcg_temp_free_i32(t1
);
10051 tcg_gen_andi_i32(t0
, t0
, 1);
10052 tcg_gen_extu_i32_tl(bcond
, t0
);
10055 ctx
->hflags
|= MIPS_HFLAG_BC
;
10058 MIPS_INVAL("cp1 cond branch");
10059 generate_exception_end(ctx
, EXCP_RI
);
10062 ctx
->btarget
= btarget
;
10063 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10065 tcg_temp_free_i32(t0
);
10068 /* R6 CP1 Branches */
10069 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10070 int32_t ft
, int32_t offset
,
10071 int delayslot_size
)
10073 target_ulong btarget
;
10074 TCGv_i64 t0
= tcg_temp_new_i64();
10076 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10077 #ifdef MIPS_DEBUG_DISAS
10078 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10079 "\n", ctx
->base
.pc_next
);
10081 generate_exception_end(ctx
, EXCP_RI
);
10085 gen_load_fpr64(ctx
, t0
, ft
);
10086 tcg_gen_andi_i64(t0
, t0
, 1);
10088 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10092 tcg_gen_xori_i64(t0
, t0
, 1);
10093 ctx
->hflags
|= MIPS_HFLAG_BC
;
10096 /* t0 already set */
10097 ctx
->hflags
|= MIPS_HFLAG_BC
;
10100 MIPS_INVAL("cp1 cond branch");
10101 generate_exception_end(ctx
, EXCP_RI
);
10105 tcg_gen_trunc_i64_tl(bcond
, t0
);
10107 ctx
->btarget
= btarget
;
10109 switch (delayslot_size
) {
10111 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10114 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10119 tcg_temp_free_i64(t0
);
10122 /* Coprocessor 1 (FPU) */
10124 #define FOP(func, fmt) (((fmt) << 21) | (func))
10127 OPC_ADD_S
= FOP(0, FMT_S
),
10128 OPC_SUB_S
= FOP(1, FMT_S
),
10129 OPC_MUL_S
= FOP(2, FMT_S
),
10130 OPC_DIV_S
= FOP(3, FMT_S
),
10131 OPC_SQRT_S
= FOP(4, FMT_S
),
10132 OPC_ABS_S
= FOP(5, FMT_S
),
10133 OPC_MOV_S
= FOP(6, FMT_S
),
10134 OPC_NEG_S
= FOP(7, FMT_S
),
10135 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10136 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10137 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10138 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10139 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10140 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10141 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10142 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10143 OPC_SEL_S
= FOP(16, FMT_S
),
10144 OPC_MOVCF_S
= FOP(17, FMT_S
),
10145 OPC_MOVZ_S
= FOP(18, FMT_S
),
10146 OPC_MOVN_S
= FOP(19, FMT_S
),
10147 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10148 OPC_RECIP_S
= FOP(21, FMT_S
),
10149 OPC_RSQRT_S
= FOP(22, FMT_S
),
10150 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10151 OPC_MADDF_S
= FOP(24, FMT_S
),
10152 OPC_MSUBF_S
= FOP(25, FMT_S
),
10153 OPC_RINT_S
= FOP(26, FMT_S
),
10154 OPC_CLASS_S
= FOP(27, FMT_S
),
10155 OPC_MIN_S
= FOP(28, FMT_S
),
10156 OPC_RECIP2_S
= FOP(28, FMT_S
),
10157 OPC_MINA_S
= FOP(29, FMT_S
),
10158 OPC_RECIP1_S
= FOP(29, FMT_S
),
10159 OPC_MAX_S
= FOP(30, FMT_S
),
10160 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10161 OPC_MAXA_S
= FOP(31, FMT_S
),
10162 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10163 OPC_CVT_D_S
= FOP(33, FMT_S
),
10164 OPC_CVT_W_S
= FOP(36, FMT_S
),
10165 OPC_CVT_L_S
= FOP(37, FMT_S
),
10166 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10167 OPC_CMP_F_S
= FOP (48, FMT_S
),
10168 OPC_CMP_UN_S
= FOP (49, FMT_S
),
10169 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
10170 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
10171 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
10172 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
10173 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
10174 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
10175 OPC_CMP_SF_S
= FOP (56, FMT_S
),
10176 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
10177 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
10178 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
10179 OPC_CMP_LT_S
= FOP (60, FMT_S
),
10180 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
10181 OPC_CMP_LE_S
= FOP (62, FMT_S
),
10182 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
10184 OPC_ADD_D
= FOP(0, FMT_D
),
10185 OPC_SUB_D
= FOP(1, FMT_D
),
10186 OPC_MUL_D
= FOP(2, FMT_D
),
10187 OPC_DIV_D
= FOP(3, FMT_D
),
10188 OPC_SQRT_D
= FOP(4, FMT_D
),
10189 OPC_ABS_D
= FOP(5, FMT_D
),
10190 OPC_MOV_D
= FOP(6, FMT_D
),
10191 OPC_NEG_D
= FOP(7, FMT_D
),
10192 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10193 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10194 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10195 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10196 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10197 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10198 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10199 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10200 OPC_SEL_D
= FOP(16, FMT_D
),
10201 OPC_MOVCF_D
= FOP(17, FMT_D
),
10202 OPC_MOVZ_D
= FOP(18, FMT_D
),
10203 OPC_MOVN_D
= FOP(19, FMT_D
),
10204 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10205 OPC_RECIP_D
= FOP(21, FMT_D
),
10206 OPC_RSQRT_D
= FOP(22, FMT_D
),
10207 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10208 OPC_MADDF_D
= FOP(24, FMT_D
),
10209 OPC_MSUBF_D
= FOP(25, FMT_D
),
10210 OPC_RINT_D
= FOP(26, FMT_D
),
10211 OPC_CLASS_D
= FOP(27, FMT_D
),
10212 OPC_MIN_D
= FOP(28, FMT_D
),
10213 OPC_RECIP2_D
= FOP(28, FMT_D
),
10214 OPC_MINA_D
= FOP(29, FMT_D
),
10215 OPC_RECIP1_D
= FOP(29, FMT_D
),
10216 OPC_MAX_D
= FOP(30, FMT_D
),
10217 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10218 OPC_MAXA_D
= FOP(31, FMT_D
),
10219 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10220 OPC_CVT_S_D
= FOP(32, FMT_D
),
10221 OPC_CVT_W_D
= FOP(36, FMT_D
),
10222 OPC_CVT_L_D
= FOP(37, FMT_D
),
10223 OPC_CMP_F_D
= FOP (48, FMT_D
),
10224 OPC_CMP_UN_D
= FOP (49, FMT_D
),
10225 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
10226 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
10227 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
10228 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
10229 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
10230 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
10231 OPC_CMP_SF_D
= FOP (56, FMT_D
),
10232 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
10233 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
10234 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
10235 OPC_CMP_LT_D
= FOP (60, FMT_D
),
10236 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
10237 OPC_CMP_LE_D
= FOP (62, FMT_D
),
10238 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
10240 OPC_CVT_S_W
= FOP(32, FMT_W
),
10241 OPC_CVT_D_W
= FOP(33, FMT_W
),
10242 OPC_CVT_S_L
= FOP(32, FMT_L
),
10243 OPC_CVT_D_L
= FOP(33, FMT_L
),
10244 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10246 OPC_ADD_PS
= FOP(0, FMT_PS
),
10247 OPC_SUB_PS
= FOP(1, FMT_PS
),
10248 OPC_MUL_PS
= FOP(2, FMT_PS
),
10249 OPC_DIV_PS
= FOP(3, FMT_PS
),
10250 OPC_ABS_PS
= FOP(5, FMT_PS
),
10251 OPC_MOV_PS
= FOP(6, FMT_PS
),
10252 OPC_NEG_PS
= FOP(7, FMT_PS
),
10253 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10254 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10255 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10256 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10257 OPC_MULR_PS
= FOP(26, FMT_PS
),
10258 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10259 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10260 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10261 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10263 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10264 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10265 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10266 OPC_PLL_PS
= FOP(44, FMT_PS
),
10267 OPC_PLU_PS
= FOP(45, FMT_PS
),
10268 OPC_PUL_PS
= FOP(46, FMT_PS
),
10269 OPC_PUU_PS
= FOP(47, FMT_PS
),
10270 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
10271 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
10272 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
10273 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
10274 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
10275 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
10276 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
10277 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
10278 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
10279 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
10280 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
10281 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
10282 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
10283 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
10284 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
10285 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
10289 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10290 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10291 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10292 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10293 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10294 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10295 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10296 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10297 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10298 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10299 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10300 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10301 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10302 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10303 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10304 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10305 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10306 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10307 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10308 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10309 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10310 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10312 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10313 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10314 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10315 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10316 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10317 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10318 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10319 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10320 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10321 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10322 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10323 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10324 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10325 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10326 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10327 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10328 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10329 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10330 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10331 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10332 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10333 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10335 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10337 TCGv t0
= tcg_temp_new();
10342 TCGv_i32 fp0
= tcg_temp_new_i32();
10344 gen_load_fpr32(ctx
, fp0
, fs
);
10345 tcg_gen_ext_i32_tl(t0
, fp0
);
10346 tcg_temp_free_i32(fp0
);
10348 gen_store_gpr(t0
, rt
);
10351 gen_load_gpr(t0
, rt
);
10353 TCGv_i32 fp0
= tcg_temp_new_i32();
10355 tcg_gen_trunc_tl_i32(fp0
, t0
);
10356 gen_store_fpr32(ctx
, fp0
, fs
);
10357 tcg_temp_free_i32(fp0
);
10361 gen_helper_1e0i(cfc1
, t0
, fs
);
10362 gen_store_gpr(t0
, rt
);
10365 gen_load_gpr(t0
, rt
);
10366 save_cpu_state(ctx
, 0);
10368 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10370 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10371 tcg_temp_free_i32(fs_tmp
);
10373 /* Stop translation as we may have changed hflags */
10374 ctx
->base
.is_jmp
= DISAS_STOP
;
10376 #if defined(TARGET_MIPS64)
10378 gen_load_fpr64(ctx
, t0
, fs
);
10379 gen_store_gpr(t0
, rt
);
10382 gen_load_gpr(t0
, rt
);
10383 gen_store_fpr64(ctx
, t0
, fs
);
10388 TCGv_i32 fp0
= tcg_temp_new_i32();
10390 gen_load_fpr32h(ctx
, fp0
, fs
);
10391 tcg_gen_ext_i32_tl(t0
, fp0
);
10392 tcg_temp_free_i32(fp0
);
10394 gen_store_gpr(t0
, rt
);
10397 gen_load_gpr(t0
, rt
);
10399 TCGv_i32 fp0
= tcg_temp_new_i32();
10401 tcg_gen_trunc_tl_i32(fp0
, t0
);
10402 gen_store_fpr32h(ctx
, fp0
, fs
);
10403 tcg_temp_free_i32(fp0
);
10407 MIPS_INVAL("cp1 move");
10408 generate_exception_end(ctx
, EXCP_RI
);
10416 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10423 /* Treat as NOP. */
10428 cond
= TCG_COND_EQ
;
10430 cond
= TCG_COND_NE
;
10432 l1
= gen_new_label();
10433 t0
= tcg_temp_new_i32();
10434 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10435 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10436 tcg_temp_free_i32(t0
);
10438 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10440 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10445 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10449 TCGv_i32 t0
= tcg_temp_new_i32();
10450 TCGLabel
*l1
= gen_new_label();
10453 cond
= TCG_COND_EQ
;
10455 cond
= TCG_COND_NE
;
10457 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10458 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10459 gen_load_fpr32(ctx
, t0
, fs
);
10460 gen_store_fpr32(ctx
, t0
, fd
);
10462 tcg_temp_free_i32(t0
);
10465 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
10468 TCGv_i32 t0
= tcg_temp_new_i32();
10470 TCGLabel
*l1
= gen_new_label();
10473 cond
= TCG_COND_EQ
;
10475 cond
= TCG_COND_NE
;
10477 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10478 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10479 tcg_temp_free_i32(t0
);
10480 fp0
= tcg_temp_new_i64();
10481 gen_load_fpr64(ctx
, fp0
, fs
);
10482 gen_store_fpr64(ctx
, fp0
, fd
);
10483 tcg_temp_free_i64(fp0
);
10487 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10491 TCGv_i32 t0
= tcg_temp_new_i32();
10492 TCGLabel
*l1
= gen_new_label();
10493 TCGLabel
*l2
= gen_new_label();
10496 cond
= TCG_COND_EQ
;
10498 cond
= TCG_COND_NE
;
10500 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10501 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10502 gen_load_fpr32(ctx
, t0
, fs
);
10503 gen_store_fpr32(ctx
, t0
, fd
);
10506 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10507 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10508 gen_load_fpr32h(ctx
, t0
, fs
);
10509 gen_store_fpr32h(ctx
, t0
, fd
);
10510 tcg_temp_free_i32(t0
);
10514 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10517 TCGv_i32 t1
= tcg_const_i32(0);
10518 TCGv_i32 fp0
= tcg_temp_new_i32();
10519 TCGv_i32 fp1
= tcg_temp_new_i32();
10520 TCGv_i32 fp2
= tcg_temp_new_i32();
10521 gen_load_fpr32(ctx
, fp0
, fd
);
10522 gen_load_fpr32(ctx
, fp1
, ft
);
10523 gen_load_fpr32(ctx
, fp2
, fs
);
10527 tcg_gen_andi_i32(fp0
, fp0
, 1);
10528 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10531 tcg_gen_andi_i32(fp1
, fp1
, 1);
10532 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10535 tcg_gen_andi_i32(fp1
, fp1
, 1);
10536 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10539 MIPS_INVAL("gen_sel_s");
10540 generate_exception_end(ctx
, EXCP_RI
);
10544 gen_store_fpr32(ctx
, fp0
, fd
);
10545 tcg_temp_free_i32(fp2
);
10546 tcg_temp_free_i32(fp1
);
10547 tcg_temp_free_i32(fp0
);
10548 tcg_temp_free_i32(t1
);
10551 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10554 TCGv_i64 t1
= tcg_const_i64(0);
10555 TCGv_i64 fp0
= tcg_temp_new_i64();
10556 TCGv_i64 fp1
= tcg_temp_new_i64();
10557 TCGv_i64 fp2
= tcg_temp_new_i64();
10558 gen_load_fpr64(ctx
, fp0
, fd
);
10559 gen_load_fpr64(ctx
, fp1
, ft
);
10560 gen_load_fpr64(ctx
, fp2
, fs
);
10564 tcg_gen_andi_i64(fp0
, fp0
, 1);
10565 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10568 tcg_gen_andi_i64(fp1
, fp1
, 1);
10569 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10572 tcg_gen_andi_i64(fp1
, fp1
, 1);
10573 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10576 MIPS_INVAL("gen_sel_d");
10577 generate_exception_end(ctx
, EXCP_RI
);
10581 gen_store_fpr64(ctx
, fp0
, fd
);
10582 tcg_temp_free_i64(fp2
);
10583 tcg_temp_free_i64(fp1
);
10584 tcg_temp_free_i64(fp0
);
10585 tcg_temp_free_i64(t1
);
10588 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
10589 int ft
, int fs
, int fd
, int cc
)
10591 uint32_t func
= ctx
->opcode
& 0x3f;
10595 TCGv_i32 fp0
= tcg_temp_new_i32();
10596 TCGv_i32 fp1
= tcg_temp_new_i32();
10598 gen_load_fpr32(ctx
, fp0
, fs
);
10599 gen_load_fpr32(ctx
, fp1
, ft
);
10600 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
10601 tcg_temp_free_i32(fp1
);
10602 gen_store_fpr32(ctx
, fp0
, fd
);
10603 tcg_temp_free_i32(fp0
);
10608 TCGv_i32 fp0
= tcg_temp_new_i32();
10609 TCGv_i32 fp1
= tcg_temp_new_i32();
10611 gen_load_fpr32(ctx
, fp0
, fs
);
10612 gen_load_fpr32(ctx
, fp1
, ft
);
10613 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
10614 tcg_temp_free_i32(fp1
);
10615 gen_store_fpr32(ctx
, fp0
, fd
);
10616 tcg_temp_free_i32(fp0
);
10621 TCGv_i32 fp0
= tcg_temp_new_i32();
10622 TCGv_i32 fp1
= tcg_temp_new_i32();
10624 gen_load_fpr32(ctx
, fp0
, fs
);
10625 gen_load_fpr32(ctx
, fp1
, ft
);
10626 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
10627 tcg_temp_free_i32(fp1
);
10628 gen_store_fpr32(ctx
, fp0
, fd
);
10629 tcg_temp_free_i32(fp0
);
10634 TCGv_i32 fp0
= tcg_temp_new_i32();
10635 TCGv_i32 fp1
= tcg_temp_new_i32();
10637 gen_load_fpr32(ctx
, fp0
, fs
);
10638 gen_load_fpr32(ctx
, fp1
, ft
);
10639 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
10640 tcg_temp_free_i32(fp1
);
10641 gen_store_fpr32(ctx
, fp0
, fd
);
10642 tcg_temp_free_i32(fp0
);
10647 TCGv_i32 fp0
= tcg_temp_new_i32();
10649 gen_load_fpr32(ctx
, fp0
, fs
);
10650 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
10651 gen_store_fpr32(ctx
, fp0
, fd
);
10652 tcg_temp_free_i32(fp0
);
10657 TCGv_i32 fp0
= tcg_temp_new_i32();
10659 gen_load_fpr32(ctx
, fp0
, fs
);
10660 if (ctx
->abs2008
) {
10661 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
10663 gen_helper_float_abs_s(fp0
, fp0
);
10665 gen_store_fpr32(ctx
, fp0
, fd
);
10666 tcg_temp_free_i32(fp0
);
10671 TCGv_i32 fp0
= tcg_temp_new_i32();
10673 gen_load_fpr32(ctx
, fp0
, fs
);
10674 gen_store_fpr32(ctx
, fp0
, fd
);
10675 tcg_temp_free_i32(fp0
);
10680 TCGv_i32 fp0
= tcg_temp_new_i32();
10682 gen_load_fpr32(ctx
, fp0
, fs
);
10683 if (ctx
->abs2008
) {
10684 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
10686 gen_helper_float_chs_s(fp0
, fp0
);
10688 gen_store_fpr32(ctx
, fp0
, fd
);
10689 tcg_temp_free_i32(fp0
);
10692 case OPC_ROUND_L_S
:
10693 check_cp1_64bitmode(ctx
);
10695 TCGv_i32 fp32
= tcg_temp_new_i32();
10696 TCGv_i64 fp64
= tcg_temp_new_i64();
10698 gen_load_fpr32(ctx
, fp32
, fs
);
10699 if (ctx
->nan2008
) {
10700 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
10702 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
10704 tcg_temp_free_i32(fp32
);
10705 gen_store_fpr64(ctx
, fp64
, fd
);
10706 tcg_temp_free_i64(fp64
);
10709 case OPC_TRUNC_L_S
:
10710 check_cp1_64bitmode(ctx
);
10712 TCGv_i32 fp32
= tcg_temp_new_i32();
10713 TCGv_i64 fp64
= tcg_temp_new_i64();
10715 gen_load_fpr32(ctx
, fp32
, fs
);
10716 if (ctx
->nan2008
) {
10717 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
10719 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
10721 tcg_temp_free_i32(fp32
);
10722 gen_store_fpr64(ctx
, fp64
, fd
);
10723 tcg_temp_free_i64(fp64
);
10727 check_cp1_64bitmode(ctx
);
10729 TCGv_i32 fp32
= tcg_temp_new_i32();
10730 TCGv_i64 fp64
= tcg_temp_new_i64();
10732 gen_load_fpr32(ctx
, fp32
, fs
);
10733 if (ctx
->nan2008
) {
10734 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
10736 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
10738 tcg_temp_free_i32(fp32
);
10739 gen_store_fpr64(ctx
, fp64
, fd
);
10740 tcg_temp_free_i64(fp64
);
10743 case OPC_FLOOR_L_S
:
10744 check_cp1_64bitmode(ctx
);
10746 TCGv_i32 fp32
= tcg_temp_new_i32();
10747 TCGv_i64 fp64
= tcg_temp_new_i64();
10749 gen_load_fpr32(ctx
, fp32
, fs
);
10750 if (ctx
->nan2008
) {
10751 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
10753 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
10755 tcg_temp_free_i32(fp32
);
10756 gen_store_fpr64(ctx
, fp64
, fd
);
10757 tcg_temp_free_i64(fp64
);
10760 case OPC_ROUND_W_S
:
10762 TCGv_i32 fp0
= tcg_temp_new_i32();
10764 gen_load_fpr32(ctx
, fp0
, fs
);
10765 if (ctx
->nan2008
) {
10766 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
10768 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
10770 gen_store_fpr32(ctx
, fp0
, fd
);
10771 tcg_temp_free_i32(fp0
);
10774 case OPC_TRUNC_W_S
:
10776 TCGv_i32 fp0
= tcg_temp_new_i32();
10778 gen_load_fpr32(ctx
, fp0
, fs
);
10779 if (ctx
->nan2008
) {
10780 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
10782 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
10784 gen_store_fpr32(ctx
, fp0
, fd
);
10785 tcg_temp_free_i32(fp0
);
10790 TCGv_i32 fp0
= tcg_temp_new_i32();
10792 gen_load_fpr32(ctx
, fp0
, fs
);
10793 if (ctx
->nan2008
) {
10794 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
10796 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
10798 gen_store_fpr32(ctx
, fp0
, fd
);
10799 tcg_temp_free_i32(fp0
);
10802 case OPC_FLOOR_W_S
:
10804 TCGv_i32 fp0
= tcg_temp_new_i32();
10806 gen_load_fpr32(ctx
, fp0
, fs
);
10807 if (ctx
->nan2008
) {
10808 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
10810 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
10812 gen_store_fpr32(ctx
, fp0
, fd
);
10813 tcg_temp_free_i32(fp0
);
10817 check_insn(ctx
, ISA_MIPS32R6
);
10818 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10821 check_insn(ctx
, ISA_MIPS32R6
);
10822 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10825 check_insn(ctx
, ISA_MIPS32R6
);
10826 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10829 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10830 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10833 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10835 TCGLabel
*l1
= gen_new_label();
10839 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10841 fp0
= tcg_temp_new_i32();
10842 gen_load_fpr32(ctx
, fp0
, fs
);
10843 gen_store_fpr32(ctx
, fp0
, fd
);
10844 tcg_temp_free_i32(fp0
);
10849 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10851 TCGLabel
*l1
= gen_new_label();
10855 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10856 fp0
= tcg_temp_new_i32();
10857 gen_load_fpr32(ctx
, fp0
, fs
);
10858 gen_store_fpr32(ctx
, fp0
, fd
);
10859 tcg_temp_free_i32(fp0
);
10866 TCGv_i32 fp0
= tcg_temp_new_i32();
10868 gen_load_fpr32(ctx
, fp0
, fs
);
10869 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
10870 gen_store_fpr32(ctx
, fp0
, fd
);
10871 tcg_temp_free_i32(fp0
);
10876 TCGv_i32 fp0
= tcg_temp_new_i32();
10878 gen_load_fpr32(ctx
, fp0
, fs
);
10879 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
10880 gen_store_fpr32(ctx
, fp0
, fd
);
10881 tcg_temp_free_i32(fp0
);
10885 check_insn(ctx
, ISA_MIPS32R6
);
10887 TCGv_i32 fp0
= tcg_temp_new_i32();
10888 TCGv_i32 fp1
= tcg_temp_new_i32();
10889 TCGv_i32 fp2
= tcg_temp_new_i32();
10890 gen_load_fpr32(ctx
, fp0
, fs
);
10891 gen_load_fpr32(ctx
, fp1
, ft
);
10892 gen_load_fpr32(ctx
, fp2
, fd
);
10893 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10894 gen_store_fpr32(ctx
, fp2
, fd
);
10895 tcg_temp_free_i32(fp2
);
10896 tcg_temp_free_i32(fp1
);
10897 tcg_temp_free_i32(fp0
);
10901 check_insn(ctx
, ISA_MIPS32R6
);
10903 TCGv_i32 fp0
= tcg_temp_new_i32();
10904 TCGv_i32 fp1
= tcg_temp_new_i32();
10905 TCGv_i32 fp2
= tcg_temp_new_i32();
10906 gen_load_fpr32(ctx
, fp0
, fs
);
10907 gen_load_fpr32(ctx
, fp1
, ft
);
10908 gen_load_fpr32(ctx
, fp2
, fd
);
10909 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10910 gen_store_fpr32(ctx
, fp2
, fd
);
10911 tcg_temp_free_i32(fp2
);
10912 tcg_temp_free_i32(fp1
);
10913 tcg_temp_free_i32(fp0
);
10917 check_insn(ctx
, ISA_MIPS32R6
);
10919 TCGv_i32 fp0
= tcg_temp_new_i32();
10920 gen_load_fpr32(ctx
, fp0
, fs
);
10921 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
10922 gen_store_fpr32(ctx
, fp0
, fd
);
10923 tcg_temp_free_i32(fp0
);
10927 check_insn(ctx
, ISA_MIPS32R6
);
10929 TCGv_i32 fp0
= tcg_temp_new_i32();
10930 gen_load_fpr32(ctx
, fp0
, fs
);
10931 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
10932 gen_store_fpr32(ctx
, fp0
, fd
);
10933 tcg_temp_free_i32(fp0
);
10936 case OPC_MIN_S
: /* OPC_RECIP2_S */
10937 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10939 TCGv_i32 fp0
= tcg_temp_new_i32();
10940 TCGv_i32 fp1
= tcg_temp_new_i32();
10941 TCGv_i32 fp2
= tcg_temp_new_i32();
10942 gen_load_fpr32(ctx
, fp0
, fs
);
10943 gen_load_fpr32(ctx
, fp1
, ft
);
10944 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
10945 gen_store_fpr32(ctx
, fp2
, fd
);
10946 tcg_temp_free_i32(fp2
);
10947 tcg_temp_free_i32(fp1
);
10948 tcg_temp_free_i32(fp0
);
10951 check_cp1_64bitmode(ctx
);
10953 TCGv_i32 fp0
= tcg_temp_new_i32();
10954 TCGv_i32 fp1
= tcg_temp_new_i32();
10956 gen_load_fpr32(ctx
, fp0
, fs
);
10957 gen_load_fpr32(ctx
, fp1
, ft
);
10958 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
10959 tcg_temp_free_i32(fp1
);
10960 gen_store_fpr32(ctx
, fp0
, fd
);
10961 tcg_temp_free_i32(fp0
);
10965 case OPC_MINA_S
: /* OPC_RECIP1_S */
10966 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10968 TCGv_i32 fp0
= tcg_temp_new_i32();
10969 TCGv_i32 fp1
= tcg_temp_new_i32();
10970 TCGv_i32 fp2
= tcg_temp_new_i32();
10971 gen_load_fpr32(ctx
, fp0
, fs
);
10972 gen_load_fpr32(ctx
, fp1
, ft
);
10973 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
10974 gen_store_fpr32(ctx
, fp2
, fd
);
10975 tcg_temp_free_i32(fp2
);
10976 tcg_temp_free_i32(fp1
);
10977 tcg_temp_free_i32(fp0
);
10980 check_cp1_64bitmode(ctx
);
10982 TCGv_i32 fp0
= tcg_temp_new_i32();
10984 gen_load_fpr32(ctx
, fp0
, fs
);
10985 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
10986 gen_store_fpr32(ctx
, fp0
, fd
);
10987 tcg_temp_free_i32(fp0
);
10991 case OPC_MAX_S
: /* OPC_RSQRT1_S */
10992 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10994 TCGv_i32 fp0
= tcg_temp_new_i32();
10995 TCGv_i32 fp1
= tcg_temp_new_i32();
10996 gen_load_fpr32(ctx
, fp0
, fs
);
10997 gen_load_fpr32(ctx
, fp1
, ft
);
10998 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
10999 gen_store_fpr32(ctx
, fp1
, fd
);
11000 tcg_temp_free_i32(fp1
);
11001 tcg_temp_free_i32(fp0
);
11004 check_cp1_64bitmode(ctx
);
11006 TCGv_i32 fp0
= tcg_temp_new_i32();
11008 gen_load_fpr32(ctx
, fp0
, fs
);
11009 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
11010 gen_store_fpr32(ctx
, fp0
, fd
);
11011 tcg_temp_free_i32(fp0
);
11015 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11016 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11018 TCGv_i32 fp0
= tcg_temp_new_i32();
11019 TCGv_i32 fp1
= tcg_temp_new_i32();
11020 gen_load_fpr32(ctx
, fp0
, fs
);
11021 gen_load_fpr32(ctx
, fp1
, ft
);
11022 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11023 gen_store_fpr32(ctx
, fp1
, fd
);
11024 tcg_temp_free_i32(fp1
);
11025 tcg_temp_free_i32(fp0
);
11028 check_cp1_64bitmode(ctx
);
11030 TCGv_i32 fp0
= tcg_temp_new_i32();
11031 TCGv_i32 fp1
= tcg_temp_new_i32();
11033 gen_load_fpr32(ctx
, fp0
, fs
);
11034 gen_load_fpr32(ctx
, fp1
, ft
);
11035 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11036 tcg_temp_free_i32(fp1
);
11037 gen_store_fpr32(ctx
, fp0
, fd
);
11038 tcg_temp_free_i32(fp0
);
11043 check_cp1_registers(ctx
, fd
);
11045 TCGv_i32 fp32
= tcg_temp_new_i32();
11046 TCGv_i64 fp64
= tcg_temp_new_i64();
11048 gen_load_fpr32(ctx
, fp32
, fs
);
11049 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11050 tcg_temp_free_i32(fp32
);
11051 gen_store_fpr64(ctx
, fp64
, fd
);
11052 tcg_temp_free_i64(fp64
);
11057 TCGv_i32 fp0
= tcg_temp_new_i32();
11059 gen_load_fpr32(ctx
, fp0
, fs
);
11060 if (ctx
->nan2008
) {
11061 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11063 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11065 gen_store_fpr32(ctx
, fp0
, fd
);
11066 tcg_temp_free_i32(fp0
);
11070 check_cp1_64bitmode(ctx
);
11072 TCGv_i32 fp32
= tcg_temp_new_i32();
11073 TCGv_i64 fp64
= tcg_temp_new_i64();
11075 gen_load_fpr32(ctx
, fp32
, fs
);
11076 if (ctx
->nan2008
) {
11077 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11079 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11081 tcg_temp_free_i32(fp32
);
11082 gen_store_fpr64(ctx
, fp64
, fd
);
11083 tcg_temp_free_i64(fp64
);
11089 TCGv_i64 fp64
= tcg_temp_new_i64();
11090 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11091 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11093 gen_load_fpr32(ctx
, fp32_0
, fs
);
11094 gen_load_fpr32(ctx
, fp32_1
, ft
);
11095 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11096 tcg_temp_free_i32(fp32_1
);
11097 tcg_temp_free_i32(fp32_0
);
11098 gen_store_fpr64(ctx
, fp64
, fd
);
11099 tcg_temp_free_i64(fp64
);
11105 case OPC_CMP_UEQ_S
:
11106 case OPC_CMP_OLT_S
:
11107 case OPC_CMP_ULT_S
:
11108 case OPC_CMP_OLE_S
:
11109 case OPC_CMP_ULE_S
:
11111 case OPC_CMP_NGLE_S
:
11112 case OPC_CMP_SEQ_S
:
11113 case OPC_CMP_NGL_S
:
11115 case OPC_CMP_NGE_S
:
11117 case OPC_CMP_NGT_S
:
11118 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11119 if (ctx
->opcode
& (1 << 6)) {
11120 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
11122 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
11126 check_cp1_registers(ctx
, fs
| ft
| fd
);
11128 TCGv_i64 fp0
= tcg_temp_new_i64();
11129 TCGv_i64 fp1
= tcg_temp_new_i64();
11131 gen_load_fpr64(ctx
, fp0
, fs
);
11132 gen_load_fpr64(ctx
, fp1
, ft
);
11133 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11134 tcg_temp_free_i64(fp1
);
11135 gen_store_fpr64(ctx
, fp0
, fd
);
11136 tcg_temp_free_i64(fp0
);
11140 check_cp1_registers(ctx
, fs
| ft
| fd
);
11142 TCGv_i64 fp0
= tcg_temp_new_i64();
11143 TCGv_i64 fp1
= tcg_temp_new_i64();
11145 gen_load_fpr64(ctx
, fp0
, fs
);
11146 gen_load_fpr64(ctx
, fp1
, ft
);
11147 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11148 tcg_temp_free_i64(fp1
);
11149 gen_store_fpr64(ctx
, fp0
, fd
);
11150 tcg_temp_free_i64(fp0
);
11154 check_cp1_registers(ctx
, fs
| ft
| fd
);
11156 TCGv_i64 fp0
= tcg_temp_new_i64();
11157 TCGv_i64 fp1
= tcg_temp_new_i64();
11159 gen_load_fpr64(ctx
, fp0
, fs
);
11160 gen_load_fpr64(ctx
, fp1
, ft
);
11161 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11162 tcg_temp_free_i64(fp1
);
11163 gen_store_fpr64(ctx
, fp0
, fd
);
11164 tcg_temp_free_i64(fp0
);
11168 check_cp1_registers(ctx
, fs
| ft
| fd
);
11170 TCGv_i64 fp0
= tcg_temp_new_i64();
11171 TCGv_i64 fp1
= tcg_temp_new_i64();
11173 gen_load_fpr64(ctx
, fp0
, fs
);
11174 gen_load_fpr64(ctx
, fp1
, ft
);
11175 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11176 tcg_temp_free_i64(fp1
);
11177 gen_store_fpr64(ctx
, fp0
, fd
);
11178 tcg_temp_free_i64(fp0
);
11182 check_cp1_registers(ctx
, fs
| fd
);
11184 TCGv_i64 fp0
= tcg_temp_new_i64();
11186 gen_load_fpr64(ctx
, fp0
, fs
);
11187 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11188 gen_store_fpr64(ctx
, fp0
, fd
);
11189 tcg_temp_free_i64(fp0
);
11193 check_cp1_registers(ctx
, fs
| fd
);
11195 TCGv_i64 fp0
= tcg_temp_new_i64();
11197 gen_load_fpr64(ctx
, fp0
, fs
);
11198 if (ctx
->abs2008
) {
11199 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11201 gen_helper_float_abs_d(fp0
, fp0
);
11203 gen_store_fpr64(ctx
, fp0
, fd
);
11204 tcg_temp_free_i64(fp0
);
11208 check_cp1_registers(ctx
, fs
| fd
);
11210 TCGv_i64 fp0
= tcg_temp_new_i64();
11212 gen_load_fpr64(ctx
, fp0
, fs
);
11213 gen_store_fpr64(ctx
, fp0
, fd
);
11214 tcg_temp_free_i64(fp0
);
11218 check_cp1_registers(ctx
, fs
| fd
);
11220 TCGv_i64 fp0
= tcg_temp_new_i64();
11222 gen_load_fpr64(ctx
, fp0
, fs
);
11223 if (ctx
->abs2008
) {
11224 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11226 gen_helper_float_chs_d(fp0
, fp0
);
11228 gen_store_fpr64(ctx
, fp0
, fd
);
11229 tcg_temp_free_i64(fp0
);
11232 case OPC_ROUND_L_D
:
11233 check_cp1_64bitmode(ctx
);
11235 TCGv_i64 fp0
= tcg_temp_new_i64();
11237 gen_load_fpr64(ctx
, fp0
, fs
);
11238 if (ctx
->nan2008
) {
11239 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11241 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11243 gen_store_fpr64(ctx
, fp0
, fd
);
11244 tcg_temp_free_i64(fp0
);
11247 case OPC_TRUNC_L_D
:
11248 check_cp1_64bitmode(ctx
);
11250 TCGv_i64 fp0
= tcg_temp_new_i64();
11252 gen_load_fpr64(ctx
, fp0
, fs
);
11253 if (ctx
->nan2008
) {
11254 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11256 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11258 gen_store_fpr64(ctx
, fp0
, fd
);
11259 tcg_temp_free_i64(fp0
);
11263 check_cp1_64bitmode(ctx
);
11265 TCGv_i64 fp0
= tcg_temp_new_i64();
11267 gen_load_fpr64(ctx
, fp0
, fs
);
11268 if (ctx
->nan2008
) {
11269 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11271 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11273 gen_store_fpr64(ctx
, fp0
, fd
);
11274 tcg_temp_free_i64(fp0
);
11277 case OPC_FLOOR_L_D
:
11278 check_cp1_64bitmode(ctx
);
11280 TCGv_i64 fp0
= tcg_temp_new_i64();
11282 gen_load_fpr64(ctx
, fp0
, fs
);
11283 if (ctx
->nan2008
) {
11284 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11286 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11288 gen_store_fpr64(ctx
, fp0
, fd
);
11289 tcg_temp_free_i64(fp0
);
11292 case OPC_ROUND_W_D
:
11293 check_cp1_registers(ctx
, fs
);
11295 TCGv_i32 fp32
= tcg_temp_new_i32();
11296 TCGv_i64 fp64
= tcg_temp_new_i64();
11298 gen_load_fpr64(ctx
, fp64
, fs
);
11299 if (ctx
->nan2008
) {
11300 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11302 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11304 tcg_temp_free_i64(fp64
);
11305 gen_store_fpr32(ctx
, fp32
, fd
);
11306 tcg_temp_free_i32(fp32
);
11309 case OPC_TRUNC_W_D
:
11310 check_cp1_registers(ctx
, fs
);
11312 TCGv_i32 fp32
= tcg_temp_new_i32();
11313 TCGv_i64 fp64
= tcg_temp_new_i64();
11315 gen_load_fpr64(ctx
, fp64
, fs
);
11316 if (ctx
->nan2008
) {
11317 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11319 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11321 tcg_temp_free_i64(fp64
);
11322 gen_store_fpr32(ctx
, fp32
, fd
);
11323 tcg_temp_free_i32(fp32
);
11327 check_cp1_registers(ctx
, fs
);
11329 TCGv_i32 fp32
= tcg_temp_new_i32();
11330 TCGv_i64 fp64
= tcg_temp_new_i64();
11332 gen_load_fpr64(ctx
, fp64
, fs
);
11333 if (ctx
->nan2008
) {
11334 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11336 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11338 tcg_temp_free_i64(fp64
);
11339 gen_store_fpr32(ctx
, fp32
, fd
);
11340 tcg_temp_free_i32(fp32
);
11343 case OPC_FLOOR_W_D
:
11344 check_cp1_registers(ctx
, fs
);
11346 TCGv_i32 fp32
= tcg_temp_new_i32();
11347 TCGv_i64 fp64
= tcg_temp_new_i64();
11349 gen_load_fpr64(ctx
, fp64
, fs
);
11350 if (ctx
->nan2008
) {
11351 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11353 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11355 tcg_temp_free_i64(fp64
);
11356 gen_store_fpr32(ctx
, fp32
, fd
);
11357 tcg_temp_free_i32(fp32
);
11361 check_insn(ctx
, ISA_MIPS32R6
);
11362 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11365 check_insn(ctx
, ISA_MIPS32R6
);
11366 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11369 check_insn(ctx
, ISA_MIPS32R6
);
11370 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11373 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11374 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11377 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11379 TCGLabel
*l1
= gen_new_label();
11383 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11385 fp0
= tcg_temp_new_i64();
11386 gen_load_fpr64(ctx
, fp0
, fs
);
11387 gen_store_fpr64(ctx
, fp0
, fd
);
11388 tcg_temp_free_i64(fp0
);
11393 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11395 TCGLabel
*l1
= gen_new_label();
11399 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11400 fp0
= tcg_temp_new_i64();
11401 gen_load_fpr64(ctx
, fp0
, fs
);
11402 gen_store_fpr64(ctx
, fp0
, fd
);
11403 tcg_temp_free_i64(fp0
);
11409 check_cp1_registers(ctx
, fs
| fd
);
11411 TCGv_i64 fp0
= tcg_temp_new_i64();
11413 gen_load_fpr64(ctx
, fp0
, fs
);
11414 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11415 gen_store_fpr64(ctx
, fp0
, fd
);
11416 tcg_temp_free_i64(fp0
);
11420 check_cp1_registers(ctx
, fs
| fd
);
11422 TCGv_i64 fp0
= tcg_temp_new_i64();
11424 gen_load_fpr64(ctx
, fp0
, fs
);
11425 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11426 gen_store_fpr64(ctx
, fp0
, fd
);
11427 tcg_temp_free_i64(fp0
);
11431 check_insn(ctx
, ISA_MIPS32R6
);
11433 TCGv_i64 fp0
= tcg_temp_new_i64();
11434 TCGv_i64 fp1
= tcg_temp_new_i64();
11435 TCGv_i64 fp2
= tcg_temp_new_i64();
11436 gen_load_fpr64(ctx
, fp0
, fs
);
11437 gen_load_fpr64(ctx
, fp1
, ft
);
11438 gen_load_fpr64(ctx
, fp2
, fd
);
11439 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11440 gen_store_fpr64(ctx
, fp2
, fd
);
11441 tcg_temp_free_i64(fp2
);
11442 tcg_temp_free_i64(fp1
);
11443 tcg_temp_free_i64(fp0
);
11447 check_insn(ctx
, ISA_MIPS32R6
);
11449 TCGv_i64 fp0
= tcg_temp_new_i64();
11450 TCGv_i64 fp1
= tcg_temp_new_i64();
11451 TCGv_i64 fp2
= tcg_temp_new_i64();
11452 gen_load_fpr64(ctx
, fp0
, fs
);
11453 gen_load_fpr64(ctx
, fp1
, ft
);
11454 gen_load_fpr64(ctx
, fp2
, fd
);
11455 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11456 gen_store_fpr64(ctx
, fp2
, fd
);
11457 tcg_temp_free_i64(fp2
);
11458 tcg_temp_free_i64(fp1
);
11459 tcg_temp_free_i64(fp0
);
11463 check_insn(ctx
, ISA_MIPS32R6
);
11465 TCGv_i64 fp0
= tcg_temp_new_i64();
11466 gen_load_fpr64(ctx
, fp0
, fs
);
11467 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11468 gen_store_fpr64(ctx
, fp0
, fd
);
11469 tcg_temp_free_i64(fp0
);
11473 check_insn(ctx
, ISA_MIPS32R6
);
11475 TCGv_i64 fp0
= tcg_temp_new_i64();
11476 gen_load_fpr64(ctx
, fp0
, fs
);
11477 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11478 gen_store_fpr64(ctx
, fp0
, fd
);
11479 tcg_temp_free_i64(fp0
);
11482 case OPC_MIN_D
: /* OPC_RECIP2_D */
11483 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11485 TCGv_i64 fp0
= tcg_temp_new_i64();
11486 TCGv_i64 fp1
= tcg_temp_new_i64();
11487 gen_load_fpr64(ctx
, fp0
, fs
);
11488 gen_load_fpr64(ctx
, fp1
, ft
);
11489 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11490 gen_store_fpr64(ctx
, fp1
, fd
);
11491 tcg_temp_free_i64(fp1
);
11492 tcg_temp_free_i64(fp0
);
11495 check_cp1_64bitmode(ctx
);
11497 TCGv_i64 fp0
= tcg_temp_new_i64();
11498 TCGv_i64 fp1
= tcg_temp_new_i64();
11500 gen_load_fpr64(ctx
, fp0
, fs
);
11501 gen_load_fpr64(ctx
, fp1
, ft
);
11502 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11503 tcg_temp_free_i64(fp1
);
11504 gen_store_fpr64(ctx
, fp0
, fd
);
11505 tcg_temp_free_i64(fp0
);
11509 case OPC_MINA_D
: /* OPC_RECIP1_D */
11510 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11512 TCGv_i64 fp0
= tcg_temp_new_i64();
11513 TCGv_i64 fp1
= tcg_temp_new_i64();
11514 gen_load_fpr64(ctx
, fp0
, fs
);
11515 gen_load_fpr64(ctx
, fp1
, ft
);
11516 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11517 gen_store_fpr64(ctx
, fp1
, fd
);
11518 tcg_temp_free_i64(fp1
);
11519 tcg_temp_free_i64(fp0
);
11522 check_cp1_64bitmode(ctx
);
11524 TCGv_i64 fp0
= tcg_temp_new_i64();
11526 gen_load_fpr64(ctx
, fp0
, fs
);
11527 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11528 gen_store_fpr64(ctx
, fp0
, fd
);
11529 tcg_temp_free_i64(fp0
);
11533 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11534 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11536 TCGv_i64 fp0
= tcg_temp_new_i64();
11537 TCGv_i64 fp1
= tcg_temp_new_i64();
11538 gen_load_fpr64(ctx
, fp0
, fs
);
11539 gen_load_fpr64(ctx
, fp1
, ft
);
11540 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11541 gen_store_fpr64(ctx
, fp1
, fd
);
11542 tcg_temp_free_i64(fp1
);
11543 tcg_temp_free_i64(fp0
);
11546 check_cp1_64bitmode(ctx
);
11548 TCGv_i64 fp0
= tcg_temp_new_i64();
11550 gen_load_fpr64(ctx
, fp0
, fs
);
11551 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11552 gen_store_fpr64(ctx
, fp0
, fd
);
11553 tcg_temp_free_i64(fp0
);
11557 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11558 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11560 TCGv_i64 fp0
= tcg_temp_new_i64();
11561 TCGv_i64 fp1
= tcg_temp_new_i64();
11562 gen_load_fpr64(ctx
, fp0
, fs
);
11563 gen_load_fpr64(ctx
, fp1
, ft
);
11564 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11565 gen_store_fpr64(ctx
, fp1
, fd
);
11566 tcg_temp_free_i64(fp1
);
11567 tcg_temp_free_i64(fp0
);
11570 check_cp1_64bitmode(ctx
);
11572 TCGv_i64 fp0
= tcg_temp_new_i64();
11573 TCGv_i64 fp1
= tcg_temp_new_i64();
11575 gen_load_fpr64(ctx
, fp0
, fs
);
11576 gen_load_fpr64(ctx
, fp1
, ft
);
11577 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
11578 tcg_temp_free_i64(fp1
);
11579 gen_store_fpr64(ctx
, fp0
, fd
);
11580 tcg_temp_free_i64(fp0
);
11587 case OPC_CMP_UEQ_D
:
11588 case OPC_CMP_OLT_D
:
11589 case OPC_CMP_ULT_D
:
11590 case OPC_CMP_OLE_D
:
11591 case OPC_CMP_ULE_D
:
11593 case OPC_CMP_NGLE_D
:
11594 case OPC_CMP_SEQ_D
:
11595 case OPC_CMP_NGL_D
:
11597 case OPC_CMP_NGE_D
:
11599 case OPC_CMP_NGT_D
:
11600 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11601 if (ctx
->opcode
& (1 << 6)) {
11602 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
11604 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
11608 check_cp1_registers(ctx
, fs
);
11610 TCGv_i32 fp32
= tcg_temp_new_i32();
11611 TCGv_i64 fp64
= tcg_temp_new_i64();
11613 gen_load_fpr64(ctx
, fp64
, fs
);
11614 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
11615 tcg_temp_free_i64(fp64
);
11616 gen_store_fpr32(ctx
, fp32
, fd
);
11617 tcg_temp_free_i32(fp32
);
11621 check_cp1_registers(ctx
, fs
);
11623 TCGv_i32 fp32
= tcg_temp_new_i32();
11624 TCGv_i64 fp64
= tcg_temp_new_i64();
11626 gen_load_fpr64(ctx
, fp64
, fs
);
11627 if (ctx
->nan2008
) {
11628 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
11630 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
11632 tcg_temp_free_i64(fp64
);
11633 gen_store_fpr32(ctx
, fp32
, fd
);
11634 tcg_temp_free_i32(fp32
);
11638 check_cp1_64bitmode(ctx
);
11640 TCGv_i64 fp0
= tcg_temp_new_i64();
11642 gen_load_fpr64(ctx
, fp0
, fs
);
11643 if (ctx
->nan2008
) {
11644 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
11646 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
11648 gen_store_fpr64(ctx
, fp0
, fd
);
11649 tcg_temp_free_i64(fp0
);
11654 TCGv_i32 fp0
= tcg_temp_new_i32();
11656 gen_load_fpr32(ctx
, fp0
, fs
);
11657 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
11658 gen_store_fpr32(ctx
, fp0
, fd
);
11659 tcg_temp_free_i32(fp0
);
11663 check_cp1_registers(ctx
, fd
);
11665 TCGv_i32 fp32
= tcg_temp_new_i32();
11666 TCGv_i64 fp64
= tcg_temp_new_i64();
11668 gen_load_fpr32(ctx
, fp32
, fs
);
11669 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
11670 tcg_temp_free_i32(fp32
);
11671 gen_store_fpr64(ctx
, fp64
, fd
);
11672 tcg_temp_free_i64(fp64
);
11676 check_cp1_64bitmode(ctx
);
11678 TCGv_i32 fp32
= tcg_temp_new_i32();
11679 TCGv_i64 fp64
= tcg_temp_new_i64();
11681 gen_load_fpr64(ctx
, fp64
, fs
);
11682 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
11683 tcg_temp_free_i64(fp64
);
11684 gen_store_fpr32(ctx
, fp32
, fd
);
11685 tcg_temp_free_i32(fp32
);
11689 check_cp1_64bitmode(ctx
);
11691 TCGv_i64 fp0
= tcg_temp_new_i64();
11693 gen_load_fpr64(ctx
, fp0
, fs
);
11694 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
11695 gen_store_fpr64(ctx
, fp0
, fd
);
11696 tcg_temp_free_i64(fp0
);
11699 case OPC_CVT_PS_PW
:
11702 TCGv_i64 fp0
= tcg_temp_new_i64();
11704 gen_load_fpr64(ctx
, fp0
, fs
);
11705 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
11706 gen_store_fpr64(ctx
, fp0
, fd
);
11707 tcg_temp_free_i64(fp0
);
11713 TCGv_i64 fp0
= tcg_temp_new_i64();
11714 TCGv_i64 fp1
= tcg_temp_new_i64();
11716 gen_load_fpr64(ctx
, fp0
, fs
);
11717 gen_load_fpr64(ctx
, fp1
, ft
);
11718 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
11719 tcg_temp_free_i64(fp1
);
11720 gen_store_fpr64(ctx
, fp0
, fd
);
11721 tcg_temp_free_i64(fp0
);
11727 TCGv_i64 fp0
= tcg_temp_new_i64();
11728 TCGv_i64 fp1
= tcg_temp_new_i64();
11730 gen_load_fpr64(ctx
, fp0
, fs
);
11731 gen_load_fpr64(ctx
, fp1
, ft
);
11732 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
11733 tcg_temp_free_i64(fp1
);
11734 gen_store_fpr64(ctx
, fp0
, fd
);
11735 tcg_temp_free_i64(fp0
);
11741 TCGv_i64 fp0
= tcg_temp_new_i64();
11742 TCGv_i64 fp1
= tcg_temp_new_i64();
11744 gen_load_fpr64(ctx
, fp0
, fs
);
11745 gen_load_fpr64(ctx
, fp1
, ft
);
11746 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
11747 tcg_temp_free_i64(fp1
);
11748 gen_store_fpr64(ctx
, fp0
, fd
);
11749 tcg_temp_free_i64(fp0
);
11755 TCGv_i64 fp0
= tcg_temp_new_i64();
11757 gen_load_fpr64(ctx
, fp0
, fs
);
11758 gen_helper_float_abs_ps(fp0
, fp0
);
11759 gen_store_fpr64(ctx
, fp0
, fd
);
11760 tcg_temp_free_i64(fp0
);
11766 TCGv_i64 fp0
= tcg_temp_new_i64();
11768 gen_load_fpr64(ctx
, fp0
, fs
);
11769 gen_store_fpr64(ctx
, fp0
, fd
);
11770 tcg_temp_free_i64(fp0
);
11776 TCGv_i64 fp0
= tcg_temp_new_i64();
11778 gen_load_fpr64(ctx
, fp0
, fs
);
11779 gen_helper_float_chs_ps(fp0
, fp0
);
11780 gen_store_fpr64(ctx
, fp0
, fd
);
11781 tcg_temp_free_i64(fp0
);
11786 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11791 TCGLabel
*l1
= gen_new_label();
11795 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11796 fp0
= tcg_temp_new_i64();
11797 gen_load_fpr64(ctx
, fp0
, fs
);
11798 gen_store_fpr64(ctx
, fp0
, fd
);
11799 tcg_temp_free_i64(fp0
);
11806 TCGLabel
*l1
= gen_new_label();
11810 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11811 fp0
= tcg_temp_new_i64();
11812 gen_load_fpr64(ctx
, fp0
, fs
);
11813 gen_store_fpr64(ctx
, fp0
, fd
);
11814 tcg_temp_free_i64(fp0
);
11822 TCGv_i64 fp0
= tcg_temp_new_i64();
11823 TCGv_i64 fp1
= tcg_temp_new_i64();
11825 gen_load_fpr64(ctx
, fp0
, ft
);
11826 gen_load_fpr64(ctx
, fp1
, fs
);
11827 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
11828 tcg_temp_free_i64(fp1
);
11829 gen_store_fpr64(ctx
, fp0
, fd
);
11830 tcg_temp_free_i64(fp0
);
11836 TCGv_i64 fp0
= tcg_temp_new_i64();
11837 TCGv_i64 fp1
= tcg_temp_new_i64();
11839 gen_load_fpr64(ctx
, fp0
, ft
);
11840 gen_load_fpr64(ctx
, fp1
, fs
);
11841 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
11842 tcg_temp_free_i64(fp1
);
11843 gen_store_fpr64(ctx
, fp0
, fd
);
11844 tcg_temp_free_i64(fp0
);
11847 case OPC_RECIP2_PS
:
11850 TCGv_i64 fp0
= tcg_temp_new_i64();
11851 TCGv_i64 fp1
= tcg_temp_new_i64();
11853 gen_load_fpr64(ctx
, fp0
, fs
);
11854 gen_load_fpr64(ctx
, fp1
, ft
);
11855 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
11856 tcg_temp_free_i64(fp1
);
11857 gen_store_fpr64(ctx
, fp0
, fd
);
11858 tcg_temp_free_i64(fp0
);
11861 case OPC_RECIP1_PS
:
11864 TCGv_i64 fp0
= tcg_temp_new_i64();
11866 gen_load_fpr64(ctx
, fp0
, fs
);
11867 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
11868 gen_store_fpr64(ctx
, fp0
, fd
);
11869 tcg_temp_free_i64(fp0
);
11872 case OPC_RSQRT1_PS
:
11875 TCGv_i64 fp0
= tcg_temp_new_i64();
11877 gen_load_fpr64(ctx
, fp0
, fs
);
11878 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
11879 gen_store_fpr64(ctx
, fp0
, fd
);
11880 tcg_temp_free_i64(fp0
);
11883 case OPC_RSQRT2_PS
:
11886 TCGv_i64 fp0
= tcg_temp_new_i64();
11887 TCGv_i64 fp1
= tcg_temp_new_i64();
11889 gen_load_fpr64(ctx
, fp0
, fs
);
11890 gen_load_fpr64(ctx
, fp1
, ft
);
11891 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
11892 tcg_temp_free_i64(fp1
);
11893 gen_store_fpr64(ctx
, fp0
, fd
);
11894 tcg_temp_free_i64(fp0
);
11898 check_cp1_64bitmode(ctx
);
11900 TCGv_i32 fp0
= tcg_temp_new_i32();
11902 gen_load_fpr32h(ctx
, fp0
, fs
);
11903 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
11904 gen_store_fpr32(ctx
, fp0
, fd
);
11905 tcg_temp_free_i32(fp0
);
11908 case OPC_CVT_PW_PS
:
11911 TCGv_i64 fp0
= tcg_temp_new_i64();
11913 gen_load_fpr64(ctx
, fp0
, fs
);
11914 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
11915 gen_store_fpr64(ctx
, fp0
, fd
);
11916 tcg_temp_free_i64(fp0
);
11920 check_cp1_64bitmode(ctx
);
11922 TCGv_i32 fp0
= tcg_temp_new_i32();
11924 gen_load_fpr32(ctx
, fp0
, fs
);
11925 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
11926 gen_store_fpr32(ctx
, fp0
, fd
);
11927 tcg_temp_free_i32(fp0
);
11933 TCGv_i32 fp0
= tcg_temp_new_i32();
11934 TCGv_i32 fp1
= tcg_temp_new_i32();
11936 gen_load_fpr32(ctx
, fp0
, fs
);
11937 gen_load_fpr32(ctx
, fp1
, ft
);
11938 gen_store_fpr32h(ctx
, fp0
, fd
);
11939 gen_store_fpr32(ctx
, fp1
, fd
);
11940 tcg_temp_free_i32(fp0
);
11941 tcg_temp_free_i32(fp1
);
11947 TCGv_i32 fp0
= tcg_temp_new_i32();
11948 TCGv_i32 fp1
= tcg_temp_new_i32();
11950 gen_load_fpr32(ctx
, fp0
, fs
);
11951 gen_load_fpr32h(ctx
, fp1
, ft
);
11952 gen_store_fpr32(ctx
, fp1
, fd
);
11953 gen_store_fpr32h(ctx
, fp0
, fd
);
11954 tcg_temp_free_i32(fp0
);
11955 tcg_temp_free_i32(fp1
);
11961 TCGv_i32 fp0
= tcg_temp_new_i32();
11962 TCGv_i32 fp1
= tcg_temp_new_i32();
11964 gen_load_fpr32h(ctx
, fp0
, fs
);
11965 gen_load_fpr32(ctx
, fp1
, ft
);
11966 gen_store_fpr32(ctx
, fp1
, fd
);
11967 gen_store_fpr32h(ctx
, fp0
, fd
);
11968 tcg_temp_free_i32(fp0
);
11969 tcg_temp_free_i32(fp1
);
11975 TCGv_i32 fp0
= tcg_temp_new_i32();
11976 TCGv_i32 fp1
= tcg_temp_new_i32();
11978 gen_load_fpr32h(ctx
, fp0
, fs
);
11979 gen_load_fpr32h(ctx
, fp1
, ft
);
11980 gen_store_fpr32(ctx
, fp1
, fd
);
11981 gen_store_fpr32h(ctx
, fp0
, fd
);
11982 tcg_temp_free_i32(fp0
);
11983 tcg_temp_free_i32(fp1
);
11987 case OPC_CMP_UN_PS
:
11988 case OPC_CMP_EQ_PS
:
11989 case OPC_CMP_UEQ_PS
:
11990 case OPC_CMP_OLT_PS
:
11991 case OPC_CMP_ULT_PS
:
11992 case OPC_CMP_OLE_PS
:
11993 case OPC_CMP_ULE_PS
:
11994 case OPC_CMP_SF_PS
:
11995 case OPC_CMP_NGLE_PS
:
11996 case OPC_CMP_SEQ_PS
:
11997 case OPC_CMP_NGL_PS
:
11998 case OPC_CMP_LT_PS
:
11999 case OPC_CMP_NGE_PS
:
12000 case OPC_CMP_LE_PS
:
12001 case OPC_CMP_NGT_PS
:
12002 if (ctx
->opcode
& (1 << 6)) {
12003 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
12005 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
12009 MIPS_INVAL("farith");
12010 generate_exception_end(ctx
, EXCP_RI
);
12015 /* Coprocessor 3 (FPU) */
12016 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
12017 int fd
, int fs
, int base
, int index
)
12019 TCGv t0
= tcg_temp_new();
12022 gen_load_gpr(t0
, index
);
12023 } else if (index
== 0) {
12024 gen_load_gpr(t0
, base
);
12026 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12028 /* Don't do NOP if destination is zero: we must perform the actual
12034 TCGv_i32 fp0
= tcg_temp_new_i32();
12036 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12037 tcg_gen_trunc_tl_i32(fp0
, t0
);
12038 gen_store_fpr32(ctx
, fp0
, fd
);
12039 tcg_temp_free_i32(fp0
);
12044 check_cp1_registers(ctx
, fd
);
12046 TCGv_i64 fp0
= tcg_temp_new_i64();
12047 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12048 gen_store_fpr64(ctx
, fp0
, fd
);
12049 tcg_temp_free_i64(fp0
);
12053 check_cp1_64bitmode(ctx
);
12054 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12056 TCGv_i64 fp0
= tcg_temp_new_i64();
12058 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12059 gen_store_fpr64(ctx
, fp0
, fd
);
12060 tcg_temp_free_i64(fp0
);
12066 TCGv_i32 fp0
= tcg_temp_new_i32();
12067 gen_load_fpr32(ctx
, fp0
, fs
);
12068 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12069 tcg_temp_free_i32(fp0
);
12074 check_cp1_registers(ctx
, fs
);
12076 TCGv_i64 fp0
= tcg_temp_new_i64();
12077 gen_load_fpr64(ctx
, fp0
, fs
);
12078 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12079 tcg_temp_free_i64(fp0
);
12083 check_cp1_64bitmode(ctx
);
12084 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12086 TCGv_i64 fp0
= tcg_temp_new_i64();
12087 gen_load_fpr64(ctx
, fp0
, fs
);
12088 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12089 tcg_temp_free_i64(fp0
);
12096 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
12097 int fd
, int fr
, int fs
, int ft
)
12103 TCGv t0
= tcg_temp_local_new();
12104 TCGv_i32 fp
= tcg_temp_new_i32();
12105 TCGv_i32 fph
= tcg_temp_new_i32();
12106 TCGLabel
*l1
= gen_new_label();
12107 TCGLabel
*l2
= gen_new_label();
12109 gen_load_gpr(t0
, fr
);
12110 tcg_gen_andi_tl(t0
, t0
, 0x7);
12112 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12113 gen_load_fpr32(ctx
, fp
, fs
);
12114 gen_load_fpr32h(ctx
, fph
, fs
);
12115 gen_store_fpr32(ctx
, fp
, fd
);
12116 gen_store_fpr32h(ctx
, fph
, fd
);
12119 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12121 #ifdef TARGET_WORDS_BIGENDIAN
12122 gen_load_fpr32(ctx
, fp
, fs
);
12123 gen_load_fpr32h(ctx
, fph
, ft
);
12124 gen_store_fpr32h(ctx
, fp
, fd
);
12125 gen_store_fpr32(ctx
, fph
, fd
);
12127 gen_load_fpr32h(ctx
, fph
, fs
);
12128 gen_load_fpr32(ctx
, fp
, ft
);
12129 gen_store_fpr32(ctx
, fph
, fd
);
12130 gen_store_fpr32h(ctx
, fp
, fd
);
12133 tcg_temp_free_i32(fp
);
12134 tcg_temp_free_i32(fph
);
12140 TCGv_i32 fp0
= tcg_temp_new_i32();
12141 TCGv_i32 fp1
= tcg_temp_new_i32();
12142 TCGv_i32 fp2
= tcg_temp_new_i32();
12144 gen_load_fpr32(ctx
, fp0
, fs
);
12145 gen_load_fpr32(ctx
, fp1
, ft
);
12146 gen_load_fpr32(ctx
, fp2
, fr
);
12147 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12148 tcg_temp_free_i32(fp0
);
12149 tcg_temp_free_i32(fp1
);
12150 gen_store_fpr32(ctx
, fp2
, fd
);
12151 tcg_temp_free_i32(fp2
);
12156 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12158 TCGv_i64 fp0
= tcg_temp_new_i64();
12159 TCGv_i64 fp1
= tcg_temp_new_i64();
12160 TCGv_i64 fp2
= tcg_temp_new_i64();
12162 gen_load_fpr64(ctx
, fp0
, fs
);
12163 gen_load_fpr64(ctx
, fp1
, ft
);
12164 gen_load_fpr64(ctx
, fp2
, fr
);
12165 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12166 tcg_temp_free_i64(fp0
);
12167 tcg_temp_free_i64(fp1
);
12168 gen_store_fpr64(ctx
, fp2
, fd
);
12169 tcg_temp_free_i64(fp2
);
12175 TCGv_i64 fp0
= tcg_temp_new_i64();
12176 TCGv_i64 fp1
= tcg_temp_new_i64();
12177 TCGv_i64 fp2
= tcg_temp_new_i64();
12179 gen_load_fpr64(ctx
, fp0
, fs
);
12180 gen_load_fpr64(ctx
, fp1
, ft
);
12181 gen_load_fpr64(ctx
, fp2
, fr
);
12182 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12183 tcg_temp_free_i64(fp0
);
12184 tcg_temp_free_i64(fp1
);
12185 gen_store_fpr64(ctx
, fp2
, fd
);
12186 tcg_temp_free_i64(fp2
);
12192 TCGv_i32 fp0
= tcg_temp_new_i32();
12193 TCGv_i32 fp1
= tcg_temp_new_i32();
12194 TCGv_i32 fp2
= tcg_temp_new_i32();
12196 gen_load_fpr32(ctx
, fp0
, fs
);
12197 gen_load_fpr32(ctx
, fp1
, ft
);
12198 gen_load_fpr32(ctx
, fp2
, fr
);
12199 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12200 tcg_temp_free_i32(fp0
);
12201 tcg_temp_free_i32(fp1
);
12202 gen_store_fpr32(ctx
, fp2
, fd
);
12203 tcg_temp_free_i32(fp2
);
12208 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12210 TCGv_i64 fp0
= tcg_temp_new_i64();
12211 TCGv_i64 fp1
= tcg_temp_new_i64();
12212 TCGv_i64 fp2
= tcg_temp_new_i64();
12214 gen_load_fpr64(ctx
, fp0
, fs
);
12215 gen_load_fpr64(ctx
, fp1
, ft
);
12216 gen_load_fpr64(ctx
, fp2
, fr
);
12217 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12218 tcg_temp_free_i64(fp0
);
12219 tcg_temp_free_i64(fp1
);
12220 gen_store_fpr64(ctx
, fp2
, fd
);
12221 tcg_temp_free_i64(fp2
);
12227 TCGv_i64 fp0
= tcg_temp_new_i64();
12228 TCGv_i64 fp1
= tcg_temp_new_i64();
12229 TCGv_i64 fp2
= tcg_temp_new_i64();
12231 gen_load_fpr64(ctx
, fp0
, fs
);
12232 gen_load_fpr64(ctx
, fp1
, ft
);
12233 gen_load_fpr64(ctx
, fp2
, fr
);
12234 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12235 tcg_temp_free_i64(fp0
);
12236 tcg_temp_free_i64(fp1
);
12237 gen_store_fpr64(ctx
, fp2
, fd
);
12238 tcg_temp_free_i64(fp2
);
12244 TCGv_i32 fp0
= tcg_temp_new_i32();
12245 TCGv_i32 fp1
= tcg_temp_new_i32();
12246 TCGv_i32 fp2
= tcg_temp_new_i32();
12248 gen_load_fpr32(ctx
, fp0
, fs
);
12249 gen_load_fpr32(ctx
, fp1
, ft
);
12250 gen_load_fpr32(ctx
, fp2
, fr
);
12251 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12252 tcg_temp_free_i32(fp0
);
12253 tcg_temp_free_i32(fp1
);
12254 gen_store_fpr32(ctx
, fp2
, fd
);
12255 tcg_temp_free_i32(fp2
);
12260 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12262 TCGv_i64 fp0
= tcg_temp_new_i64();
12263 TCGv_i64 fp1
= tcg_temp_new_i64();
12264 TCGv_i64 fp2
= tcg_temp_new_i64();
12266 gen_load_fpr64(ctx
, fp0
, fs
);
12267 gen_load_fpr64(ctx
, fp1
, ft
);
12268 gen_load_fpr64(ctx
, fp2
, fr
);
12269 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12270 tcg_temp_free_i64(fp0
);
12271 tcg_temp_free_i64(fp1
);
12272 gen_store_fpr64(ctx
, fp2
, fd
);
12273 tcg_temp_free_i64(fp2
);
12279 TCGv_i64 fp0
= tcg_temp_new_i64();
12280 TCGv_i64 fp1
= tcg_temp_new_i64();
12281 TCGv_i64 fp2
= tcg_temp_new_i64();
12283 gen_load_fpr64(ctx
, fp0
, fs
);
12284 gen_load_fpr64(ctx
, fp1
, ft
);
12285 gen_load_fpr64(ctx
, fp2
, fr
);
12286 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12287 tcg_temp_free_i64(fp0
);
12288 tcg_temp_free_i64(fp1
);
12289 gen_store_fpr64(ctx
, fp2
, fd
);
12290 tcg_temp_free_i64(fp2
);
12296 TCGv_i32 fp0
= tcg_temp_new_i32();
12297 TCGv_i32 fp1
= tcg_temp_new_i32();
12298 TCGv_i32 fp2
= tcg_temp_new_i32();
12300 gen_load_fpr32(ctx
, fp0
, fs
);
12301 gen_load_fpr32(ctx
, fp1
, ft
);
12302 gen_load_fpr32(ctx
, fp2
, fr
);
12303 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12304 tcg_temp_free_i32(fp0
);
12305 tcg_temp_free_i32(fp1
);
12306 gen_store_fpr32(ctx
, fp2
, fd
);
12307 tcg_temp_free_i32(fp2
);
12312 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12314 TCGv_i64 fp0
= tcg_temp_new_i64();
12315 TCGv_i64 fp1
= tcg_temp_new_i64();
12316 TCGv_i64 fp2
= tcg_temp_new_i64();
12318 gen_load_fpr64(ctx
, fp0
, fs
);
12319 gen_load_fpr64(ctx
, fp1
, ft
);
12320 gen_load_fpr64(ctx
, fp2
, fr
);
12321 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12322 tcg_temp_free_i64(fp0
);
12323 tcg_temp_free_i64(fp1
);
12324 gen_store_fpr64(ctx
, fp2
, fd
);
12325 tcg_temp_free_i64(fp2
);
12331 TCGv_i64 fp0
= tcg_temp_new_i64();
12332 TCGv_i64 fp1
= tcg_temp_new_i64();
12333 TCGv_i64 fp2
= tcg_temp_new_i64();
12335 gen_load_fpr64(ctx
, fp0
, fs
);
12336 gen_load_fpr64(ctx
, fp1
, ft
);
12337 gen_load_fpr64(ctx
, fp2
, fr
);
12338 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12339 tcg_temp_free_i64(fp0
);
12340 tcg_temp_free_i64(fp1
);
12341 gen_store_fpr64(ctx
, fp2
, fd
);
12342 tcg_temp_free_i64(fp2
);
12346 MIPS_INVAL("flt3_arith");
12347 generate_exception_end(ctx
, EXCP_RI
);
12352 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12356 #if !defined(CONFIG_USER_ONLY)
12357 /* The Linux kernel will emulate rdhwr if it's not supported natively.
12358 Therefore only check the ISA in system mode. */
12359 check_insn(ctx
, ISA_MIPS32R2
);
12361 t0
= tcg_temp_new();
12365 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12366 gen_store_gpr(t0
, rt
);
12369 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12370 gen_store_gpr(t0
, rt
);
12373 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12376 gen_helper_rdhwr_cc(t0
, cpu_env
);
12377 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12380 gen_store_gpr(t0
, rt
);
12381 /* Break the TB to be able to take timer interrupts immediately
12382 after reading count. DISAS_STOP isn't sufficient, we need to ensure
12383 we break completely out of translated code. */
12384 gen_save_pc(ctx
->base
.pc_next
+ 4);
12385 ctx
->base
.is_jmp
= DISAS_EXIT
;
12388 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12389 gen_store_gpr(t0
, rt
);
12392 check_insn(ctx
, ISA_MIPS32R6
);
12394 /* Performance counter registers are not implemented other than
12395 * control register 0.
12397 generate_exception(ctx
, EXCP_RI
);
12399 gen_helper_rdhwr_performance(t0
, cpu_env
);
12400 gen_store_gpr(t0
, rt
);
12403 check_insn(ctx
, ISA_MIPS32R6
);
12404 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12405 gen_store_gpr(t0
, rt
);
12408 #if defined(CONFIG_USER_ONLY)
12409 tcg_gen_ld_tl(t0
, cpu_env
,
12410 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12411 gen_store_gpr(t0
, rt
);
12414 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12415 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12416 tcg_gen_ld_tl(t0
, cpu_env
,
12417 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12418 gen_store_gpr(t0
, rt
);
12420 generate_exception_end(ctx
, EXCP_RI
);
12424 default: /* Invalid */
12425 MIPS_INVAL("rdhwr");
12426 generate_exception_end(ctx
, EXCP_RI
);
12432 static inline void clear_branch_hflags(DisasContext
*ctx
)
12434 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12435 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12436 save_cpu_state(ctx
, 0);
12438 /* it is not safe to save ctx->hflags as hflags may be changed
12439 in execution time by the instruction in delay / forbidden slot. */
12440 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12444 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12446 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12447 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12448 /* Branches completion */
12449 clear_branch_hflags(ctx
);
12450 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12451 /* FIXME: Need to clear can_do_io. */
12452 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12453 case MIPS_HFLAG_FBNSLOT
:
12454 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12457 /* unconditional branch */
12458 if (proc_hflags
& MIPS_HFLAG_BX
) {
12459 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12461 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12463 case MIPS_HFLAG_BL
:
12464 /* blikely taken case */
12465 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12467 case MIPS_HFLAG_BC
:
12468 /* Conditional branch */
12470 TCGLabel
*l1
= gen_new_label();
12472 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12473 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12475 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12478 case MIPS_HFLAG_BR
:
12479 /* unconditional branch to register */
12480 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12481 TCGv t0
= tcg_temp_new();
12482 TCGv_i32 t1
= tcg_temp_new_i32();
12484 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12485 tcg_gen_trunc_tl_i32(t1
, t0
);
12487 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12488 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12489 tcg_gen_or_i32(hflags
, hflags
, t1
);
12490 tcg_temp_free_i32(t1
);
12492 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12494 tcg_gen_mov_tl(cpu_PC
, btarget
);
12496 if (ctx
->base
.singlestep_enabled
) {
12497 save_cpu_state(ctx
, 0);
12498 gen_helper_raise_exception_debug(cpu_env
);
12500 tcg_gen_lookup_and_goto_ptr();
12503 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12509 /* Compact Branches */
12510 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12511 int rs
, int rt
, int32_t offset
)
12513 int bcond_compute
= 0;
12514 TCGv t0
= tcg_temp_new();
12515 TCGv t1
= tcg_temp_new();
12516 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12518 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12519 #ifdef MIPS_DEBUG_DISAS
12520 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12521 "\n", ctx
->base
.pc_next
);
12523 generate_exception_end(ctx
, EXCP_RI
);
12527 /* Load needed operands and calculate btarget */
12529 /* compact branch */
12530 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12531 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12532 gen_load_gpr(t0
, rs
);
12533 gen_load_gpr(t1
, rt
);
12535 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12536 if (rs
<= rt
&& rs
== 0) {
12537 /* OPC_BEQZALC, OPC_BNEZALC */
12538 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12541 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12542 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12543 gen_load_gpr(t0
, rs
);
12544 gen_load_gpr(t1
, rt
);
12546 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12548 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12549 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12550 if (rs
== 0 || rs
== rt
) {
12551 /* OPC_BLEZALC, OPC_BGEZALC */
12552 /* OPC_BGTZALC, OPC_BLTZALC */
12553 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12555 gen_load_gpr(t0
, rs
);
12556 gen_load_gpr(t1
, rt
);
12558 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12562 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12567 /* OPC_BEQZC, OPC_BNEZC */
12568 gen_load_gpr(t0
, rs
);
12570 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12572 /* OPC_JIC, OPC_JIALC */
12573 TCGv tbase
= tcg_temp_new();
12574 TCGv toffset
= tcg_temp_new();
12576 gen_load_gpr(tbase
, rt
);
12577 tcg_gen_movi_tl(toffset
, offset
);
12578 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
12579 tcg_temp_free(tbase
);
12580 tcg_temp_free(toffset
);
12584 MIPS_INVAL("Compact branch/jump");
12585 generate_exception_end(ctx
, EXCP_RI
);
12589 if (bcond_compute
== 0) {
12590 /* Uncoditional compact branch */
12593 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12596 ctx
->hflags
|= MIPS_HFLAG_BR
;
12599 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12602 ctx
->hflags
|= MIPS_HFLAG_B
;
12605 MIPS_INVAL("Compact branch/jump");
12606 generate_exception_end(ctx
, EXCP_RI
);
12610 /* Generating branch here as compact branches don't have delay slot */
12611 gen_branch(ctx
, 4);
12613 /* Conditional compact branch */
12614 TCGLabel
*fs
= gen_new_label();
12615 save_cpu_state(ctx
, 0);
12618 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12619 if (rs
== 0 && rt
!= 0) {
12621 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12622 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12624 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12627 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
12630 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12631 if (rs
== 0 && rt
!= 0) {
12633 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12634 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12636 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12639 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
12642 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12643 if (rs
== 0 && rt
!= 0) {
12645 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12646 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12648 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12651 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
12654 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12655 if (rs
== 0 && rt
!= 0) {
12657 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12658 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12660 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12663 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
12666 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12667 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12669 /* OPC_BOVC, OPC_BNVC */
12670 TCGv t2
= tcg_temp_new();
12671 TCGv t3
= tcg_temp_new();
12672 TCGv t4
= tcg_temp_new();
12673 TCGv input_overflow
= tcg_temp_new();
12675 gen_load_gpr(t0
, rs
);
12676 gen_load_gpr(t1
, rt
);
12677 tcg_gen_ext32s_tl(t2
, t0
);
12678 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
12679 tcg_gen_ext32s_tl(t3
, t1
);
12680 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
12681 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
12683 tcg_gen_add_tl(t4
, t2
, t3
);
12684 tcg_gen_ext32s_tl(t4
, t4
);
12685 tcg_gen_xor_tl(t2
, t2
, t3
);
12686 tcg_gen_xor_tl(t3
, t4
, t3
);
12687 tcg_gen_andc_tl(t2
, t3
, t2
);
12688 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
12689 tcg_gen_or_tl(t4
, t4
, input_overflow
);
12690 if (opc
== OPC_BOVC
) {
12692 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
12695 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
12697 tcg_temp_free(input_overflow
);
12701 } else if (rs
< rt
&& rs
== 0) {
12702 /* OPC_BEQZALC, OPC_BNEZALC */
12703 if (opc
== OPC_BEQZALC
) {
12705 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
12708 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
12711 /* OPC_BEQC, OPC_BNEC */
12712 if (opc
== OPC_BEQC
) {
12714 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
12717 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
12722 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
12725 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
12728 MIPS_INVAL("Compact conditional branch/jump");
12729 generate_exception_end(ctx
, EXCP_RI
);
12733 /* Generating branch here as compact branches don't have delay slot */
12734 gen_goto_tb(ctx
, 1, ctx
->btarget
);
12737 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
12745 /* ISA extensions (ASEs) */
12746 /* MIPS16 extension to MIPS32 */
12748 /* MIPS16 major opcodes */
12750 M16_OPC_ADDIUSP
= 0x00,
12751 M16_OPC_ADDIUPC
= 0x01,
12753 M16_OPC_JAL
= 0x03,
12754 M16_OPC_BEQZ
= 0x04,
12755 M16_OPC_BNEQZ
= 0x05,
12756 M16_OPC_SHIFT
= 0x06,
12758 M16_OPC_RRIA
= 0x08,
12759 M16_OPC_ADDIU8
= 0x09,
12760 M16_OPC_SLTI
= 0x0a,
12761 M16_OPC_SLTIU
= 0x0b,
12764 M16_OPC_CMPI
= 0x0e,
12768 M16_OPC_LWSP
= 0x12,
12770 M16_OPC_LBU
= 0x14,
12771 M16_OPC_LHU
= 0x15,
12772 M16_OPC_LWPC
= 0x16,
12773 M16_OPC_LWU
= 0x17,
12776 M16_OPC_SWSP
= 0x1a,
12778 M16_OPC_RRR
= 0x1c,
12780 M16_OPC_EXTEND
= 0x1e,
12784 /* I8 funct field */
12803 /* RR funct field */
12837 /* I64 funct field */
12845 I64_DADDIUPC
= 0x6,
12849 /* RR ry field for CNVT */
12851 RR_RY_CNVT_ZEB
= 0x0,
12852 RR_RY_CNVT_ZEH
= 0x1,
12853 RR_RY_CNVT_ZEW
= 0x2,
12854 RR_RY_CNVT_SEB
= 0x4,
12855 RR_RY_CNVT_SEH
= 0x5,
12856 RR_RY_CNVT_SEW
= 0x6,
12859 static int xlat (int r
)
12861 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12866 static void gen_mips16_save (DisasContext
*ctx
,
12867 int xsregs
, int aregs
,
12868 int do_ra
, int do_s0
, int do_s1
,
12871 TCGv t0
= tcg_temp_new();
12872 TCGv t1
= tcg_temp_new();
12873 TCGv t2
= tcg_temp_new();
12903 generate_exception_end(ctx
, EXCP_RI
);
12909 gen_base_offset_addr(ctx
, t0
, 29, 12);
12910 gen_load_gpr(t1
, 7);
12911 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12914 gen_base_offset_addr(ctx
, t0
, 29, 8);
12915 gen_load_gpr(t1
, 6);
12916 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12919 gen_base_offset_addr(ctx
, t0
, 29, 4);
12920 gen_load_gpr(t1
, 5);
12921 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12924 gen_base_offset_addr(ctx
, t0
, 29, 0);
12925 gen_load_gpr(t1
, 4);
12926 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12929 gen_load_gpr(t0
, 29);
12931 #define DECR_AND_STORE(reg) do { \
12932 tcg_gen_movi_tl(t2, -4); \
12933 gen_op_addr_add(ctx, t0, t0, t2); \
12934 gen_load_gpr(t1, reg); \
12935 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
12939 DECR_AND_STORE(31);
12944 DECR_AND_STORE(30);
12947 DECR_AND_STORE(23);
12950 DECR_AND_STORE(22);
12953 DECR_AND_STORE(21);
12956 DECR_AND_STORE(20);
12959 DECR_AND_STORE(19);
12962 DECR_AND_STORE(18);
12966 DECR_AND_STORE(17);
12969 DECR_AND_STORE(16);
12999 generate_exception_end(ctx
, EXCP_RI
);
13015 #undef DECR_AND_STORE
13017 tcg_gen_movi_tl(t2
, -framesize
);
13018 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13024 static void gen_mips16_restore (DisasContext
*ctx
,
13025 int xsregs
, int aregs
,
13026 int do_ra
, int do_s0
, int do_s1
,
13030 TCGv t0
= tcg_temp_new();
13031 TCGv t1
= tcg_temp_new();
13032 TCGv t2
= tcg_temp_new();
13034 tcg_gen_movi_tl(t2
, framesize
);
13035 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13037 #define DECR_AND_LOAD(reg) do { \
13038 tcg_gen_movi_tl(t2, -4); \
13039 gen_op_addr_add(ctx, t0, t0, t2); \
13040 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13041 gen_store_gpr(t1, reg); \
13105 generate_exception_end(ctx
, EXCP_RI
);
13121 #undef DECR_AND_LOAD
13123 tcg_gen_movi_tl(t2
, framesize
);
13124 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13130 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
13131 int is_64_bit
, int extended
)
13135 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13136 generate_exception_end(ctx
, EXCP_RI
);
13140 t0
= tcg_temp_new();
13142 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13143 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13145 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13151 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13154 TCGv_i32 t0
= tcg_const_i32(op
);
13155 TCGv t1
= tcg_temp_new();
13156 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13157 gen_helper_cache(cpu_env
, t1
, t0
);
13160 #if defined(TARGET_MIPS64)
13161 static void decode_i64_mips16 (DisasContext
*ctx
,
13162 int ry
, int funct
, int16_t offset
,
13167 check_insn(ctx
, ISA_MIPS3
);
13168 check_mips_64(ctx
);
13169 offset
= extended
? offset
: offset
<< 3;
13170 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13173 check_insn(ctx
, ISA_MIPS3
);
13174 check_mips_64(ctx
);
13175 offset
= extended
? offset
: offset
<< 3;
13176 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13179 check_insn(ctx
, ISA_MIPS3
);
13180 check_mips_64(ctx
);
13181 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13182 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13185 check_insn(ctx
, ISA_MIPS3
);
13186 check_mips_64(ctx
);
13187 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13188 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13191 check_insn(ctx
, ISA_MIPS3
);
13192 check_mips_64(ctx
);
13193 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13194 generate_exception_end(ctx
, EXCP_RI
);
13196 offset
= extended
? offset
: offset
<< 3;
13197 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13201 check_insn(ctx
, ISA_MIPS3
);
13202 check_mips_64(ctx
);
13203 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13204 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13207 check_insn(ctx
, ISA_MIPS3
);
13208 check_mips_64(ctx
);
13209 offset
= extended
? offset
: offset
<< 2;
13210 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13213 check_insn(ctx
, ISA_MIPS3
);
13214 check_mips_64(ctx
);
13215 offset
= extended
? offset
: offset
<< 2;
13216 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13222 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13224 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13225 int op
, rx
, ry
, funct
, sa
;
13226 int16_t imm
, offset
;
13228 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13229 op
= (ctx
->opcode
>> 11) & 0x1f;
13230 sa
= (ctx
->opcode
>> 22) & 0x1f;
13231 funct
= (ctx
->opcode
>> 8) & 0x7;
13232 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13233 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13234 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13235 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13236 | (ctx
->opcode
& 0x1f));
13238 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
13241 case M16_OPC_ADDIUSP
:
13242 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13244 case M16_OPC_ADDIUPC
:
13245 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13248 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13249 /* No delay slot, so just process as a normal instruction */
13252 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13253 /* No delay slot, so just process as a normal instruction */
13255 case M16_OPC_BNEQZ
:
13256 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13257 /* No delay slot, so just process as a normal instruction */
13259 case M16_OPC_SHIFT
:
13260 switch (ctx
->opcode
& 0x3) {
13262 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13265 #if defined(TARGET_MIPS64)
13266 check_mips_64(ctx
);
13267 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13269 generate_exception_end(ctx
, EXCP_RI
);
13273 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13276 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13280 #if defined(TARGET_MIPS64)
13282 check_insn(ctx
, ISA_MIPS3
);
13283 check_mips_64(ctx
);
13284 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13288 imm
= ctx
->opcode
& 0xf;
13289 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13290 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13291 imm
= (int16_t) (imm
<< 1) >> 1;
13292 if ((ctx
->opcode
>> 4) & 0x1) {
13293 #if defined(TARGET_MIPS64)
13294 check_mips_64(ctx
);
13295 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13297 generate_exception_end(ctx
, EXCP_RI
);
13300 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13303 case M16_OPC_ADDIU8
:
13304 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13307 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13309 case M16_OPC_SLTIU
:
13310 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13315 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13318 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13321 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13324 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13327 check_insn(ctx
, ISA_MIPS32
);
13329 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13330 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13331 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13332 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13333 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13334 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13335 | (ctx
->opcode
& 0xf)) << 3;
13337 if (ctx
->opcode
& (1 << 7)) {
13338 gen_mips16_save(ctx
, xsregs
, aregs
,
13339 do_ra
, do_s0
, do_s1
,
13342 gen_mips16_restore(ctx
, xsregs
, aregs
,
13343 do_ra
, do_s0
, do_s1
,
13349 generate_exception_end(ctx
, EXCP_RI
);
13354 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13357 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13359 #if defined(TARGET_MIPS64)
13361 check_insn(ctx
, ISA_MIPS3
);
13362 check_mips_64(ctx
);
13363 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13367 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13370 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13373 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13376 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13379 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13382 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13385 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13387 #if defined(TARGET_MIPS64)
13389 check_insn(ctx
, ISA_MIPS3
);
13390 check_mips_64(ctx
);
13391 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13395 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13398 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13401 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13404 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13406 #if defined(TARGET_MIPS64)
13408 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13412 generate_exception_end(ctx
, EXCP_RI
);
13419 static inline bool is_uhi(int sdbbp_code
)
13421 #ifdef CONFIG_USER_ONLY
13424 return semihosting_enabled() && sdbbp_code
== 1;
13428 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13432 int op
, cnvt_op
, op1
, offset
;
13436 op
= (ctx
->opcode
>> 11) & 0x1f;
13437 sa
= (ctx
->opcode
>> 2) & 0x7;
13438 sa
= sa
== 0 ? 8 : sa
;
13439 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13440 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13441 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13442 op1
= offset
= ctx
->opcode
& 0x1f;
13447 case M16_OPC_ADDIUSP
:
13449 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13451 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13454 case M16_OPC_ADDIUPC
:
13455 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13458 offset
= (ctx
->opcode
& 0x7ff) << 1;
13459 offset
= (int16_t)(offset
<< 4) >> 4;
13460 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13461 /* No delay slot, so just process as a normal instruction */
13464 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13465 offset
= (((ctx
->opcode
& 0x1f) << 21)
13466 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13468 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13469 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13473 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13474 ((int8_t)ctx
->opcode
) << 1, 0);
13475 /* No delay slot, so just process as a normal instruction */
13477 case M16_OPC_BNEQZ
:
13478 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13479 ((int8_t)ctx
->opcode
) << 1, 0);
13480 /* No delay slot, so just process as a normal instruction */
13482 case M16_OPC_SHIFT
:
13483 switch (ctx
->opcode
& 0x3) {
13485 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13488 #if defined(TARGET_MIPS64)
13489 check_insn(ctx
, ISA_MIPS3
);
13490 check_mips_64(ctx
);
13491 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13493 generate_exception_end(ctx
, EXCP_RI
);
13497 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13500 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13504 #if defined(TARGET_MIPS64)
13506 check_insn(ctx
, ISA_MIPS3
);
13507 check_mips_64(ctx
);
13508 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13513 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13515 if ((ctx
->opcode
>> 4) & 1) {
13516 #if defined(TARGET_MIPS64)
13517 check_insn(ctx
, ISA_MIPS3
);
13518 check_mips_64(ctx
);
13519 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13521 generate_exception_end(ctx
, EXCP_RI
);
13524 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13528 case M16_OPC_ADDIU8
:
13530 int16_t imm
= (int8_t) ctx
->opcode
;
13532 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13537 int16_t imm
= (uint8_t) ctx
->opcode
;
13538 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13541 case M16_OPC_SLTIU
:
13543 int16_t imm
= (uint8_t) ctx
->opcode
;
13544 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13551 funct
= (ctx
->opcode
>> 8) & 0x7;
13554 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
13555 ((int8_t)ctx
->opcode
) << 1, 0);
13558 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
13559 ((int8_t)ctx
->opcode
) << 1, 0);
13562 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
13565 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
13566 ((int8_t)ctx
->opcode
) << 3);
13569 check_insn(ctx
, ISA_MIPS32
);
13571 int do_ra
= ctx
->opcode
& (1 << 6);
13572 int do_s0
= ctx
->opcode
& (1 << 5);
13573 int do_s1
= ctx
->opcode
& (1 << 4);
13574 int framesize
= ctx
->opcode
& 0xf;
13576 if (framesize
== 0) {
13579 framesize
= framesize
<< 3;
13582 if (ctx
->opcode
& (1 << 7)) {
13583 gen_mips16_save(ctx
, 0, 0,
13584 do_ra
, do_s0
, do_s1
, framesize
);
13586 gen_mips16_restore(ctx
, 0, 0,
13587 do_ra
, do_s0
, do_s1
, framesize
);
13593 int rz
= xlat(ctx
->opcode
& 0x7);
13595 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
13596 ((ctx
->opcode
>> 5) & 0x7);
13597 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
13601 reg32
= ctx
->opcode
& 0x1f;
13602 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
13605 generate_exception_end(ctx
, EXCP_RI
);
13612 int16_t imm
= (uint8_t) ctx
->opcode
;
13614 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
13619 int16_t imm
= (uint8_t) ctx
->opcode
;
13620 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
13623 #if defined(TARGET_MIPS64)
13625 check_insn(ctx
, ISA_MIPS3
);
13626 check_mips_64(ctx
);
13627 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
13631 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13634 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
13637 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13640 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
13643 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13646 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
13649 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
13651 #if defined (TARGET_MIPS64)
13653 check_insn(ctx
, ISA_MIPS3
);
13654 check_mips_64(ctx
);
13655 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
13659 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13662 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
13665 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13668 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
13672 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
13675 switch (ctx
->opcode
& 0x3) {
13677 mips32_op
= OPC_ADDU
;
13680 mips32_op
= OPC_SUBU
;
13682 #if defined(TARGET_MIPS64)
13684 mips32_op
= OPC_DADDU
;
13685 check_insn(ctx
, ISA_MIPS3
);
13686 check_mips_64(ctx
);
13689 mips32_op
= OPC_DSUBU
;
13690 check_insn(ctx
, ISA_MIPS3
);
13691 check_mips_64(ctx
);
13695 generate_exception_end(ctx
, EXCP_RI
);
13699 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
13708 int nd
= (ctx
->opcode
>> 7) & 0x1;
13709 int link
= (ctx
->opcode
>> 6) & 0x1;
13710 int ra
= (ctx
->opcode
>> 5) & 0x1;
13713 check_insn(ctx
, ISA_MIPS32
);
13722 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
13727 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
13728 gen_helper_do_semihosting(cpu_env
);
13730 /* XXX: not clear which exception should be raised
13731 * when in debug mode...
13733 check_insn(ctx
, ISA_MIPS32
);
13734 generate_exception_end(ctx
, EXCP_DBp
);
13738 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
13741 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
13744 generate_exception_end(ctx
, EXCP_BREAK
);
13747 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
13750 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
13753 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
13755 #if defined (TARGET_MIPS64)
13757 check_insn(ctx
, ISA_MIPS3
);
13758 check_mips_64(ctx
);
13759 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
13763 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
13766 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
13769 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
13772 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
13775 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
13778 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
13781 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
13784 check_insn(ctx
, ISA_MIPS32
);
13786 case RR_RY_CNVT_ZEB
:
13787 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13789 case RR_RY_CNVT_ZEH
:
13790 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13792 case RR_RY_CNVT_SEB
:
13793 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13795 case RR_RY_CNVT_SEH
:
13796 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13798 #if defined (TARGET_MIPS64)
13799 case RR_RY_CNVT_ZEW
:
13800 check_insn(ctx
, ISA_MIPS64
);
13801 check_mips_64(ctx
);
13802 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13804 case RR_RY_CNVT_SEW
:
13805 check_insn(ctx
, ISA_MIPS64
);
13806 check_mips_64(ctx
);
13807 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13811 generate_exception_end(ctx
, EXCP_RI
);
13816 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
13818 #if defined (TARGET_MIPS64)
13820 check_insn(ctx
, ISA_MIPS3
);
13821 check_mips_64(ctx
);
13822 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
13825 check_insn(ctx
, ISA_MIPS3
);
13826 check_mips_64(ctx
);
13827 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
13830 check_insn(ctx
, ISA_MIPS3
);
13831 check_mips_64(ctx
);
13832 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
13835 check_insn(ctx
, ISA_MIPS3
);
13836 check_mips_64(ctx
);
13837 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
13841 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
13844 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
13847 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
13850 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
13852 #if defined (TARGET_MIPS64)
13854 check_insn(ctx
, ISA_MIPS3
);
13855 check_mips_64(ctx
);
13856 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
13859 check_insn(ctx
, ISA_MIPS3
);
13860 check_mips_64(ctx
);
13861 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
13864 check_insn(ctx
, ISA_MIPS3
);
13865 check_mips_64(ctx
);
13866 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
13869 check_insn(ctx
, ISA_MIPS3
);
13870 check_mips_64(ctx
);
13871 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
13875 generate_exception_end(ctx
, EXCP_RI
);
13879 case M16_OPC_EXTEND
:
13880 decode_extended_mips16_opc(env
, ctx
);
13883 #if defined(TARGET_MIPS64)
13885 funct
= (ctx
->opcode
>> 8) & 0x7;
13886 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
13890 generate_exception_end(ctx
, EXCP_RI
);
13897 /* microMIPS extension to MIPS32/MIPS64 */
13900 * microMIPS32/microMIPS64 major opcodes
13902 * 1. MIPS Architecture for Programmers Volume II-B:
13903 * The microMIPS32 Instruction Set (Revision 3.05)
13905 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
13907 * 2. MIPS Architecture For Programmers Volume II-A:
13908 * The MIPS64 Instruction Set (Revision 3.51)
13938 POOL32S
= 0x16, /* MIPS64 */
13939 DADDIU32
= 0x17, /* MIPS64 */
13968 /* 0x29 is reserved */
13981 /* 0x31 is reserved */
13994 SD32
= 0x36, /* MIPS64 */
13995 LD32
= 0x37, /* MIPS64 */
13997 /* 0x39 is reserved */
14013 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14035 /* POOL32A encoding of minor opcode field */
14038 /* These opcodes are distinguished only by bits 9..6; those bits are
14039 * what are recorded below. */
14076 /* The following can be distinguished by their lower 6 bits. */
14086 /* POOL32AXF encoding of minor opcode field extension */
14089 * 1. MIPS Architecture for Programmers Volume II-B:
14090 * The microMIPS32 Instruction Set (Revision 3.05)
14092 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14094 * 2. MIPS Architecture for Programmers VolumeIV-e:
14095 * The MIPS DSP Application-Specific Extension
14096 * to the microMIPS32 Architecture (Revision 2.34)
14098 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14113 /* begin of microMIPS32 DSP */
14115 /* bits 13..12 for 0x01 */
14121 /* bits 13..12 for 0x2a */
14127 /* bits 13..12 for 0x32 */
14131 /* end of microMIPS32 DSP */
14133 /* bits 15..12 for 0x2c */
14150 /* bits 15..12 for 0x34 */
14158 /* bits 15..12 for 0x3c */
14160 JR
= 0x0, /* alias */
14168 /* bits 15..12 for 0x05 */
14172 /* bits 15..12 for 0x0d */
14184 /* bits 15..12 for 0x15 */
14190 /* bits 15..12 for 0x1d */
14194 /* bits 15..12 for 0x2d */
14199 /* bits 15..12 for 0x35 */
14206 /* POOL32B encoding of minor opcode field (bits 15..12) */
14222 /* POOL32C encoding of minor opcode field (bits 15..12) */
14243 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14256 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14269 /* POOL32F encoding of minor opcode field (bits 5..0) */
14272 /* These are the bit 7..6 values */
14281 /* These are the bit 8..6 values */
14306 MOVZ_FMT_05
= 0x05,
14340 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14347 /* POOL32Fxf encoding of minor opcode extension field */
14385 /* POOL32I encoding of minor opcode field (bits 25..21) */
14415 /* These overlap and are distinguished by bit16 of the instruction */
14424 /* POOL16A encoding of minor opcode field */
14431 /* POOL16B encoding of minor opcode field */
14438 /* POOL16C encoding of minor opcode field */
14458 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14482 /* POOL16D encoding of minor opcode field */
14489 /* POOL16E encoding of minor opcode field */
14496 static int mmreg (int r
)
14498 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14503 /* Used for 16-bit store instructions. */
14504 static int mmreg2 (int r
)
14506 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14511 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14512 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14513 #define uMIPS_RS2(op) uMIPS_RS(op)
14514 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14515 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14516 #define uMIPS_RS5(op) (op & 0x1f)
14518 /* Signed immediate */
14519 #define SIMM(op, start, width) \
14520 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14523 /* Zero-extended immediate */
14524 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14526 static void gen_addiur1sp(DisasContext
*ctx
)
14528 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14530 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14533 static void gen_addiur2(DisasContext
*ctx
)
14535 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14536 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14537 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14539 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14542 static void gen_addiusp(DisasContext
*ctx
)
14544 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14547 if (encoded
<= 1) {
14548 decoded
= 256 + encoded
;
14549 } else if (encoded
<= 255) {
14551 } else if (encoded
<= 509) {
14552 decoded
= encoded
- 512;
14554 decoded
= encoded
- 768;
14557 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
14560 static void gen_addius5(DisasContext
*ctx
)
14562 int imm
= SIMM(ctx
->opcode
, 1, 4);
14563 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14565 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
14568 static void gen_andi16(DisasContext
*ctx
)
14570 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
14571 31, 32, 63, 64, 255, 32768, 65535 };
14572 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14573 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14574 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
14576 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
14579 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
14580 int base
, int16_t offset
)
14585 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
14586 generate_exception_end(ctx
, EXCP_RI
);
14590 t0
= tcg_temp_new();
14592 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14594 t1
= tcg_const_tl(reglist
);
14595 t2
= tcg_const_i32(ctx
->mem_idx
);
14597 save_cpu_state(ctx
, 1);
14600 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
14603 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
14605 #ifdef TARGET_MIPS64
14607 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
14610 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
14616 tcg_temp_free_i32(t2
);
14620 static void gen_pool16c_insn(DisasContext
*ctx
)
14622 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
14623 int rs
= mmreg(ctx
->opcode
& 0x7);
14625 switch (((ctx
->opcode
) >> 4) & 0x3f) {
14630 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
14636 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
14642 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
14648 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
14655 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14656 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14658 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
14667 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14668 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14670 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
14677 int reg
= ctx
->opcode
& 0x1f;
14679 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
14685 int reg
= ctx
->opcode
& 0x1f;
14686 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
14687 /* Let normal delay slot handling in our caller take us
14688 to the branch target. */
14693 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
14694 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14698 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
14699 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14703 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
14707 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
14710 generate_exception_end(ctx
, EXCP_BREAK
);
14713 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
14714 gen_helper_do_semihosting(cpu_env
);
14716 /* XXX: not clear which exception should be raised
14717 * when in debug mode...
14719 check_insn(ctx
, ISA_MIPS32
);
14720 generate_exception_end(ctx
, EXCP_DBp
);
14723 case JRADDIUSP
+ 0:
14724 case JRADDIUSP
+ 1:
14726 int imm
= ZIMM(ctx
->opcode
, 0, 5);
14727 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14728 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14729 /* Let normal delay slot handling in our caller take us
14730 to the branch target. */
14734 generate_exception_end(ctx
, EXCP_RI
);
14739 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
14742 int rd
, rs
, re
, rt
;
14743 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
14744 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
14745 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
14746 rd
= rd_enc
[enc_dest
];
14747 re
= re_enc
[enc_dest
];
14748 rs
= rs_rt_enc
[enc_rs
];
14749 rt
= rs_rt_enc
[enc_rt
];
14751 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
14753 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
14756 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
14758 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
14762 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
14764 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
14765 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
14767 switch (ctx
->opcode
& 0xf) {
14769 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
14772 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
14776 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14777 int offset
= extract32(ctx
->opcode
, 4, 4);
14778 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
14781 case R6_JRC16
: /* JRCADDIUSP */
14782 if ((ctx
->opcode
>> 4) & 1) {
14784 int imm
= extract32(ctx
->opcode
, 5, 5);
14785 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14786 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14789 rs
= extract32(ctx
->opcode
, 5, 5);
14790 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
14802 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14803 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14804 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
14805 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14809 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
14812 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
14816 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14817 int offset
= extract32(ctx
->opcode
, 4, 4);
14818 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
14821 case JALRC16
: /* BREAK16, SDBBP16 */
14822 switch (ctx
->opcode
& 0x3f) {
14824 case JALRC16
+ 0x20:
14826 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
14831 generate_exception(ctx
, EXCP_BREAK
);
14835 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
14836 gen_helper_do_semihosting(cpu_env
);
14838 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
14839 generate_exception(ctx
, EXCP_RI
);
14841 generate_exception(ctx
, EXCP_DBp
);
14848 generate_exception(ctx
, EXCP_RI
);
14853 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
14855 TCGv t0
= tcg_temp_new();
14856 TCGv t1
= tcg_temp_new();
14858 gen_load_gpr(t0
, base
);
14861 gen_load_gpr(t1
, index
);
14862 tcg_gen_shli_tl(t1
, t1
, 2);
14863 gen_op_addr_add(ctx
, t0
, t1
, t0
);
14866 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14867 gen_store_gpr(t1
, rd
);
14873 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
14874 int base
, int16_t offset
)
14878 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
14879 generate_exception_end(ctx
, EXCP_RI
);
14883 t0
= tcg_temp_new();
14884 t1
= tcg_temp_new();
14886 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14891 generate_exception_end(ctx
, EXCP_RI
);
14894 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14895 gen_store_gpr(t1
, rd
);
14896 tcg_gen_movi_tl(t1
, 4);
14897 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14898 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14899 gen_store_gpr(t1
, rd
+1);
14902 gen_load_gpr(t1
, rd
);
14903 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
14904 tcg_gen_movi_tl(t1
, 4);
14905 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14906 gen_load_gpr(t1
, rd
+1);
14907 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
14909 #ifdef TARGET_MIPS64
14912 generate_exception_end(ctx
, EXCP_RI
);
14915 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14916 gen_store_gpr(t1
, rd
);
14917 tcg_gen_movi_tl(t1
, 8);
14918 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14919 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14920 gen_store_gpr(t1
, rd
+1);
14923 gen_load_gpr(t1
, rd
);
14924 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14925 tcg_gen_movi_tl(t1
, 8);
14926 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14927 gen_load_gpr(t1
, rd
+1);
14928 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14936 static void gen_sync(int stype
)
14938 TCGBar tcg_mo
= TCG_BAR_SC
;
14941 case 0x4: /* SYNC_WMB */
14942 tcg_mo
|= TCG_MO_ST_ST
;
14944 case 0x10: /* SYNC_MB */
14945 tcg_mo
|= TCG_MO_ALL
;
14947 case 0x11: /* SYNC_ACQUIRE */
14948 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
14950 case 0x12: /* SYNC_RELEASE */
14951 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
14953 case 0x13: /* SYNC_RMB */
14954 tcg_mo
|= TCG_MO_LD_LD
;
14957 tcg_mo
|= TCG_MO_ALL
;
14961 tcg_gen_mb(tcg_mo
);
14964 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
14966 int extension
= (ctx
->opcode
>> 6) & 0x3f;
14967 int minor
= (ctx
->opcode
>> 12) & 0xf;
14968 uint32_t mips32_op
;
14970 switch (extension
) {
14972 mips32_op
= OPC_TEQ
;
14975 mips32_op
= OPC_TGE
;
14978 mips32_op
= OPC_TGEU
;
14981 mips32_op
= OPC_TLT
;
14984 mips32_op
= OPC_TLTU
;
14987 mips32_op
= OPC_TNE
;
14989 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
14991 #ifndef CONFIG_USER_ONLY
14994 check_cp0_enabled(ctx
);
14996 /* Treat as NOP. */
14999 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
15003 check_cp0_enabled(ctx
);
15005 TCGv t0
= tcg_temp_new();
15007 gen_load_gpr(t0
, rt
);
15008 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
15014 switch (minor
& 3) {
15016 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15019 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15022 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15025 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15028 goto pool32axf_invalid
;
15032 switch (minor
& 3) {
15034 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15037 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15040 goto pool32axf_invalid
;
15046 check_insn(ctx
, ISA_MIPS32R6
);
15047 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15050 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15053 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15056 mips32_op
= OPC_CLO
;
15059 mips32_op
= OPC_CLZ
;
15061 check_insn(ctx
, ISA_MIPS32
);
15062 gen_cl(ctx
, mips32_op
, rt
, rs
);
15065 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15066 gen_rdhwr(ctx
, rt
, rs
, 0);
15069 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15072 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15073 mips32_op
= OPC_MULT
;
15076 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15077 mips32_op
= OPC_MULTU
;
15080 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15081 mips32_op
= OPC_DIV
;
15084 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15085 mips32_op
= OPC_DIVU
;
15088 check_insn(ctx
, ISA_MIPS32
);
15089 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15092 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15093 mips32_op
= OPC_MADD
;
15096 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15097 mips32_op
= OPC_MADDU
;
15100 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15101 mips32_op
= OPC_MSUB
;
15104 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15105 mips32_op
= OPC_MSUBU
;
15107 check_insn(ctx
, ISA_MIPS32
);
15108 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15111 goto pool32axf_invalid
;
15122 generate_exception_err(ctx
, EXCP_CpU
, 2);
15125 goto pool32axf_invalid
;
15130 case JALR
: /* JALRC */
15131 case JALR_HB
: /* JALRC_HB */
15132 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15133 /* JALRC, JALRC_HB */
15134 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15136 /* JALR, JALR_HB */
15137 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15138 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15143 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15144 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15145 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15148 goto pool32axf_invalid
;
15154 check_cp0_enabled(ctx
);
15155 check_insn(ctx
, ISA_MIPS32R2
);
15156 gen_load_srsgpr(rs
, rt
);
15159 check_cp0_enabled(ctx
);
15160 check_insn(ctx
, ISA_MIPS32R2
);
15161 gen_store_srsgpr(rs
, rt
);
15164 goto pool32axf_invalid
;
15167 #ifndef CONFIG_USER_ONLY
15171 mips32_op
= OPC_TLBP
;
15174 mips32_op
= OPC_TLBR
;
15177 mips32_op
= OPC_TLBWI
;
15180 mips32_op
= OPC_TLBWR
;
15183 mips32_op
= OPC_TLBINV
;
15186 mips32_op
= OPC_TLBINVF
;
15189 mips32_op
= OPC_WAIT
;
15192 mips32_op
= OPC_DERET
;
15195 mips32_op
= OPC_ERET
;
15197 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15200 goto pool32axf_invalid
;
15206 check_cp0_enabled(ctx
);
15208 TCGv t0
= tcg_temp_new();
15210 save_cpu_state(ctx
, 1);
15211 gen_helper_di(t0
, cpu_env
);
15212 gen_store_gpr(t0
, rs
);
15213 /* Stop translation as we may have switched the execution mode */
15214 ctx
->base
.is_jmp
= DISAS_STOP
;
15219 check_cp0_enabled(ctx
);
15221 TCGv t0
= tcg_temp_new();
15223 save_cpu_state(ctx
, 1);
15224 gen_helper_ei(t0
, cpu_env
);
15225 gen_store_gpr(t0
, rs
);
15226 /* DISAS_STOP isn't sufficient, we need to ensure we break out
15227 of translated code to check for pending interrupts. */
15228 gen_save_pc(ctx
->base
.pc_next
+ 4);
15229 ctx
->base
.is_jmp
= DISAS_EXIT
;
15234 goto pool32axf_invalid
;
15241 gen_sync(extract32(ctx
->opcode
, 16, 5));
15244 generate_exception_end(ctx
, EXCP_SYSCALL
);
15247 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15248 gen_helper_do_semihosting(cpu_env
);
15250 check_insn(ctx
, ISA_MIPS32
);
15251 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15252 generate_exception_end(ctx
, EXCP_RI
);
15254 generate_exception_end(ctx
, EXCP_DBp
);
15259 goto pool32axf_invalid
;
15263 switch (minor
& 3) {
15265 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15268 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15271 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15274 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15277 goto pool32axf_invalid
;
15281 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15284 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15287 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15290 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15293 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15296 goto pool32axf_invalid
;
15301 MIPS_INVAL("pool32axf");
15302 generate_exception_end(ctx
, EXCP_RI
);
15307 /* Values for microMIPS fmt field. Variable-width, depending on which
15308 formats the instruction supports. */
15327 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15329 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15330 uint32_t mips32_op
;
15332 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
15333 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
15334 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
15336 switch (extension
) {
15337 case FLOAT_1BIT_FMT(CFC1
, 0):
15338 mips32_op
= OPC_CFC1
;
15340 case FLOAT_1BIT_FMT(CTC1
, 0):
15341 mips32_op
= OPC_CTC1
;
15343 case FLOAT_1BIT_FMT(MFC1
, 0):
15344 mips32_op
= OPC_MFC1
;
15346 case FLOAT_1BIT_FMT(MTC1
, 0):
15347 mips32_op
= OPC_MTC1
;
15349 case FLOAT_1BIT_FMT(MFHC1
, 0):
15350 mips32_op
= OPC_MFHC1
;
15352 case FLOAT_1BIT_FMT(MTHC1
, 0):
15353 mips32_op
= OPC_MTHC1
;
15355 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15358 /* Reciprocal square root */
15359 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15360 mips32_op
= OPC_RSQRT_S
;
15362 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15363 mips32_op
= OPC_RSQRT_D
;
15367 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15368 mips32_op
= OPC_SQRT_S
;
15370 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15371 mips32_op
= OPC_SQRT_D
;
15375 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15376 mips32_op
= OPC_RECIP_S
;
15378 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15379 mips32_op
= OPC_RECIP_D
;
15383 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15384 mips32_op
= OPC_FLOOR_L_S
;
15386 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15387 mips32_op
= OPC_FLOOR_L_D
;
15389 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15390 mips32_op
= OPC_FLOOR_W_S
;
15392 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15393 mips32_op
= OPC_FLOOR_W_D
;
15397 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15398 mips32_op
= OPC_CEIL_L_S
;
15400 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15401 mips32_op
= OPC_CEIL_L_D
;
15403 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15404 mips32_op
= OPC_CEIL_W_S
;
15406 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15407 mips32_op
= OPC_CEIL_W_D
;
15411 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15412 mips32_op
= OPC_TRUNC_L_S
;
15414 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15415 mips32_op
= OPC_TRUNC_L_D
;
15417 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15418 mips32_op
= OPC_TRUNC_W_S
;
15420 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15421 mips32_op
= OPC_TRUNC_W_D
;
15425 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15426 mips32_op
= OPC_ROUND_L_S
;
15428 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15429 mips32_op
= OPC_ROUND_L_D
;
15431 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15432 mips32_op
= OPC_ROUND_W_S
;
15434 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15435 mips32_op
= OPC_ROUND_W_D
;
15438 /* Integer to floating-point conversion */
15439 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15440 mips32_op
= OPC_CVT_L_S
;
15442 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15443 mips32_op
= OPC_CVT_L_D
;
15445 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15446 mips32_op
= OPC_CVT_W_S
;
15448 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15449 mips32_op
= OPC_CVT_W_D
;
15452 /* Paired-foo conversions */
15453 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15454 mips32_op
= OPC_CVT_S_PL
;
15456 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15457 mips32_op
= OPC_CVT_S_PU
;
15459 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15460 mips32_op
= OPC_CVT_PW_PS
;
15462 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15463 mips32_op
= OPC_CVT_PS_PW
;
15466 /* Floating-point moves */
15467 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15468 mips32_op
= OPC_MOV_S
;
15470 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15471 mips32_op
= OPC_MOV_D
;
15473 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15474 mips32_op
= OPC_MOV_PS
;
15477 /* Absolute value */
15478 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15479 mips32_op
= OPC_ABS_S
;
15481 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15482 mips32_op
= OPC_ABS_D
;
15484 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15485 mips32_op
= OPC_ABS_PS
;
15489 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15490 mips32_op
= OPC_NEG_S
;
15492 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15493 mips32_op
= OPC_NEG_D
;
15495 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15496 mips32_op
= OPC_NEG_PS
;
15499 /* Reciprocal square root step */
15500 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15501 mips32_op
= OPC_RSQRT1_S
;
15503 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15504 mips32_op
= OPC_RSQRT1_D
;
15506 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15507 mips32_op
= OPC_RSQRT1_PS
;
15510 /* Reciprocal step */
15511 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15512 mips32_op
= OPC_RECIP1_S
;
15514 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15515 mips32_op
= OPC_RECIP1_S
;
15517 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15518 mips32_op
= OPC_RECIP1_PS
;
15521 /* Conversions from double */
15522 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15523 mips32_op
= OPC_CVT_D_S
;
15525 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15526 mips32_op
= OPC_CVT_D_W
;
15528 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15529 mips32_op
= OPC_CVT_D_L
;
15532 /* Conversions from single */
15533 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15534 mips32_op
= OPC_CVT_S_D
;
15536 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15537 mips32_op
= OPC_CVT_S_W
;
15539 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15540 mips32_op
= OPC_CVT_S_L
;
15542 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
15545 /* Conditional moves on floating-point codes */
15546 case COND_FLOAT_MOV(MOVT
, 0):
15547 case COND_FLOAT_MOV(MOVT
, 1):
15548 case COND_FLOAT_MOV(MOVT
, 2):
15549 case COND_FLOAT_MOV(MOVT
, 3):
15550 case COND_FLOAT_MOV(MOVT
, 4):
15551 case COND_FLOAT_MOV(MOVT
, 5):
15552 case COND_FLOAT_MOV(MOVT
, 6):
15553 case COND_FLOAT_MOV(MOVT
, 7):
15554 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15555 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
15557 case COND_FLOAT_MOV(MOVF
, 0):
15558 case COND_FLOAT_MOV(MOVF
, 1):
15559 case COND_FLOAT_MOV(MOVF
, 2):
15560 case COND_FLOAT_MOV(MOVF
, 3):
15561 case COND_FLOAT_MOV(MOVF
, 4):
15562 case COND_FLOAT_MOV(MOVF
, 5):
15563 case COND_FLOAT_MOV(MOVF
, 6):
15564 case COND_FLOAT_MOV(MOVF
, 7):
15565 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15566 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
15569 MIPS_INVAL("pool32fxf");
15570 generate_exception_end(ctx
, EXCP_RI
);
15575 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15579 int rt
, rs
, rd
, rr
;
15581 uint32_t op
, minor
, minor2
, mips32_op
;
15582 uint32_t cond
, fmt
, cc
;
15584 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
15585 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
15587 rt
= (ctx
->opcode
>> 21) & 0x1f;
15588 rs
= (ctx
->opcode
>> 16) & 0x1f;
15589 rd
= (ctx
->opcode
>> 11) & 0x1f;
15590 rr
= (ctx
->opcode
>> 6) & 0x1f;
15591 imm
= (int16_t) ctx
->opcode
;
15593 op
= (ctx
->opcode
>> 26) & 0x3f;
15596 minor
= ctx
->opcode
& 0x3f;
15599 minor
= (ctx
->opcode
>> 6) & 0xf;
15602 mips32_op
= OPC_SLL
;
15605 mips32_op
= OPC_SRA
;
15608 mips32_op
= OPC_SRL
;
15611 mips32_op
= OPC_ROTR
;
15613 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
15616 check_insn(ctx
, ISA_MIPS32R6
);
15617 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
15620 check_insn(ctx
, ISA_MIPS32R6
);
15621 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
15624 check_insn(ctx
, ISA_MIPS32R6
);
15625 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
15628 goto pool32a_invalid
;
15632 minor
= (ctx
->opcode
>> 6) & 0xf;
15636 mips32_op
= OPC_ADD
;
15639 mips32_op
= OPC_ADDU
;
15642 mips32_op
= OPC_SUB
;
15645 mips32_op
= OPC_SUBU
;
15648 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15649 mips32_op
= OPC_MUL
;
15651 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
15655 mips32_op
= OPC_SLLV
;
15658 mips32_op
= OPC_SRLV
;
15661 mips32_op
= OPC_SRAV
;
15664 mips32_op
= OPC_ROTRV
;
15666 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
15668 /* Logical operations */
15670 mips32_op
= OPC_AND
;
15673 mips32_op
= OPC_OR
;
15676 mips32_op
= OPC_NOR
;
15679 mips32_op
= OPC_XOR
;
15681 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
15683 /* Set less than */
15685 mips32_op
= OPC_SLT
;
15688 mips32_op
= OPC_SLTU
;
15690 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
15693 goto pool32a_invalid
;
15697 minor
= (ctx
->opcode
>> 6) & 0xf;
15699 /* Conditional moves */
15700 case MOVN
: /* MUL */
15701 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15703 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
15706 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
15709 case MOVZ
: /* MUH */
15710 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15712 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
15715 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
15719 check_insn(ctx
, ISA_MIPS32R6
);
15720 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
15723 check_insn(ctx
, ISA_MIPS32R6
);
15724 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
15726 case LWXS
: /* DIV */
15727 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15729 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
15732 gen_ldxs(ctx
, rs
, rt
, rd
);
15736 check_insn(ctx
, ISA_MIPS32R6
);
15737 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
15740 check_insn(ctx
, ISA_MIPS32R6
);
15741 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
15744 check_insn(ctx
, ISA_MIPS32R6
);
15745 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
15748 goto pool32a_invalid
;
15752 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
15755 check_insn(ctx
, ISA_MIPS32R6
);
15756 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
15757 extract32(ctx
->opcode
, 9, 2));
15760 check_insn(ctx
, ISA_MIPS32R6
);
15761 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
15764 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
15767 gen_pool32axf(env
, ctx
, rt
, rs
);
15770 generate_exception_end(ctx
, EXCP_BREAK
);
15773 check_insn(ctx
, ISA_MIPS32R6
);
15774 generate_exception_end(ctx
, EXCP_RI
);
15778 MIPS_INVAL("pool32a");
15779 generate_exception_end(ctx
, EXCP_RI
);
15784 minor
= (ctx
->opcode
>> 12) & 0xf;
15787 check_cp0_enabled(ctx
);
15788 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15789 gen_cache_operation(ctx
, rt
, rs
, imm
);
15794 /* COP2: Not implemented. */
15795 generate_exception_err(ctx
, EXCP_CpU
, 2);
15797 #ifdef TARGET_MIPS64
15800 check_insn(ctx
, ISA_MIPS3
);
15801 check_mips_64(ctx
);
15806 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
15808 #ifdef TARGET_MIPS64
15811 check_insn(ctx
, ISA_MIPS3
);
15812 check_mips_64(ctx
);
15817 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
15820 MIPS_INVAL("pool32b");
15821 generate_exception_end(ctx
, EXCP_RI
);
15826 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
15827 minor
= ctx
->opcode
& 0x3f;
15828 check_cp1_enabled(ctx
);
15831 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15832 mips32_op
= OPC_ALNV_PS
;
15835 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15836 mips32_op
= OPC_MADD_S
;
15839 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15840 mips32_op
= OPC_MADD_D
;
15843 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15844 mips32_op
= OPC_MADD_PS
;
15847 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15848 mips32_op
= OPC_MSUB_S
;
15851 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15852 mips32_op
= OPC_MSUB_D
;
15855 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15856 mips32_op
= OPC_MSUB_PS
;
15859 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15860 mips32_op
= OPC_NMADD_S
;
15863 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15864 mips32_op
= OPC_NMADD_D
;
15867 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15868 mips32_op
= OPC_NMADD_PS
;
15871 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15872 mips32_op
= OPC_NMSUB_S
;
15875 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15876 mips32_op
= OPC_NMSUB_D
;
15879 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15880 mips32_op
= OPC_NMSUB_PS
;
15882 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
15884 case CABS_COND_FMT
:
15885 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15886 cond
= (ctx
->opcode
>> 6) & 0xf;
15887 cc
= (ctx
->opcode
>> 13) & 0x7;
15888 fmt
= (ctx
->opcode
>> 10) & 0x3;
15891 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
15894 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
15897 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
15900 goto pool32f_invalid
;
15904 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15905 cond
= (ctx
->opcode
>> 6) & 0xf;
15906 cc
= (ctx
->opcode
>> 13) & 0x7;
15907 fmt
= (ctx
->opcode
>> 10) & 0x3;
15910 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
15913 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
15916 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
15919 goto pool32f_invalid
;
15923 check_insn(ctx
, ISA_MIPS32R6
);
15924 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
15927 check_insn(ctx
, ISA_MIPS32R6
);
15928 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
15931 gen_pool32fxf(ctx
, rt
, rs
);
15935 switch ((ctx
->opcode
>> 6) & 0x7) {
15937 mips32_op
= OPC_PLL_PS
;
15940 mips32_op
= OPC_PLU_PS
;
15943 mips32_op
= OPC_PUL_PS
;
15946 mips32_op
= OPC_PUU_PS
;
15949 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15950 mips32_op
= OPC_CVT_PS_S
;
15952 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
15955 goto pool32f_invalid
;
15959 check_insn(ctx
, ISA_MIPS32R6
);
15960 switch ((ctx
->opcode
>> 9) & 0x3) {
15962 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
15965 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
15968 goto pool32f_invalid
;
15973 switch ((ctx
->opcode
>> 6) & 0x7) {
15975 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15976 mips32_op
= OPC_LWXC1
;
15979 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15980 mips32_op
= OPC_SWXC1
;
15983 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15984 mips32_op
= OPC_LDXC1
;
15987 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15988 mips32_op
= OPC_SDXC1
;
15991 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15992 mips32_op
= OPC_LUXC1
;
15995 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15996 mips32_op
= OPC_SUXC1
;
15998 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
16001 goto pool32f_invalid
;
16005 check_insn(ctx
, ISA_MIPS32R6
);
16006 switch ((ctx
->opcode
>> 9) & 0x3) {
16008 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
16011 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
16014 goto pool32f_invalid
;
16019 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16020 fmt
= (ctx
->opcode
>> 9) & 0x3;
16021 switch ((ctx
->opcode
>> 6) & 0x7) {
16025 mips32_op
= OPC_RSQRT2_S
;
16028 mips32_op
= OPC_RSQRT2_D
;
16031 mips32_op
= OPC_RSQRT2_PS
;
16034 goto pool32f_invalid
;
16040 mips32_op
= OPC_RECIP2_S
;
16043 mips32_op
= OPC_RECIP2_D
;
16046 mips32_op
= OPC_RECIP2_PS
;
16049 goto pool32f_invalid
;
16053 mips32_op
= OPC_ADDR_PS
;
16056 mips32_op
= OPC_MULR_PS
;
16058 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16061 goto pool32f_invalid
;
16065 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16066 cc
= (ctx
->opcode
>> 13) & 0x7;
16067 fmt
= (ctx
->opcode
>> 9) & 0x3;
16068 switch ((ctx
->opcode
>> 6) & 0x7) {
16069 case MOVF_FMT
: /* RINT_FMT */
16070 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16074 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16077 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16080 goto pool32f_invalid
;
16086 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16089 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16093 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16096 goto pool32f_invalid
;
16100 case MOVT_FMT
: /* CLASS_FMT */
16101 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16105 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16108 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16111 goto pool32f_invalid
;
16117 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16120 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16124 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16127 goto pool32f_invalid
;
16132 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16135 goto pool32f_invalid
;
16138 #define FINSN_3ARG_SDPS(prfx) \
16139 switch ((ctx->opcode >> 8) & 0x3) { \
16141 mips32_op = OPC_##prfx##_S; \
16144 mips32_op = OPC_##prfx##_D; \
16146 case FMT_SDPS_PS: \
16148 mips32_op = OPC_##prfx##_PS; \
16151 goto pool32f_invalid; \
16154 check_insn(ctx
, ISA_MIPS32R6
);
16155 switch ((ctx
->opcode
>> 9) & 0x3) {
16157 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16160 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16163 goto pool32f_invalid
;
16167 check_insn(ctx
, ISA_MIPS32R6
);
16168 switch ((ctx
->opcode
>> 9) & 0x3) {
16170 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16173 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16176 goto pool32f_invalid
;
16180 /* regular FP ops */
16181 switch ((ctx
->opcode
>> 6) & 0x3) {
16183 FINSN_3ARG_SDPS(ADD
);
16186 FINSN_3ARG_SDPS(SUB
);
16189 FINSN_3ARG_SDPS(MUL
);
16192 fmt
= (ctx
->opcode
>> 8) & 0x3;
16194 mips32_op
= OPC_DIV_D
;
16195 } else if (fmt
== 0) {
16196 mips32_op
= OPC_DIV_S
;
16198 goto pool32f_invalid
;
16202 goto pool32f_invalid
;
16207 switch ((ctx
->opcode
>> 6) & 0x7) {
16208 case MOVN_FMT
: /* SELEQZ_FMT */
16209 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16211 switch ((ctx
->opcode
>> 9) & 0x3) {
16213 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16216 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16219 goto pool32f_invalid
;
16223 FINSN_3ARG_SDPS(MOVN
);
16227 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16228 FINSN_3ARG_SDPS(MOVN
);
16230 case MOVZ_FMT
: /* SELNEZ_FMT */
16231 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16233 switch ((ctx
->opcode
>> 9) & 0x3) {
16235 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16238 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16241 goto pool32f_invalid
;
16245 FINSN_3ARG_SDPS(MOVZ
);
16249 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16250 FINSN_3ARG_SDPS(MOVZ
);
16253 check_insn(ctx
, ISA_MIPS32R6
);
16254 switch ((ctx
->opcode
>> 9) & 0x3) {
16256 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16259 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16262 goto pool32f_invalid
;
16266 check_insn(ctx
, ISA_MIPS32R6
);
16267 switch ((ctx
->opcode
>> 9) & 0x3) {
16269 mips32_op
= OPC_MADDF_S
;
16272 mips32_op
= OPC_MADDF_D
;
16275 goto pool32f_invalid
;
16279 check_insn(ctx
, ISA_MIPS32R6
);
16280 switch ((ctx
->opcode
>> 9) & 0x3) {
16282 mips32_op
= OPC_MSUBF_S
;
16285 mips32_op
= OPC_MSUBF_D
;
16288 goto pool32f_invalid
;
16292 goto pool32f_invalid
;
16296 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16300 MIPS_INVAL("pool32f");
16301 generate_exception_end(ctx
, EXCP_RI
);
16305 generate_exception_err(ctx
, EXCP_CpU
, 1);
16309 minor
= (ctx
->opcode
>> 21) & 0x1f;
16312 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16313 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16316 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16317 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16318 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16321 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16322 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16323 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16326 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16327 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16330 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16331 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16332 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16335 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16336 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16337 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16340 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16341 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16344 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16345 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16349 case TLTI
: /* BC1EQZC */
16350 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16352 check_cp1_enabled(ctx
);
16353 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16356 mips32_op
= OPC_TLTI
;
16360 case TGEI
: /* BC1NEZC */
16361 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16363 check_cp1_enabled(ctx
);
16364 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16367 mips32_op
= OPC_TGEI
;
16372 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16373 mips32_op
= OPC_TLTIU
;
16376 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16377 mips32_op
= OPC_TGEIU
;
16379 case TNEI
: /* SYNCI */
16380 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16382 /* Break the TB to be able to sync copied instructions
16384 ctx
->base
.is_jmp
= DISAS_STOP
;
16387 mips32_op
= OPC_TNEI
;
16392 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16393 mips32_op
= OPC_TEQI
;
16395 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16400 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16401 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16402 4, rs
, 0, imm
<< 1, 0);
16403 /* Compact branches don't have a delay slot, so just let
16404 the normal delay slot handling take us to the branch
16408 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16409 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16412 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16413 /* Break the TB to be able to sync copied instructions
16415 ctx
->base
.is_jmp
= DISAS_STOP
;
16419 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16420 /* COP2: Not implemented. */
16421 generate_exception_err(ctx
, EXCP_CpU
, 2);
16424 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16425 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16428 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16429 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16432 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16433 mips32_op
= OPC_BC1FANY4
;
16436 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16437 mips32_op
= OPC_BC1TANY4
;
16440 check_insn(ctx
, ASE_MIPS3D
);
16443 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16444 check_cp1_enabled(ctx
);
16445 gen_compute_branch1(ctx
, mips32_op
,
16446 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16448 generate_exception_err(ctx
, EXCP_CpU
, 1);
16453 /* MIPS DSP: not implemented */
16456 MIPS_INVAL("pool32i");
16457 generate_exception_end(ctx
, EXCP_RI
);
16462 minor
= (ctx
->opcode
>> 12) & 0xf;
16463 offset
= sextract32(ctx
->opcode
, 0,
16464 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16467 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16468 mips32_op
= OPC_LWL
;
16471 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16472 mips32_op
= OPC_SWL
;
16475 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16476 mips32_op
= OPC_LWR
;
16479 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16480 mips32_op
= OPC_SWR
;
16482 #if defined(TARGET_MIPS64)
16484 check_insn(ctx
, ISA_MIPS3
);
16485 check_mips_64(ctx
);
16486 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16487 mips32_op
= OPC_LDL
;
16490 check_insn(ctx
, ISA_MIPS3
);
16491 check_mips_64(ctx
);
16492 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16493 mips32_op
= OPC_SDL
;
16496 check_insn(ctx
, ISA_MIPS3
);
16497 check_mips_64(ctx
);
16498 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16499 mips32_op
= OPC_LDR
;
16502 check_insn(ctx
, ISA_MIPS3
);
16503 check_mips_64(ctx
);
16504 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16505 mips32_op
= OPC_SDR
;
16508 check_insn(ctx
, ISA_MIPS3
);
16509 check_mips_64(ctx
);
16510 mips32_op
= OPC_LWU
;
16513 check_insn(ctx
, ISA_MIPS3
);
16514 check_mips_64(ctx
);
16515 mips32_op
= OPC_LLD
;
16519 mips32_op
= OPC_LL
;
16522 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16525 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16528 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
16530 #if defined(TARGET_MIPS64)
16532 check_insn(ctx
, ISA_MIPS3
);
16533 check_mips_64(ctx
);
16534 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
16539 MIPS_INVAL("pool32c ld-eva");
16540 generate_exception_end(ctx
, EXCP_RI
);
16543 check_cp0_enabled(ctx
);
16545 minor2
= (ctx
->opcode
>> 9) & 0x7;
16546 offset
= sextract32(ctx
->opcode
, 0, 9);
16549 mips32_op
= OPC_LBUE
;
16552 mips32_op
= OPC_LHUE
;
16555 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16556 mips32_op
= OPC_LWLE
;
16559 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16560 mips32_op
= OPC_LWRE
;
16563 mips32_op
= OPC_LBE
;
16566 mips32_op
= OPC_LHE
;
16569 mips32_op
= OPC_LLE
;
16572 mips32_op
= OPC_LWE
;
16578 MIPS_INVAL("pool32c st-eva");
16579 generate_exception_end(ctx
, EXCP_RI
);
16582 check_cp0_enabled(ctx
);
16584 minor2
= (ctx
->opcode
>> 9) & 0x7;
16585 offset
= sextract32(ctx
->opcode
, 0, 9);
16588 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16589 mips32_op
= OPC_SWLE
;
16592 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16593 mips32_op
= OPC_SWRE
;
16596 /* Treat as no-op */
16597 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16598 /* hint codes 24-31 are reserved and signal RI */
16599 generate_exception(ctx
, EXCP_RI
);
16603 /* Treat as no-op */
16604 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16605 gen_cache_operation(ctx
, rt
, rs
, offset
);
16609 mips32_op
= OPC_SBE
;
16612 mips32_op
= OPC_SHE
;
16615 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
16618 mips32_op
= OPC_SWE
;
16623 /* Treat as no-op */
16624 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16625 /* hint codes 24-31 are reserved and signal RI */
16626 generate_exception(ctx
, EXCP_RI
);
16630 MIPS_INVAL("pool32c");
16631 generate_exception_end(ctx
, EXCP_RI
);
16635 case ADDI32
: /* AUI, LUI */
16636 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16638 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
16641 mips32_op
= OPC_ADDI
;
16646 mips32_op
= OPC_ADDIU
;
16648 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16651 /* Logical operations */
16653 mips32_op
= OPC_ORI
;
16656 mips32_op
= OPC_XORI
;
16659 mips32_op
= OPC_ANDI
;
16661 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16664 /* Set less than immediate */
16666 mips32_op
= OPC_SLTI
;
16669 mips32_op
= OPC_SLTIU
;
16671 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16674 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16675 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
16676 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
16677 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16679 case JALS32
: /* BOVC, BEQC, BEQZALC */
16680 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16683 mips32_op
= OPC_BOVC
;
16684 } else if (rs
< rt
&& rs
== 0) {
16686 mips32_op
= OPC_BEQZALC
;
16689 mips32_op
= OPC_BEQC
;
16691 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16694 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
16695 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
16696 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16699 case BEQ32
: /* BC */
16700 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16702 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
16703 sextract32(ctx
->opcode
<< 1, 0, 27));
16706 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
16709 case BNE32
: /* BALC */
16710 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16712 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
16713 sextract32(ctx
->opcode
<< 1, 0, 27));
16716 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
16719 case J32
: /* BGTZC, BLTZC, BLTC */
16720 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16721 if (rs
== 0 && rt
!= 0) {
16723 mips32_op
= OPC_BGTZC
;
16724 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16726 mips32_op
= OPC_BLTZC
;
16729 mips32_op
= OPC_BLTC
;
16731 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16734 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
16735 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16738 case JAL32
: /* BLEZC, BGEZC, BGEC */
16739 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16740 if (rs
== 0 && rt
!= 0) {
16742 mips32_op
= OPC_BLEZC
;
16743 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16745 mips32_op
= OPC_BGEZC
;
16748 mips32_op
= OPC_BGEC
;
16750 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16753 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
16754 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16755 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16758 /* Floating point (COP1) */
16760 mips32_op
= OPC_LWC1
;
16763 mips32_op
= OPC_LDC1
;
16766 mips32_op
= OPC_SWC1
;
16769 mips32_op
= OPC_SDC1
;
16771 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
16773 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16774 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16775 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16776 switch ((ctx
->opcode
>> 16) & 0x1f) {
16785 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16788 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
16791 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
16801 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16804 generate_exception(ctx
, EXCP_RI
);
16809 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
16810 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
16812 gen_addiupc(ctx
, reg
, offset
, 0, 0);
16815 case BNVC
: /* BNEC, BNEZALC */
16816 check_insn(ctx
, ISA_MIPS32R6
);
16819 mips32_op
= OPC_BNVC
;
16820 } else if (rs
< rt
&& rs
== 0) {
16822 mips32_op
= OPC_BNEZALC
;
16825 mips32_op
= OPC_BNEC
;
16827 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16829 case R6_BNEZC
: /* JIALC */
16830 check_insn(ctx
, ISA_MIPS32R6
);
16833 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
16834 sextract32(ctx
->opcode
<< 1, 0, 22));
16837 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
16840 case R6_BEQZC
: /* JIC */
16841 check_insn(ctx
, ISA_MIPS32R6
);
16844 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
16845 sextract32(ctx
->opcode
<< 1, 0, 22));
16848 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
16851 case BLEZALC
: /* BGEZALC, BGEUC */
16852 check_insn(ctx
, ISA_MIPS32R6
);
16853 if (rs
== 0 && rt
!= 0) {
16855 mips32_op
= OPC_BLEZALC
;
16856 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16858 mips32_op
= OPC_BGEZALC
;
16861 mips32_op
= OPC_BGEUC
;
16863 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16865 case BGTZALC
: /* BLTZALC, BLTUC */
16866 check_insn(ctx
, ISA_MIPS32R6
);
16867 if (rs
== 0 && rt
!= 0) {
16869 mips32_op
= OPC_BGTZALC
;
16870 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16872 mips32_op
= OPC_BLTZALC
;
16875 mips32_op
= OPC_BLTUC
;
16877 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16879 /* Loads and stores */
16881 mips32_op
= OPC_LB
;
16884 mips32_op
= OPC_LBU
;
16887 mips32_op
= OPC_LH
;
16890 mips32_op
= OPC_LHU
;
16893 mips32_op
= OPC_LW
;
16895 #ifdef TARGET_MIPS64
16897 check_insn(ctx
, ISA_MIPS3
);
16898 check_mips_64(ctx
);
16899 mips32_op
= OPC_LD
;
16902 check_insn(ctx
, ISA_MIPS3
);
16903 check_mips_64(ctx
);
16904 mips32_op
= OPC_SD
;
16908 mips32_op
= OPC_SB
;
16911 mips32_op
= OPC_SH
;
16914 mips32_op
= OPC_SW
;
16917 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
16920 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
16923 generate_exception_end(ctx
, EXCP_RI
);
16928 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
16932 /* make sure instructions are on a halfword boundary */
16933 if (ctx
->base
.pc_next
& 0x1) {
16934 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
16935 generate_exception_end(ctx
, EXCP_AdEL
);
16939 op
= (ctx
->opcode
>> 10) & 0x3f;
16940 /* Enforce properly-sized instructions in a delay slot */
16941 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
16942 switch (op
& 0x7) { /* MSB-3..MSB-5 */
16944 /* POOL32A, POOL32B, POOL32I, POOL32C */
16946 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
16948 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
16950 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
16952 /* LB32, LH32, LWC132, LDC132, LW32 */
16953 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
16954 generate_exception_end(ctx
, EXCP_RI
);
16959 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
16961 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
16963 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
16964 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
16965 generate_exception_end(ctx
, EXCP_RI
);
16975 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16976 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
16977 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
16980 switch (ctx
->opcode
& 0x1) {
16988 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16989 /* In the Release 6 the register number location in
16990 * the instruction encoding has changed.
16992 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
16994 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
17000 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17001 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
17002 int amount
= (ctx
->opcode
>> 1) & 0x7;
17004 amount
= amount
== 0 ? 8 : amount
;
17006 switch (ctx
->opcode
& 0x1) {
17015 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17019 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17020 gen_pool16c_r6_insn(ctx
);
17022 gen_pool16c_insn(ctx
);
17027 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17028 int rb
= 28; /* GP */
17029 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17031 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17035 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17036 if (ctx
->opcode
& 1) {
17037 generate_exception_end(ctx
, EXCP_RI
);
17040 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17041 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17042 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17043 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17048 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17049 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17050 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17051 offset
= (offset
== 0xf ? -1 : offset
);
17053 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17058 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17059 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17060 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17062 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17067 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17068 int rb
= 29; /* SP */
17069 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17071 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17076 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17077 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17078 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17080 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17085 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17086 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17087 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17089 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17094 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17095 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17096 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17098 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17103 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17104 int rb
= 29; /* SP */
17105 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17107 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17112 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17113 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17114 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17116 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17121 int rd
= uMIPS_RD5(ctx
->opcode
);
17122 int rs
= uMIPS_RS5(ctx
->opcode
);
17124 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17131 switch (ctx
->opcode
& 0x1) {
17141 switch (ctx
->opcode
& 0x1) {
17146 gen_addiur1sp(ctx
);
17150 case B16
: /* BC16 */
17151 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17152 sextract32(ctx
->opcode
, 0, 10) << 1,
17153 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17155 case BNEZ16
: /* BNEZC16 */
17156 case BEQZ16
: /* BEQZC16 */
17157 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17158 mmreg(uMIPS_RD(ctx
->opcode
)),
17159 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17160 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17165 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17166 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17168 imm
= (imm
== 0x7f ? -1 : imm
);
17169 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17175 generate_exception_end(ctx
, EXCP_RI
);
17178 decode_micromips32_opc(env
, ctx
);
17191 /* MAJOR, P16, and P32 pools opcodes */
17195 NM_MOVE_BALC
= 0x02,
17203 NM_P16_SHIFT
= 0x0c,
17221 NM_P_LS_U12
= 0x21,
17231 NM_P16_ADDU
= 0x2c,
17245 NM_MOVEPREV
= 0x3f,
17248 /* POOL32A instruction pool */
17250 NM_POOL32A0
= 0x00,
17251 NM_SPECIAL2
= 0x01,
17254 NM_POOL32A5
= 0x05,
17255 NM_POOL32A7
= 0x07,
17258 /* P.GP.W instruction pool */
17260 NM_ADDIUGP_W
= 0x00,
17265 /* P48I instruction pool */
17269 NM_ADDIUGP48
= 0x02,
17270 NM_ADDIUPC48
= 0x03,
17275 /* P.U12 instruction pool */
17284 NM_ADDIUNEG
= 0x08,
17291 /* POOL32F instruction pool */
17293 NM_POOL32F_0
= 0x00,
17294 NM_POOL32F_3
= 0x03,
17295 NM_POOL32F_5
= 0x05,
17298 /* POOL32S instruction pool */
17300 NM_POOL32S_0
= 0x00,
17301 NM_POOL32S_4
= 0x04,
17304 /* P.LUI instruction pool */
17310 /* P.GP.BH instruction pool */
17315 NM_ADDIUGP_B
= 0x03,
17318 NM_P_GP_CP1
= 0x06,
17321 /* P.LS.U12 instruction pool */
17326 NM_P_PREFU12
= 0x03,
17339 /* P.LS.S9 instruction pool */
17345 NM_P_LS_UAWM
= 0x05,
17348 /* P.BAL instruction pool */
17354 /* P.J instruction pool */
17357 NM_JALRC_HB
= 0x01,
17358 NM_P_BALRSC
= 0x08,
17361 /* P.BR1 instruction pool */
17369 /* P.BR2 instruction pool */
17376 /* P.BRI instruction pool */
17388 /* P16.SHIFT instruction pool */
17394 /* POOL16C instruction pool */
17396 NM_POOL16C_0
= 0x00,
17400 /* P16.A1 instruction pool */
17402 NM_ADDIUR1SP
= 0x01,
17405 /* P16.A2 instruction pool */
17408 NM_P_ADDIURS5
= 0x01,
17411 /* P16.ADDU instruction pool */
17417 /* P16.SR instruction pool */
17420 NM_RESTORE_JRC16
= 0x01,
17423 /* P16.4X4 instruction pool */
17429 /* P16.LB instruction pool */
17436 /* P16.LH instruction pool */
17443 /* P.RI instruction pool */
17446 NM_P_SYSCALL
= 0x01,
17451 /* POOL32A0 instruction pool */
17486 NM_D_E_MT_VPE
= 0x56,
17494 /* CRC32 instruction pool */
17504 /* POOL32A5 instruction pool */
17506 NM_CMP_EQ_PH
= 0x00,
17507 NM_CMP_LT_PH
= 0x08,
17508 NM_CMP_LE_PH
= 0x10,
17509 NM_CMPGU_EQ_QB
= 0x18,
17510 NM_CMPGU_LT_QB
= 0x20,
17511 NM_CMPGU_LE_QB
= 0x28,
17512 NM_CMPGDU_EQ_QB
= 0x30,
17513 NM_CMPGDU_LT_QB
= 0x38,
17514 NM_CMPGDU_LE_QB
= 0x40,
17515 NM_CMPU_EQ_QB
= 0x48,
17516 NM_CMPU_LT_QB
= 0x50,
17517 NM_CMPU_LE_QB
= 0x58,
17518 NM_ADDQ_S_W
= 0x60,
17519 NM_SUBQ_S_W
= 0x68,
17523 NM_ADDQ_S_PH
= 0x01,
17524 NM_ADDQH_R_PH
= 0x09,
17525 NM_ADDQH_R_W
= 0x11,
17526 NM_ADDU_S_QB
= 0x19,
17527 NM_ADDU_S_PH
= 0x21,
17528 NM_ADDUH_R_QB
= 0x29,
17529 NM_SHRAV_R_PH
= 0x31,
17530 NM_SHRAV_R_QB
= 0x39,
17531 NM_SUBQ_S_PH
= 0x41,
17532 NM_SUBQH_R_PH
= 0x49,
17533 NM_SUBQH_R_W
= 0x51,
17534 NM_SUBU_S_QB
= 0x59,
17535 NM_SUBU_S_PH
= 0x61,
17536 NM_SUBUH_R_QB
= 0x69,
17537 NM_SHLLV_S_PH
= 0x71,
17538 NM_PRECR_SRA_R_PH_W
= 0x79,
17540 NM_MULEU_S_PH_QBL
= 0x12,
17541 NM_MULEU_S_PH_QBR
= 0x1a,
17542 NM_MULQ_RS_PH
= 0x22,
17543 NM_MULQ_S_PH
= 0x2a,
17544 NM_MULQ_RS_W
= 0x32,
17545 NM_MULQ_S_W
= 0x3a,
17548 NM_SHRAV_R_W
= 0x5a,
17549 NM_SHRLV_PH
= 0x62,
17550 NM_SHRLV_QB
= 0x6a,
17551 NM_SHLLV_QB
= 0x72,
17552 NM_SHLLV_S_W
= 0x7a,
17556 NM_MULEQ_S_W_PHL
= 0x04,
17557 NM_MULEQ_S_W_PHR
= 0x0c,
17559 NM_MUL_S_PH
= 0x05,
17560 NM_PRECR_QB_PH
= 0x0d,
17561 NM_PRECRQ_QB_PH
= 0x15,
17562 NM_PRECRQ_PH_W
= 0x1d,
17563 NM_PRECRQ_RS_PH_W
= 0x25,
17564 NM_PRECRQU_S_QB_PH
= 0x2d,
17565 NM_PACKRL_PH
= 0x35,
17569 NM_SHRA_R_W
= 0x5e,
17570 NM_SHRA_R_PH
= 0x66,
17571 NM_SHLL_S_PH
= 0x76,
17572 NM_SHLL_S_W
= 0x7e,
17577 /* POOL32A7 instruction pool */
17582 NM_POOL32AXF
= 0x07,
17585 /* P.SR instruction pool */
17591 /* P.SHIFT instruction pool */
17599 /* P.ROTX instruction pool */
17604 /* P.INS instruction pool */
17609 /* P.EXT instruction pool */
17614 /* POOL32F_0 (fmt) instruction pool */
17619 NM_SELEQZ_S
= 0x07,
17620 NM_SELEQZ_D
= 0x47,
17624 NM_SELNEZ_S
= 0x0f,
17625 NM_SELNEZ_D
= 0x4f,
17640 /* POOL32F_3 instruction pool */
17644 NM_MINA_FMT
= 0x04,
17645 NM_MAXA_FMT
= 0x05,
17646 NM_POOL32FXF
= 0x07,
17649 /* POOL32F_5 instruction pool */
17651 NM_CMP_CONDN_S
= 0x00,
17652 NM_CMP_CONDN_D
= 0x02,
17655 /* P.GP.LH instruction pool */
17661 /* P.GP.SH instruction pool */
17666 /* P.GP.CP1 instruction pool */
17674 /* P.LS.S0 instruction pool */
17691 NM_P_PREFS9
= 0x03,
17697 /* P.LS.S1 instruction pool */
17699 NM_ASET_ACLR
= 0x02,
17707 /* P.LS.E0 instruction pool */
17723 /* P.PREFE instruction pool */
17729 /* P.LLE instruction pool */
17735 /* P.SCE instruction pool */
17741 /* P.LS.WM instruction pool */
17747 /* P.LS.UAWM instruction pool */
17753 /* P.BR3A instruction pool */
17759 NM_BPOSGE32C
= 0x04,
17762 /* P16.RI instruction pool */
17764 NM_P16_SYSCALL
= 0x01,
17769 /* POOL16C_0 instruction pool */
17771 NM_POOL16C_00
= 0x00,
17774 /* P16.JRC instruction pool */
17780 /* P.SYSCALL instruction pool */
17786 /* P.TRAP instruction pool */
17792 /* P.CMOVE instruction pool */
17798 /* POOL32Axf instruction pool */
17800 NM_POOL32AXF_1
= 0x01,
17801 NM_POOL32AXF_2
= 0x02,
17802 NM_POOL32AXF_4
= 0x04,
17803 NM_POOL32AXF_5
= 0x05,
17804 NM_POOL32AXF_7
= 0x07,
17807 /* POOL32Axf_1 instruction pool */
17809 NM_POOL32AXF_1_0
= 0x00,
17810 NM_POOL32AXF_1_1
= 0x01,
17811 NM_POOL32AXF_1_3
= 0x03,
17812 NM_POOL32AXF_1_4
= 0x04,
17813 NM_POOL32AXF_1_5
= 0x05,
17814 NM_POOL32AXF_1_7
= 0x07,
17817 /* POOL32Axf_2 instruction pool */
17819 NM_POOL32AXF_2_0_7
= 0x00,
17820 NM_POOL32AXF_2_8_15
= 0x01,
17821 NM_POOL32AXF_2_16_23
= 0x02,
17822 NM_POOL32AXF_2_24_31
= 0x03,
17825 /* POOL32Axf_7 instruction pool */
17827 NM_SHRA_R_QB
= 0x0,
17832 /* POOL32Axf_1_0 instruction pool */
17840 /* POOL32Axf_1_1 instruction pool */
17846 /* POOL32Axf_1_3 instruction pool */
17854 /* POOL32Axf_1_4 instruction pool */
17860 /* POOL32Axf_1_5 instruction pool */
17862 NM_MAQ_S_W_PHR
= 0x0,
17863 NM_MAQ_S_W_PHL
= 0x1,
17864 NM_MAQ_SA_W_PHR
= 0x2,
17865 NM_MAQ_SA_W_PHL
= 0x3,
17868 /* POOL32Axf_1_7 instruction pool */
17872 NM_EXTR_RS_W
= 0x2,
17876 /* POOL32Axf_2_0_7 instruction pool */
17879 NM_DPAQ_S_W_PH
= 0x1,
17881 NM_DPSQ_S_W_PH
= 0x3,
17888 /* POOL32Axf_2_8_15 instruction pool */
17890 NM_DPAX_W_PH
= 0x0,
17891 NM_DPAQ_SA_L_W
= 0x1,
17892 NM_DPSX_W_PH
= 0x2,
17893 NM_DPSQ_SA_L_W
= 0x3,
17896 NM_EXTRV_R_W
= 0x7,
17899 /* POOL32Axf_2_16_23 instruction pool */
17901 NM_DPAU_H_QBL
= 0x0,
17902 NM_DPAQX_S_W_PH
= 0x1,
17903 NM_DPSU_H_QBL
= 0x2,
17904 NM_DPSQX_S_W_PH
= 0x3,
17907 NM_MULSA_W_PH
= 0x6,
17908 NM_EXTRV_RS_W
= 0x7,
17911 /* POOL32Axf_2_24_31 instruction pool */
17913 NM_DPAU_H_QBR
= 0x0,
17914 NM_DPAQX_SA_W_PH
= 0x1,
17915 NM_DPSU_H_QBR
= 0x2,
17916 NM_DPSQX_SA_W_PH
= 0x3,
17919 NM_MULSAQ_S_W_PH
= 0x6,
17920 NM_EXTRV_S_H
= 0x7,
17923 /* POOL32Axf_{4, 5} instruction pool */
17942 /* nanoMIPS DSP instructions */
17943 NM_ABSQ_S_QB
= 0x00,
17944 NM_ABSQ_S_PH
= 0x08,
17945 NM_ABSQ_S_W
= 0x10,
17946 NM_PRECEQ_W_PHL
= 0x28,
17947 NM_PRECEQ_W_PHR
= 0x30,
17948 NM_PRECEQU_PH_QBL
= 0x38,
17949 NM_PRECEQU_PH_QBR
= 0x48,
17950 NM_PRECEU_PH_QBL
= 0x58,
17951 NM_PRECEU_PH_QBR
= 0x68,
17952 NM_PRECEQU_PH_QBLA
= 0x39,
17953 NM_PRECEQU_PH_QBRA
= 0x49,
17954 NM_PRECEU_PH_QBLA
= 0x59,
17955 NM_PRECEU_PH_QBRA
= 0x69,
17956 NM_REPLV_PH
= 0x01,
17957 NM_REPLV_QB
= 0x09,
17960 NM_RADDU_W_QB
= 0x78,
17966 /* PP.SR instruction pool */
17970 NM_RESTORE_JRC
= 0x03,
17973 /* P.SR.F instruction pool */
17976 NM_RESTOREF
= 0x01,
17979 /* P16.SYSCALL instruction pool */
17981 NM_SYSCALL16
= 0x00,
17982 NM_HYPCALL16
= 0x01,
17985 /* POOL16C_00 instruction pool */
17993 /* PP.LSX and PP.LSXS instruction pool */
18031 /* ERETx instruction pool */
18037 /* POOL32FxF_{0, 1} insturction pool */
18046 NM_CVT_S_PL
= 0x84,
18047 NM_CVT_S_PU
= 0xa4,
18049 NM_CVT_L_S
= 0x004,
18050 NM_CVT_L_D
= 0x104,
18051 NM_CVT_W_S
= 0x024,
18052 NM_CVT_W_D
= 0x124,
18054 NM_RSQRT_S
= 0x008,
18055 NM_RSQRT_D
= 0x108,
18060 NM_RECIP_S
= 0x048,
18061 NM_RECIP_D
= 0x148,
18063 NM_FLOOR_L_S
= 0x00c,
18064 NM_FLOOR_L_D
= 0x10c,
18066 NM_FLOOR_W_S
= 0x02c,
18067 NM_FLOOR_W_D
= 0x12c,
18069 NM_CEIL_L_S
= 0x04c,
18070 NM_CEIL_L_D
= 0x14c,
18071 NM_CEIL_W_S
= 0x06c,
18072 NM_CEIL_W_D
= 0x16c,
18073 NM_TRUNC_L_S
= 0x08c,
18074 NM_TRUNC_L_D
= 0x18c,
18075 NM_TRUNC_W_S
= 0x0ac,
18076 NM_TRUNC_W_D
= 0x1ac,
18077 NM_ROUND_L_S
= 0x0cc,
18078 NM_ROUND_L_D
= 0x1cc,
18079 NM_ROUND_W_S
= 0x0ec,
18080 NM_ROUND_W_D
= 0x1ec,
18088 NM_CVT_D_S
= 0x04d,
18089 NM_CVT_D_W
= 0x0cd,
18090 NM_CVT_D_L
= 0x14d,
18091 NM_CVT_S_D
= 0x06d,
18092 NM_CVT_S_W
= 0x0ed,
18093 NM_CVT_S_L
= 0x16d,
18096 /* P.LL instruction pool */
18102 /* P.SC instruction pool */
18108 /* P.DVP instruction pool */
18117 * nanoMIPS decoding engine
18122 /* extraction utilities */
18124 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18125 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18126 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18127 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18128 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18129 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18131 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18132 static inline int decode_gpr_gpr3(int r
)
18134 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18136 return map
[r
& 0x7];
18139 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18140 static inline int decode_gpr_gpr3_src_store(int r
)
18142 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18144 return map
[r
& 0x7];
18147 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18148 static inline int decode_gpr_gpr4(int r
)
18150 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18151 16, 17, 18, 19, 20, 21, 22, 23 };
18153 return map
[r
& 0xf];
18156 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18157 static inline int decode_gpr_gpr4_zero(int r
)
18159 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18160 16, 17, 18, 19, 20, 21, 22, 23 };
18162 return map
[r
& 0xf];
18166 /* extraction utilities */
18168 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18169 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18170 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18171 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18172 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18173 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18176 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18178 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18181 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18182 uint8_t gp
, uint16_t u
)
18185 TCGv va
= tcg_temp_new();
18186 TCGv t0
= tcg_temp_new();
18188 while (counter
!= count
) {
18189 bool use_gp
= gp
&& (counter
== count
- 1);
18190 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18191 int this_offset
= -((counter
+ 1) << 2);
18192 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18193 gen_load_gpr(t0
, this_rt
);
18194 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18195 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18199 /* adjust stack pointer */
18200 gen_adjust_sp(ctx
, -u
);
18206 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18207 uint8_t gp
, uint16_t u
)
18210 TCGv va
= tcg_temp_new();
18211 TCGv t0
= tcg_temp_new();
18213 while (counter
!= count
) {
18214 bool use_gp
= gp
&& (counter
== count
- 1);
18215 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18216 int this_offset
= u
- ((counter
+ 1) << 2);
18217 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18218 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18219 ctx
->default_tcg_memop_mask
);
18220 tcg_gen_ext32s_tl(t0
, t0
);
18221 gen_store_gpr(t0
, this_rt
);
18225 /* adjust stack pointer */
18226 gen_adjust_sp(ctx
, u
);
18232 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18234 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
18235 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
18237 switch (extract32(ctx
->opcode
, 2, 2)) {
18239 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18242 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18245 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18248 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18253 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18255 int rt
= extract32(ctx
->opcode
, 21, 5);
18256 int rs
= extract32(ctx
->opcode
, 16, 5);
18257 int rd
= extract32(ctx
->opcode
, 11, 5);
18259 switch (extract32(ctx
->opcode
, 3, 7)) {
18261 switch (extract32(ctx
->opcode
, 10, 1)) {
18264 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18268 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18274 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18278 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18281 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18284 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18287 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18290 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18293 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18296 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18299 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18303 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18306 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18309 switch (extract32(ctx
->opcode
, 10, 1)) {
18311 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18314 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18319 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18322 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18325 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18328 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18331 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18336 #ifndef CONFIG_USER_ONLY
18337 TCGv t0
= tcg_temp_new();
18338 switch (extract32(ctx
->opcode
, 10, 1)) {
18341 check_cp0_enabled(ctx
);
18342 gen_helper_dvp(t0
, cpu_env
);
18343 gen_store_gpr(t0
, rt
);
18348 check_cp0_enabled(ctx
);
18349 gen_helper_evp(t0
, cpu_env
);
18350 gen_store_gpr(t0
, rt
);
18357 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18362 TCGv t0
= tcg_temp_new();
18363 TCGv t1
= tcg_temp_new();
18364 TCGv t2
= tcg_temp_new();
18366 gen_load_gpr(t1
, rs
);
18367 gen_load_gpr(t2
, rt
);
18368 tcg_gen_add_tl(t0
, t1
, t2
);
18369 tcg_gen_ext32s_tl(t0
, t0
);
18370 tcg_gen_xor_tl(t1
, t1
, t2
);
18371 tcg_gen_xor_tl(t2
, t0
, t2
);
18372 tcg_gen_andc_tl(t1
, t2
, t1
);
18374 /* operands of same sign, result different sign */
18375 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18376 gen_store_gpr(t0
, rd
);
18384 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18387 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18390 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18393 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18396 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18399 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18402 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18405 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18407 #ifndef CONFIG_USER_ONLY
18409 check_cp0_enabled(ctx
);
18411 /* Treat as NOP. */
18414 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18417 check_cp0_enabled(ctx
);
18419 TCGv t0
= tcg_temp_new();
18421 gen_load_gpr(t0
, rt
);
18422 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18426 case NM_D_E_MT_VPE
:
18428 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18429 TCGv t0
= tcg_temp_new();
18436 gen_helper_dmt(t0
);
18437 gen_store_gpr(t0
, rt
);
18438 } else if (rs
== 0) {
18441 gen_helper_dvpe(t0
, cpu_env
);
18442 gen_store_gpr(t0
, rt
);
18444 generate_exception_end(ctx
, EXCP_RI
);
18451 gen_helper_emt(t0
);
18452 gen_store_gpr(t0
, rt
);
18453 } else if (rs
== 0) {
18456 gen_helper_evpe(t0
, cpu_env
);
18457 gen_store_gpr(t0
, rt
);
18459 generate_exception_end(ctx
, EXCP_RI
);
18470 TCGv t0
= tcg_temp_new();
18471 TCGv t1
= tcg_temp_new();
18473 gen_load_gpr(t0
, rt
);
18474 gen_load_gpr(t1
, rs
);
18475 gen_helper_fork(t0
, t1
);
18482 check_cp0_enabled(ctx
);
18484 /* Treat as NOP. */
18487 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18488 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18492 check_cp0_enabled(ctx
);
18493 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18494 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18499 TCGv t0
= tcg_temp_new();
18501 gen_load_gpr(t0
, rs
);
18502 gen_helper_yield(t0
, cpu_env
, t0
);
18503 gen_store_gpr(t0
, rt
);
18509 generate_exception_end(ctx
, EXCP_RI
);
18515 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18516 int ret
, int v1
, int v2
)
18522 t0
= tcg_temp_new_i32();
18524 v0_t
= tcg_temp_new();
18525 v1_t
= tcg_temp_new();
18527 tcg_gen_movi_i32(t0
, v2
>> 3);
18529 gen_load_gpr(v0_t
, ret
);
18530 gen_load_gpr(v1_t
, v1
);
18533 case NM_MAQ_S_W_PHR
:
18535 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18537 case NM_MAQ_S_W_PHL
:
18539 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18541 case NM_MAQ_SA_W_PHR
:
18543 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18545 case NM_MAQ_SA_W_PHL
:
18547 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18550 generate_exception_end(ctx
, EXCP_RI
);
18554 tcg_temp_free_i32(t0
);
18556 tcg_temp_free(v0_t
);
18557 tcg_temp_free(v1_t
);
18561 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18562 int ret
, int v1
, int v2
)
18565 TCGv t0
= tcg_temp_new();
18566 TCGv t1
= tcg_temp_new();
18567 TCGv v0_t
= tcg_temp_new();
18569 gen_load_gpr(v0_t
, v1
);
18572 case NM_POOL32AXF_1_0
:
18574 switch (extract32(ctx
->opcode
, 12, 2)) {
18576 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
18579 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
18582 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
18585 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
18589 case NM_POOL32AXF_1_1
:
18591 switch (extract32(ctx
->opcode
, 12, 2)) {
18593 tcg_gen_movi_tl(t0
, v2
);
18594 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
18597 tcg_gen_movi_tl(t0
, v2
>> 3);
18598 gen_helper_shilo(t0
, v0_t
, cpu_env
);
18601 generate_exception_end(ctx
, EXCP_RI
);
18605 case NM_POOL32AXF_1_3
:
18607 imm
= extract32(ctx
->opcode
, 14, 7);
18608 switch (extract32(ctx
->opcode
, 12, 2)) {
18610 tcg_gen_movi_tl(t0
, imm
);
18611 gen_helper_rddsp(t0
, t0
, cpu_env
);
18612 gen_store_gpr(t0
, ret
);
18615 gen_load_gpr(t0
, ret
);
18616 tcg_gen_movi_tl(t1
, imm
);
18617 gen_helper_wrdsp(t0
, t1
, cpu_env
);
18620 tcg_gen_movi_tl(t0
, v2
>> 3);
18621 tcg_gen_movi_tl(t1
, v1
);
18622 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
18623 gen_store_gpr(t0
, ret
);
18626 tcg_gen_movi_tl(t0
, v2
>> 3);
18627 tcg_gen_movi_tl(t1
, v1
);
18628 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
18629 gen_store_gpr(t0
, ret
);
18633 case NM_POOL32AXF_1_4
:
18635 tcg_gen_movi_tl(t0
, v2
>> 2);
18636 switch (extract32(ctx
->opcode
, 12, 1)) {
18638 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
18639 gen_store_gpr(t0
, ret
);
18642 gen_helper_shrl_qb(t0
, t0
, v0_t
);
18643 gen_store_gpr(t0
, ret
);
18647 case NM_POOL32AXF_1_5
:
18648 opc
= extract32(ctx
->opcode
, 12, 2);
18649 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
18651 case NM_POOL32AXF_1_7
:
18653 tcg_gen_movi_tl(t0
, v2
>> 3);
18654 tcg_gen_movi_tl(t1
, v1
);
18655 switch (extract32(ctx
->opcode
, 12, 2)) {
18657 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
18658 gen_store_gpr(t0
, ret
);
18661 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
18662 gen_store_gpr(t0
, ret
);
18665 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
18666 gen_store_gpr(t0
, ret
);
18669 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
18670 gen_store_gpr(t0
, ret
);
18675 generate_exception_end(ctx
, EXCP_RI
);
18681 tcg_temp_free(v0_t
);
18684 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
18685 TCGv v0
, TCGv v1
, int rd
)
18689 t0
= tcg_temp_new_i32();
18691 tcg_gen_movi_i32(t0
, rd
>> 3);
18694 case NM_POOL32AXF_2_0_7
:
18695 switch (extract32(ctx
->opcode
, 9, 3)) {
18698 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
18700 case NM_DPAQ_S_W_PH
:
18702 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18706 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
18708 case NM_DPSQ_S_W_PH
:
18710 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18713 generate_exception_end(ctx
, EXCP_RI
);
18717 case NM_POOL32AXF_2_8_15
:
18718 switch (extract32(ctx
->opcode
, 9, 3)) {
18721 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
18723 case NM_DPAQ_SA_L_W
:
18725 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18729 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
18731 case NM_DPSQ_SA_L_W
:
18733 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18736 generate_exception_end(ctx
, EXCP_RI
);
18740 case NM_POOL32AXF_2_16_23
:
18741 switch (extract32(ctx
->opcode
, 9, 3)) {
18742 case NM_DPAU_H_QBL
:
18744 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
18746 case NM_DPAQX_S_W_PH
:
18748 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18750 case NM_DPSU_H_QBL
:
18752 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
18754 case NM_DPSQX_S_W_PH
:
18756 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18758 case NM_MULSA_W_PH
:
18760 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
18763 generate_exception_end(ctx
, EXCP_RI
);
18767 case NM_POOL32AXF_2_24_31
:
18768 switch (extract32(ctx
->opcode
, 9, 3)) {
18769 case NM_DPAU_H_QBR
:
18771 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
18773 case NM_DPAQX_SA_W_PH
:
18775 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18777 case NM_DPSU_H_QBR
:
18779 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
18781 case NM_DPSQX_SA_W_PH
:
18783 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18785 case NM_MULSAQ_S_W_PH
:
18787 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18790 generate_exception_end(ctx
, EXCP_RI
);
18795 generate_exception_end(ctx
, EXCP_RI
);
18799 tcg_temp_free_i32(t0
);
18802 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18803 int rt
, int rs
, int rd
)
18806 TCGv t0
= tcg_temp_new();
18807 TCGv t1
= tcg_temp_new();
18808 TCGv v0_t
= tcg_temp_new();
18809 TCGv v1_t
= tcg_temp_new();
18811 gen_load_gpr(v0_t
, rt
);
18812 gen_load_gpr(v1_t
, rs
);
18815 case NM_POOL32AXF_2_0_7
:
18816 switch (extract32(ctx
->opcode
, 9, 3)) {
18818 case NM_DPAQ_S_W_PH
:
18820 case NM_DPSQ_S_W_PH
:
18821 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18826 gen_load_gpr(t0
, rs
);
18828 if (rd
!= 0 && rd
!= 2) {
18829 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
18830 tcg_gen_ext32u_tl(t0
, t0
);
18831 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
18832 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
18834 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
18840 int acc
= extract32(ctx
->opcode
, 14, 2);
18841 TCGv_i64 t2
= tcg_temp_new_i64();
18842 TCGv_i64 t3
= tcg_temp_new_i64();
18844 gen_load_gpr(t0
, rt
);
18845 gen_load_gpr(t1
, rs
);
18846 tcg_gen_ext_tl_i64(t2
, t0
);
18847 tcg_gen_ext_tl_i64(t3
, t1
);
18848 tcg_gen_mul_i64(t2
, t2
, t3
);
18849 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18850 tcg_gen_add_i64(t2
, t2
, t3
);
18851 tcg_temp_free_i64(t3
);
18852 gen_move_low32(cpu_LO
[acc
], t2
);
18853 gen_move_high32(cpu_HI
[acc
], t2
);
18854 tcg_temp_free_i64(t2
);
18860 int acc
= extract32(ctx
->opcode
, 14, 2);
18861 TCGv_i32 t2
= tcg_temp_new_i32();
18862 TCGv_i32 t3
= tcg_temp_new_i32();
18864 gen_load_gpr(t0
, rs
);
18865 gen_load_gpr(t1
, rt
);
18866 tcg_gen_trunc_tl_i32(t2
, t0
);
18867 tcg_gen_trunc_tl_i32(t3
, t1
);
18868 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
18869 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
18870 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
18871 tcg_temp_free_i32(t2
);
18872 tcg_temp_free_i32(t3
);
18877 gen_load_gpr(v1_t
, rs
);
18878 tcg_gen_movi_tl(t0
, rd
>> 3);
18879 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
18880 gen_store_gpr(t0
, ret
);
18884 case NM_POOL32AXF_2_8_15
:
18885 switch (extract32(ctx
->opcode
, 9, 3)) {
18887 case NM_DPAQ_SA_L_W
:
18889 case NM_DPSQ_SA_L_W
:
18890 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18895 int acc
= extract32(ctx
->opcode
, 14, 2);
18896 TCGv_i64 t2
= tcg_temp_new_i64();
18897 TCGv_i64 t3
= tcg_temp_new_i64();
18899 gen_load_gpr(t0
, rs
);
18900 gen_load_gpr(t1
, rt
);
18901 tcg_gen_ext32u_tl(t0
, t0
);
18902 tcg_gen_ext32u_tl(t1
, t1
);
18903 tcg_gen_extu_tl_i64(t2
, t0
);
18904 tcg_gen_extu_tl_i64(t3
, t1
);
18905 tcg_gen_mul_i64(t2
, t2
, t3
);
18906 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18907 tcg_gen_add_i64(t2
, t2
, t3
);
18908 tcg_temp_free_i64(t3
);
18909 gen_move_low32(cpu_LO
[acc
], t2
);
18910 gen_move_high32(cpu_HI
[acc
], t2
);
18911 tcg_temp_free_i64(t2
);
18917 int acc
= extract32(ctx
->opcode
, 14, 2);
18918 TCGv_i32 t2
= tcg_temp_new_i32();
18919 TCGv_i32 t3
= tcg_temp_new_i32();
18921 gen_load_gpr(t0
, rs
);
18922 gen_load_gpr(t1
, rt
);
18923 tcg_gen_trunc_tl_i32(t2
, t0
);
18924 tcg_gen_trunc_tl_i32(t3
, t1
);
18925 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
18926 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
18927 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
18928 tcg_temp_free_i32(t2
);
18929 tcg_temp_free_i32(t3
);
18934 tcg_gen_movi_tl(t0
, rd
>> 3);
18935 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
18936 gen_store_gpr(t0
, ret
);
18939 generate_exception_end(ctx
, EXCP_RI
);
18943 case NM_POOL32AXF_2_16_23
:
18944 switch (extract32(ctx
->opcode
, 9, 3)) {
18945 case NM_DPAU_H_QBL
:
18946 case NM_DPAQX_S_W_PH
:
18947 case NM_DPSU_H_QBL
:
18948 case NM_DPSQX_S_W_PH
:
18949 case NM_MULSA_W_PH
:
18950 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18954 tcg_gen_movi_tl(t0
, rd
>> 3);
18955 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
18956 gen_store_gpr(t0
, ret
);
18961 int acc
= extract32(ctx
->opcode
, 14, 2);
18962 TCGv_i64 t2
= tcg_temp_new_i64();
18963 TCGv_i64 t3
= tcg_temp_new_i64();
18965 gen_load_gpr(t0
, rs
);
18966 gen_load_gpr(t1
, rt
);
18967 tcg_gen_ext_tl_i64(t2
, t0
);
18968 tcg_gen_ext_tl_i64(t3
, t1
);
18969 tcg_gen_mul_i64(t2
, t2
, t3
);
18970 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18971 tcg_gen_sub_i64(t2
, t3
, t2
);
18972 tcg_temp_free_i64(t3
);
18973 gen_move_low32(cpu_LO
[acc
], t2
);
18974 gen_move_high32(cpu_HI
[acc
], t2
);
18975 tcg_temp_free_i64(t2
);
18978 case NM_EXTRV_RS_W
:
18980 tcg_gen_movi_tl(t0
, rd
>> 3);
18981 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
18982 gen_store_gpr(t0
, ret
);
18986 case NM_POOL32AXF_2_24_31
:
18987 switch (extract32(ctx
->opcode
, 9, 3)) {
18988 case NM_DPAU_H_QBR
:
18989 case NM_DPAQX_SA_W_PH
:
18990 case NM_DPSU_H_QBR
:
18991 case NM_DPSQX_SA_W_PH
:
18992 case NM_MULSAQ_S_W_PH
:
18993 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18997 tcg_gen_movi_tl(t0
, rd
>> 3);
18998 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
18999 gen_store_gpr(t0
, ret
);
19004 int acc
= extract32(ctx
->opcode
, 14, 2);
19005 TCGv_i64 t2
= tcg_temp_new_i64();
19006 TCGv_i64 t3
= tcg_temp_new_i64();
19008 gen_load_gpr(t0
, rs
);
19009 gen_load_gpr(t1
, rt
);
19010 tcg_gen_ext32u_tl(t0
, t0
);
19011 tcg_gen_ext32u_tl(t1
, t1
);
19012 tcg_gen_extu_tl_i64(t2
, t0
);
19013 tcg_gen_extu_tl_i64(t3
, t1
);
19014 tcg_gen_mul_i64(t2
, t2
, t3
);
19015 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19016 tcg_gen_sub_i64(t2
, t3
, t2
);
19017 tcg_temp_free_i64(t3
);
19018 gen_move_low32(cpu_LO
[acc
], t2
);
19019 gen_move_high32(cpu_HI
[acc
], t2
);
19020 tcg_temp_free_i64(t2
);
19025 tcg_gen_movi_tl(t0
, rd
>> 3);
19026 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19027 gen_store_gpr(t0
, ret
);
19032 generate_exception_end(ctx
, EXCP_RI
);
19039 tcg_temp_free(v0_t
);
19040 tcg_temp_free(v1_t
);
19043 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19047 TCGv t0
= tcg_temp_new();
19048 TCGv v0_t
= tcg_temp_new();
19050 gen_load_gpr(v0_t
, rs
);
19055 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19056 gen_store_gpr(v0_t
, ret
);
19060 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19061 gen_store_gpr(v0_t
, ret
);
19065 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19066 gen_store_gpr(v0_t
, ret
);
19068 case NM_PRECEQ_W_PHL
:
19070 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19071 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19072 gen_store_gpr(v0_t
, ret
);
19074 case NM_PRECEQ_W_PHR
:
19076 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19077 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19078 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19079 gen_store_gpr(v0_t
, ret
);
19081 case NM_PRECEQU_PH_QBL
:
19083 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19084 gen_store_gpr(v0_t
, ret
);
19086 case NM_PRECEQU_PH_QBR
:
19088 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19089 gen_store_gpr(v0_t
, ret
);
19091 case NM_PRECEQU_PH_QBLA
:
19093 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19094 gen_store_gpr(v0_t
, ret
);
19096 case NM_PRECEQU_PH_QBRA
:
19098 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19099 gen_store_gpr(v0_t
, ret
);
19101 case NM_PRECEU_PH_QBL
:
19103 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19104 gen_store_gpr(v0_t
, ret
);
19106 case NM_PRECEU_PH_QBR
:
19108 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19109 gen_store_gpr(v0_t
, ret
);
19111 case NM_PRECEU_PH_QBLA
:
19113 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19114 gen_store_gpr(v0_t
, ret
);
19116 case NM_PRECEU_PH_QBRA
:
19118 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19119 gen_store_gpr(v0_t
, ret
);
19123 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19124 tcg_gen_shli_tl(t0
, v0_t
, 16);
19125 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19126 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19127 gen_store_gpr(v0_t
, ret
);
19131 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19132 tcg_gen_shli_tl(t0
, v0_t
, 8);
19133 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19134 tcg_gen_shli_tl(t0
, v0_t
, 16);
19135 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19136 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19137 gen_store_gpr(v0_t
, ret
);
19141 gen_helper_bitrev(v0_t
, v0_t
);
19142 gen_store_gpr(v0_t
, ret
);
19147 TCGv tv0
= tcg_temp_new();
19149 gen_load_gpr(tv0
, rt
);
19150 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19151 gen_store_gpr(v0_t
, ret
);
19152 tcg_temp_free(tv0
);
19155 case NM_RADDU_W_QB
:
19157 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19158 gen_store_gpr(v0_t
, ret
);
19161 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19165 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19169 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19172 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19175 generate_exception_end(ctx
, EXCP_RI
);
19179 tcg_temp_free(v0_t
);
19183 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19184 int rt
, int rs
, int rd
)
19186 TCGv t0
= tcg_temp_new();
19187 TCGv rs_t
= tcg_temp_new();
19189 gen_load_gpr(rs_t
, rs
);
19194 tcg_gen_movi_tl(t0
, rd
>> 2);
19195 switch (extract32(ctx
->opcode
, 12, 1)) {
19198 gen_helper_shra_qb(t0
, t0
, rs_t
);
19199 gen_store_gpr(t0
, rt
);
19203 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19204 gen_store_gpr(t0
, rt
);
19210 tcg_gen_movi_tl(t0
, rd
>> 1);
19211 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19212 gen_store_gpr(t0
, rt
);
19218 target_long result
;
19219 imm
= extract32(ctx
->opcode
, 13, 8);
19220 result
= (uint32_t)imm
<< 24 |
19221 (uint32_t)imm
<< 16 |
19222 (uint32_t)imm
<< 8 |
19224 result
= (int32_t)result
;
19225 tcg_gen_movi_tl(t0
, result
);
19226 gen_store_gpr(t0
, rt
);
19230 generate_exception_end(ctx
, EXCP_RI
);
19234 tcg_temp_free(rs_t
);
19238 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19240 int rt
= extract32(ctx
->opcode
, 21, 5);
19241 int rs
= extract32(ctx
->opcode
, 16, 5);
19242 int rd
= extract32(ctx
->opcode
, 11, 5);
19244 switch (extract32(ctx
->opcode
, 6, 3)) {
19245 case NM_POOL32AXF_1
:
19247 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19248 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19251 case NM_POOL32AXF_2
:
19253 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19254 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19257 case NM_POOL32AXF_4
:
19259 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19260 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19263 case NM_POOL32AXF_5
:
19264 switch (extract32(ctx
->opcode
, 9, 7)) {
19265 #ifndef CONFIG_USER_ONLY
19267 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19270 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19273 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19276 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19279 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19282 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19285 check_cp0_enabled(ctx
);
19287 TCGv t0
= tcg_temp_new();
19289 save_cpu_state(ctx
, 1);
19290 gen_helper_di(t0
, cpu_env
);
19291 gen_store_gpr(t0
, rt
);
19292 /* Stop translation as we may have switched the execution mode */
19293 ctx
->base
.is_jmp
= DISAS_STOP
;
19298 check_cp0_enabled(ctx
);
19300 TCGv t0
= tcg_temp_new();
19302 save_cpu_state(ctx
, 1);
19303 gen_helper_ei(t0
, cpu_env
);
19304 gen_store_gpr(t0
, rt
);
19305 /* Stop translation as we may have switched the execution mode */
19306 ctx
->base
.is_jmp
= DISAS_STOP
;
19311 gen_load_srsgpr(rs
, rt
);
19314 gen_store_srsgpr(rs
, rt
);
19317 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19320 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19323 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19327 generate_exception_end(ctx
, EXCP_RI
);
19331 case NM_POOL32AXF_7
:
19333 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19334 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19338 generate_exception_end(ctx
, EXCP_RI
);
19343 /* Immediate Value Compact Branches */
19344 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19345 int rt
, int32_t imm
, int32_t offset
)
19348 int bcond_compute
= 0;
19349 TCGv t0
= tcg_temp_new();
19350 TCGv t1
= tcg_temp_new();
19352 gen_load_gpr(t0
, rt
);
19353 tcg_gen_movi_tl(t1
, imm
);
19354 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19356 /* Load needed operands and calculate btarget */
19359 if (rt
== 0 && imm
== 0) {
19360 /* Unconditional branch */
19361 } else if (rt
== 0 && imm
!= 0) {
19366 cond
= TCG_COND_EQ
;
19372 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19373 generate_exception_end(ctx
, EXCP_RI
);
19375 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19376 /* Unconditional branch */
19377 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19381 tcg_gen_shri_tl(t0
, t0
, imm
);
19382 tcg_gen_andi_tl(t0
, t0
, 1);
19383 tcg_gen_movi_tl(t1
, 0);
19385 if (opc
== NM_BBEQZC
) {
19386 cond
= TCG_COND_EQ
;
19388 cond
= TCG_COND_NE
;
19393 if (rt
== 0 && imm
== 0) {
19396 } else if (rt
== 0 && imm
!= 0) {
19397 /* Unconditional branch */
19400 cond
= TCG_COND_NE
;
19404 if (rt
== 0 && imm
== 0) {
19405 /* Unconditional branch */
19408 cond
= TCG_COND_GE
;
19413 cond
= TCG_COND_LT
;
19416 if (rt
== 0 && imm
== 0) {
19417 /* Unconditional branch */
19420 cond
= TCG_COND_GEU
;
19425 cond
= TCG_COND_LTU
;
19428 MIPS_INVAL("Immediate Value Compact branch");
19429 generate_exception_end(ctx
, EXCP_RI
);
19433 if (bcond_compute
== 0) {
19434 /* Uncoditional compact branch */
19435 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19437 /* Conditional compact branch */
19438 TCGLabel
*fs
= gen_new_label();
19440 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19442 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19445 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19453 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19454 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19457 TCGv t0
= tcg_temp_new();
19458 TCGv t1
= tcg_temp_new();
19461 gen_load_gpr(t0
, rs
);
19465 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19468 /* calculate btarget */
19469 tcg_gen_shli_tl(t0
, t0
, 1);
19470 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19471 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19473 /* unconditional branch to register */
19474 tcg_gen_mov_tl(cpu_PC
, btarget
);
19475 tcg_gen_lookup_and_goto_ptr();
19481 /* nanoMIPS Branches */
19482 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19483 int rs
, int rt
, int32_t offset
)
19485 int bcond_compute
= 0;
19486 TCGv t0
= tcg_temp_new();
19487 TCGv t1
= tcg_temp_new();
19489 /* Load needed operands and calculate btarget */
19491 /* compact branch */
19494 gen_load_gpr(t0
, rs
);
19495 gen_load_gpr(t1
, rt
);
19497 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19501 if (rs
== 0 || rs
== rt
) {
19502 /* OPC_BLEZALC, OPC_BGEZALC */
19503 /* OPC_BGTZALC, OPC_BLTZALC */
19504 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19506 gen_load_gpr(t0
, rs
);
19507 gen_load_gpr(t1
, rt
);
19509 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19512 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19516 /* OPC_BEQZC, OPC_BNEZC */
19517 gen_load_gpr(t0
, rs
);
19519 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19521 /* OPC_JIC, OPC_JIALC */
19522 TCGv tbase
= tcg_temp_new();
19523 TCGv toffset
= tcg_temp_new();
19525 gen_load_gpr(tbase
, rt
);
19526 tcg_gen_movi_tl(toffset
, offset
);
19527 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19528 tcg_temp_free(tbase
);
19529 tcg_temp_free(toffset
);
19533 MIPS_INVAL("Compact branch/jump");
19534 generate_exception_end(ctx
, EXCP_RI
);
19538 if (bcond_compute
== 0) {
19539 /* Uncoditional compact branch */
19542 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19545 MIPS_INVAL("Compact branch/jump");
19546 generate_exception_end(ctx
, EXCP_RI
);
19550 /* Conditional compact branch */
19551 TCGLabel
*fs
= gen_new_label();
19555 if (rs
== 0 && rt
!= 0) {
19557 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19558 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19560 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19563 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
19567 if (rs
== 0 && rt
!= 0) {
19569 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19570 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19572 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19575 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
19579 if (rs
== 0 && rt
!= 0) {
19581 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19582 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19584 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19587 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
19591 if (rs
== 0 && rt
!= 0) {
19593 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19594 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19596 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19599 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
19603 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
19606 MIPS_INVAL("Compact conditional branch/jump");
19607 generate_exception_end(ctx
, EXCP_RI
);
19611 /* Generating branch here as compact branches don't have delay slot */
19612 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19615 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19624 /* nanoMIPS CP1 Branches */
19625 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
19626 int32_t ft
, int32_t offset
)
19628 target_ulong btarget
;
19629 TCGv_i64 t0
= tcg_temp_new_i64();
19631 gen_load_fpr64(ctx
, t0
, ft
);
19632 tcg_gen_andi_i64(t0
, t0
, 1);
19634 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19638 tcg_gen_xori_i64(t0
, t0
, 1);
19639 ctx
->hflags
|= MIPS_HFLAG_BC
;
19642 /* t0 already set */
19643 ctx
->hflags
|= MIPS_HFLAG_BC
;
19646 MIPS_INVAL("cp1 cond branch");
19647 generate_exception_end(ctx
, EXCP_RI
);
19651 tcg_gen_trunc_i64_tl(bcond
, t0
);
19653 ctx
->btarget
= btarget
;
19656 tcg_temp_free_i64(t0
);
19660 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
19663 t0
= tcg_temp_new();
19664 t1
= tcg_temp_new();
19666 gen_load_gpr(t0
, rs
);
19667 gen_load_gpr(t1
, rt
);
19669 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
19670 /* PP.LSXS instructions require shifting */
19671 switch (extract32(ctx
->opcode
, 7, 4)) {
19676 tcg_gen_shli_tl(t0
, t0
, 1);
19683 tcg_gen_shli_tl(t0
, t0
, 2);
19687 tcg_gen_shli_tl(t0
, t0
, 3);
19691 gen_op_addr_add(ctx
, t0
, t0
, t1
);
19693 switch (extract32(ctx
->opcode
, 7, 4)) {
19695 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19697 gen_store_gpr(t0
, rd
);
19701 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19703 gen_store_gpr(t0
, rd
);
19707 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19709 gen_store_gpr(t0
, rd
);
19712 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19714 gen_store_gpr(t0
, rd
);
19718 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19720 gen_store_gpr(t0
, rd
);
19724 gen_load_gpr(t1
, rd
);
19725 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19731 gen_load_gpr(t1
, rd
);
19732 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19738 gen_load_gpr(t1
, rd
);
19739 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19743 /*case NM_LWC1XS:*/
19745 /*case NM_LDC1XS:*/
19747 /*case NM_SWC1XS:*/
19749 /*case NM_SDC1XS:*/
19750 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19751 check_cp1_enabled(ctx
);
19752 switch (extract32(ctx
->opcode
, 7, 4)) {
19754 /*case NM_LWC1XS:*/
19755 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
19758 /*case NM_LDC1XS:*/
19759 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
19762 /*case NM_SWC1XS:*/
19763 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
19766 /*case NM_SDC1XS:*/
19767 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
19771 generate_exception_err(ctx
, EXCP_CpU
, 1);
19775 generate_exception_end(ctx
, EXCP_RI
);
19783 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
19787 rt
= extract32(ctx
->opcode
, 21, 5);
19788 rs
= extract32(ctx
->opcode
, 16, 5);
19789 rd
= extract32(ctx
->opcode
, 11, 5);
19791 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
19792 generate_exception_end(ctx
, EXCP_RI
);
19795 check_cp1_enabled(ctx
);
19796 switch (extract32(ctx
->opcode
, 0, 3)) {
19798 switch (extract32(ctx
->opcode
, 3, 7)) {
19800 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
19803 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
19806 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
19809 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
19812 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
19815 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
19818 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
19821 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
19824 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
19827 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
19830 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
19833 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
19836 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
19839 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
19842 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
19845 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
19848 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
19851 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
19854 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
19857 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
19860 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
19863 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
19866 generate_exception_end(ctx
, EXCP_RI
);
19871 switch (extract32(ctx
->opcode
, 3, 3)) {
19873 switch (extract32(ctx
->opcode
, 9, 1)) {
19875 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
19878 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
19883 switch (extract32(ctx
->opcode
, 9, 1)) {
19885 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
19888 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
19893 switch (extract32(ctx
->opcode
, 9, 1)) {
19895 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
19898 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
19903 switch (extract32(ctx
->opcode
, 9, 1)) {
19905 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
19908 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
19913 switch (extract32(ctx
->opcode
, 6, 8)) {
19915 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
19918 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
19921 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
19924 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
19927 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
19930 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
19933 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
19936 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
19939 switch (extract32(ctx
->opcode
, 6, 9)) {
19941 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
19944 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
19947 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
19950 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
19953 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
19956 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
19959 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
19962 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
19965 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
19968 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
19971 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
19974 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
19977 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
19980 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
19983 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
19986 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
19989 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
19992 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
19995 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
19998 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
20001 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
20004 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
20007 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
20010 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
20013 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
20016 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
20019 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
20022 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
20025 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20028 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20031 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20034 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20037 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20040 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20043 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20046 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20049 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20052 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20055 generate_exception_end(ctx
, EXCP_RI
);
20064 switch (extract32(ctx
->opcode
, 3, 3)) {
20065 case NM_CMP_CONDN_S
:
20066 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20068 case NM_CMP_CONDN_D
:
20069 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20072 generate_exception_end(ctx
, EXCP_RI
);
20077 generate_exception_end(ctx
, EXCP_RI
);
20082 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20083 int rd
, int rs
, int rt
)
20086 TCGv t0
= tcg_temp_new();
20087 TCGv v1_t
= tcg_temp_new();
20088 TCGv v2_t
= tcg_temp_new();
20090 gen_load_gpr(v1_t
, rs
);
20091 gen_load_gpr(v2_t
, rt
);
20096 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20100 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20104 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20106 case NM_CMPU_EQ_QB
:
20108 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20110 case NM_CMPU_LT_QB
:
20112 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20114 case NM_CMPU_LE_QB
:
20116 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20118 case NM_CMPGU_EQ_QB
:
20120 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20121 gen_store_gpr(v1_t
, ret
);
20123 case NM_CMPGU_LT_QB
:
20125 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20126 gen_store_gpr(v1_t
, ret
);
20128 case NM_CMPGU_LE_QB
:
20130 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20131 gen_store_gpr(v1_t
, ret
);
20133 case NM_CMPGDU_EQ_QB
:
20135 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20136 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20137 gen_store_gpr(v1_t
, ret
);
20139 case NM_CMPGDU_LT_QB
:
20141 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20142 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20143 gen_store_gpr(v1_t
, ret
);
20145 case NM_CMPGDU_LE_QB
:
20147 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20148 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20149 gen_store_gpr(v1_t
, ret
);
20153 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20154 gen_store_gpr(v1_t
, ret
);
20158 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20159 gen_store_gpr(v1_t
, ret
);
20163 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20164 gen_store_gpr(v1_t
, ret
);
20168 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20169 gen_store_gpr(v1_t
, ret
);
20173 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20174 gen_store_gpr(v1_t
, ret
);
20178 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20179 gen_store_gpr(v1_t
, ret
);
20183 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20184 gen_store_gpr(v1_t
, ret
);
20188 switch (extract32(ctx
->opcode
, 10, 1)) {
20191 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20192 gen_store_gpr(v1_t
, ret
);
20196 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20197 gen_store_gpr(v1_t
, ret
);
20201 case NM_ADDQH_R_PH
:
20203 switch (extract32(ctx
->opcode
, 10, 1)) {
20206 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20207 gen_store_gpr(v1_t
, ret
);
20211 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20212 gen_store_gpr(v1_t
, ret
);
20218 switch (extract32(ctx
->opcode
, 10, 1)) {
20221 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20222 gen_store_gpr(v1_t
, ret
);
20226 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20227 gen_store_gpr(v1_t
, ret
);
20233 switch (extract32(ctx
->opcode
, 10, 1)) {
20236 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20237 gen_store_gpr(v1_t
, ret
);
20241 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20242 gen_store_gpr(v1_t
, ret
);
20248 switch (extract32(ctx
->opcode
, 10, 1)) {
20251 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20252 gen_store_gpr(v1_t
, ret
);
20256 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20257 gen_store_gpr(v1_t
, ret
);
20261 case NM_ADDUH_R_QB
:
20263 switch (extract32(ctx
->opcode
, 10, 1)) {
20266 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20267 gen_store_gpr(v1_t
, ret
);
20271 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20272 gen_store_gpr(v1_t
, ret
);
20276 case NM_SHRAV_R_PH
:
20278 switch (extract32(ctx
->opcode
, 10, 1)) {
20281 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20282 gen_store_gpr(v1_t
, ret
);
20286 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20287 gen_store_gpr(v1_t
, ret
);
20291 case NM_SHRAV_R_QB
:
20293 switch (extract32(ctx
->opcode
, 10, 1)) {
20296 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20297 gen_store_gpr(v1_t
, ret
);
20301 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20302 gen_store_gpr(v1_t
, ret
);
20308 switch (extract32(ctx
->opcode
, 10, 1)) {
20311 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20312 gen_store_gpr(v1_t
, ret
);
20316 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20317 gen_store_gpr(v1_t
, ret
);
20321 case NM_SUBQH_R_PH
:
20323 switch (extract32(ctx
->opcode
, 10, 1)) {
20326 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20327 gen_store_gpr(v1_t
, ret
);
20331 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20332 gen_store_gpr(v1_t
, ret
);
20338 switch (extract32(ctx
->opcode
, 10, 1)) {
20341 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20342 gen_store_gpr(v1_t
, ret
);
20346 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20347 gen_store_gpr(v1_t
, ret
);
20353 switch (extract32(ctx
->opcode
, 10, 1)) {
20356 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20357 gen_store_gpr(v1_t
, ret
);
20361 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20362 gen_store_gpr(v1_t
, ret
);
20368 switch (extract32(ctx
->opcode
, 10, 1)) {
20371 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20372 gen_store_gpr(v1_t
, ret
);
20376 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20377 gen_store_gpr(v1_t
, ret
);
20381 case NM_SUBUH_R_QB
:
20383 switch (extract32(ctx
->opcode
, 10, 1)) {
20386 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20387 gen_store_gpr(v1_t
, ret
);
20391 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20392 gen_store_gpr(v1_t
, ret
);
20396 case NM_SHLLV_S_PH
:
20398 switch (extract32(ctx
->opcode
, 10, 1)) {
20401 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20402 gen_store_gpr(v1_t
, ret
);
20406 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20407 gen_store_gpr(v1_t
, ret
);
20411 case NM_PRECR_SRA_R_PH_W
:
20413 switch (extract32(ctx
->opcode
, 10, 1)) {
20415 /* PRECR_SRA_PH_W */
20417 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20418 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20420 gen_store_gpr(v1_t
, rt
);
20421 tcg_temp_free_i32(sa_t
);
20425 /* PRECR_SRA_R_PH_W */
20427 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20428 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20430 gen_store_gpr(v1_t
, rt
);
20431 tcg_temp_free_i32(sa_t
);
20436 case NM_MULEU_S_PH_QBL
:
20438 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20439 gen_store_gpr(v1_t
, ret
);
20441 case NM_MULEU_S_PH_QBR
:
20443 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20444 gen_store_gpr(v1_t
, ret
);
20446 case NM_MULQ_RS_PH
:
20448 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20449 gen_store_gpr(v1_t
, ret
);
20453 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20454 gen_store_gpr(v1_t
, ret
);
20458 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20459 gen_store_gpr(v1_t
, ret
);
20463 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20464 gen_store_gpr(v1_t
, ret
);
20468 gen_load_gpr(t0
, rs
);
20470 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20472 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20476 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20477 gen_store_gpr(v1_t
, ret
);
20481 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20482 gen_store_gpr(v1_t
, ret
);
20486 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20487 gen_store_gpr(v1_t
, ret
);
20491 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20492 gen_store_gpr(v1_t
, ret
);
20496 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20497 gen_store_gpr(v1_t
, ret
);
20501 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20502 gen_store_gpr(v1_t
, ret
);
20507 TCGv tv0
= tcg_temp_new();
20508 TCGv tv1
= tcg_temp_new();
20509 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20511 tcg_gen_movi_tl(tv0
, rd
>> 3);
20512 tcg_gen_movi_tl(tv1
, imm
);
20513 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20516 case NM_MULEQ_S_W_PHL
:
20518 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20519 gen_store_gpr(v1_t
, ret
);
20521 case NM_MULEQ_S_W_PHR
:
20523 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20524 gen_store_gpr(v1_t
, ret
);
20528 switch (extract32(ctx
->opcode
, 10, 1)) {
20531 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20532 gen_store_gpr(v1_t
, ret
);
20536 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20537 gen_store_gpr(v1_t
, ret
);
20541 case NM_PRECR_QB_PH
:
20543 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
20544 gen_store_gpr(v1_t
, ret
);
20546 case NM_PRECRQ_QB_PH
:
20548 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
20549 gen_store_gpr(v1_t
, ret
);
20551 case NM_PRECRQ_PH_W
:
20553 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
20554 gen_store_gpr(v1_t
, ret
);
20556 case NM_PRECRQ_RS_PH_W
:
20558 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20559 gen_store_gpr(v1_t
, ret
);
20561 case NM_PRECRQU_S_QB_PH
:
20563 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20564 gen_store_gpr(v1_t
, ret
);
20568 tcg_gen_movi_tl(t0
, rd
);
20569 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
20570 gen_store_gpr(v1_t
, rt
);
20574 tcg_gen_movi_tl(t0
, rd
>> 1);
20575 switch (extract32(ctx
->opcode
, 10, 1)) {
20578 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
20579 gen_store_gpr(v1_t
, rt
);
20583 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
20584 gen_store_gpr(v1_t
, rt
);
20590 tcg_gen_movi_tl(t0
, rd
>> 1);
20591 switch (extract32(ctx
->opcode
, 10, 2)) {
20594 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
20595 gen_store_gpr(v1_t
, rt
);
20599 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
20600 gen_store_gpr(v1_t
, rt
);
20603 generate_exception_end(ctx
, EXCP_RI
);
20609 tcg_gen_movi_tl(t0
, rd
);
20610 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
20611 gen_store_gpr(v1_t
, rt
);
20617 imm
= sextract32(ctx
->opcode
, 11, 11);
20618 imm
= (int16_t)(imm
<< 6) >> 6;
20620 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
20625 generate_exception_end(ctx
, EXCP_RI
);
20630 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
20638 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
20639 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
20641 rt
= extract32(ctx
->opcode
, 21, 5);
20642 rs
= extract32(ctx
->opcode
, 16, 5);
20643 rd
= extract32(ctx
->opcode
, 11, 5);
20645 op
= extract32(ctx
->opcode
, 26, 6);
20650 switch (extract32(ctx
->opcode
, 19, 2)) {
20653 generate_exception_end(ctx
, EXCP_RI
);
20656 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
20657 generate_exception_end(ctx
, EXCP_SYSCALL
);
20659 generate_exception_end(ctx
, EXCP_RI
);
20663 generate_exception_end(ctx
, EXCP_BREAK
);
20666 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
20667 gen_helper_do_semihosting(cpu_env
);
20669 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
20670 generate_exception_end(ctx
, EXCP_RI
);
20672 generate_exception_end(ctx
, EXCP_DBp
);
20679 imm
= extract32(ctx
->opcode
, 0, 16);
20681 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
20683 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
20685 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20690 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
20691 extract32(ctx
->opcode
, 1, 20) << 1;
20692 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20693 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20697 switch (ctx
->opcode
& 0x07) {
20699 gen_pool32a0_nanomips_insn(env
, ctx
);
20703 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
20704 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
20708 switch (extract32(ctx
->opcode
, 3, 3)) {
20710 gen_p_lsx(ctx
, rd
, rs
, rt
);
20713 /* In nanoMIPS, the shift field directly encodes the shift
20714 * amount, meaning that the supported shift values are in
20715 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
20716 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
20717 extract32(ctx
->opcode
, 9, 2) - 1);
20720 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
20723 gen_pool32axf_nanomips_insn(env
, ctx
);
20726 generate_exception_end(ctx
, EXCP_RI
);
20731 generate_exception_end(ctx
, EXCP_RI
);
20736 switch (ctx
->opcode
& 0x03) {
20739 offset
= extract32(ctx
->opcode
, 0, 21);
20740 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
20744 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20747 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20750 generate_exception_end(ctx
, EXCP_RI
);
20756 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
20757 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
20758 switch (extract32(ctx
->opcode
, 16, 5)) {
20762 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
20768 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
20769 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20775 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
20781 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20784 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20791 t0
= tcg_temp_new();
20793 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20796 tcg_gen_movi_tl(t0
, addr
);
20797 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
20805 t0
= tcg_temp_new();
20806 t1
= tcg_temp_new();
20808 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20811 tcg_gen_movi_tl(t0
, addr
);
20812 gen_load_gpr(t1
, rt
);
20814 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
20821 generate_exception_end(ctx
, EXCP_RI
);
20827 switch (extract32(ctx
->opcode
, 12, 4)) {
20829 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20832 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20835 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20838 switch (extract32(ctx
->opcode
, 20, 1)) {
20840 switch (ctx
->opcode
& 3) {
20842 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
20843 extract32(ctx
->opcode
, 2, 1),
20844 extract32(ctx
->opcode
, 3, 9) << 3);
20847 case NM_RESTORE_JRC
:
20848 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
20849 extract32(ctx
->opcode
, 2, 1),
20850 extract32(ctx
->opcode
, 3, 9) << 3);
20851 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
20852 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
20856 generate_exception_end(ctx
, EXCP_RI
);
20861 generate_exception_end(ctx
, EXCP_RI
);
20866 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20869 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20873 TCGv t0
= tcg_temp_new();
20875 imm
= extract32(ctx
->opcode
, 0, 12);
20876 gen_load_gpr(t0
, rs
);
20877 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
20878 gen_store_gpr(t0
, rt
);
20884 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
20885 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
20889 int shift
= extract32(ctx
->opcode
, 0, 5);
20890 switch (extract32(ctx
->opcode
, 5, 4)) {
20892 if (rt
== 0 && shift
== 0) {
20894 } else if (rt
== 0 && shift
== 3) {
20895 /* EHB - treat as NOP */
20896 } else if (rt
== 0 && shift
== 5) {
20897 /* PAUSE - treat as NOP */
20898 } else if (rt
== 0 && shift
== 6) {
20900 gen_sync(extract32(ctx
->opcode
, 16, 5));
20903 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
20904 extract32(ctx
->opcode
, 0, 5));
20908 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
20909 extract32(ctx
->opcode
, 0, 5));
20912 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
20913 extract32(ctx
->opcode
, 0, 5));
20916 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
20917 extract32(ctx
->opcode
, 0, 5));
20925 TCGv t0
= tcg_temp_new();
20926 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
20927 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
20929 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
20931 gen_load_gpr(t0
, rs
);
20932 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
20935 tcg_temp_free_i32(shift
);
20936 tcg_temp_free_i32(shiftx
);
20937 tcg_temp_free_i32(stripe
);
20941 switch (((ctx
->opcode
>> 10) & 2) |
20942 (extract32(ctx
->opcode
, 5, 1))) {
20945 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
20946 extract32(ctx
->opcode
, 6, 5));
20949 generate_exception_end(ctx
, EXCP_RI
);
20954 switch (((ctx
->opcode
>> 10) & 2) |
20955 (extract32(ctx
->opcode
, 5, 1))) {
20958 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
20959 extract32(ctx
->opcode
, 6, 5));
20962 generate_exception_end(ctx
, EXCP_RI
);
20967 generate_exception_end(ctx
, EXCP_RI
);
20972 gen_pool32f_nanomips_insn(ctx
);
20977 switch (extract32(ctx
->opcode
, 1, 1)) {
20980 tcg_gen_movi_tl(cpu_gpr
[rt
],
20981 sextract32(ctx
->opcode
, 0, 1) << 31 |
20982 extract32(ctx
->opcode
, 2, 10) << 21 |
20983 extract32(ctx
->opcode
, 12, 9) << 12);
20988 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
20989 extract32(ctx
->opcode
, 2, 10) << 21 |
20990 extract32(ctx
->opcode
, 12, 9) << 12;
20992 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20993 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21000 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
21002 switch (extract32(ctx
->opcode
, 18, 3)) {
21004 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
21007 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
21010 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
21014 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
21019 switch (ctx
->opcode
& 1) {
21021 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
21024 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21030 switch (ctx
->opcode
& 1) {
21032 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21035 generate_exception_end(ctx
, EXCP_RI
);
21041 switch (ctx
->opcode
& 0x3) {
21043 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21046 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21049 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21052 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21057 generate_exception_end(ctx
, EXCP_RI
);
21064 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21066 switch (extract32(ctx
->opcode
, 12, 4)) {
21070 /* Break the TB to be able to sync copied instructions
21072 ctx
->base
.is_jmp
= DISAS_STOP
;
21075 /* Treat as NOP. */
21079 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21082 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21085 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21088 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21091 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21094 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21097 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21100 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21103 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21106 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21109 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21112 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21115 generate_exception_end(ctx
, EXCP_RI
);
21122 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21123 extract32(ctx
->opcode
, 0, 8);
21125 switch (extract32(ctx
->opcode
, 8, 3)) {
21127 switch (extract32(ctx
->opcode
, 11, 4)) {
21129 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21132 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21135 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21138 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21141 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21144 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21147 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21150 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21153 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21156 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21159 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21162 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21167 /* Break the TB to be able to sync copied instructions
21169 ctx
->base
.is_jmp
= DISAS_STOP
;
21172 /* Treat as NOP. */
21176 generate_exception_end(ctx
, EXCP_RI
);
21181 switch (extract32(ctx
->opcode
, 11, 4)) {
21186 TCGv t0
= tcg_temp_new();
21187 TCGv t1
= tcg_temp_new();
21189 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21191 switch (extract32(ctx
->opcode
, 11, 4)) {
21193 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21195 gen_store_gpr(t0
, rt
);
21198 gen_load_gpr(t1
, rt
);
21199 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21208 switch (ctx
->opcode
& 0x03) {
21210 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21214 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21219 switch (ctx
->opcode
& 0x03) {
21221 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, s
);
21225 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21230 check_cp0_enabled(ctx
);
21231 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21232 gen_cache_operation(ctx
, rt
, rs
, s
);
21238 switch (extract32(ctx
->opcode
, 11, 4)) {
21241 check_cp0_enabled(ctx
);
21242 gen_ld(ctx
, OPC_LBE
, rt
, rs
, s
);
21246 check_cp0_enabled(ctx
);
21247 gen_st(ctx
, OPC_SBE
, rt
, rs
, s
);
21251 check_cp0_enabled(ctx
);
21252 gen_ld(ctx
, OPC_LBUE
, rt
, rs
, s
);
21256 /* case NM_SYNCIE */
21258 check_cp0_enabled(ctx
);
21259 /* Break the TB to be able to sync copied instructions
21261 ctx
->base
.is_jmp
= DISAS_STOP
;
21263 /* case NM_PREFE */
21265 check_cp0_enabled(ctx
);
21266 /* Treat as NOP. */
21271 check_cp0_enabled(ctx
);
21272 gen_ld(ctx
, OPC_LHE
, rt
, rs
, s
);
21276 check_cp0_enabled(ctx
);
21277 gen_st(ctx
, OPC_SHE
, rt
, rs
, s
);
21281 check_cp0_enabled(ctx
);
21282 gen_ld(ctx
, OPC_LHUE
, rt
, rs
, s
);
21285 check_nms_dl_il_sl_tl_l2c(ctx
);
21286 gen_cache_operation(ctx
, rt
, rs
, s
);
21290 check_cp0_enabled(ctx
);
21291 gen_ld(ctx
, OPC_LWE
, rt
, rs
, s
);
21295 check_cp0_enabled(ctx
);
21296 gen_st(ctx
, OPC_SWE
, rt
, rs
, s
);
21299 switch (extract32(ctx
->opcode
, 2, 2)) {
21303 check_cp0_enabled(ctx
);
21304 gen_ld(ctx
, OPC_LLE
, rt
, rs
, s
);
21309 check_cp0_enabled(ctx
);
21310 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21313 generate_exception_end(ctx
, EXCP_RI
);
21318 switch (extract32(ctx
->opcode
, 2, 2)) {
21322 check_cp0_enabled(ctx
);
21323 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, s
);
21328 check_cp0_enabled(ctx
);
21329 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21332 generate_exception_end(ctx
, EXCP_RI
);
21342 int count
= extract32(ctx
->opcode
, 12, 3);
21345 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21346 extract32(ctx
->opcode
, 0, 8);
21347 TCGv va
= tcg_temp_new();
21348 TCGv t1
= tcg_temp_new();
21349 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21350 NM_P_LS_UAWM
? MO_UNALN
: 0;
21352 count
= (count
== 0) ? 8 : count
;
21353 while (counter
!= count
) {
21354 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21355 int this_offset
= offset
+ (counter
<< 2);
21357 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21359 switch (extract32(ctx
->opcode
, 11, 1)) {
21361 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21363 gen_store_gpr(t1
, this_rt
);
21364 if ((this_rt
== rs
) &&
21365 (counter
!= (count
- 1))) {
21366 /* UNPREDICTABLE */
21370 this_rt
= (rt
== 0) ? 0 : this_rt
;
21371 gen_load_gpr(t1
, this_rt
);
21372 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21383 generate_exception_end(ctx
, EXCP_RI
);
21391 TCGv t0
= tcg_temp_new();
21392 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21393 extract32(ctx
->opcode
, 1, 20) << 1;
21394 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21395 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21396 extract32(ctx
->opcode
, 21, 3));
21397 gen_load_gpr(t0
, rt
);
21398 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21399 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21405 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21406 extract32(ctx
->opcode
, 1, 24) << 1;
21408 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21410 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21413 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21418 switch (extract32(ctx
->opcode
, 12, 4)) {
21421 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21424 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21427 generate_exception_end(ctx
, EXCP_RI
);
21433 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21434 extract32(ctx
->opcode
, 1, 13) << 1;
21435 switch (extract32(ctx
->opcode
, 14, 2)) {
21438 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21441 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21442 extract32(ctx
->opcode
, 1, 13) << 1;
21443 check_cp1_enabled(ctx
);
21444 switch (extract32(ctx
->opcode
, 16, 5)) {
21446 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21449 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21454 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21455 extract32(ctx
->opcode
, 0, 1) << 13;
21457 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21462 generate_exception_end(ctx
, EXCP_RI
);
21468 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21470 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21474 if (rs
== rt
|| rt
== 0) {
21475 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21476 } else if (rs
== 0) {
21477 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21479 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21487 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21488 extract32(ctx
->opcode
, 1, 13) << 1;
21489 switch (extract32(ctx
->opcode
, 14, 2)) {
21492 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21495 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21497 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21499 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21503 if (rs
== 0 || rs
== rt
) {
21505 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21507 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21511 generate_exception_end(ctx
, EXCP_RI
);
21518 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21519 extract32(ctx
->opcode
, 1, 10) << 1;
21520 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21522 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
21527 generate_exception_end(ctx
, EXCP_RI
);
21533 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21536 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21537 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21538 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS1(ctx
->opcode
));
21542 /* make sure instructions are on a halfword boundary */
21543 if (ctx
->base
.pc_next
& 0x1) {
21544 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
21545 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
21546 tcg_temp_free(tmp
);
21547 generate_exception_end(ctx
, EXCP_AdEL
);
21551 op
= extract32(ctx
->opcode
, 10, 6);
21554 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21557 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
21558 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
21561 switch (extract32(ctx
->opcode
, 3, 2)) {
21562 case NM_P16_SYSCALL
:
21563 if (extract32(ctx
->opcode
, 2, 1) == 0) {
21564 generate_exception_end(ctx
, EXCP_SYSCALL
);
21566 generate_exception_end(ctx
, EXCP_RI
);
21570 generate_exception_end(ctx
, EXCP_BREAK
);
21573 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
21574 gen_helper_do_semihosting(cpu_env
);
21576 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21577 generate_exception_end(ctx
, EXCP_RI
);
21579 generate_exception_end(ctx
, EXCP_DBp
);
21584 generate_exception_end(ctx
, EXCP_RI
);
21591 int shift
= extract32(ctx
->opcode
, 0, 3);
21593 shift
= (shift
== 0) ? 8 : shift
;
21595 switch (extract32(ctx
->opcode
, 3, 1)) {
21603 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
21607 switch (ctx
->opcode
& 1) {
21609 gen_pool16c_nanomips_insn(ctx
);
21612 gen_ldxs(ctx
, rt
, rs
, rd
);
21617 switch (extract32(ctx
->opcode
, 6, 1)) {
21619 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
21620 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
21623 generate_exception_end(ctx
, EXCP_RI
);
21628 switch (extract32(ctx
->opcode
, 3, 1)) {
21630 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
21631 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
21633 case NM_P_ADDIURS5
:
21634 rt
= extract32(ctx
->opcode
, 5, 5);
21636 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
21637 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
21638 (extract32(ctx
->opcode
, 0, 3));
21639 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
21645 switch (ctx
->opcode
& 0x1) {
21647 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
21650 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
21655 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21656 extract32(ctx
->opcode
, 5, 3);
21657 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21658 extract32(ctx
->opcode
, 0, 3);
21659 rt
= decode_gpr_gpr4(rt
);
21660 rs
= decode_gpr_gpr4(rs
);
21661 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
21662 (extract32(ctx
->opcode
, 3, 1))) {
21665 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
21669 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
21672 generate_exception_end(ctx
, EXCP_RI
);
21678 int imm
= extract32(ctx
->opcode
, 0, 7);
21679 imm
= (imm
== 0x7f ? -1 : imm
);
21681 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21687 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
21688 u
= (u
== 12) ? 0xff :
21689 (u
== 13) ? 0xffff : u
;
21690 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
21694 offset
= extract32(ctx
->opcode
, 0, 2);
21695 switch (extract32(ctx
->opcode
, 2, 2)) {
21697 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
21700 rt
= decode_gpr_gpr3_src_store(
21701 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21702 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
21705 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
21708 generate_exception_end(ctx
, EXCP_RI
);
21713 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
21714 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
21716 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
21719 rt
= decode_gpr_gpr3_src_store(
21720 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21721 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
21724 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
21727 generate_exception_end(ctx
, EXCP_RI
);
21732 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21733 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21736 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21737 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21738 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
21742 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21743 extract32(ctx
->opcode
, 5, 3);
21744 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21745 extract32(ctx
->opcode
, 0, 3);
21746 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21747 (extract32(ctx
->opcode
, 8, 1) << 2);
21748 rt
= decode_gpr_gpr4(rt
);
21749 rs
= decode_gpr_gpr4(rs
);
21750 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21754 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21755 extract32(ctx
->opcode
, 5, 3);
21756 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21757 extract32(ctx
->opcode
, 0, 3);
21758 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21759 (extract32(ctx
->opcode
, 8, 1) << 2);
21760 rt
= decode_gpr_gpr4_zero(rt
);
21761 rs
= decode_gpr_gpr4(rs
);
21762 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21765 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21766 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
21769 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21770 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21771 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
21774 rt
= decode_gpr_gpr3_src_store(
21775 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21776 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21777 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21778 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21781 rt
= decode_gpr_gpr3_src_store(
21782 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21783 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21784 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
21787 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
21788 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21789 (extract32(ctx
->opcode
, 1, 9) << 1));
21792 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
21793 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21794 (extract32(ctx
->opcode
, 1, 9) << 1));
21797 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
21798 (sextract32(ctx
->opcode
, 0, 1) << 7) |
21799 (extract32(ctx
->opcode
, 1, 6) << 1));
21802 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
21803 (sextract32(ctx
->opcode
, 0, 1) << 7) |
21804 (extract32(ctx
->opcode
, 1, 6) << 1));
21807 switch (ctx
->opcode
& 0xf) {
21810 switch (extract32(ctx
->opcode
, 4, 1)) {
21812 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
21813 extract32(ctx
->opcode
, 5, 5), 0, 0);
21816 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
21817 extract32(ctx
->opcode
, 5, 5), 31, 0);
21824 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
21825 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
21826 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
21827 extract32(ctx
->opcode
, 0, 4) << 1);
21834 int count
= extract32(ctx
->opcode
, 0, 4);
21835 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
21837 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
21838 switch (extract32(ctx
->opcode
, 8, 1)) {
21840 gen_save(ctx
, rt
, count
, 0, u
);
21842 case NM_RESTORE_JRC16
:
21843 gen_restore(ctx
, rt
, count
, 0, u
);
21844 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21853 static const int gpr2reg1
[] = {4, 5, 6, 7};
21854 static const int gpr2reg2
[] = {5, 6, 7, 8};
21856 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
21857 extract32(ctx
->opcode
, 8, 1);
21858 int r1
= gpr2reg1
[rd2
];
21859 int r2
= gpr2reg2
[rd2
];
21860 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
21861 extract32(ctx
->opcode
, 0, 3);
21862 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
21863 extract32(ctx
->opcode
, 5, 3);
21864 TCGv t0
= tcg_temp_new();
21865 TCGv t1
= tcg_temp_new();
21866 if (op
== NM_MOVEP
) {
21869 rs
= decode_gpr_gpr4_zero(r3
);
21870 rt
= decode_gpr_gpr4_zero(r4
);
21872 rd
= decode_gpr_gpr4(r3
);
21873 re
= decode_gpr_gpr4(r4
);
21877 gen_load_gpr(t0
, rs
);
21878 gen_load_gpr(t1
, rt
);
21879 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21880 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
21886 return decode_nanomips_32_48_opc(env
, ctx
);
21893 /* SmartMIPS extension to MIPS32 */
21895 #if defined(TARGET_MIPS64)
21897 /* MDMX extension to MIPS64 */
21901 /* MIPSDSP functions. */
21902 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
21903 int rd
, int base
, int offset
)
21908 t0
= tcg_temp_new();
21911 gen_load_gpr(t0
, offset
);
21912 } else if (offset
== 0) {
21913 gen_load_gpr(t0
, base
);
21915 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
21920 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
21921 gen_store_gpr(t0
, rd
);
21924 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
21925 gen_store_gpr(t0
, rd
);
21928 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
21929 gen_store_gpr(t0
, rd
);
21931 #if defined(TARGET_MIPS64)
21933 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
21934 gen_store_gpr(t0
, rd
);
21941 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
21942 int ret
, int v1
, int v2
)
21948 /* Treat as NOP. */
21952 v1_t
= tcg_temp_new();
21953 v2_t
= tcg_temp_new();
21955 gen_load_gpr(v1_t
, v1
);
21956 gen_load_gpr(v2_t
, v2
);
21959 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
21960 case OPC_MULT_G_2E
:
21964 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21966 case OPC_ADDUH_R_QB
:
21967 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21970 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21972 case OPC_ADDQH_R_PH
:
21973 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21976 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21978 case OPC_ADDQH_R_W
:
21979 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21982 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21984 case OPC_SUBUH_R_QB
:
21985 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21988 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21990 case OPC_SUBQH_R_PH
:
21991 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21994 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21996 case OPC_SUBQH_R_W
:
21997 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22001 case OPC_ABSQ_S_PH_DSP
:
22003 case OPC_ABSQ_S_QB
:
22005 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
22007 case OPC_ABSQ_S_PH
:
22009 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
22013 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
22015 case OPC_PRECEQ_W_PHL
:
22017 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
22018 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22020 case OPC_PRECEQ_W_PHR
:
22022 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
22023 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
22024 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22026 case OPC_PRECEQU_PH_QBL
:
22028 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
22030 case OPC_PRECEQU_PH_QBR
:
22032 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
22034 case OPC_PRECEQU_PH_QBLA
:
22036 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
22038 case OPC_PRECEQU_PH_QBRA
:
22040 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
22042 case OPC_PRECEU_PH_QBL
:
22044 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
22046 case OPC_PRECEU_PH_QBR
:
22048 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
22050 case OPC_PRECEU_PH_QBLA
:
22052 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
22054 case OPC_PRECEU_PH_QBRA
:
22056 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
22060 case OPC_ADDU_QB_DSP
:
22064 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22066 case OPC_ADDQ_S_PH
:
22068 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22072 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22076 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22078 case OPC_ADDU_S_QB
:
22080 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22084 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22086 case OPC_ADDU_S_PH
:
22088 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22092 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22094 case OPC_SUBQ_S_PH
:
22096 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22100 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22104 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22106 case OPC_SUBU_S_QB
:
22108 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22112 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22114 case OPC_SUBU_S_PH
:
22116 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22120 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22124 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22128 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22130 case OPC_RADDU_W_QB
:
22132 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22136 case OPC_CMPU_EQ_QB_DSP
:
22138 case OPC_PRECR_QB_PH
:
22140 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22142 case OPC_PRECRQ_QB_PH
:
22144 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22146 case OPC_PRECR_SRA_PH_W
:
22149 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22150 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22152 tcg_temp_free_i32(sa_t
);
22155 case OPC_PRECR_SRA_R_PH_W
:
22158 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22159 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22161 tcg_temp_free_i32(sa_t
);
22164 case OPC_PRECRQ_PH_W
:
22166 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22168 case OPC_PRECRQ_RS_PH_W
:
22170 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22172 case OPC_PRECRQU_S_QB_PH
:
22174 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22178 #ifdef TARGET_MIPS64
22179 case OPC_ABSQ_S_QH_DSP
:
22181 case OPC_PRECEQ_L_PWL
:
22183 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22185 case OPC_PRECEQ_L_PWR
:
22187 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22189 case OPC_PRECEQ_PW_QHL
:
22191 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22193 case OPC_PRECEQ_PW_QHR
:
22195 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22197 case OPC_PRECEQ_PW_QHLA
:
22199 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22201 case OPC_PRECEQ_PW_QHRA
:
22203 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22205 case OPC_PRECEQU_QH_OBL
:
22207 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22209 case OPC_PRECEQU_QH_OBR
:
22211 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22213 case OPC_PRECEQU_QH_OBLA
:
22215 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22217 case OPC_PRECEQU_QH_OBRA
:
22219 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22221 case OPC_PRECEU_QH_OBL
:
22223 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22225 case OPC_PRECEU_QH_OBR
:
22227 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22229 case OPC_PRECEU_QH_OBLA
:
22231 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22233 case OPC_PRECEU_QH_OBRA
:
22235 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22237 case OPC_ABSQ_S_OB
:
22239 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22241 case OPC_ABSQ_S_PW
:
22243 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22245 case OPC_ABSQ_S_QH
:
22247 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22251 case OPC_ADDU_OB_DSP
:
22253 case OPC_RADDU_L_OB
:
22255 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22259 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22261 case OPC_SUBQ_S_PW
:
22263 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22267 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22269 case OPC_SUBQ_S_QH
:
22271 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22275 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22277 case OPC_SUBU_S_OB
:
22279 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22283 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22285 case OPC_SUBU_S_QH
:
22287 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22291 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22293 case OPC_SUBUH_R_OB
:
22295 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22299 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22301 case OPC_ADDQ_S_PW
:
22303 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22307 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22309 case OPC_ADDQ_S_QH
:
22311 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22315 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22317 case OPC_ADDU_S_OB
:
22319 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22323 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22325 case OPC_ADDU_S_QH
:
22327 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22331 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22333 case OPC_ADDUH_R_OB
:
22335 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22339 case OPC_CMPU_EQ_OB_DSP
:
22341 case OPC_PRECR_OB_QH
:
22343 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22345 case OPC_PRECR_SRA_QH_PW
:
22348 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22349 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22350 tcg_temp_free_i32(ret_t
);
22353 case OPC_PRECR_SRA_R_QH_PW
:
22356 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22357 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22358 tcg_temp_free_i32(sa_v
);
22361 case OPC_PRECRQ_OB_QH
:
22363 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22365 case OPC_PRECRQ_PW_L
:
22367 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22369 case OPC_PRECRQ_QH_PW
:
22371 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22373 case OPC_PRECRQ_RS_QH_PW
:
22375 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22377 case OPC_PRECRQU_S_OB_QH
:
22379 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22386 tcg_temp_free(v1_t
);
22387 tcg_temp_free(v2_t
);
22390 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22391 int ret
, int v1
, int v2
)
22399 /* Treat as NOP. */
22403 t0
= tcg_temp_new();
22404 v1_t
= tcg_temp_new();
22405 v2_t
= tcg_temp_new();
22407 tcg_gen_movi_tl(t0
, v1
);
22408 gen_load_gpr(v1_t
, v1
);
22409 gen_load_gpr(v2_t
, v2
);
22412 case OPC_SHLL_QB_DSP
:
22414 op2
= MASK_SHLL_QB(ctx
->opcode
);
22418 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22422 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22426 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22430 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22432 case OPC_SHLL_S_PH
:
22434 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22436 case OPC_SHLLV_S_PH
:
22438 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22442 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22444 case OPC_SHLLV_S_W
:
22446 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22450 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22454 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22458 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22462 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22466 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22468 case OPC_SHRA_R_QB
:
22470 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22474 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22476 case OPC_SHRAV_R_QB
:
22478 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22482 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22484 case OPC_SHRA_R_PH
:
22486 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22490 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22492 case OPC_SHRAV_R_PH
:
22494 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22498 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22500 case OPC_SHRAV_R_W
:
22502 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22504 default: /* Invalid */
22505 MIPS_INVAL("MASK SHLL.QB");
22506 generate_exception_end(ctx
, EXCP_RI
);
22511 #ifdef TARGET_MIPS64
22512 case OPC_SHLL_OB_DSP
:
22513 op2
= MASK_SHLL_OB(ctx
->opcode
);
22517 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22521 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22523 case OPC_SHLL_S_PW
:
22525 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22527 case OPC_SHLLV_S_PW
:
22529 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22533 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22537 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22541 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22545 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22547 case OPC_SHLL_S_QH
:
22549 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22551 case OPC_SHLLV_S_QH
:
22553 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22557 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
22561 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22563 case OPC_SHRA_R_OB
:
22565 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
22567 case OPC_SHRAV_R_OB
:
22569 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22573 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
22577 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22579 case OPC_SHRA_R_PW
:
22581 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
22583 case OPC_SHRAV_R_PW
:
22585 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22589 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
22593 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22595 case OPC_SHRA_R_QH
:
22597 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
22599 case OPC_SHRAV_R_QH
:
22601 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22605 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
22609 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22613 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
22617 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22619 default: /* Invalid */
22620 MIPS_INVAL("MASK SHLL.OB");
22621 generate_exception_end(ctx
, EXCP_RI
);
22629 tcg_temp_free(v1_t
);
22630 tcg_temp_free(v2_t
);
22633 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22634 int ret
, int v1
, int v2
, int check_ret
)
22640 if ((ret
== 0) && (check_ret
== 1)) {
22641 /* Treat as NOP. */
22645 t0
= tcg_temp_new_i32();
22646 v1_t
= tcg_temp_new();
22647 v2_t
= tcg_temp_new();
22649 tcg_gen_movi_i32(t0
, ret
);
22650 gen_load_gpr(v1_t
, v1
);
22651 gen_load_gpr(v2_t
, v2
);
22654 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22655 * the same mask and op1. */
22656 case OPC_MULT_G_2E
:
22660 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22663 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22666 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22668 case OPC_MULQ_RS_W
:
22669 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22673 case OPC_DPA_W_PH_DSP
:
22675 case OPC_DPAU_H_QBL
:
22677 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22679 case OPC_DPAU_H_QBR
:
22681 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22683 case OPC_DPSU_H_QBL
:
22685 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22687 case OPC_DPSU_H_QBR
:
22689 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22693 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22695 case OPC_DPAX_W_PH
:
22697 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22699 case OPC_DPAQ_S_W_PH
:
22701 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22703 case OPC_DPAQX_S_W_PH
:
22705 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22707 case OPC_DPAQX_SA_W_PH
:
22709 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22713 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22715 case OPC_DPSX_W_PH
:
22717 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22719 case OPC_DPSQ_S_W_PH
:
22721 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22723 case OPC_DPSQX_S_W_PH
:
22725 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22727 case OPC_DPSQX_SA_W_PH
:
22729 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22731 case OPC_MULSAQ_S_W_PH
:
22733 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22735 case OPC_DPAQ_SA_L_W
:
22737 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22739 case OPC_DPSQ_SA_L_W
:
22741 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22743 case OPC_MAQ_S_W_PHL
:
22745 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22747 case OPC_MAQ_S_W_PHR
:
22749 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22751 case OPC_MAQ_SA_W_PHL
:
22753 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22755 case OPC_MAQ_SA_W_PHR
:
22757 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22759 case OPC_MULSA_W_PH
:
22761 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22765 #ifdef TARGET_MIPS64
22766 case OPC_DPAQ_W_QH_DSP
:
22768 int ac
= ret
& 0x03;
22769 tcg_gen_movi_i32(t0
, ac
);
22774 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
22778 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
22782 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
22786 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
22790 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22792 case OPC_DPAQ_S_W_QH
:
22794 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22796 case OPC_DPAQ_SA_L_PW
:
22798 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22800 case OPC_DPAU_H_OBL
:
22802 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
22804 case OPC_DPAU_H_OBR
:
22806 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
22810 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22812 case OPC_DPSQ_S_W_QH
:
22814 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22816 case OPC_DPSQ_SA_L_PW
:
22818 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22820 case OPC_DPSU_H_OBL
:
22822 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
22824 case OPC_DPSU_H_OBR
:
22826 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
22828 case OPC_MAQ_S_L_PWL
:
22830 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
22832 case OPC_MAQ_S_L_PWR
:
22834 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
22836 case OPC_MAQ_S_W_QHLL
:
22838 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
22840 case OPC_MAQ_SA_W_QHLL
:
22842 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
22844 case OPC_MAQ_S_W_QHLR
:
22846 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
22848 case OPC_MAQ_SA_W_QHLR
:
22850 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
22852 case OPC_MAQ_S_W_QHRL
:
22854 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
22856 case OPC_MAQ_SA_W_QHRL
:
22858 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
22860 case OPC_MAQ_S_W_QHRR
:
22862 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
22864 case OPC_MAQ_SA_W_QHRR
:
22866 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
22868 case OPC_MULSAQ_S_L_PW
:
22870 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22872 case OPC_MULSAQ_S_W_QH
:
22874 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22880 case OPC_ADDU_QB_DSP
:
22882 case OPC_MULEU_S_PH_QBL
:
22884 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22886 case OPC_MULEU_S_PH_QBR
:
22888 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22890 case OPC_MULQ_RS_PH
:
22892 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22894 case OPC_MULEQ_S_W_PHL
:
22896 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22898 case OPC_MULEQ_S_W_PHR
:
22900 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22902 case OPC_MULQ_S_PH
:
22904 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22908 #ifdef TARGET_MIPS64
22909 case OPC_ADDU_OB_DSP
:
22911 case OPC_MULEQ_S_PW_QHL
:
22913 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22915 case OPC_MULEQ_S_PW_QHR
:
22917 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22919 case OPC_MULEU_S_QH_OBL
:
22921 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22923 case OPC_MULEU_S_QH_OBR
:
22925 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22927 case OPC_MULQ_RS_QH
:
22929 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22936 tcg_temp_free_i32(t0
);
22937 tcg_temp_free(v1_t
);
22938 tcg_temp_free(v2_t
);
22941 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22949 /* Treat as NOP. */
22953 t0
= tcg_temp_new();
22954 val_t
= tcg_temp_new();
22955 gen_load_gpr(val_t
, val
);
22958 case OPC_ABSQ_S_PH_DSP
:
22962 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
22967 target_long result
;
22968 imm
= (ctx
->opcode
>> 16) & 0xFF;
22969 result
= (uint32_t)imm
<< 24 |
22970 (uint32_t)imm
<< 16 |
22971 (uint32_t)imm
<< 8 |
22973 result
= (int32_t)result
;
22974 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
22979 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
22980 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
22981 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22982 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22983 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22984 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22989 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22990 imm
= (int16_t)(imm
<< 6) >> 6;
22991 tcg_gen_movi_tl(cpu_gpr
[ret
], \
22992 (target_long
)((int32_t)imm
<< 16 | \
22998 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
22999 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23000 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23001 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23005 #ifdef TARGET_MIPS64
23006 case OPC_ABSQ_S_QH_DSP
:
23013 imm
= (ctx
->opcode
>> 16) & 0xFF;
23014 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
23015 temp
= (temp
<< 16) | temp
;
23016 temp
= (temp
<< 32) | temp
;
23017 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23025 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23026 imm
= (int16_t)(imm
<< 6) >> 6;
23027 temp
= ((target_long
)imm
<< 32) \
23028 | ((target_long
)imm
& 0xFFFFFFFF);
23029 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23037 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23038 imm
= (int16_t)(imm
<< 6) >> 6;
23040 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
23041 ((uint64_t)(uint16_t)imm
<< 32) |
23042 ((uint64_t)(uint16_t)imm
<< 16) |
23043 (uint64_t)(uint16_t)imm
;
23044 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23049 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23050 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23051 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23052 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23053 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23054 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23055 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23059 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
23060 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23061 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23065 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23066 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23067 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23068 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23069 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23076 tcg_temp_free(val_t
);
23079 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
23080 uint32_t op1
, uint32_t op2
,
23081 int ret
, int v1
, int v2
, int check_ret
)
23087 if ((ret
== 0) && (check_ret
== 1)) {
23088 /* Treat as NOP. */
23092 t1
= tcg_temp_new();
23093 v1_t
= tcg_temp_new();
23094 v2_t
= tcg_temp_new();
23096 gen_load_gpr(v1_t
, v1
);
23097 gen_load_gpr(v2_t
, v2
);
23100 case OPC_CMPU_EQ_QB_DSP
:
23102 case OPC_CMPU_EQ_QB
:
23104 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
23106 case OPC_CMPU_LT_QB
:
23108 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
23110 case OPC_CMPU_LE_QB
:
23112 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
23114 case OPC_CMPGU_EQ_QB
:
23116 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23118 case OPC_CMPGU_LT_QB
:
23120 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23122 case OPC_CMPGU_LE_QB
:
23124 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23126 case OPC_CMPGDU_EQ_QB
:
23128 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23129 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23130 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23131 tcg_gen_shli_tl(t1
, t1
, 24);
23132 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23134 case OPC_CMPGDU_LT_QB
:
23136 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23137 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23138 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23139 tcg_gen_shli_tl(t1
, t1
, 24);
23140 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23142 case OPC_CMPGDU_LE_QB
:
23144 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23145 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23146 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23147 tcg_gen_shli_tl(t1
, t1
, 24);
23148 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23150 case OPC_CMP_EQ_PH
:
23152 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23154 case OPC_CMP_LT_PH
:
23156 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23158 case OPC_CMP_LE_PH
:
23160 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23164 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23168 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23170 case OPC_PACKRL_PH
:
23172 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23176 #ifdef TARGET_MIPS64
23177 case OPC_CMPU_EQ_OB_DSP
:
23179 case OPC_CMP_EQ_PW
:
23181 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23183 case OPC_CMP_LT_PW
:
23185 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23187 case OPC_CMP_LE_PW
:
23189 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23191 case OPC_CMP_EQ_QH
:
23193 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23195 case OPC_CMP_LT_QH
:
23197 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23199 case OPC_CMP_LE_QH
:
23201 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23203 case OPC_CMPGDU_EQ_OB
:
23205 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23207 case OPC_CMPGDU_LT_OB
:
23209 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23211 case OPC_CMPGDU_LE_OB
:
23213 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23215 case OPC_CMPGU_EQ_OB
:
23217 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23219 case OPC_CMPGU_LT_OB
:
23221 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23223 case OPC_CMPGU_LE_OB
:
23225 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23227 case OPC_CMPU_EQ_OB
:
23229 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23231 case OPC_CMPU_LT_OB
:
23233 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23235 case OPC_CMPU_LE_OB
:
23237 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23239 case OPC_PACKRL_PW
:
23241 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23245 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23249 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23253 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23261 tcg_temp_free(v1_t
);
23262 tcg_temp_free(v2_t
);
23265 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23266 uint32_t op1
, int rt
, int rs
, int sa
)
23273 /* Treat as NOP. */
23277 t0
= tcg_temp_new();
23278 gen_load_gpr(t0
, rs
);
23281 case OPC_APPEND_DSP
:
23282 switch (MASK_APPEND(ctx
->opcode
)) {
23285 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23287 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23291 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23292 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23293 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23294 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23296 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23300 if (sa
!= 0 && sa
!= 2) {
23301 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23302 tcg_gen_ext32u_tl(t0
, t0
);
23303 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23304 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23306 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23308 default: /* Invalid */
23309 MIPS_INVAL("MASK APPEND");
23310 generate_exception_end(ctx
, EXCP_RI
);
23314 #ifdef TARGET_MIPS64
23315 case OPC_DAPPEND_DSP
:
23316 switch (MASK_DAPPEND(ctx
->opcode
)) {
23319 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23323 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23324 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23325 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23329 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23330 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23331 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23336 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23337 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23338 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23339 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23342 default: /* Invalid */
23343 MIPS_INVAL("MASK DAPPEND");
23344 generate_exception_end(ctx
, EXCP_RI
);
23353 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23354 int ret
, int v1
, int v2
, int check_ret
)
23363 if ((ret
== 0) && (check_ret
== 1)) {
23364 /* Treat as NOP. */
23368 t0
= tcg_temp_new();
23369 t1
= tcg_temp_new();
23370 v1_t
= tcg_temp_new();
23371 v2_t
= tcg_temp_new();
23373 gen_load_gpr(v1_t
, v1
);
23374 gen_load_gpr(v2_t
, v2
);
23377 case OPC_EXTR_W_DSP
:
23381 tcg_gen_movi_tl(t0
, v2
);
23382 tcg_gen_movi_tl(t1
, v1
);
23383 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23386 tcg_gen_movi_tl(t0
, v2
);
23387 tcg_gen_movi_tl(t1
, v1
);
23388 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23390 case OPC_EXTR_RS_W
:
23391 tcg_gen_movi_tl(t0
, v2
);
23392 tcg_gen_movi_tl(t1
, v1
);
23393 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23396 tcg_gen_movi_tl(t0
, v2
);
23397 tcg_gen_movi_tl(t1
, v1
);
23398 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23400 case OPC_EXTRV_S_H
:
23401 tcg_gen_movi_tl(t0
, v2
);
23402 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23405 tcg_gen_movi_tl(t0
, v2
);
23406 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23408 case OPC_EXTRV_R_W
:
23409 tcg_gen_movi_tl(t0
, v2
);
23410 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23412 case OPC_EXTRV_RS_W
:
23413 tcg_gen_movi_tl(t0
, v2
);
23414 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23417 tcg_gen_movi_tl(t0
, v2
);
23418 tcg_gen_movi_tl(t1
, v1
);
23419 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23422 tcg_gen_movi_tl(t0
, v2
);
23423 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23426 tcg_gen_movi_tl(t0
, v2
);
23427 tcg_gen_movi_tl(t1
, v1
);
23428 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23431 tcg_gen_movi_tl(t0
, v2
);
23432 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23435 imm
= (ctx
->opcode
>> 20) & 0x3F;
23436 tcg_gen_movi_tl(t0
, ret
);
23437 tcg_gen_movi_tl(t1
, imm
);
23438 gen_helper_shilo(t0
, t1
, cpu_env
);
23441 tcg_gen_movi_tl(t0
, ret
);
23442 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23445 tcg_gen_movi_tl(t0
, ret
);
23446 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23449 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23450 tcg_gen_movi_tl(t0
, imm
);
23451 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23454 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23455 tcg_gen_movi_tl(t0
, imm
);
23456 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23460 #ifdef TARGET_MIPS64
23461 case OPC_DEXTR_W_DSP
:
23465 tcg_gen_movi_tl(t0
, ret
);
23466 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23470 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23471 int ac
= (ctx
->opcode
>> 11) & 0x03;
23472 tcg_gen_movi_tl(t0
, shift
);
23473 tcg_gen_movi_tl(t1
, ac
);
23474 gen_helper_dshilo(t0
, t1
, cpu_env
);
23479 int ac
= (ctx
->opcode
>> 11) & 0x03;
23480 tcg_gen_movi_tl(t0
, ac
);
23481 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23485 tcg_gen_movi_tl(t0
, v2
);
23486 tcg_gen_movi_tl(t1
, v1
);
23488 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23491 tcg_gen_movi_tl(t0
, v2
);
23492 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23495 tcg_gen_movi_tl(t0
, v2
);
23496 tcg_gen_movi_tl(t1
, v1
);
23497 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23500 tcg_gen_movi_tl(t0
, v2
);
23501 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23504 tcg_gen_movi_tl(t0
, v2
);
23505 tcg_gen_movi_tl(t1
, v1
);
23506 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23508 case OPC_DEXTR_R_L
:
23509 tcg_gen_movi_tl(t0
, v2
);
23510 tcg_gen_movi_tl(t1
, v1
);
23511 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23513 case OPC_DEXTR_RS_L
:
23514 tcg_gen_movi_tl(t0
, v2
);
23515 tcg_gen_movi_tl(t1
, v1
);
23516 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23519 tcg_gen_movi_tl(t0
, v2
);
23520 tcg_gen_movi_tl(t1
, v1
);
23521 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23523 case OPC_DEXTR_R_W
:
23524 tcg_gen_movi_tl(t0
, v2
);
23525 tcg_gen_movi_tl(t1
, v1
);
23526 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23528 case OPC_DEXTR_RS_W
:
23529 tcg_gen_movi_tl(t0
, v2
);
23530 tcg_gen_movi_tl(t1
, v1
);
23531 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23533 case OPC_DEXTR_S_H
:
23534 tcg_gen_movi_tl(t0
, v2
);
23535 tcg_gen_movi_tl(t1
, v1
);
23536 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23538 case OPC_DEXTRV_S_H
:
23539 tcg_gen_movi_tl(t0
, v2
);
23540 tcg_gen_movi_tl(t1
, v1
);
23541 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23544 tcg_gen_movi_tl(t0
, v2
);
23545 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23547 case OPC_DEXTRV_R_L
:
23548 tcg_gen_movi_tl(t0
, v2
);
23549 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23551 case OPC_DEXTRV_RS_L
:
23552 tcg_gen_movi_tl(t0
, v2
);
23553 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23556 tcg_gen_movi_tl(t0
, v2
);
23557 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23559 case OPC_DEXTRV_R_W
:
23560 tcg_gen_movi_tl(t0
, v2
);
23561 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23563 case OPC_DEXTRV_RS_W
:
23564 tcg_gen_movi_tl(t0
, v2
);
23565 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23574 tcg_temp_free(v1_t
);
23575 tcg_temp_free(v2_t
);
23578 /* End MIPSDSP functions. */
23580 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23582 int rs
, rt
, rd
, sa
;
23585 rs
= (ctx
->opcode
>> 21) & 0x1f;
23586 rt
= (ctx
->opcode
>> 16) & 0x1f;
23587 rd
= (ctx
->opcode
>> 11) & 0x1f;
23588 sa
= (ctx
->opcode
>> 6) & 0x1f;
23590 op1
= MASK_SPECIAL(ctx
->opcode
);
23593 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23599 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23609 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23612 MIPS_INVAL("special_r6 muldiv");
23613 generate_exception_end(ctx
, EXCP_RI
);
23619 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23623 if (rt
== 0 && sa
== 1) {
23624 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23625 We need additionally to check other fields */
23626 gen_cl(ctx
, op1
, rd
, rs
);
23628 generate_exception_end(ctx
, EXCP_RI
);
23632 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23633 gen_helper_do_semihosting(cpu_env
);
23635 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
23636 generate_exception_end(ctx
, EXCP_RI
);
23638 generate_exception_end(ctx
, EXCP_DBp
);
23642 #if defined(TARGET_MIPS64)
23644 check_mips_64(ctx
);
23645 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23649 if (rt
== 0 && sa
== 1) {
23650 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23651 We need additionally to check other fields */
23652 check_mips_64(ctx
);
23653 gen_cl(ctx
, op1
, rd
, rs
);
23655 generate_exception_end(ctx
, EXCP_RI
);
23663 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23673 check_mips_64(ctx
);
23674 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23677 MIPS_INVAL("special_r6 muldiv");
23678 generate_exception_end(ctx
, EXCP_RI
);
23683 default: /* Invalid */
23684 MIPS_INVAL("special_r6");
23685 generate_exception_end(ctx
, EXCP_RI
);
23690 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23692 int rs
, rt
, rd
, sa
;
23695 rs
= (ctx
->opcode
>> 21) & 0x1f;
23696 rt
= (ctx
->opcode
>> 16) & 0x1f;
23697 rd
= (ctx
->opcode
>> 11) & 0x1f;
23698 sa
= (ctx
->opcode
>> 6) & 0x1f;
23700 op1
= MASK_SPECIAL(ctx
->opcode
);
23702 case OPC_MOVN
: /* Conditional move */
23704 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
23705 INSN_LOONGSON2E
| INSN_LOONGSON2F
| INSN_R5900
);
23706 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23708 case OPC_MFHI
: /* Move from HI/LO */
23710 gen_HILO(ctx
, op1
, rs
& 3, rd
);
23713 case OPC_MTLO
: /* Move to HI/LO */
23714 gen_HILO(ctx
, op1
, rd
& 3, rs
);
23717 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
23718 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
23719 check_cp1_enabled(ctx
);
23720 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
23721 (ctx
->opcode
>> 16) & 1);
23723 generate_exception_err(ctx
, EXCP_CpU
, 1);
23729 check_insn(ctx
, INSN_VR54XX
);
23730 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
23731 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
23732 } else if (ctx
->insn_flags
& INSN_R5900
) {
23733 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
23735 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
23740 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23742 #if defined(TARGET_MIPS64)
23747 check_insn(ctx
, ISA_MIPS3
);
23748 check_insn_opc_user_only(ctx
, INSN_R5900
);
23749 check_mips_64(ctx
);
23750 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23754 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
23757 #ifdef MIPS_STRICT_STANDARD
23758 MIPS_INVAL("SPIM");
23759 generate_exception_end(ctx
, EXCP_RI
);
23761 /* Implemented as RI exception for now. */
23762 MIPS_INVAL("spim (unofficial)");
23763 generate_exception_end(ctx
, EXCP_RI
);
23766 default: /* Invalid */
23767 MIPS_INVAL("special_legacy");
23768 generate_exception_end(ctx
, EXCP_RI
);
23773 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
23775 int rs
, rt
, rd
, sa
;
23778 rs
= (ctx
->opcode
>> 21) & 0x1f;
23779 rt
= (ctx
->opcode
>> 16) & 0x1f;
23780 rd
= (ctx
->opcode
>> 11) & 0x1f;
23781 sa
= (ctx
->opcode
>> 6) & 0x1f;
23783 op1
= MASK_SPECIAL(ctx
->opcode
);
23785 case OPC_SLL
: /* Shift with immediate */
23786 if (sa
== 5 && rd
== 0 &&
23787 rs
== 0 && rt
== 0) { /* PAUSE */
23788 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
23789 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
23790 generate_exception_end(ctx
, EXCP_RI
);
23796 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23799 switch ((ctx
->opcode
>> 21) & 0x1f) {
23801 /* rotr is decoded as srl on non-R2 CPUs */
23802 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23807 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23810 generate_exception_end(ctx
, EXCP_RI
);
23818 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23820 case OPC_SLLV
: /* Shifts */
23822 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23825 switch ((ctx
->opcode
>> 6) & 0x1f) {
23827 /* rotrv is decoded as srlv on non-R2 CPUs */
23828 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23833 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23836 generate_exception_end(ctx
, EXCP_RI
);
23840 case OPC_SLT
: /* Set on less than */
23842 gen_slt(ctx
, op1
, rd
, rs
, rt
);
23844 case OPC_AND
: /* Logic*/
23848 gen_logic(ctx
, op1
, rd
, rs
, rt
);
23851 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
23853 case OPC_TGE
: /* Traps */
23859 check_insn(ctx
, ISA_MIPS2
);
23860 gen_trap(ctx
, op1
, rs
, rt
, -1);
23862 case OPC_LSA
: /* OPC_PMON */
23863 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
23864 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
23865 decode_opc_special_r6(env
, ctx
);
23867 /* Pmon entry point, also R4010 selsl */
23868 #ifdef MIPS_STRICT_STANDARD
23869 MIPS_INVAL("PMON / selsl");
23870 generate_exception_end(ctx
, EXCP_RI
);
23872 gen_helper_0e0i(pmon
, sa
);
23877 generate_exception_end(ctx
, EXCP_SYSCALL
);
23880 generate_exception_end(ctx
, EXCP_BREAK
);
23883 check_insn(ctx
, ISA_MIPS2
);
23884 gen_sync(extract32(ctx
->opcode
, 6, 5));
23887 #if defined(TARGET_MIPS64)
23888 /* MIPS64 specific opcodes */
23893 check_insn(ctx
, ISA_MIPS3
);
23894 check_mips_64(ctx
);
23895 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23898 switch ((ctx
->opcode
>> 21) & 0x1f) {
23900 /* drotr is decoded as dsrl on non-R2 CPUs */
23901 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23906 check_insn(ctx
, ISA_MIPS3
);
23907 check_mips_64(ctx
);
23908 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23911 generate_exception_end(ctx
, EXCP_RI
);
23916 switch ((ctx
->opcode
>> 21) & 0x1f) {
23918 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
23919 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23924 check_insn(ctx
, ISA_MIPS3
);
23925 check_mips_64(ctx
);
23926 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23929 generate_exception_end(ctx
, EXCP_RI
);
23937 check_insn(ctx
, ISA_MIPS3
);
23938 check_mips_64(ctx
);
23939 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23943 check_insn(ctx
, ISA_MIPS3
);
23944 check_mips_64(ctx
);
23945 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23948 switch ((ctx
->opcode
>> 6) & 0x1f) {
23950 /* drotrv is decoded as dsrlv on non-R2 CPUs */
23951 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23956 check_insn(ctx
, ISA_MIPS3
);
23957 check_mips_64(ctx
);
23958 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23961 generate_exception_end(ctx
, EXCP_RI
);
23966 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
23967 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
23968 decode_opc_special_r6(env
, ctx
);
23973 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
23974 decode_opc_special_r6(env
, ctx
);
23976 decode_opc_special_legacy(env
, ctx
);
23982 /* MXU accumulate add/subtract 1-bit pattern 'aptn1' */
23983 #define MXU_APTN1_A 0
23984 #define MXU_APTN1_S 1
23986 /* MXU accumulate add/subtract 2-bit pattern 'aptn2' */
23987 #define MXU_APTN2_AA 0
23988 #define MXU_APTN2_AS 1
23989 #define MXU_APTN2_SA 2
23990 #define MXU_APTN2_SS 3
23992 /* MXU execute add/subtract 2-bit pattern 'eptn2' */
23993 #define MXU_EPTN2_AA 0
23994 #define MXU_EPTN2_AS 1
23995 #define MXU_EPTN2_SA 2
23996 #define MXU_EPTN2_SS 3
23998 /* MXU operand getting pattern 'optn2' */
23999 #define MXU_OPTN2_WW 0
24000 #define MXU_OPTN2_LW 1
24001 #define MXU_OPTN2_HW 2
24002 #define MXU_OPTN2_XW 3
24004 /* MXU operand getting pattern 'optn3' */
24005 #define MXU_OPTN3_PTN0 0
24006 #define MXU_OPTN3_PTN1 1
24007 #define MXU_OPTN3_PTN2 2
24008 #define MXU_OPTN3_PTN3 3
24009 #define MXU_OPTN3_PTN4 4
24010 #define MXU_OPTN3_PTN5 5
24011 #define MXU_OPTN3_PTN6 6
24012 #define MXU_OPTN3_PTN7 7
24017 * Decode MXU pool00
24019 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24020 * +-----------+---------+-----+-------+-------+-------+-----------+
24021 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL00|
24022 * +-----------+---------+-----+-------+-------+-------+-----------+
24025 static void decode_opc_mxu__pool00(CPUMIPSState
*env
, DisasContext
*ctx
)
24027 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
24030 case OPC_MXU_S32MAX
:
24031 /* TODO: Implement emulation of S32MAX instruction. */
24032 MIPS_INVAL("OPC_MXU_S32MAX");
24033 generate_exception_end(ctx
, EXCP_RI
);
24035 case OPC_MXU_S32MIN
:
24036 /* TODO: Implement emulation of S32MIN instruction. */
24037 MIPS_INVAL("OPC_MXU_S32MIN");
24038 generate_exception_end(ctx
, EXCP_RI
);
24040 case OPC_MXU_D16MAX
:
24041 /* TODO: Implement emulation of D16MAX instruction. */
24042 MIPS_INVAL("OPC_MXU_D16MAX");
24043 generate_exception_end(ctx
, EXCP_RI
);
24045 case OPC_MXU_D16MIN
:
24046 /* TODO: Implement emulation of D16MIN instruction. */
24047 MIPS_INVAL("OPC_MXU_D16MIN");
24048 generate_exception_end(ctx
, EXCP_RI
);
24050 case OPC_MXU_Q8MAX
:
24051 /* TODO: Implement emulation of Q8MAX instruction. */
24052 MIPS_INVAL("OPC_MXU_Q8MAX");
24053 generate_exception_end(ctx
, EXCP_RI
);
24055 case OPC_MXU_Q8MIN
:
24056 /* TODO: Implement emulation of Q8MIN instruction. */
24057 MIPS_INVAL("OPC_MXU_Q8MIN");
24058 generate_exception_end(ctx
, EXCP_RI
);
24060 case OPC_MXU_Q8SLT
:
24061 /* TODO: Implement emulation of Q8SLT instruction. */
24062 MIPS_INVAL("OPC_MXU_Q8SLT");
24063 generate_exception_end(ctx
, EXCP_RI
);
24065 case OPC_MXU_Q8SLTU
:
24066 /* TODO: Implement emulation of Q8SLTU instruction. */
24067 MIPS_INVAL("OPC_MXU_Q8SLTU");
24068 generate_exception_end(ctx
, EXCP_RI
);
24071 MIPS_INVAL("decode_opc_mxu");
24072 generate_exception_end(ctx
, EXCP_RI
);
24079 * Decode MXU pool01
24081 * S32SLT, D16SLT, D16AVG, D16AVGR, Q8AVG, Q8AVGR:
24082 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24083 * +-----------+---------+-----+-------+-------+-------+-----------+
24084 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
24085 * +-----------+---------+-----+-------+-------+-------+-----------+
24088 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24089 * +-----------+---+-----+-----+-------+-------+-------+-----------+
24090 * | SPECIAL2 |en2|0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
24091 * +-----------+---+-----+-----+-------+-------+-------+-----------+
24094 static void decode_opc_mxu__pool01(CPUMIPSState
*env
, DisasContext
*ctx
)
24096 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
24099 case OPC_MXU_S32SLT
:
24100 /* TODO: Implement emulation of S32SLT instruction. */
24101 MIPS_INVAL("OPC_MXU_S32SLT");
24102 generate_exception_end(ctx
, EXCP_RI
);
24104 case OPC_MXU_D16SLT
:
24105 /* TODO: Implement emulation of D16SLT instruction. */
24106 MIPS_INVAL("OPC_MXU_D16SLT");
24107 generate_exception_end(ctx
, EXCP_RI
);
24109 case OPC_MXU_D16AVG
:
24110 /* TODO: Implement emulation of D16AVG instruction. */
24111 MIPS_INVAL("OPC_MXU_D16AVG");
24112 generate_exception_end(ctx
, EXCP_RI
);
24114 case OPC_MXU_D16AVGR
:
24115 /* TODO: Implement emulation of D16AVGR instruction. */
24116 MIPS_INVAL("OPC_MXU_D16AVGR");
24117 generate_exception_end(ctx
, EXCP_RI
);
24119 case OPC_MXU_Q8AVG
:
24120 /* TODO: Implement emulation of Q8AVG instruction. */
24121 MIPS_INVAL("OPC_MXU_Q8AVG");
24122 generate_exception_end(ctx
, EXCP_RI
);
24124 case OPC_MXU_Q8AVGR
:
24125 /* TODO: Implement emulation of Q8AVGR instruction. */
24126 MIPS_INVAL("OPC_MXU_Q8AVGR");
24127 generate_exception_end(ctx
, EXCP_RI
);
24129 case OPC_MXU_Q8ADD
:
24130 /* TODO: Implement emulation of Q8ADD instruction. */
24131 MIPS_INVAL("OPC_MXU_Q8ADD");
24132 generate_exception_end(ctx
, EXCP_RI
);
24135 MIPS_INVAL("decode_opc_mxu");
24136 generate_exception_end(ctx
, EXCP_RI
);
24143 * Decode MXU pool02
24145 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24146 * +-----------+---------+-----+-------+-------+-------+-----------+
24147 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL02|
24148 * +-----------+---------+-----+-------+-------+-------+-----------+
24151 static void decode_opc_mxu__pool02(CPUMIPSState
*env
, DisasContext
*ctx
)
24153 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
24156 case OPC_MXU_S32CPS
:
24157 /* TODO: Implement emulation of S32CPS instruction. */
24158 MIPS_INVAL("OPC_MXU_S32CPS");
24159 generate_exception_end(ctx
, EXCP_RI
);
24161 case OPC_MXU_D16CPS
:
24162 /* TODO: Implement emulation of D16CPS instruction. */
24163 MIPS_INVAL("OPC_MXU_D16CPS");
24164 generate_exception_end(ctx
, EXCP_RI
);
24166 case OPC_MXU_Q8ABD
:
24167 /* TODO: Implement emulation of Q8ABD instruction. */
24168 MIPS_INVAL("OPC_MXU_Q8ABD");
24169 generate_exception_end(ctx
, EXCP_RI
);
24171 case OPC_MXU_Q16SAT
:
24172 /* TODO: Implement emulation of Q16SAT instruction. */
24173 MIPS_INVAL("OPC_MXU_Q16SAT");
24174 generate_exception_end(ctx
, EXCP_RI
);
24177 MIPS_INVAL("decode_opc_mxu");
24178 generate_exception_end(ctx
, EXCP_RI
);
24185 * Decode MXU pool03
24188 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24189 * +-----------+---+---+-------+-------+-------+-------+-----------+
24190 * | SPECIAL2 |x x|on2|0 0 0 0| XRc | XRb | XRa |MXU__POOL03|
24191 * +-----------+---+---+-------+-------+-------+-------+-----------+
24194 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24195 * +-----------+---+---+-------+-------+-------+-------+-----------+
24196 * | SPECIAL2 |x x|on2| Xd | XRc | XRb | XRa |MXU__POOL03|
24197 * +-----------+---+---+-------+-------+-------+-------+-----------+
24200 static void decode_opc_mxu__pool03(CPUMIPSState
*env
, DisasContext
*ctx
)
24202 uint32_t opcode
= extract32(ctx
->opcode
, 24, 2);
24205 case OPC_MXU_D16MULF
:
24206 /* TODO: Implement emulation of D16MULF instruction. */
24207 MIPS_INVAL("OPC_MXU_D16MULF");
24208 generate_exception_end(ctx
, EXCP_RI
);
24210 case OPC_MXU_D16MULE
:
24211 /* TODO: Implement emulation of D16MULE instruction. */
24212 MIPS_INVAL("OPC_MXU_D16MULE");
24213 generate_exception_end(ctx
, EXCP_RI
);
24216 MIPS_INVAL("decode_opc_mxu");
24217 generate_exception_end(ctx
, EXCP_RI
);
24224 * Decode MXU pool04
24226 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24227 * +-----------+---------+-+-------------------+-------+-----------+
24228 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL04|
24229 * +-----------+---------+-+-------------------+-------+-----------+
24232 static void decode_opc_mxu__pool04(CPUMIPSState
*env
, DisasContext
*ctx
)
24234 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
24237 case OPC_MXU_S32LDD
:
24238 /* TODO: Implement emulation of S32LDD instruction. */
24239 MIPS_INVAL("OPC_MXU_S32LDD");
24240 generate_exception_end(ctx
, EXCP_RI
);
24242 case OPC_MXU_S32LDDR
:
24243 /* TODO: Implement emulation of S32LDDR instruction. */
24244 MIPS_INVAL("OPC_MXU_S32LDDR");
24245 generate_exception_end(ctx
, EXCP_RI
);
24248 MIPS_INVAL("decode_opc_mxu");
24249 generate_exception_end(ctx
, EXCP_RI
);
24256 * Decode MXU pool05
24258 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24259 * +-----------+---------+-+-------------------+-------+-----------+
24260 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL05|
24261 * +-----------+---------+-+-------------------+-------+-----------+
24264 static void decode_opc_mxu__pool05(CPUMIPSState
*env
, DisasContext
*ctx
)
24266 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
24269 case OPC_MXU_S32STD
:
24270 /* TODO: Implement emulation of S32STD instruction. */
24271 MIPS_INVAL("OPC_MXU_S32STD");
24272 generate_exception_end(ctx
, EXCP_RI
);
24274 case OPC_MXU_S32STDR
:
24275 /* TODO: Implement emulation of S32STDR instruction. */
24276 MIPS_INVAL("OPC_MXU_S32STDR");
24277 generate_exception_end(ctx
, EXCP_RI
);
24280 MIPS_INVAL("decode_opc_mxu");
24281 generate_exception_end(ctx
, EXCP_RI
);
24288 * Decode MXU pool06
24290 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24291 * +-----------+---------+---------+---+-------+-------+-----------+
24292 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL06|
24293 * +-----------+---------+---------+---+-------+-------+-----------+
24296 static void decode_opc_mxu__pool06(CPUMIPSState
*env
, DisasContext
*ctx
)
24298 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
24301 case OPC_MXU_S32LDDV
:
24302 /* TODO: Implement emulation of S32LDDV instruction. */
24303 MIPS_INVAL("OPC_MXU_S32LDDV");
24304 generate_exception_end(ctx
, EXCP_RI
);
24306 case OPC_MXU_S32LDDVR
:
24307 /* TODO: Implement emulation of S32LDDVR instruction. */
24308 MIPS_INVAL("OPC_MXU_S32LDDVR");
24309 generate_exception_end(ctx
, EXCP_RI
);
24312 MIPS_INVAL("decode_opc_mxu");
24313 generate_exception_end(ctx
, EXCP_RI
);
24320 * Decode MXU pool07
24322 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24323 * +-----------+---------+---------+---+-------+-------+-----------+
24324 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL07|
24325 * +-----------+---------+---------+---+-------+-------+-----------+
24328 static void decode_opc_mxu__pool07(CPUMIPSState
*env
, DisasContext
*ctx
)
24330 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
24333 case OPC_MXU_S32STDV
:
24334 /* TODO: Implement emulation of S32TDV instruction. */
24335 MIPS_INVAL("OPC_MXU_S32TDV");
24336 generate_exception_end(ctx
, EXCP_RI
);
24338 case OPC_MXU_S32STDVR
:
24339 /* TODO: Implement emulation of S32TDVR instruction. */
24340 MIPS_INVAL("OPC_MXU_S32TDVR");
24341 generate_exception_end(ctx
, EXCP_RI
);
24344 MIPS_INVAL("decode_opc_mxu");
24345 generate_exception_end(ctx
, EXCP_RI
);
24352 * Decode MXU pool08
24354 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24355 * +-----------+---------+-+-------------------+-------+-----------+
24356 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL08|
24357 * +-----------+---------+-+-------------------+-------+-----------+
24360 static void decode_opc_mxu__pool08(CPUMIPSState
*env
, DisasContext
*ctx
)
24362 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
24365 case OPC_MXU_S32LDI
:
24366 /* TODO: Implement emulation of S32LDI instruction. */
24367 MIPS_INVAL("OPC_MXU_S32LDI");
24368 generate_exception_end(ctx
, EXCP_RI
);
24370 case OPC_MXU_S32LDIR
:
24371 /* TODO: Implement emulation of S32LDIR instruction. */
24372 MIPS_INVAL("OPC_MXU_S32LDIR");
24373 generate_exception_end(ctx
, EXCP_RI
);
24376 MIPS_INVAL("decode_opc_mxu");
24377 generate_exception_end(ctx
, EXCP_RI
);
24384 * Decode MXU pool09
24386 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24387 * +-----------+---------+-+-------------------+-------+-----------+
24388 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL09|
24389 * +-----------+---------+-+-------------------+-------+-----------+
24392 static void decode_opc_mxu__pool09(CPUMIPSState
*env
, DisasContext
*ctx
)
24394 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
24397 case OPC_MXU_S32SDI
:
24398 /* TODO: Implement emulation of S32SDI instruction. */
24399 MIPS_INVAL("OPC_MXU_S32SDI");
24400 generate_exception_end(ctx
, EXCP_RI
);
24402 case OPC_MXU_S32SDIR
:
24403 /* TODO: Implement emulation of S32SDIR instruction. */
24404 MIPS_INVAL("OPC_MXU_S32SDIR");
24405 generate_exception_end(ctx
, EXCP_RI
);
24408 MIPS_INVAL("decode_opc_mxu");
24409 generate_exception_end(ctx
, EXCP_RI
);
24416 * Decode MXU pool10
24418 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24419 * +-----------+---------+---------+---+-------+-------+-----------+
24420 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL10|
24421 * +-----------+---------+---------+---+-------+-------+-----------+
24424 static void decode_opc_mxu__pool10(CPUMIPSState
*env
, DisasContext
*ctx
)
24426 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
24429 case OPC_MXU_S32LDIV
:
24430 /* TODO: Implement emulation of S32LDIV instruction. */
24431 MIPS_INVAL("OPC_MXU_S32LDIV");
24432 generate_exception_end(ctx
, EXCP_RI
);
24434 case OPC_MXU_S32LDIVR
:
24435 /* TODO: Implement emulation of S32LDIVR instruction. */
24436 MIPS_INVAL("OPC_MXU_S32LDIVR");
24437 generate_exception_end(ctx
, EXCP_RI
);
24440 MIPS_INVAL("decode_opc_mxu");
24441 generate_exception_end(ctx
, EXCP_RI
);
24448 * Decode MXU pool11
24450 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24451 * +-----------+---------+---------+---+-------+-------+-----------+
24452 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL11|
24453 * +-----------+---------+---------+---+-------+-------+-----------+
24456 static void decode_opc_mxu__pool11(CPUMIPSState
*env
, DisasContext
*ctx
)
24458 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
24461 case OPC_MXU_S32SDIV
:
24462 /* TODO: Implement emulation of S32SDIV instruction. */
24463 MIPS_INVAL("OPC_MXU_S32SDIV");
24464 generate_exception_end(ctx
, EXCP_RI
);
24466 case OPC_MXU_S32SDIVR
:
24467 /* TODO: Implement emulation of S32SDIVR instruction. */
24468 MIPS_INVAL("OPC_MXU_S32SDIVR");
24469 generate_exception_end(ctx
, EXCP_RI
);
24472 MIPS_INVAL("decode_opc_mxu");
24473 generate_exception_end(ctx
, EXCP_RI
);
24480 * Decode MXU pool12
24482 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24483 * +-----------+---+---+-------+-------+-------+-------+-----------+
24484 * | SPECIAL2 |an2|x x| Xd | XRc | XRb | XRa |MXU__POOL12|
24485 * +-----------+---+---+-------+-------+-------+-------+-----------+
24488 static void decode_opc_mxu__pool12(CPUMIPSState
*env
, DisasContext
*ctx
)
24490 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
24493 case OPC_MXU_D32ACC
:
24494 /* TODO: Implement emulation of D32ACC instruction. */
24495 MIPS_INVAL("OPC_MXU_D32ACC");
24496 generate_exception_end(ctx
, EXCP_RI
);
24498 case OPC_MXU_D32ACCM
:
24499 /* TODO: Implement emulation of D32ACCM instruction. */
24500 MIPS_INVAL("OPC_MXU_D32ACCM");
24501 generate_exception_end(ctx
, EXCP_RI
);
24503 case OPC_MXU_D32ASUM
:
24504 /* TODO: Implement emulation of D32ASUM instruction. */
24505 MIPS_INVAL("OPC_MXU_D32ASUM");
24506 generate_exception_end(ctx
, EXCP_RI
);
24509 MIPS_INVAL("decode_opc_mxu");
24510 generate_exception_end(ctx
, EXCP_RI
);
24517 * Decode MXU pool13
24519 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24520 * +-----------+---+---+-------+-------+-------+-------+-----------+
24521 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL13|
24522 * +-----------+---+---+-------+-------+-------+-------+-----------+
24525 static void decode_opc_mxu__pool13(CPUMIPSState
*env
, DisasContext
*ctx
)
24527 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
24530 case OPC_MXU_Q16ACC
:
24531 /* TODO: Implement emulation of Q16ACC instruction. */
24532 MIPS_INVAL("OPC_MXU_Q16ACC");
24533 generate_exception_end(ctx
, EXCP_RI
);
24535 case OPC_MXU_Q16ACCM
:
24536 /* TODO: Implement emulation of Q16ACCM instruction. */
24537 MIPS_INVAL("OPC_MXU_Q16ACCM");
24538 generate_exception_end(ctx
, EXCP_RI
);
24540 case OPC_MXU_Q16ASUM
:
24541 /* TODO: Implement emulation of Q16ASUM instruction. */
24542 MIPS_INVAL("OPC_MXU_Q16ASUM");
24543 generate_exception_end(ctx
, EXCP_RI
);
24546 MIPS_INVAL("decode_opc_mxu");
24547 generate_exception_end(ctx
, EXCP_RI
);
24554 * Decode MXU pool14
24557 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24558 * +-----------+---+---+-------+-------+-------+-------+-----------+
24559 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL14|
24560 * +-----------+---+---+-------+-------+-------+-------+-----------+
24563 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24564 * +-----------+---+---+-------+-------+-------+-------+-----------+
24565 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL14|
24566 * +-----------+---+---+-------+-------+-------+-------+-----------+
24569 static void decode_opc_mxu__pool14(CPUMIPSState
*env
, DisasContext
*ctx
)
24571 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
24574 case OPC_MXU_Q8ADDE
:
24575 /* TODO: Implement emulation of Q8ADDE instruction. */
24576 MIPS_INVAL("OPC_MXU_Q8ADDE");
24577 generate_exception_end(ctx
, EXCP_RI
);
24579 case OPC_MXU_D8SUM
:
24580 /* TODO: Implement emulation of D8SUM instruction. */
24581 MIPS_INVAL("OPC_MXU_D8SUM");
24582 generate_exception_end(ctx
, EXCP_RI
);
24584 case OPC_MXU_D8SUMC
:
24585 /* TODO: Implement emulation of D8SUMC instruction. */
24586 MIPS_INVAL("OPC_MXU_D8SUMC");
24587 generate_exception_end(ctx
, EXCP_RI
);
24590 MIPS_INVAL("decode_opc_mxu");
24591 generate_exception_end(ctx
, EXCP_RI
);
24598 * Decode MXU pool15
24600 * S32MUL, S32MULU, S32EXTRV:
24601 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24602 * +-----------+---------+---------+---+-------+-------+-----------+
24603 * | SPECIAL2 | rs | rt |x x| XRd | XRa |MXU__POOL15|
24604 * +-----------+---------+---------+---+-------+-------+-----------+
24607 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24608 * +-----------+---------+---------+---+-------+-------+-----------+
24609 * | SPECIAL2 | rb | sft5 |x x| XRd | XRa |MXU__POOL15|
24610 * +-----------+---------+---------+---+-------+-------+-----------+
24613 static void decode_opc_mxu__pool15(CPUMIPSState
*env
, DisasContext
*ctx
)
24615 uint32_t opcode
= extract32(ctx
->opcode
, 14, 2);
24618 case OPC_MXU_S32MUL
:
24619 /* TODO: Implement emulation of S32MUL instruction. */
24620 MIPS_INVAL("OPC_MXU_S32MUL");
24621 generate_exception_end(ctx
, EXCP_RI
);
24623 case OPC_MXU_S32MULU
:
24624 /* TODO: Implement emulation of S32MULU instruction. */
24625 MIPS_INVAL("OPC_MXU_S32MULU");
24626 generate_exception_end(ctx
, EXCP_RI
);
24628 case OPC_MXU_S32EXTR
:
24629 /* TODO: Implement emulation of S32EXTR instruction. */
24630 MIPS_INVAL("OPC_MXU_S32EXTR");
24631 generate_exception_end(ctx
, EXCP_RI
);
24633 case OPC_MXU_S32EXTRV
:
24634 /* TODO: Implement emulation of S32EXTRV instruction. */
24635 MIPS_INVAL("OPC_MXU_S32EXTRV");
24636 generate_exception_end(ctx
, EXCP_RI
);
24639 MIPS_INVAL("decode_opc_mxu");
24640 generate_exception_end(ctx
, EXCP_RI
);
24647 * Decode MXU pool16
24650 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24651 * +-----------+---------+-----+-------+-------+-------+-----------+
24652 * | SPECIAL2 | rb |x x x| XRc | XRb | XRa |MXU__POOL16|
24653 * +-----------+---------+-----+-------+-------+-------+-----------+
24656 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24657 * +-----------+---------+-----+-------+-------+-------+-----------+
24658 * | SPECIAL2 | rs |x x x| XRc | XRb | XRa |MXU__POOL16|
24659 * +-----------+---------+-----+-------+-------+-------+-----------+
24662 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24663 * +-----------+-----+---+-----+-------+-------+-------+-----------+
24664 * | SPECIAL2 | s3 |0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
24665 * +-----------+-----+---+-----+-------+-------+-------+-----------+
24667 * S32NOR, S32AND, S32OR, S32XOR:
24668 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24669 * +-----------+---------+-----+-------+-------+-------+-----------+
24670 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
24671 * +-----------+---------+-----+-------+-------+-------+-----------+
24674 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24675 * +-----------+-----+---+-----+-------+---------------+-----------+
24676 * | SPECIAL2 |optn3|0 0|x x x| XRc | s8 |MXU__POOL16|
24677 * +-----------+-----+---+-----+-------+---------------+-----------+
24680 static void decode_opc_mxu__pool16(CPUMIPSState
*env
, DisasContext
*ctx
)
24682 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
24685 case OPC_MXU_D32SARW
:
24686 /* TODO: Implement emulation of D32SARW instruction. */
24687 MIPS_INVAL("OPC_MXU_D32SARW");
24688 generate_exception_end(ctx
, EXCP_RI
);
24690 case OPC_MXU_S32ALN
:
24691 /* TODO: Implement emulation of S32ALN instruction. */
24692 MIPS_INVAL("OPC_MXU_S32ALN");
24693 generate_exception_end(ctx
, EXCP_RI
);
24695 case OPC_MXU_S32ALNI
:
24696 /* TODO: Implement emulation of S32ALNI instruction. */
24697 MIPS_INVAL("OPC_MXU_S32ALNI");
24698 generate_exception_end(ctx
, EXCP_RI
);
24700 case OPC_MXU_S32NOR
:
24701 /* TODO: Implement emulation of S32NOR instruction. */
24702 MIPS_INVAL("OPC_MXU_S32NOR");
24703 generate_exception_end(ctx
, EXCP_RI
);
24705 case OPC_MXU_S32AND
:
24706 /* TODO: Implement emulation of S32AND instruction. */
24707 MIPS_INVAL("OPC_MXU_S32AND");
24708 generate_exception_end(ctx
, EXCP_RI
);
24710 case OPC_MXU_S32OR
:
24711 /* TODO: Implement emulation of S32OR instruction. */
24712 MIPS_INVAL("OPC_MXU_S32OR");
24713 generate_exception_end(ctx
, EXCP_RI
);
24715 case OPC_MXU_S32XOR
:
24716 /* TODO: Implement emulation of S32XOR instruction. */
24717 MIPS_INVAL("OPC_MXU_S32XOR");
24718 generate_exception_end(ctx
, EXCP_RI
);
24720 case OPC_MXU_S32LUI
:
24721 /* TODO: Implement emulation of S32LUI instruction. */
24722 MIPS_INVAL("OPC_MXU_S32LUI");
24723 generate_exception_end(ctx
, EXCP_RI
);
24726 MIPS_INVAL("decode_opc_mxu");
24727 generate_exception_end(ctx
, EXCP_RI
);
24734 * Decode MXU pool17
24736 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24737 * +-----------+---------+-----+-------+-------+-------+-----------+
24738 * | SPECIAL2 | rb |x x x| XRd | XRa |0 0 0 0|MXU__POOL17|
24739 * +-----------+---------+-----+-------+-------+-------+-----------+
24742 static void decode_opc_mxu__pool17(CPUMIPSState
*env
, DisasContext
*ctx
)
24744 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
24747 case OPC_MXU_D32SLLV
:
24748 /* TODO: Implement emulation of D32SLLV instruction. */
24749 MIPS_INVAL("OPC_MXU_D32SLLV");
24750 generate_exception_end(ctx
, EXCP_RI
);
24752 case OPC_MXU_D32SLRV
:
24753 /* TODO: Implement emulation of D32SLRV instruction. */
24754 MIPS_INVAL("OPC_MXU_D32SLRV");
24755 generate_exception_end(ctx
, EXCP_RI
);
24757 case OPC_MXU_D32SARV
:
24758 /* TODO: Implement emulation of D32SARV instruction. */
24759 MIPS_INVAL("OPC_MXU_D32SARV");
24760 generate_exception_end(ctx
, EXCP_RI
);
24762 case OPC_MXU_Q16SLLV
:
24763 /* TODO: Implement emulation of Q16SLLV instruction. */
24764 MIPS_INVAL("OPC_MXU_Q16SLLV");
24765 generate_exception_end(ctx
, EXCP_RI
);
24767 case OPC_MXU_Q16SLRV
:
24768 /* TODO: Implement emulation of Q16SLRV instruction. */
24769 MIPS_INVAL("OPC_MXU_Q16SLRV");
24770 generate_exception_end(ctx
, EXCP_RI
);
24772 case OPC_MXU_Q16SARV
:
24773 /* TODO: Implement emulation of Q16SARV instruction. */
24774 MIPS_INVAL("OPC_MXU_Q16SARV");
24775 generate_exception_end(ctx
, EXCP_RI
);
24778 MIPS_INVAL("decode_opc_mxu");
24779 generate_exception_end(ctx
, EXCP_RI
);
24786 * Decode MXU pool18
24788 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24789 * +-----------+---+---+-------+-------+-------+-------+-----------+
24790 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL18|
24791 * +-----------+---+---+-------+-------+-------+-------+-----------+
24794 static void decode_opc_mxu__pool18(CPUMIPSState
*env
, DisasContext
*ctx
)
24796 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
24799 case OPC_MXU_Q8MUL
:
24800 /* TODO: Implement emulation of Q8MUL instruction. */
24801 MIPS_INVAL("OPC_MXU_Q8MUL");
24802 generate_exception_end(ctx
, EXCP_RI
);
24804 case OPC_MXU_Q8MULSU
:
24805 /* TODO: Implement emulation of Q8MULSU instruction. */
24806 MIPS_INVAL("OPC_MXU_Q8MULSU");
24807 generate_exception_end(ctx
, EXCP_RI
);
24810 MIPS_INVAL("decode_opc_mxu");
24811 generate_exception_end(ctx
, EXCP_RI
);
24818 * Decode MXU pool19
24820 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24821 * +-----------+---------+-----+-------+-------+-------+-----------+
24822 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL19|
24823 * +-----------+---------+-----+-------+-------+-------+-----------+
24826 static void decode_opc_mxu__pool19(CPUMIPSState
*env
, DisasContext
*ctx
)
24828 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
24831 case OPC_MXU_Q8MOVZ
:
24832 /* TODO: Implement emulation of Q8MOVZ instruction. */
24833 MIPS_INVAL("OPC_MXU_Q8MOVZ");
24834 generate_exception_end(ctx
, EXCP_RI
);
24836 case OPC_MXU_Q8MOVN
:
24837 /* TODO: Implement emulation of Q8MOVN instruction. */
24838 MIPS_INVAL("OPC_MXU_Q8MOVN");
24839 generate_exception_end(ctx
, EXCP_RI
);
24841 case OPC_MXU_D16MOVZ
:
24842 /* TODO: Implement emulation of D16MOVZ instruction. */
24843 MIPS_INVAL("OPC_MXU_D16MOVZ");
24844 generate_exception_end(ctx
, EXCP_RI
);
24846 case OPC_MXU_D16MOVN
:
24847 /* TODO: Implement emulation of D16MOVN instruction. */
24848 MIPS_INVAL("OPC_MXU_D16MOVN");
24849 generate_exception_end(ctx
, EXCP_RI
);
24851 case OPC_MXU_S32MOVZ
:
24852 /* TODO: Implement emulation of S32MOVZ instruction. */
24853 MIPS_INVAL("OPC_MXU_S32MOVZ");
24854 generate_exception_end(ctx
, EXCP_RI
);
24856 case OPC_MXU_S32MOVN
:
24857 /* TODO: Implement emulation of S32MOVN instruction. */
24858 MIPS_INVAL("OPC_MXU_S32MOVN");
24859 generate_exception_end(ctx
, EXCP_RI
);
24862 MIPS_INVAL("decode_opc_mxu");
24863 generate_exception_end(ctx
, EXCP_RI
);
24870 * Decode MXU pool20
24872 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24873 * +-----------+---+---+-------+-------+-------+-------+-----------+
24874 * | SPECIAL2 |an2|x x| XRd | XRc | XRb | XRa |MXU__POOL20|
24875 * +-----------+---+---+-------+-------+-------+-------+-----------+
24878 static void decode_opc_mxu__pool20(CPUMIPSState
*env
, DisasContext
*ctx
)
24880 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
24883 case OPC_MXU_Q8MAC
:
24884 /* TODO: Implement emulation of Q8MAC instruction. */
24885 MIPS_INVAL("OPC_MXU_Q8MAC");
24886 generate_exception_end(ctx
, EXCP_RI
);
24888 case OPC_MXU_Q8MACSU
:
24889 /* TODO: Implement emulation of Q8MACSU instruction. */
24890 MIPS_INVAL("OPC_MXU_Q8MACSU");
24891 generate_exception_end(ctx
, EXCP_RI
);
24894 MIPS_INVAL("decode_opc_mxu");
24895 generate_exception_end(ctx
, EXCP_RI
);
24902 * Main MXU decoding function
24904 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24905 * +-----------+---------------------------------------+-----------+
24906 * | SPECIAL2 | |x x x x x x|
24907 * +-----------+---------------------------------------+-----------+
24910 static void decode_opc_mxu(CPUMIPSState
*env
, DisasContext
*ctx
)
24912 uint32_t opcode
= extract32(ctx
->opcode
, 0, 6);
24915 case OPC_MXU_S32MADD
:
24916 /* TODO: Implement emulation of S32MADD instruction. */
24917 MIPS_INVAL("OPC_MXU_S32MADD");
24918 generate_exception_end(ctx
, EXCP_RI
);
24920 case OPC_MXU_S32MADDU
:
24921 /* TODO: Implement emulation of S32MADDU instruction. */
24922 MIPS_INVAL("OPC_MXU_S32MADDU");
24923 generate_exception_end(ctx
, EXCP_RI
);
24925 case OPC_MXU__POOL00
:
24926 decode_opc_mxu__pool00(env
, ctx
);
24928 case OPC_MXU_S32MSUB
:
24929 /* TODO: Implement emulation of S32MSUB instruction. */
24930 MIPS_INVAL("OPC_MXU_S32MSUB");
24931 generate_exception_end(ctx
, EXCP_RI
);
24933 case OPC_MXU_S32MSUBU
:
24934 /* TODO: Implement emulation of S32MSUBU instruction. */
24935 MIPS_INVAL("OPC_MXU_S32MSUBU");
24936 generate_exception_end(ctx
, EXCP_RI
);
24938 case OPC_MXU__POOL01
:
24939 decode_opc_mxu__pool01(env
, ctx
);
24941 case OPC_MXU__POOL02
:
24942 decode_opc_mxu__pool02(env
, ctx
);
24944 case OPC_MXU_D16MUL
:
24945 /* TODO: Implement emulation of D16MUL instruction. */
24946 MIPS_INVAL("OPC_MXU_D16MUL");
24947 generate_exception_end(ctx
, EXCP_RI
);
24949 case OPC_MXU__POOL03
:
24950 decode_opc_mxu__pool03(env
, ctx
);
24952 case OPC_MXU_D16MAC
:
24953 /* TODO: Implement emulation of D16MAC instruction. */
24954 MIPS_INVAL("OPC_MXU_D16MAC");
24955 generate_exception_end(ctx
, EXCP_RI
);
24957 case OPC_MXU_D16MACF
:
24958 /* TODO: Implement emulation of D16MACF instruction. */
24959 MIPS_INVAL("OPC_MXU_D16MACF");
24960 generate_exception_end(ctx
, EXCP_RI
);
24962 case OPC_MXU_D16MADL
:
24963 /* TODO: Implement emulation of D16MADL instruction. */
24964 MIPS_INVAL("OPC_MXU_D16MADL");
24965 generate_exception_end(ctx
, EXCP_RI
);
24967 case OPC_MXU_S16MAD
:
24968 /* TODO: Implement emulation of S16MAD instruction. */
24969 MIPS_INVAL("OPC_MXU_S16MAD");
24970 generate_exception_end(ctx
, EXCP_RI
);
24972 case OPC_MXU_Q16ADD
:
24973 /* TODO: Implement emulation of Q16ADD instruction. */
24974 MIPS_INVAL("OPC_MXU_Q16ADD");
24975 generate_exception_end(ctx
, EXCP_RI
);
24977 case OPC_MXU_D16MACE
:
24978 /* TODO: Implement emulation of D16MACE instruction. */
24979 MIPS_INVAL("OPC_MXU_D16MACE");
24980 generate_exception_end(ctx
, EXCP_RI
);
24982 case OPC_MXU__POOL04
:
24983 decode_opc_mxu__pool04(env
, ctx
);
24985 case OPC_MXU__POOL05
:
24986 decode_opc_mxu__pool05(env
, ctx
);
24988 case OPC_MXU__POOL06
:
24989 decode_opc_mxu__pool06(env
, ctx
);
24991 case OPC_MXU__POOL07
:
24992 decode_opc_mxu__pool07(env
, ctx
);
24994 case OPC_MXU__POOL08
:
24995 decode_opc_mxu__pool08(env
, ctx
);
24997 case OPC_MXU__POOL09
:
24998 decode_opc_mxu__pool09(env
, ctx
);
25000 case OPC_MXU__POOL10
:
25001 decode_opc_mxu__pool10(env
, ctx
);
25003 case OPC_MXU__POOL11
:
25004 decode_opc_mxu__pool11(env
, ctx
);
25006 case OPC_MXU_D32ADD
:
25007 /* TODO: Implement emulation of D32ADD instruction. */
25008 MIPS_INVAL("OPC_MXU_D32ADD");
25009 generate_exception_end(ctx
, EXCP_RI
);
25011 case OPC_MXU__POOL12
:
25012 decode_opc_mxu__pool12(env
, ctx
);
25014 case OPC_MXU__POOL13
:
25015 decode_opc_mxu__pool13(env
, ctx
);
25017 case OPC_MXU__POOL14
:
25018 decode_opc_mxu__pool14(env
, ctx
);
25020 case OPC_MXU_Q8ACCE
:
25021 /* TODO: Implement emulation of Q8ACCE instruction. */
25022 MIPS_INVAL("OPC_MXU_Q8ACCE");
25023 generate_exception_end(ctx
, EXCP_RI
);
25025 case OPC_MXU_S8LDD
:
25026 /* TODO: Implement emulation of S8LDD instruction. */
25027 MIPS_INVAL("OPC_MXU_S8LDD");
25028 generate_exception_end(ctx
, EXCP_RI
);
25030 case OPC_MXU_S8STD
:
25031 /* TODO: Implement emulation of S8STD instruction. */
25032 MIPS_INVAL("OPC_MXU_S8STD");
25033 generate_exception_end(ctx
, EXCP_RI
);
25035 case OPC_MXU_S8LDI
:
25036 /* TODO: Implement emulation of S8LDI instruction. */
25037 MIPS_INVAL("OPC_MXU_S8LDI");
25038 generate_exception_end(ctx
, EXCP_RI
);
25040 case OPC_MXU_S8SDI
:
25041 /* TODO: Implement emulation of S8SDI instruction. */
25042 MIPS_INVAL("OPC_MXU_S8SDI");
25043 generate_exception_end(ctx
, EXCP_RI
);
25045 case OPC_MXU__POOL15
:
25046 decode_opc_mxu__pool15(env
, ctx
);
25048 case OPC_MXU__POOL16
:
25049 decode_opc_mxu__pool16(env
, ctx
);
25052 /* TODO: Implement emulation of LXB instruction. */
25053 MIPS_INVAL("OPC_MXU_LXB");
25054 generate_exception_end(ctx
, EXCP_RI
);
25056 case OPC_MXU_S16LDD
:
25057 /* TODO: Implement emulation of S16LDD instruction. */
25058 MIPS_INVAL("OPC_MXU_S16LDD");
25059 generate_exception_end(ctx
, EXCP_RI
);
25061 case OPC_MXU_S16STD
:
25062 /* TODO: Implement emulation of S16STD instruction. */
25063 MIPS_INVAL("OPC_MXU_S16STD");
25064 generate_exception_end(ctx
, EXCP_RI
);
25066 case OPC_MXU_S16LDI
:
25067 /* TODO: Implement emulation of S16LDI instruction. */
25068 MIPS_INVAL("OPC_MXU_S16LDI");
25069 generate_exception_end(ctx
, EXCP_RI
);
25071 case OPC_MXU_S16SDI
:
25072 /* TODO: Implement emulation of S16SDI instruction. */
25073 MIPS_INVAL("OPC_MXU_S16SDI");
25074 generate_exception_end(ctx
, EXCP_RI
);
25076 case OPC_MXU_S32M2I
:
25077 /* TODO: Implement emulation of S32M2I instruction. */
25078 MIPS_INVAL("OPC_MXU_S32M2I");
25079 generate_exception_end(ctx
, EXCP_RI
);
25081 case OPC_MXU_S32I2M
:
25082 /* TODO: Implement emulation of S32I2M instruction. */
25083 MIPS_INVAL("OPC_MXU_S32I2M");
25084 generate_exception_end(ctx
, EXCP_RI
);
25086 case OPC_MXU_D32SLL
:
25087 /* TODO: Implement emulation of D32SLL instruction. */
25088 MIPS_INVAL("OPC_MXU_D32SLL");
25089 generate_exception_end(ctx
, EXCP_RI
);
25091 case OPC_MXU_D32SLR
:
25092 /* TODO: Implement emulation of D32SLR instruction. */
25093 MIPS_INVAL("OPC_MXU_D32SLR");
25094 generate_exception_end(ctx
, EXCP_RI
);
25096 case OPC_MXU_D32SARL
:
25097 /* TODO: Implement emulation of D32SARL instruction. */
25098 MIPS_INVAL("OPC_MXU_D32SARL");
25099 generate_exception_end(ctx
, EXCP_RI
);
25101 case OPC_MXU_D32SAR
:
25102 /* TODO: Implement emulation of D32SAR instruction. */
25103 MIPS_INVAL("OPC_MXU_D32SAR");
25104 generate_exception_end(ctx
, EXCP_RI
);
25106 case OPC_MXU_Q16SLL
:
25107 /* TODO: Implement emulation of Q16SLL instruction. */
25108 MIPS_INVAL("OPC_MXU_Q16SLL");
25109 generate_exception_end(ctx
, EXCP_RI
);
25111 case OPC_MXU_Q16SLR
:
25112 /* TODO: Implement emulation of Q16SLR instruction. */
25113 MIPS_INVAL("OPC_MXU_Q16SLR");
25114 generate_exception_end(ctx
, EXCP_RI
);
25116 case OPC_MXU__POOL17
:
25117 decode_opc_mxu__pool17(env
, ctx
);
25119 case OPC_MXU_Q16SAR
:
25120 /* TODO: Implement emulation of Q16SAR instruction. */
25121 MIPS_INVAL("OPC_MXU_Q16SAR");
25122 generate_exception_end(ctx
, EXCP_RI
);
25124 case OPC_MXU__POOL18
:
25125 decode_opc_mxu__pool18(env
, ctx
);
25127 case OPC_MXU__POOL19
:
25128 decode_opc_mxu__pool19(env
, ctx
);
25130 case OPC_MXU__POOL20
:
25131 decode_opc_mxu__pool20(env
, ctx
);
25133 case OPC_MXU_Q16SCOP
:
25134 /* TODO: Implement emulation of Q16SCOP instruction. */
25135 MIPS_INVAL("OPC_MXU_Q16SCOP");
25136 generate_exception_end(ctx
, EXCP_RI
);
25138 case OPC_MXU_Q8MADL
:
25139 /* TODO: Implement emulation of Q8MADL instruction. */
25140 MIPS_INVAL("OPC_MXU_Q8MADL");
25141 generate_exception_end(ctx
, EXCP_RI
);
25143 case OPC_MXU_S32SFL
:
25144 /* TODO: Implement emulation of S32SFL instruction. */
25145 MIPS_INVAL("OPC_MXU_S32SFL");
25146 generate_exception_end(ctx
, EXCP_RI
);
25148 case OPC_MXU_Q8SAD
:
25149 /* TODO: Implement emulation of Q8SAD instruction. */
25150 MIPS_INVAL("OPC_MXU_Q8SAD");
25151 generate_exception_end(ctx
, EXCP_RI
);
25154 MIPS_INVAL("decode_opc_mxu");
25155 generate_exception_end(ctx
, EXCP_RI
);
25160 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
25165 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25167 rs
= (ctx
->opcode
>> 21) & 0x1f;
25168 rt
= (ctx
->opcode
>> 16) & 0x1f;
25169 rd
= (ctx
->opcode
>> 11) & 0x1f;
25171 op1
= MASK_SPECIAL2(ctx
->opcode
);
25173 case OPC_MADD
: /* Multiply and add/sub */
25177 check_insn(ctx
, ISA_MIPS32
);
25178 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
25181 gen_arith(ctx
, op1
, rd
, rs
, rt
);
25184 case OPC_DIVU_G_2F
:
25185 case OPC_MULT_G_2F
:
25186 case OPC_MULTU_G_2F
:
25188 case OPC_MODU_G_2F
:
25189 check_insn(ctx
, INSN_LOONGSON2F
);
25190 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
25194 check_insn(ctx
, ISA_MIPS32
);
25195 gen_cl(ctx
, op1
, rd
, rs
);
25198 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
25199 gen_helper_do_semihosting(cpu_env
);
25201 /* XXX: not clear which exception should be raised
25202 * when in debug mode...
25204 check_insn(ctx
, ISA_MIPS32
);
25205 generate_exception_end(ctx
, EXCP_DBp
);
25208 #if defined(TARGET_MIPS64)
25211 check_insn(ctx
, ISA_MIPS64
);
25212 check_mips_64(ctx
);
25213 gen_cl(ctx
, op1
, rd
, rs
);
25215 case OPC_DMULT_G_2F
:
25216 case OPC_DMULTU_G_2F
:
25217 case OPC_DDIV_G_2F
:
25218 case OPC_DDIVU_G_2F
:
25219 case OPC_DMOD_G_2F
:
25220 case OPC_DMODU_G_2F
:
25221 check_insn(ctx
, INSN_LOONGSON2F
);
25222 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
25225 default: /* Invalid */
25226 MIPS_INVAL("special2_legacy");
25227 generate_exception_end(ctx
, EXCP_RI
);
25232 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
25234 int rs
, rt
, rd
, sa
;
25238 rs
= (ctx
->opcode
>> 21) & 0x1f;
25239 rt
= (ctx
->opcode
>> 16) & 0x1f;
25240 rd
= (ctx
->opcode
>> 11) & 0x1f;
25241 sa
= (ctx
->opcode
>> 6) & 0x1f;
25242 imm
= (int16_t)ctx
->opcode
>> 7;
25244 op1
= MASK_SPECIAL3(ctx
->opcode
);
25248 /* hint codes 24-31 are reserved and signal RI */
25249 generate_exception_end(ctx
, EXCP_RI
);
25251 /* Treat as NOP. */
25254 check_cp0_enabled(ctx
);
25255 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
25256 gen_cache_operation(ctx
, rt
, rs
, imm
);
25260 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
25263 gen_ld(ctx
, op1
, rt
, rs
, imm
);
25268 /* Treat as NOP. */
25271 op2
= MASK_BSHFL(ctx
->opcode
);
25277 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
25280 gen_bitswap(ctx
, op2
, rd
, rt
);
25285 #if defined(TARGET_MIPS64)
25287 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
25290 gen_ld(ctx
, op1
, rt
, rs
, imm
);
25293 check_mips_64(ctx
);
25296 /* Treat as NOP. */
25299 op2
= MASK_DBSHFL(ctx
->opcode
);
25309 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
25312 gen_bitswap(ctx
, op2
, rd
, rt
);
25319 default: /* Invalid */
25320 MIPS_INVAL("special3_r6");
25321 generate_exception_end(ctx
, EXCP_RI
);
25326 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
25331 rs
= (ctx
->opcode
>> 21) & 0x1f;
25332 rt
= (ctx
->opcode
>> 16) & 0x1f;
25333 rd
= (ctx
->opcode
>> 11) & 0x1f;
25335 op1
= MASK_SPECIAL3(ctx
->opcode
);
25338 case OPC_DIVU_G_2E
:
25340 case OPC_MODU_G_2E
:
25341 case OPC_MULT_G_2E
:
25342 case OPC_MULTU_G_2E
:
25343 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
25344 * the same mask and op1. */
25345 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
25346 op2
= MASK_ADDUH_QB(ctx
->opcode
);
25349 case OPC_ADDUH_R_QB
:
25351 case OPC_ADDQH_R_PH
:
25353 case OPC_ADDQH_R_W
:
25355 case OPC_SUBUH_R_QB
:
25357 case OPC_SUBQH_R_PH
:
25359 case OPC_SUBQH_R_W
:
25360 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
25365 case OPC_MULQ_RS_W
:
25366 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
25369 MIPS_INVAL("MASK ADDUH.QB");
25370 generate_exception_end(ctx
, EXCP_RI
);
25373 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
25374 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
25376 generate_exception_end(ctx
, EXCP_RI
);
25380 op2
= MASK_LX(ctx
->opcode
);
25382 #if defined(TARGET_MIPS64)
25388 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
25390 default: /* Invalid */
25391 MIPS_INVAL("MASK LX");
25392 generate_exception_end(ctx
, EXCP_RI
);
25396 case OPC_ABSQ_S_PH_DSP
:
25397 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
25399 case OPC_ABSQ_S_QB
:
25400 case OPC_ABSQ_S_PH
:
25402 case OPC_PRECEQ_W_PHL
:
25403 case OPC_PRECEQ_W_PHR
:
25404 case OPC_PRECEQU_PH_QBL
:
25405 case OPC_PRECEQU_PH_QBR
:
25406 case OPC_PRECEQU_PH_QBLA
:
25407 case OPC_PRECEQU_PH_QBRA
:
25408 case OPC_PRECEU_PH_QBL
:
25409 case OPC_PRECEU_PH_QBR
:
25410 case OPC_PRECEU_PH_QBLA
:
25411 case OPC_PRECEU_PH_QBRA
:
25412 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
25419 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
25422 MIPS_INVAL("MASK ABSQ_S.PH");
25423 generate_exception_end(ctx
, EXCP_RI
);
25427 case OPC_ADDU_QB_DSP
:
25428 op2
= MASK_ADDU_QB(ctx
->opcode
);
25431 case OPC_ADDQ_S_PH
:
25434 case OPC_ADDU_S_QB
:
25436 case OPC_ADDU_S_PH
:
25438 case OPC_SUBQ_S_PH
:
25441 case OPC_SUBU_S_QB
:
25443 case OPC_SUBU_S_PH
:
25447 case OPC_RADDU_W_QB
:
25448 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
25450 case OPC_MULEU_S_PH_QBL
:
25451 case OPC_MULEU_S_PH_QBR
:
25452 case OPC_MULQ_RS_PH
:
25453 case OPC_MULEQ_S_W_PHL
:
25454 case OPC_MULEQ_S_W_PHR
:
25455 case OPC_MULQ_S_PH
:
25456 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
25458 default: /* Invalid */
25459 MIPS_INVAL("MASK ADDU.QB");
25460 generate_exception_end(ctx
, EXCP_RI
);
25465 case OPC_CMPU_EQ_QB_DSP
:
25466 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
25468 case OPC_PRECR_SRA_PH_W
:
25469 case OPC_PRECR_SRA_R_PH_W
:
25470 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
25472 case OPC_PRECR_QB_PH
:
25473 case OPC_PRECRQ_QB_PH
:
25474 case OPC_PRECRQ_PH_W
:
25475 case OPC_PRECRQ_RS_PH_W
:
25476 case OPC_PRECRQU_S_QB_PH
:
25477 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
25479 case OPC_CMPU_EQ_QB
:
25480 case OPC_CMPU_LT_QB
:
25481 case OPC_CMPU_LE_QB
:
25482 case OPC_CMP_EQ_PH
:
25483 case OPC_CMP_LT_PH
:
25484 case OPC_CMP_LE_PH
:
25485 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
25487 case OPC_CMPGU_EQ_QB
:
25488 case OPC_CMPGU_LT_QB
:
25489 case OPC_CMPGU_LE_QB
:
25490 case OPC_CMPGDU_EQ_QB
:
25491 case OPC_CMPGDU_LT_QB
:
25492 case OPC_CMPGDU_LE_QB
:
25495 case OPC_PACKRL_PH
:
25496 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
25498 default: /* Invalid */
25499 MIPS_INVAL("MASK CMPU.EQ.QB");
25500 generate_exception_end(ctx
, EXCP_RI
);
25504 case OPC_SHLL_QB_DSP
:
25505 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
25507 case OPC_DPA_W_PH_DSP
:
25508 op2
= MASK_DPA_W_PH(ctx
->opcode
);
25510 case OPC_DPAU_H_QBL
:
25511 case OPC_DPAU_H_QBR
:
25512 case OPC_DPSU_H_QBL
:
25513 case OPC_DPSU_H_QBR
:
25515 case OPC_DPAX_W_PH
:
25516 case OPC_DPAQ_S_W_PH
:
25517 case OPC_DPAQX_S_W_PH
:
25518 case OPC_DPAQX_SA_W_PH
:
25520 case OPC_DPSX_W_PH
:
25521 case OPC_DPSQ_S_W_PH
:
25522 case OPC_DPSQX_S_W_PH
:
25523 case OPC_DPSQX_SA_W_PH
:
25524 case OPC_MULSAQ_S_W_PH
:
25525 case OPC_DPAQ_SA_L_W
:
25526 case OPC_DPSQ_SA_L_W
:
25527 case OPC_MAQ_S_W_PHL
:
25528 case OPC_MAQ_S_W_PHR
:
25529 case OPC_MAQ_SA_W_PHL
:
25530 case OPC_MAQ_SA_W_PHR
:
25531 case OPC_MULSA_W_PH
:
25532 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
25534 default: /* Invalid */
25535 MIPS_INVAL("MASK DPAW.PH");
25536 generate_exception_end(ctx
, EXCP_RI
);
25541 op2
= MASK_INSV(ctx
->opcode
);
25552 t0
= tcg_temp_new();
25553 t1
= tcg_temp_new();
25555 gen_load_gpr(t0
, rt
);
25556 gen_load_gpr(t1
, rs
);
25558 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
25564 default: /* Invalid */
25565 MIPS_INVAL("MASK INSV");
25566 generate_exception_end(ctx
, EXCP_RI
);
25570 case OPC_APPEND_DSP
:
25571 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
25573 case OPC_EXTR_W_DSP
:
25574 op2
= MASK_EXTR_W(ctx
->opcode
);
25578 case OPC_EXTR_RS_W
:
25580 case OPC_EXTRV_S_H
:
25582 case OPC_EXTRV_R_W
:
25583 case OPC_EXTRV_RS_W
:
25588 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
25591 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
25597 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
25599 default: /* Invalid */
25600 MIPS_INVAL("MASK EXTR.W");
25601 generate_exception_end(ctx
, EXCP_RI
);
25605 #if defined(TARGET_MIPS64)
25606 case OPC_DDIV_G_2E
:
25607 case OPC_DDIVU_G_2E
:
25608 case OPC_DMULT_G_2E
:
25609 case OPC_DMULTU_G_2E
:
25610 case OPC_DMOD_G_2E
:
25611 case OPC_DMODU_G_2E
:
25612 check_insn(ctx
, INSN_LOONGSON2E
);
25613 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
25615 case OPC_ABSQ_S_QH_DSP
:
25616 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
25618 case OPC_PRECEQ_L_PWL
:
25619 case OPC_PRECEQ_L_PWR
:
25620 case OPC_PRECEQ_PW_QHL
:
25621 case OPC_PRECEQ_PW_QHR
:
25622 case OPC_PRECEQ_PW_QHLA
:
25623 case OPC_PRECEQ_PW_QHRA
:
25624 case OPC_PRECEQU_QH_OBL
:
25625 case OPC_PRECEQU_QH_OBR
:
25626 case OPC_PRECEQU_QH_OBLA
:
25627 case OPC_PRECEQU_QH_OBRA
:
25628 case OPC_PRECEU_QH_OBL
:
25629 case OPC_PRECEU_QH_OBR
:
25630 case OPC_PRECEU_QH_OBLA
:
25631 case OPC_PRECEU_QH_OBRA
:
25632 case OPC_ABSQ_S_OB
:
25633 case OPC_ABSQ_S_PW
:
25634 case OPC_ABSQ_S_QH
:
25635 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
25643 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
25645 default: /* Invalid */
25646 MIPS_INVAL("MASK ABSQ_S.QH");
25647 generate_exception_end(ctx
, EXCP_RI
);
25651 case OPC_ADDU_OB_DSP
:
25652 op2
= MASK_ADDU_OB(ctx
->opcode
);
25654 case OPC_RADDU_L_OB
:
25656 case OPC_SUBQ_S_PW
:
25658 case OPC_SUBQ_S_QH
:
25660 case OPC_SUBU_S_OB
:
25662 case OPC_SUBU_S_QH
:
25664 case OPC_SUBUH_R_OB
:
25666 case OPC_ADDQ_S_PW
:
25668 case OPC_ADDQ_S_QH
:
25670 case OPC_ADDU_S_OB
:
25672 case OPC_ADDU_S_QH
:
25674 case OPC_ADDUH_R_OB
:
25675 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
25677 case OPC_MULEQ_S_PW_QHL
:
25678 case OPC_MULEQ_S_PW_QHR
:
25679 case OPC_MULEU_S_QH_OBL
:
25680 case OPC_MULEU_S_QH_OBR
:
25681 case OPC_MULQ_RS_QH
:
25682 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
25684 default: /* Invalid */
25685 MIPS_INVAL("MASK ADDU.OB");
25686 generate_exception_end(ctx
, EXCP_RI
);
25690 case OPC_CMPU_EQ_OB_DSP
:
25691 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
25693 case OPC_PRECR_SRA_QH_PW
:
25694 case OPC_PRECR_SRA_R_QH_PW
:
25695 /* Return value is rt. */
25696 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
25698 case OPC_PRECR_OB_QH
:
25699 case OPC_PRECRQ_OB_QH
:
25700 case OPC_PRECRQ_PW_L
:
25701 case OPC_PRECRQ_QH_PW
:
25702 case OPC_PRECRQ_RS_QH_PW
:
25703 case OPC_PRECRQU_S_OB_QH
:
25704 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
25706 case OPC_CMPU_EQ_OB
:
25707 case OPC_CMPU_LT_OB
:
25708 case OPC_CMPU_LE_OB
:
25709 case OPC_CMP_EQ_QH
:
25710 case OPC_CMP_LT_QH
:
25711 case OPC_CMP_LE_QH
:
25712 case OPC_CMP_EQ_PW
:
25713 case OPC_CMP_LT_PW
:
25714 case OPC_CMP_LE_PW
:
25715 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
25717 case OPC_CMPGDU_EQ_OB
:
25718 case OPC_CMPGDU_LT_OB
:
25719 case OPC_CMPGDU_LE_OB
:
25720 case OPC_CMPGU_EQ_OB
:
25721 case OPC_CMPGU_LT_OB
:
25722 case OPC_CMPGU_LE_OB
:
25723 case OPC_PACKRL_PW
:
25727 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
25729 default: /* Invalid */
25730 MIPS_INVAL("MASK CMPU_EQ.OB");
25731 generate_exception_end(ctx
, EXCP_RI
);
25735 case OPC_DAPPEND_DSP
:
25736 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
25738 case OPC_DEXTR_W_DSP
:
25739 op2
= MASK_DEXTR_W(ctx
->opcode
);
25746 case OPC_DEXTR_R_L
:
25747 case OPC_DEXTR_RS_L
:
25749 case OPC_DEXTR_R_W
:
25750 case OPC_DEXTR_RS_W
:
25751 case OPC_DEXTR_S_H
:
25753 case OPC_DEXTRV_R_L
:
25754 case OPC_DEXTRV_RS_L
:
25755 case OPC_DEXTRV_S_H
:
25757 case OPC_DEXTRV_R_W
:
25758 case OPC_DEXTRV_RS_W
:
25759 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
25764 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
25766 default: /* Invalid */
25767 MIPS_INVAL("MASK EXTR.W");
25768 generate_exception_end(ctx
, EXCP_RI
);
25772 case OPC_DPAQ_W_QH_DSP
:
25773 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
25775 case OPC_DPAU_H_OBL
:
25776 case OPC_DPAU_H_OBR
:
25777 case OPC_DPSU_H_OBL
:
25778 case OPC_DPSU_H_OBR
:
25780 case OPC_DPAQ_S_W_QH
:
25782 case OPC_DPSQ_S_W_QH
:
25783 case OPC_MULSAQ_S_W_QH
:
25784 case OPC_DPAQ_SA_L_PW
:
25785 case OPC_DPSQ_SA_L_PW
:
25786 case OPC_MULSAQ_S_L_PW
:
25787 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
25789 case OPC_MAQ_S_W_QHLL
:
25790 case OPC_MAQ_S_W_QHLR
:
25791 case OPC_MAQ_S_W_QHRL
:
25792 case OPC_MAQ_S_W_QHRR
:
25793 case OPC_MAQ_SA_W_QHLL
:
25794 case OPC_MAQ_SA_W_QHLR
:
25795 case OPC_MAQ_SA_W_QHRL
:
25796 case OPC_MAQ_SA_W_QHRR
:
25797 case OPC_MAQ_S_L_PWL
:
25798 case OPC_MAQ_S_L_PWR
:
25803 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
25805 default: /* Invalid */
25806 MIPS_INVAL("MASK DPAQ.W.QH");
25807 generate_exception_end(ctx
, EXCP_RI
);
25811 case OPC_DINSV_DSP
:
25812 op2
= MASK_INSV(ctx
->opcode
);
25823 t0
= tcg_temp_new();
25824 t1
= tcg_temp_new();
25826 gen_load_gpr(t0
, rt
);
25827 gen_load_gpr(t1
, rs
);
25829 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
25835 default: /* Invalid */
25836 MIPS_INVAL("MASK DINSV");
25837 generate_exception_end(ctx
, EXCP_RI
);
25841 case OPC_SHLL_OB_DSP
:
25842 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
25845 default: /* Invalid */
25846 MIPS_INVAL("special3_legacy");
25847 generate_exception_end(ctx
, EXCP_RI
);
25852 static void decode_tx79_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
25854 uint32_t opc
= MASK_TX79_MMI0(ctx
->opcode
);
25857 case TX79_MMI0_PADDW
: /* TODO: TX79_MMI0_PADDW */
25858 case TX79_MMI0_PSUBW
: /* TODO: TX79_MMI0_PSUBW */
25859 case TX79_MMI0_PCGTW
: /* TODO: TX79_MMI0_PCGTW */
25860 case TX79_MMI0_PMAXW
: /* TODO: TX79_MMI0_PMAXW */
25861 case TX79_MMI0_PADDH
: /* TODO: TX79_MMI0_PADDH */
25862 case TX79_MMI0_PSUBH
: /* TODO: TX79_MMI0_PSUBH */
25863 case TX79_MMI0_PCGTH
: /* TODO: TX79_MMI0_PCGTH */
25864 case TX79_MMI0_PMAXH
: /* TODO: TX79_MMI0_PMAXH */
25865 case TX79_MMI0_PADDB
: /* TODO: TX79_MMI0_PADDB */
25866 case TX79_MMI0_PSUBB
: /* TODO: TX79_MMI0_PSUBB */
25867 case TX79_MMI0_PCGTB
: /* TODO: TX79_MMI0_PCGTB */
25868 case TX79_MMI0_PADDSW
: /* TODO: TX79_MMI0_PADDSW */
25869 case TX79_MMI0_PSUBSW
: /* TODO: TX79_MMI0_PSUBSW */
25870 case TX79_MMI0_PEXTLW
: /* TODO: TX79_MMI0_PEXTLW */
25871 case TX79_MMI0_PPACW
: /* TODO: TX79_MMI0_PPACW */
25872 case TX79_MMI0_PADDSH
: /* TODO: TX79_MMI0_PADDSH */
25873 case TX79_MMI0_PSUBSH
: /* TODO: TX79_MMI0_PSUBSH */
25874 case TX79_MMI0_PEXTLH
: /* TODO: TX79_MMI0_PEXTLH */
25875 case TX79_MMI0_PPACH
: /* TODO: TX79_MMI0_PPACH */
25876 case TX79_MMI0_PADDSB
: /* TODO: TX79_MMI0_PADDSB */
25877 case TX79_MMI0_PSUBSB
: /* TODO: TX79_MMI0_PSUBSB */
25878 case TX79_MMI0_PEXTLB
: /* TODO: TX79_MMI0_PEXTLB */
25879 case TX79_MMI0_PPACB
: /* TODO: TX79_MMI0_PPACB */
25880 case TX79_MMI0_PEXT5
: /* TODO: TX79_MMI0_PEXT5 */
25881 case TX79_MMI0_PPAC5
: /* TODO: TX79_MMI0_PPAC5 */
25882 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI0 */
25885 MIPS_INVAL("TX79 MMI class MMI0");
25886 generate_exception_end(ctx
, EXCP_RI
);
25891 static void decode_tx79_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
25893 uint32_t opc
= MASK_TX79_MMI1(ctx
->opcode
);
25896 case TX79_MMI1_PABSW
: /* TODO: TX79_MMI1_PABSW */
25897 case TX79_MMI1_PCEQW
: /* TODO: TX79_MMI1_PCEQW */
25898 case TX79_MMI1_PMINW
: /* TODO: TX79_MMI1_PMINW */
25899 case TX79_MMI1_PADSBH
: /* TODO: TX79_MMI1_PADSBH */
25900 case TX79_MMI1_PABSH
: /* TODO: TX79_MMI1_PABSH */
25901 case TX79_MMI1_PCEQH
: /* TODO: TX79_MMI1_PCEQH */
25902 case TX79_MMI1_PMINH
: /* TODO: TX79_MMI1_PMINH */
25903 case TX79_MMI1_PCEQB
: /* TODO: TX79_MMI1_PCEQB */
25904 case TX79_MMI1_PADDUW
: /* TODO: TX79_MMI1_PADDUW */
25905 case TX79_MMI1_PSUBUW
: /* TODO: TX79_MMI1_PSUBUW */
25906 case TX79_MMI1_PEXTUW
: /* TODO: TX79_MMI1_PEXTUW */
25907 case TX79_MMI1_PADDUH
: /* TODO: TX79_MMI1_PADDUH */
25908 case TX79_MMI1_PSUBUH
: /* TODO: TX79_MMI1_PSUBUH */
25909 case TX79_MMI1_PEXTUH
: /* TODO: TX79_MMI1_PEXTUH */
25910 case TX79_MMI1_PADDUB
: /* TODO: TX79_MMI1_PADDUB */
25911 case TX79_MMI1_PSUBUB
: /* TODO: TX79_MMI1_PSUBUB */
25912 case TX79_MMI1_PEXTUB
: /* TODO: TX79_MMI1_PEXTUB */
25913 case TX79_MMI1_QFSRV
: /* TODO: TX79_MMI1_QFSRV */
25914 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI1 */
25917 MIPS_INVAL("TX79 MMI class MMI1");
25918 generate_exception_end(ctx
, EXCP_RI
);
25923 static void decode_tx79_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
25925 uint32_t opc
= MASK_TX79_MMI2(ctx
->opcode
);
25928 case TX79_MMI2_PMADDW
: /* TODO: TX79_MMI2_PMADDW */
25929 case TX79_MMI2_PSLLVW
: /* TODO: TX79_MMI2_PSLLVW */
25930 case TX79_MMI2_PSRLVW
: /* TODO: TX79_MMI2_PSRLVW */
25931 case TX79_MMI2_PMSUBW
: /* TODO: TX79_MMI2_PMSUBW */
25932 case TX79_MMI2_PMFHI
: /* TODO: TX79_MMI2_PMFHI */
25933 case TX79_MMI2_PMFLO
: /* TODO: TX79_MMI2_PMFLO */
25934 case TX79_MMI2_PINTH
: /* TODO: TX79_MMI2_PINTH */
25935 case TX79_MMI2_PMULTW
: /* TODO: TX79_MMI2_PMULTW */
25936 case TX79_MMI2_PDIVW
: /* TODO: TX79_MMI2_PDIVW */
25937 case TX79_MMI2_PCPYLD
: /* TODO: TX79_MMI2_PCPYLD */
25938 case TX79_MMI2_PMADDH
: /* TODO: TX79_MMI2_PMADDH */
25939 case TX79_MMI2_PHMADH
: /* TODO: TX79_MMI2_PHMADH */
25940 case TX79_MMI2_PAND
: /* TODO: TX79_MMI2_PAND */
25941 case TX79_MMI2_PXOR
: /* TODO: TX79_MMI2_PXOR */
25942 case TX79_MMI2_PMSUBH
: /* TODO: TX79_MMI2_PMSUBH */
25943 case TX79_MMI2_PHMSBH
: /* TODO: TX79_MMI2_PHMSBH */
25944 case TX79_MMI2_PEXEH
: /* TODO: TX79_MMI2_PEXEH */
25945 case TX79_MMI2_PREVH
: /* TODO: TX79_MMI2_PREVH */
25946 case TX79_MMI2_PMULTH
: /* TODO: TX79_MMI2_PMULTH */
25947 case TX79_MMI2_PDIVBW
: /* TODO: TX79_MMI2_PDIVBW */
25948 case TX79_MMI2_PEXEW
: /* TODO: TX79_MMI2_PEXEW */
25949 case TX79_MMI2_PROT3W
: /* TODO: TX79_MMI2_PROT3W */
25950 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI2 */
25953 MIPS_INVAL("TX79 MMI class MMI2");
25954 generate_exception_end(ctx
, EXCP_RI
);
25959 static void decode_tx79_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
25961 uint32_t opc
= MASK_TX79_MMI3(ctx
->opcode
);
25964 case TX79_MMI3_PMADDUW
: /* TODO: TX79_MMI3_PMADDUW */
25965 case TX79_MMI3_PSRAVW
: /* TODO: TX79_MMI3_PSRAVW */
25966 case TX79_MMI3_PMTHI
: /* TODO: TX79_MMI3_PMTHI */
25967 case TX79_MMI3_PMTLO
: /* TODO: TX79_MMI3_PMTLO */
25968 case TX79_MMI3_PINTEH
: /* TODO: TX79_MMI3_PINTEH */
25969 case TX79_MMI3_PMULTUW
: /* TODO: TX79_MMI3_PMULTUW */
25970 case TX79_MMI3_PDIVUW
: /* TODO: TX79_MMI3_PDIVUW */
25971 case TX79_MMI3_PCPYUD
: /* TODO: TX79_MMI3_PCPYUD */
25972 case TX79_MMI3_POR
: /* TODO: TX79_MMI3_POR */
25973 case TX79_MMI3_PNOR
: /* TODO: TX79_MMI3_PNOR */
25974 case TX79_MMI3_PEXCH
: /* TODO: TX79_MMI3_PEXCH */
25975 case TX79_MMI3_PCPYH
: /* TODO: TX79_MMI3_PCPYH */
25976 case TX79_MMI3_PEXCW
: /* TODO: TX79_MMI3_PEXCW */
25977 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI3 */
25980 MIPS_INVAL("TX79 MMI class MMI3");
25981 generate_exception_end(ctx
, EXCP_RI
);
25986 static void decode_tx79_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
25988 uint32_t opc
= MASK_TX79_MMI(ctx
->opcode
);
25989 int rs
= extract32(ctx
->opcode
, 21, 5);
25990 int rt
= extract32(ctx
->opcode
, 16, 5);
25991 int rd
= extract32(ctx
->opcode
, 11, 5);
25994 case TX79_MMI_CLASS_MMI0
:
25995 decode_tx79_mmi0(env
, ctx
);
25997 case TX79_MMI_CLASS_MMI1
:
25998 decode_tx79_mmi1(env
, ctx
);
26000 case TX79_MMI_CLASS_MMI2
:
26001 decode_tx79_mmi2(env
, ctx
);
26003 case TX79_MMI_CLASS_MMI3
:
26004 decode_tx79_mmi3(env
, ctx
);
26006 case TX79_MMI_MULT1
:
26007 case TX79_MMI_MULTU1
:
26008 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
26010 case TX79_MMI_DIV1
:
26011 case TX79_MMI_DIVU1
:
26012 gen_muldiv(ctx
, opc
, 1, rs
, rt
);
26014 case TX79_MMI_MTLO1
:
26015 case TX79_MMI_MTHI1
:
26016 gen_HILO(ctx
, opc
, 1, rs
);
26018 case TX79_MMI_MFLO1
:
26019 case TX79_MMI_MFHI1
:
26020 gen_HILO(ctx
, opc
, 1, rd
);
26022 case TX79_MMI_MADD
: /* TODO: TX79_MMI_MADD */
26023 case TX79_MMI_MADDU
: /* TODO: TX79_MMI_MADDU */
26024 case TX79_MMI_PLZCW
: /* TODO: TX79_MMI_PLZCW */
26025 case TX79_MMI_MADD1
: /* TODO: TX79_MMI_MADD1 */
26026 case TX79_MMI_MADDU1
: /* TODO: TX79_MMI_MADDU1 */
26027 case TX79_MMI_PMFHL
: /* TODO: TX79_MMI_PMFHL */
26028 case TX79_MMI_PMTHL
: /* TODO: TX79_MMI_PMTHL */
26029 case TX79_MMI_PSLLH
: /* TODO: TX79_MMI_PSLLH */
26030 case TX79_MMI_PSRLH
: /* TODO: TX79_MMI_PSRLH */
26031 case TX79_MMI_PSRAH
: /* TODO: TX79_MMI_PSRAH */
26032 case TX79_MMI_PSLLW
: /* TODO: TX79_MMI_PSLLW */
26033 case TX79_MMI_PSRLW
: /* TODO: TX79_MMI_PSRLW */
26034 case TX79_MMI_PSRAW
: /* TODO: TX79_MMI_PSRAW */
26035 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_CLASS_MMI */
26038 MIPS_INVAL("TX79 MMI class");
26039 generate_exception_end(ctx
, EXCP_RI
);
26044 static void decode_tx79_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
26046 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_LQ */
26049 static void gen_tx79_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
26051 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_SQ */
26055 * The TX79-specific instruction Store Quadword
26057 * +--------+-------+-------+------------------------+
26058 * | 011111 | base | rt | offset | SQ
26059 * +--------+-------+-------+------------------------+
26062 * has the same opcode as the Read Hardware Register instruction
26064 * +--------+-------+-------+-------+-------+--------+
26065 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
26066 * +--------+-------+-------+-------+-------+--------+
26069 * that is required, trapped and emulated by the Linux kernel. However, all
26070 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
26071 * offset is odd. Therefore all valid SQ instructions can execute normally.
26072 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
26073 * between SQ and RDHWR, as the Linux kernel does.
26075 static void decode_tx79_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
26077 int base
= extract32(ctx
->opcode
, 21, 5);
26078 int rt
= extract32(ctx
->opcode
, 16, 5);
26079 int offset
= extract32(ctx
->opcode
, 0, 16);
26081 #ifdef CONFIG_USER_ONLY
26082 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
26083 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
26085 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
26086 int rd
= extract32(ctx
->opcode
, 11, 5);
26088 gen_rdhwr(ctx
, rt
, rd
, 0);
26093 gen_tx79_sq(ctx
, base
, rt
, offset
);
26096 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
26098 int rs
, rt
, rd
, sa
;
26102 rs
= (ctx
->opcode
>> 21) & 0x1f;
26103 rt
= (ctx
->opcode
>> 16) & 0x1f;
26104 rd
= (ctx
->opcode
>> 11) & 0x1f;
26105 sa
= (ctx
->opcode
>> 6) & 0x1f;
26106 imm
= sextract32(ctx
->opcode
, 7, 9);
26108 op1
= MASK_SPECIAL3(ctx
->opcode
);
26111 * EVA loads and stores overlap Loongson 2E instructions decoded by
26112 * decode_opc_special3_legacy(), so be careful to allow their decoding when
26119 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26127 check_cp0_enabled(ctx
);
26128 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26132 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26137 check_cp0_enabled(ctx
);
26138 gen_st(ctx
, op1
, rt
, rs
, imm
);
26141 check_cp0_enabled(ctx
);
26142 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
26145 check_cp0_enabled(ctx
);
26146 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
26147 gen_cache_operation(ctx
, rt
, rs
, imm
);
26149 /* Treat as NOP. */
26152 check_cp0_enabled(ctx
);
26153 /* Treat as NOP. */
26161 check_insn(ctx
, ISA_MIPS32R2
);
26162 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
26165 op2
= MASK_BSHFL(ctx
->opcode
);
26172 check_insn(ctx
, ISA_MIPS32R6
);
26173 decode_opc_special3_r6(env
, ctx
);
26176 check_insn(ctx
, ISA_MIPS32R2
);
26177 gen_bshfl(ctx
, op2
, rt
, rd
);
26181 #if defined(TARGET_MIPS64)
26188 check_insn(ctx
, ISA_MIPS64R2
);
26189 check_mips_64(ctx
);
26190 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
26193 op2
= MASK_DBSHFL(ctx
->opcode
);
26204 check_insn(ctx
, ISA_MIPS32R6
);
26205 decode_opc_special3_r6(env
, ctx
);
26208 check_insn(ctx
, ISA_MIPS64R2
);
26209 check_mips_64(ctx
);
26210 op2
= MASK_DBSHFL(ctx
->opcode
);
26211 gen_bshfl(ctx
, op2
, rt
, rd
);
26217 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
26222 TCGv t0
= tcg_temp_new();
26223 TCGv t1
= tcg_temp_new();
26225 gen_load_gpr(t0
, rt
);
26226 gen_load_gpr(t1
, rs
);
26227 gen_helper_fork(t0
, t1
);
26235 TCGv t0
= tcg_temp_new();
26237 gen_load_gpr(t0
, rs
);
26238 gen_helper_yield(t0
, cpu_env
, t0
);
26239 gen_store_gpr(t0
, rd
);
26244 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26245 decode_opc_special3_r6(env
, ctx
);
26247 decode_opc_special3_legacy(env
, ctx
);
26252 /* MIPS SIMD Architecture (MSA) */
26253 static inline int check_msa_access(DisasContext
*ctx
)
26255 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
26256 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
26257 generate_exception_end(ctx
, EXCP_RI
);
26261 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
26262 if (ctx
->insn_flags
& ASE_MSA
) {
26263 generate_exception_end(ctx
, EXCP_MSADIS
);
26266 generate_exception_end(ctx
, EXCP_RI
);
26273 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
26275 /* generates tcg ops to check if any element is 0 */
26276 /* Note this function only works with MSA_WRLEN = 128 */
26277 uint64_t eval_zero_or_big
= 0;
26278 uint64_t eval_big
= 0;
26279 TCGv_i64 t0
= tcg_temp_new_i64();
26280 TCGv_i64 t1
= tcg_temp_new_i64();
26283 eval_zero_or_big
= 0x0101010101010101ULL
;
26284 eval_big
= 0x8080808080808080ULL
;
26287 eval_zero_or_big
= 0x0001000100010001ULL
;
26288 eval_big
= 0x8000800080008000ULL
;
26291 eval_zero_or_big
= 0x0000000100000001ULL
;
26292 eval_big
= 0x8000000080000000ULL
;
26295 eval_zero_or_big
= 0x0000000000000001ULL
;
26296 eval_big
= 0x8000000000000000ULL
;
26299 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
26300 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
26301 tcg_gen_andi_i64(t0
, t0
, eval_big
);
26302 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
26303 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
26304 tcg_gen_andi_i64(t1
, t1
, eval_big
);
26305 tcg_gen_or_i64(t0
, t0
, t1
);
26306 /* if all bits are zero then all elements are not zero */
26307 /* if some bit is non-zero then some element is zero */
26308 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
26309 tcg_gen_trunc_i64_tl(tresult
, t0
);
26310 tcg_temp_free_i64(t0
);
26311 tcg_temp_free_i64(t1
);
26314 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
26316 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
26317 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
26318 int64_t s16
= (int16_t)ctx
->opcode
;
26320 check_msa_access(ctx
);
26322 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
26323 generate_exception_end(ctx
, EXCP_RI
);
26330 TCGv_i64 t0
= tcg_temp_new_i64();
26331 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
26332 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
26333 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
26334 tcg_gen_trunc_i64_tl(bcond
, t0
);
26335 tcg_temp_free_i64(t0
);
26342 gen_check_zero_element(bcond
, df
, wt
);
26348 gen_check_zero_element(bcond
, df
, wt
);
26349 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
26353 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
26355 ctx
->hflags
|= MIPS_HFLAG_BC
;
26356 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
26359 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
26361 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
26362 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
26363 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
26364 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
26366 TCGv_i32 twd
= tcg_const_i32(wd
);
26367 TCGv_i32 tws
= tcg_const_i32(ws
);
26368 TCGv_i32 ti8
= tcg_const_i32(i8
);
26370 switch (MASK_MSA_I8(ctx
->opcode
)) {
26372 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
26375 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
26378 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
26381 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
26384 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
26387 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
26390 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
26396 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
26397 if (df
== DF_DOUBLE
) {
26398 generate_exception_end(ctx
, EXCP_RI
);
26400 TCGv_i32 tdf
= tcg_const_i32(df
);
26401 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
26402 tcg_temp_free_i32(tdf
);
26407 MIPS_INVAL("MSA instruction");
26408 generate_exception_end(ctx
, EXCP_RI
);
26412 tcg_temp_free_i32(twd
);
26413 tcg_temp_free_i32(tws
);
26414 tcg_temp_free_i32(ti8
);
26417 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
26419 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
26420 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
26421 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
26422 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
26423 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
26424 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
26426 TCGv_i32 tdf
= tcg_const_i32(df
);
26427 TCGv_i32 twd
= tcg_const_i32(wd
);
26428 TCGv_i32 tws
= tcg_const_i32(ws
);
26429 TCGv_i32 timm
= tcg_temp_new_i32();
26430 tcg_gen_movi_i32(timm
, u5
);
26432 switch (MASK_MSA_I5(ctx
->opcode
)) {
26434 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
26437 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
26439 case OPC_MAXI_S_df
:
26440 tcg_gen_movi_i32(timm
, s5
);
26441 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
26443 case OPC_MAXI_U_df
:
26444 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
26446 case OPC_MINI_S_df
:
26447 tcg_gen_movi_i32(timm
, s5
);
26448 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
26450 case OPC_MINI_U_df
:
26451 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
26454 tcg_gen_movi_i32(timm
, s5
);
26455 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
26457 case OPC_CLTI_S_df
:
26458 tcg_gen_movi_i32(timm
, s5
);
26459 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
26461 case OPC_CLTI_U_df
:
26462 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
26464 case OPC_CLEI_S_df
:
26465 tcg_gen_movi_i32(timm
, s5
);
26466 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
26468 case OPC_CLEI_U_df
:
26469 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
26473 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
26474 tcg_gen_movi_i32(timm
, s10
);
26475 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
26479 MIPS_INVAL("MSA instruction");
26480 generate_exception_end(ctx
, EXCP_RI
);
26484 tcg_temp_free_i32(tdf
);
26485 tcg_temp_free_i32(twd
);
26486 tcg_temp_free_i32(tws
);
26487 tcg_temp_free_i32(timm
);
26490 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
26492 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
26493 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
26494 uint32_t df
= 0, m
= 0;
26495 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
26496 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
26503 if ((dfm
& 0x40) == 0x00) {
26506 } else if ((dfm
& 0x60) == 0x40) {
26509 } else if ((dfm
& 0x70) == 0x60) {
26512 } else if ((dfm
& 0x78) == 0x70) {
26516 generate_exception_end(ctx
, EXCP_RI
);
26520 tdf
= tcg_const_i32(df
);
26521 tm
= tcg_const_i32(m
);
26522 twd
= tcg_const_i32(wd
);
26523 tws
= tcg_const_i32(ws
);
26525 switch (MASK_MSA_BIT(ctx
->opcode
)) {
26527 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
26530 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
26533 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
26536 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
26539 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
26542 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
26544 case OPC_BINSLI_df
:
26545 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
26547 case OPC_BINSRI_df
:
26548 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
26551 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
26554 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
26557 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
26560 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
26563 MIPS_INVAL("MSA instruction");
26564 generate_exception_end(ctx
, EXCP_RI
);
26568 tcg_temp_free_i32(tdf
);
26569 tcg_temp_free_i32(tm
);
26570 tcg_temp_free_i32(twd
);
26571 tcg_temp_free_i32(tws
);
26574 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
26576 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
26577 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
26578 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
26579 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
26580 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
26582 TCGv_i32 tdf
= tcg_const_i32(df
);
26583 TCGv_i32 twd
= tcg_const_i32(wd
);
26584 TCGv_i32 tws
= tcg_const_i32(ws
);
26585 TCGv_i32 twt
= tcg_const_i32(wt
);
26587 switch (MASK_MSA_3R(ctx
->opcode
)) {
26589 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
26592 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
26595 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
26598 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
26600 case OPC_SUBS_S_df
:
26601 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26604 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
26607 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
26610 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
26613 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
26616 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
26618 case OPC_ADDS_A_df
:
26619 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
26621 case OPC_SUBS_U_df
:
26622 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26625 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
26628 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
26631 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
26634 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
26637 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26640 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26642 case OPC_ADDS_S_df
:
26643 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26645 case OPC_SUBSUS_U_df
:
26646 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26649 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
26652 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
26655 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
26658 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
26661 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26664 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26666 case OPC_ADDS_U_df
:
26667 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26669 case OPC_SUBSUU_S_df
:
26670 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26673 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
26676 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
26679 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26682 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26685 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26687 case OPC_ASUB_S_df
:
26688 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26691 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26694 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
26697 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
26700 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26703 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26706 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26708 case OPC_ASUB_U_df
:
26709 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26712 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26715 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
26718 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
26721 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
26723 case OPC_AVER_S_df
:
26724 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26727 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26730 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
26733 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
26736 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
26738 case OPC_AVER_U_df
:
26739 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26742 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26745 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
26748 case OPC_DOTP_S_df
:
26749 case OPC_DOTP_U_df
:
26750 case OPC_DPADD_S_df
:
26751 case OPC_DPADD_U_df
:
26752 case OPC_DPSUB_S_df
:
26753 case OPC_HADD_S_df
:
26754 case OPC_DPSUB_U_df
:
26755 case OPC_HADD_U_df
:
26756 case OPC_HSUB_S_df
:
26757 case OPC_HSUB_U_df
:
26758 if (df
== DF_BYTE
) {
26759 generate_exception_end(ctx
, EXCP_RI
);
26762 switch (MASK_MSA_3R(ctx
->opcode
)) {
26763 case OPC_DOTP_S_df
:
26764 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26766 case OPC_DOTP_U_df
:
26767 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26769 case OPC_DPADD_S_df
:
26770 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26772 case OPC_DPADD_U_df
:
26773 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26775 case OPC_DPSUB_S_df
:
26776 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26778 case OPC_HADD_S_df
:
26779 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26781 case OPC_DPSUB_U_df
:
26782 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26784 case OPC_HADD_U_df
:
26785 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26787 case OPC_HSUB_S_df
:
26788 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
26790 case OPC_HSUB_U_df
:
26791 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
26796 MIPS_INVAL("MSA instruction");
26797 generate_exception_end(ctx
, EXCP_RI
);
26800 tcg_temp_free_i32(twd
);
26801 tcg_temp_free_i32(tws
);
26802 tcg_temp_free_i32(twt
);
26803 tcg_temp_free_i32(tdf
);
26806 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
26808 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
26809 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
26810 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
26811 TCGv telm
= tcg_temp_new();
26812 TCGv_i32 tsr
= tcg_const_i32(source
);
26813 TCGv_i32 tdt
= tcg_const_i32(dest
);
26815 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
26817 gen_load_gpr(telm
, source
);
26818 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
26821 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
26822 gen_store_gpr(telm
, dest
);
26825 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
26828 MIPS_INVAL("MSA instruction");
26829 generate_exception_end(ctx
, EXCP_RI
);
26833 tcg_temp_free(telm
);
26834 tcg_temp_free_i32(tdt
);
26835 tcg_temp_free_i32(tsr
);
26838 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
26841 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
26842 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
26843 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
26845 TCGv_i32 tws
= tcg_const_i32(ws
);
26846 TCGv_i32 twd
= tcg_const_i32(wd
);
26847 TCGv_i32 tn
= tcg_const_i32(n
);
26848 TCGv_i32 tdf
= tcg_const_i32(df
);
26850 switch (MASK_MSA_ELM(ctx
->opcode
)) {
26852 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
26854 case OPC_SPLATI_df
:
26855 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
26858 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
26860 case OPC_COPY_S_df
:
26861 case OPC_COPY_U_df
:
26862 case OPC_INSERT_df
:
26863 #if !defined(TARGET_MIPS64)
26864 /* Double format valid only for MIPS64 */
26865 if (df
== DF_DOUBLE
) {
26866 generate_exception_end(ctx
, EXCP_RI
);
26870 switch (MASK_MSA_ELM(ctx
->opcode
)) {
26871 case OPC_COPY_S_df
:
26872 if (likely(wd
!= 0)) {
26873 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
26876 case OPC_COPY_U_df
:
26877 if (likely(wd
!= 0)) {
26878 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
26881 case OPC_INSERT_df
:
26882 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
26887 MIPS_INVAL("MSA instruction");
26888 generate_exception_end(ctx
, EXCP_RI
);
26890 tcg_temp_free_i32(twd
);
26891 tcg_temp_free_i32(tws
);
26892 tcg_temp_free_i32(tn
);
26893 tcg_temp_free_i32(tdf
);
26896 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
26898 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
26899 uint32_t df
= 0, n
= 0;
26901 if ((dfn
& 0x30) == 0x00) {
26904 } else if ((dfn
& 0x38) == 0x20) {
26907 } else if ((dfn
& 0x3c) == 0x30) {
26910 } else if ((dfn
& 0x3e) == 0x38) {
26913 } else if (dfn
== 0x3E) {
26914 /* CTCMSA, CFCMSA, MOVE.V */
26915 gen_msa_elm_3e(env
, ctx
);
26918 generate_exception_end(ctx
, EXCP_RI
);
26922 gen_msa_elm_df(env
, ctx
, df
, n
);
26925 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
26927 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
26928 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
26929 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
26930 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
26931 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
26933 TCGv_i32 twd
= tcg_const_i32(wd
);
26934 TCGv_i32 tws
= tcg_const_i32(ws
);
26935 TCGv_i32 twt
= tcg_const_i32(wt
);
26936 TCGv_i32 tdf
= tcg_temp_new_i32();
26938 /* adjust df value for floating-point instruction */
26939 tcg_gen_movi_i32(tdf
, df
+ 2);
26941 switch (MASK_MSA_3RF(ctx
->opcode
)) {
26943 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
26946 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
26949 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
26952 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
26955 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
26958 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
26961 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
26964 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
26967 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
26970 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
26973 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
26976 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
26979 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
26982 tcg_gen_movi_i32(tdf
, df
+ 1);
26983 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
26986 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
26989 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
26991 case OPC_MADD_Q_df
:
26992 tcg_gen_movi_i32(tdf
, df
+ 1);
26993 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
26996 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
26998 case OPC_MSUB_Q_df
:
26999 tcg_gen_movi_i32(tdf
, df
+ 1);
27000 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
27003 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
27006 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
27009 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
27012 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
27015 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
27018 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
27021 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
27024 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
27027 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
27030 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
27033 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
27036 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
27039 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
27041 case OPC_MULR_Q_df
:
27042 tcg_gen_movi_i32(tdf
, df
+ 1);
27043 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
27046 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
27048 case OPC_FMIN_A_df
:
27049 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27051 case OPC_MADDR_Q_df
:
27052 tcg_gen_movi_i32(tdf
, df
+ 1);
27053 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
27056 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
27059 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
27061 case OPC_MSUBR_Q_df
:
27062 tcg_gen_movi_i32(tdf
, df
+ 1);
27063 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
27066 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
27068 case OPC_FMAX_A_df
:
27069 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27072 MIPS_INVAL("MSA instruction");
27073 generate_exception_end(ctx
, EXCP_RI
);
27077 tcg_temp_free_i32(twd
);
27078 tcg_temp_free_i32(tws
);
27079 tcg_temp_free_i32(twt
);
27080 tcg_temp_free_i32(tdf
);
27083 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
27085 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
27086 (op & (0x7 << 18)))
27087 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27088 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27089 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27090 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
27091 TCGv_i32 twd
= tcg_const_i32(wd
);
27092 TCGv_i32 tws
= tcg_const_i32(ws
);
27093 TCGv_i32 twt
= tcg_const_i32(wt
);
27094 TCGv_i32 tdf
= tcg_const_i32(df
);
27096 switch (MASK_MSA_2R(ctx
->opcode
)) {
27098 #if !defined(TARGET_MIPS64)
27099 /* Double format valid only for MIPS64 */
27100 if (df
== DF_DOUBLE
) {
27101 generate_exception_end(ctx
, EXCP_RI
);
27105 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
27108 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
27111 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
27114 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
27117 MIPS_INVAL("MSA instruction");
27118 generate_exception_end(ctx
, EXCP_RI
);
27122 tcg_temp_free_i32(twd
);
27123 tcg_temp_free_i32(tws
);
27124 tcg_temp_free_i32(twt
);
27125 tcg_temp_free_i32(tdf
);
27128 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
27130 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
27131 (op & (0xf << 17)))
27132 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27133 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27134 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27135 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
27136 TCGv_i32 twd
= tcg_const_i32(wd
);
27137 TCGv_i32 tws
= tcg_const_i32(ws
);
27138 TCGv_i32 twt
= tcg_const_i32(wt
);
27139 /* adjust df value for floating-point instruction */
27140 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
27142 switch (MASK_MSA_2RF(ctx
->opcode
)) {
27143 case OPC_FCLASS_df
:
27144 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
27146 case OPC_FTRUNC_S_df
:
27147 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
27149 case OPC_FTRUNC_U_df
:
27150 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
27153 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
27155 case OPC_FRSQRT_df
:
27156 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
27159 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
27162 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
27165 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
27167 case OPC_FEXUPL_df
:
27168 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
27170 case OPC_FEXUPR_df
:
27171 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
27174 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
27177 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
27179 case OPC_FTINT_S_df
:
27180 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
27182 case OPC_FTINT_U_df
:
27183 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
27185 case OPC_FFINT_S_df
:
27186 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
27188 case OPC_FFINT_U_df
:
27189 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
27193 tcg_temp_free_i32(twd
);
27194 tcg_temp_free_i32(tws
);
27195 tcg_temp_free_i32(twt
);
27196 tcg_temp_free_i32(tdf
);
27199 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
27201 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
27202 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27203 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27204 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27205 TCGv_i32 twd
= tcg_const_i32(wd
);
27206 TCGv_i32 tws
= tcg_const_i32(ws
);
27207 TCGv_i32 twt
= tcg_const_i32(wt
);
27209 switch (MASK_MSA_VEC(ctx
->opcode
)) {
27211 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
27214 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
27217 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
27220 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
27223 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
27226 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
27229 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
27232 MIPS_INVAL("MSA instruction");
27233 generate_exception_end(ctx
, EXCP_RI
);
27237 tcg_temp_free_i32(twd
);
27238 tcg_temp_free_i32(tws
);
27239 tcg_temp_free_i32(twt
);
27242 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
27244 switch (MASK_MSA_VEC(ctx
->opcode
)) {
27252 gen_msa_vec_v(env
, ctx
);
27255 gen_msa_2r(env
, ctx
);
27258 gen_msa_2rf(env
, ctx
);
27261 MIPS_INVAL("MSA instruction");
27262 generate_exception_end(ctx
, EXCP_RI
);
27267 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
27269 uint32_t opcode
= ctx
->opcode
;
27270 check_insn(ctx
, ASE_MSA
);
27271 check_msa_access(ctx
);
27273 switch (MASK_MSA_MINOR(opcode
)) {
27274 case OPC_MSA_I8_00
:
27275 case OPC_MSA_I8_01
:
27276 case OPC_MSA_I8_02
:
27277 gen_msa_i8(env
, ctx
);
27279 case OPC_MSA_I5_06
:
27280 case OPC_MSA_I5_07
:
27281 gen_msa_i5(env
, ctx
);
27283 case OPC_MSA_BIT_09
:
27284 case OPC_MSA_BIT_0A
:
27285 gen_msa_bit(env
, ctx
);
27287 case OPC_MSA_3R_0D
:
27288 case OPC_MSA_3R_0E
:
27289 case OPC_MSA_3R_0F
:
27290 case OPC_MSA_3R_10
:
27291 case OPC_MSA_3R_11
:
27292 case OPC_MSA_3R_12
:
27293 case OPC_MSA_3R_13
:
27294 case OPC_MSA_3R_14
:
27295 case OPC_MSA_3R_15
:
27296 gen_msa_3r(env
, ctx
);
27299 gen_msa_elm(env
, ctx
);
27301 case OPC_MSA_3RF_1A
:
27302 case OPC_MSA_3RF_1B
:
27303 case OPC_MSA_3RF_1C
:
27304 gen_msa_3rf(env
, ctx
);
27307 gen_msa_vec(env
, ctx
);
27318 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
27319 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
27320 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27321 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
27323 TCGv_i32 twd
= tcg_const_i32(wd
);
27324 TCGv taddr
= tcg_temp_new();
27325 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
27327 switch (MASK_MSA_MINOR(opcode
)) {
27329 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
27332 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
27335 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
27338 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
27341 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
27344 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
27347 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
27350 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
27354 tcg_temp_free_i32(twd
);
27355 tcg_temp_free(taddr
);
27359 MIPS_INVAL("MSA instruction");
27360 generate_exception_end(ctx
, EXCP_RI
);
27366 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
27369 int rs
, rt
, rd
, sa
;
27373 /* make sure instructions are on a word boundary */
27374 if (ctx
->base
.pc_next
& 0x3) {
27375 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
27376 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
27380 /* Handle blikely not taken case */
27381 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
27382 TCGLabel
*l1
= gen_new_label();
27384 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
27385 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
27386 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
27390 op
= MASK_OP_MAJOR(ctx
->opcode
);
27391 rs
= (ctx
->opcode
>> 21) & 0x1f;
27392 rt
= (ctx
->opcode
>> 16) & 0x1f;
27393 rd
= (ctx
->opcode
>> 11) & 0x1f;
27394 sa
= (ctx
->opcode
>> 6) & 0x1f;
27395 imm
= (int16_t)ctx
->opcode
;
27398 decode_opc_special(env
, ctx
);
27401 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
27402 decode_tx79_mmi(env
, ctx
);
27403 } else if (ctx
->insn_flags
& ASE_MXU
) {
27404 decode_opc_mxu(env
, ctx
);
27406 decode_opc_special2_legacy(env
, ctx
);
27410 if (ctx
->insn_flags
& INSN_R5900
) {
27411 decode_tx79_sq(env
, ctx
); /* TX79_SQ */
27413 decode_opc_special3(env
, ctx
);
27417 op1
= MASK_REGIMM(ctx
->opcode
);
27419 case OPC_BLTZL
: /* REGIMM branches */
27423 check_insn(ctx
, ISA_MIPS2
);
27424 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27428 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
27432 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27434 /* OPC_NAL, OPC_BAL */
27435 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
27437 generate_exception_end(ctx
, EXCP_RI
);
27440 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
27443 case OPC_TGEI
: /* REGIMM traps */
27450 check_insn(ctx
, ISA_MIPS2
);
27451 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27452 gen_trap(ctx
, op1
, rs
, -1, imm
);
27455 check_insn(ctx
, ISA_MIPS32R6
);
27456 generate_exception_end(ctx
, EXCP_RI
);
27459 check_insn(ctx
, ISA_MIPS32R2
);
27460 /* Break the TB to be able to sync copied instructions
27462 ctx
->base
.is_jmp
= DISAS_STOP
;
27464 case OPC_BPOSGE32
: /* MIPS DSP branch */
27465 #if defined(TARGET_MIPS64)
27469 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
27471 #if defined(TARGET_MIPS64)
27473 check_insn(ctx
, ISA_MIPS32R6
);
27474 check_mips_64(ctx
);
27476 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
27480 check_insn(ctx
, ISA_MIPS32R6
);
27481 check_mips_64(ctx
);
27483 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
27487 default: /* Invalid */
27488 MIPS_INVAL("regimm");
27489 generate_exception_end(ctx
, EXCP_RI
);
27494 check_cp0_enabled(ctx
);
27495 op1
= MASK_CP0(ctx
->opcode
);
27503 #if defined(TARGET_MIPS64)
27507 #ifndef CONFIG_USER_ONLY
27508 gen_cp0(env
, ctx
, op1
, rt
, rd
);
27509 #endif /* !CONFIG_USER_ONLY */
27527 #ifndef CONFIG_USER_ONLY
27528 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
27529 #endif /* !CONFIG_USER_ONLY */
27532 #ifndef CONFIG_USER_ONLY
27535 TCGv t0
= tcg_temp_new();
27537 op2
= MASK_MFMC0(ctx
->opcode
);
27541 gen_helper_dmt(t0
);
27542 gen_store_gpr(t0
, rt
);
27546 gen_helper_emt(t0
);
27547 gen_store_gpr(t0
, rt
);
27551 gen_helper_dvpe(t0
, cpu_env
);
27552 gen_store_gpr(t0
, rt
);
27556 gen_helper_evpe(t0
, cpu_env
);
27557 gen_store_gpr(t0
, rt
);
27560 check_insn(ctx
, ISA_MIPS32R6
);
27562 gen_helper_dvp(t0
, cpu_env
);
27563 gen_store_gpr(t0
, rt
);
27567 check_insn(ctx
, ISA_MIPS32R6
);
27569 gen_helper_evp(t0
, cpu_env
);
27570 gen_store_gpr(t0
, rt
);
27574 check_insn(ctx
, ISA_MIPS32R2
);
27575 save_cpu_state(ctx
, 1);
27576 gen_helper_di(t0
, cpu_env
);
27577 gen_store_gpr(t0
, rt
);
27578 /* Stop translation as we may have switched
27579 the execution mode. */
27580 ctx
->base
.is_jmp
= DISAS_STOP
;
27583 check_insn(ctx
, ISA_MIPS32R2
);
27584 save_cpu_state(ctx
, 1);
27585 gen_helper_ei(t0
, cpu_env
);
27586 gen_store_gpr(t0
, rt
);
27587 /* DISAS_STOP isn't sufficient, we need to ensure we break
27588 out of translated code to check for pending interrupts */
27589 gen_save_pc(ctx
->base
.pc_next
+ 4);
27590 ctx
->base
.is_jmp
= DISAS_EXIT
;
27592 default: /* Invalid */
27593 MIPS_INVAL("mfmc0");
27594 generate_exception_end(ctx
, EXCP_RI
);
27599 #endif /* !CONFIG_USER_ONLY */
27602 check_insn(ctx
, ISA_MIPS32R2
);
27603 gen_load_srsgpr(rt
, rd
);
27606 check_insn(ctx
, ISA_MIPS32R2
);
27607 gen_store_srsgpr(rt
, rd
);
27611 generate_exception_end(ctx
, EXCP_RI
);
27615 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
27616 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27617 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
27618 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
27621 /* Arithmetic with immediate opcode */
27622 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
27626 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
27628 case OPC_SLTI
: /* Set on less than with immediate opcode */
27630 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
27632 case OPC_ANDI
: /* Arithmetic with immediate opcode */
27633 case OPC_LUI
: /* OPC_AUI */
27636 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
27638 case OPC_J
: /* Jump */
27640 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
27641 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
27644 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
27645 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27647 generate_exception_end(ctx
, EXCP_RI
);
27650 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
27651 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
27654 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
27657 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
27658 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27660 generate_exception_end(ctx
, EXCP_RI
);
27663 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
27664 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
27667 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
27670 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
27673 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
27675 check_insn(ctx
, ISA_MIPS32R6
);
27676 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
27677 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
27680 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
27683 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
27685 check_insn(ctx
, ISA_MIPS32R6
);
27686 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
27687 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
27692 check_insn(ctx
, ISA_MIPS2
);
27693 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27697 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
27699 case OPC_LL
: /* Load and stores */
27700 check_insn(ctx
, ISA_MIPS2
);
27701 check_insn_opc_user_only(ctx
, INSN_R5900
);
27705 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27713 gen_ld(ctx
, op
, rt
, rs
, imm
);
27717 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27722 gen_st(ctx
, op
, rt
, rs
, imm
);
27725 check_insn(ctx
, ISA_MIPS2
);
27726 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27727 check_insn_opc_user_only(ctx
, INSN_R5900
);
27728 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
27731 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27732 check_cp0_enabled(ctx
);
27733 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
27734 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27735 gen_cache_operation(ctx
, rt
, rs
, imm
);
27737 /* Treat as NOP. */
27740 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27741 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
27743 /* Treat as NOP. */
27746 /* Floating point (COP1). */
27751 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
27755 op1
= MASK_CP1(ctx
->opcode
);
27760 check_cp1_enabled(ctx
);
27761 check_insn(ctx
, ISA_MIPS32R2
);
27767 check_cp1_enabled(ctx
);
27768 gen_cp1(ctx
, op1
, rt
, rd
);
27770 #if defined(TARGET_MIPS64)
27773 check_cp1_enabled(ctx
);
27774 check_insn(ctx
, ISA_MIPS3
);
27775 check_mips_64(ctx
);
27776 gen_cp1(ctx
, op1
, rt
, rd
);
27779 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
27780 check_cp1_enabled(ctx
);
27781 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27783 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
27788 check_insn(ctx
, ASE_MIPS3D
);
27789 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
27790 (rt
>> 2) & 0x7, imm
<< 2);
27794 check_cp1_enabled(ctx
);
27795 check_insn(ctx
, ISA_MIPS32R6
);
27796 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
27800 check_cp1_enabled(ctx
);
27801 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27803 check_insn(ctx
, ASE_MIPS3D
);
27806 check_cp1_enabled(ctx
);
27807 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27808 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
27809 (rt
>> 2) & 0x7, imm
<< 2);
27816 check_cp1_enabled(ctx
);
27817 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
27823 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
27824 check_cp1_enabled(ctx
);
27825 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27827 case R6_OPC_CMP_AF_S
:
27828 case R6_OPC_CMP_UN_S
:
27829 case R6_OPC_CMP_EQ_S
:
27830 case R6_OPC_CMP_UEQ_S
:
27831 case R6_OPC_CMP_LT_S
:
27832 case R6_OPC_CMP_ULT_S
:
27833 case R6_OPC_CMP_LE_S
:
27834 case R6_OPC_CMP_ULE_S
:
27835 case R6_OPC_CMP_SAF_S
:
27836 case R6_OPC_CMP_SUN_S
:
27837 case R6_OPC_CMP_SEQ_S
:
27838 case R6_OPC_CMP_SEUQ_S
:
27839 case R6_OPC_CMP_SLT_S
:
27840 case R6_OPC_CMP_SULT_S
:
27841 case R6_OPC_CMP_SLE_S
:
27842 case R6_OPC_CMP_SULE_S
:
27843 case R6_OPC_CMP_OR_S
:
27844 case R6_OPC_CMP_UNE_S
:
27845 case R6_OPC_CMP_NE_S
:
27846 case R6_OPC_CMP_SOR_S
:
27847 case R6_OPC_CMP_SUNE_S
:
27848 case R6_OPC_CMP_SNE_S
:
27849 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
27851 case R6_OPC_CMP_AF_D
:
27852 case R6_OPC_CMP_UN_D
:
27853 case R6_OPC_CMP_EQ_D
:
27854 case R6_OPC_CMP_UEQ_D
:
27855 case R6_OPC_CMP_LT_D
:
27856 case R6_OPC_CMP_ULT_D
:
27857 case R6_OPC_CMP_LE_D
:
27858 case R6_OPC_CMP_ULE_D
:
27859 case R6_OPC_CMP_SAF_D
:
27860 case R6_OPC_CMP_SUN_D
:
27861 case R6_OPC_CMP_SEQ_D
:
27862 case R6_OPC_CMP_SEUQ_D
:
27863 case R6_OPC_CMP_SLT_D
:
27864 case R6_OPC_CMP_SULT_D
:
27865 case R6_OPC_CMP_SLE_D
:
27866 case R6_OPC_CMP_SULE_D
:
27867 case R6_OPC_CMP_OR_D
:
27868 case R6_OPC_CMP_UNE_D
:
27869 case R6_OPC_CMP_NE_D
:
27870 case R6_OPC_CMP_SOR_D
:
27871 case R6_OPC_CMP_SUNE_D
:
27872 case R6_OPC_CMP_SNE_D
:
27873 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
27876 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
27877 rt
, rd
, sa
, (imm
>> 8) & 0x7);
27882 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
27897 check_insn(ctx
, ASE_MSA
);
27898 gen_msa_branch(env
, ctx
, op1
);
27902 generate_exception_end(ctx
, EXCP_RI
);
27907 /* Compact branches [R6] and COP2 [non-R6] */
27908 case OPC_BC
: /* OPC_LWC2 */
27909 case OPC_BALC
: /* OPC_SWC2 */
27910 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27911 /* OPC_BC, OPC_BALC */
27912 gen_compute_compact_branch(ctx
, op
, 0, 0,
27913 sextract32(ctx
->opcode
<< 2, 0, 28));
27915 /* OPC_LWC2, OPC_SWC2 */
27916 /* COP2: Not implemented. */
27917 generate_exception_err(ctx
, EXCP_CpU
, 2);
27920 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
27921 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
27922 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27924 /* OPC_BEQZC, OPC_BNEZC */
27925 gen_compute_compact_branch(ctx
, op
, rs
, 0,
27926 sextract32(ctx
->opcode
<< 2, 0, 23));
27928 /* OPC_JIC, OPC_JIALC */
27929 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
27932 /* OPC_LWC2, OPC_SWC2 */
27933 /* COP2: Not implemented. */
27934 generate_exception_err(ctx
, EXCP_CpU
, 2);
27938 check_insn(ctx
, INSN_LOONGSON2F
);
27939 /* Note that these instructions use different fields. */
27940 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
27944 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27945 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
27946 check_cp1_enabled(ctx
);
27947 op1
= MASK_CP3(ctx
->opcode
);
27951 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
27957 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
27958 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
27961 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
27962 /* Treat as NOP. */
27965 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
27979 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
27980 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
27984 generate_exception_end(ctx
, EXCP_RI
);
27988 generate_exception_err(ctx
, EXCP_CpU
, 1);
27992 #if defined(TARGET_MIPS64)
27993 /* MIPS64 opcodes */
27995 check_insn_opc_user_only(ctx
, INSN_R5900
);
27999 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28003 check_insn(ctx
, ISA_MIPS3
);
28004 check_mips_64(ctx
);
28005 gen_ld(ctx
, op
, rt
, rs
, imm
);
28009 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28012 check_insn(ctx
, ISA_MIPS3
);
28013 check_mips_64(ctx
);
28014 gen_st(ctx
, op
, rt
, rs
, imm
);
28017 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28018 check_insn(ctx
, ISA_MIPS3
);
28019 check_insn_opc_user_only(ctx
, INSN_R5900
);
28020 check_mips_64(ctx
);
28021 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
28023 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
28024 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28025 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
28026 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28029 check_insn(ctx
, ISA_MIPS3
);
28030 check_mips_64(ctx
);
28031 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
28035 check_insn(ctx
, ISA_MIPS3
);
28036 check_mips_64(ctx
);
28037 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
28040 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
28041 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28042 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28044 MIPS_INVAL("major opcode");
28045 generate_exception_end(ctx
, EXCP_RI
);
28049 case OPC_DAUI
: /* OPC_JALX */
28050 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28051 #if defined(TARGET_MIPS64)
28053 check_mips_64(ctx
);
28055 generate_exception(ctx
, EXCP_RI
);
28056 } else if (rt
!= 0) {
28057 TCGv t0
= tcg_temp_new();
28058 gen_load_gpr(t0
, rs
);
28059 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
28063 generate_exception_end(ctx
, EXCP_RI
);
28064 MIPS_INVAL("major opcode");
28068 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
28069 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
28070 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
28073 case OPC_MSA
: /* OPC_MDMX */
28074 if (ctx
->insn_flags
& INSN_R5900
) {
28075 decode_tx79_lq(env
, ctx
); /* TX79_LQ */
28077 /* MDMX: Not implemented. */
28082 check_insn(ctx
, ISA_MIPS32R6
);
28083 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
28085 default: /* Invalid */
28086 MIPS_INVAL("major opcode");
28087 generate_exception_end(ctx
, EXCP_RI
);
28092 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
28094 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
28095 CPUMIPSState
*env
= cs
->env_ptr
;
28097 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
28098 ctx
->saved_pc
= -1;
28099 ctx
->insn_flags
= env
->insn_flags
;
28100 ctx
->CP0_Config1
= env
->CP0_Config1
;
28101 ctx
->CP0_Config2
= env
->CP0_Config2
;
28102 ctx
->CP0_Config3
= env
->CP0_Config3
;
28103 ctx
->CP0_Config5
= env
->CP0_Config5
;
28105 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
28106 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
28107 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
28108 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
28109 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
28110 ctx
->PAMask
= env
->PAMask
;
28111 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
28112 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
28113 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
28114 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
28115 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
28116 /* Restore delay slot state from the tb context. */
28117 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
28118 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
28119 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
28120 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
28121 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
28122 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
28123 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
28124 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
28125 restore_cpu_state(env
, ctx
);
28126 #ifdef CONFIG_USER_ONLY
28127 ctx
->mem_idx
= MIPS_HFLAG_UM
;
28129 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
28131 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
28132 MO_UNALN
: MO_ALIGN
;
28134 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
28138 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
28142 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
28144 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
28146 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
28150 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
28151 const CPUBreakpoint
*bp
)
28153 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
28155 save_cpu_state(ctx
, 1);
28156 ctx
->base
.is_jmp
= DISAS_NORETURN
;
28157 gen_helper_raise_exception_debug(cpu_env
);
28158 /* The address covered by the breakpoint must be included in
28159 [tb->pc, tb->pc + tb->size) in order to for it to be
28160 properly cleared -- thus we increment the PC here so that
28161 the logic setting tb->size below does the right thing. */
28162 ctx
->base
.pc_next
+= 4;
28166 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
28168 CPUMIPSState
*env
= cs
->env_ptr
;
28169 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
28173 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
28174 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
28175 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
28176 insn_bytes
= decode_nanomips_opc(env
, ctx
);
28177 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
28178 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
28180 decode_opc(env
, ctx
);
28181 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
28182 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
28183 insn_bytes
= decode_micromips_opc(env
, ctx
);
28184 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
28185 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
28186 insn_bytes
= decode_mips16_opc(env
, ctx
);
28188 generate_exception_end(ctx
, EXCP_RI
);
28189 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
28193 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
28194 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
28195 MIPS_HFLAG_FBNSLOT
))) {
28196 /* force to generate branch as there is neither delay nor
28200 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
28201 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
28202 /* Force to generate branch as microMIPS R6 doesn't restrict
28203 branches in the forbidden slot. */
28208 gen_branch(ctx
, insn_bytes
);
28210 ctx
->base
.pc_next
+= insn_bytes
;
28212 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
28215 /* Execute a branch and its delay slot as a single instruction.
28216 This is what GDB expects and is consistent with what the
28217 hardware does (e.g. if a delay slot instruction faults, the
28218 reported PC is the PC of the branch). */
28219 if (ctx
->base
.singlestep_enabled
&&
28220 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
28221 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
28223 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
28224 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
28228 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
28230 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
28232 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
28233 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
28234 gen_helper_raise_exception_debug(cpu_env
);
28236 switch (ctx
->base
.is_jmp
) {
28238 gen_save_pc(ctx
->base
.pc_next
);
28239 tcg_gen_lookup_and_goto_ptr();
28242 case DISAS_TOO_MANY
:
28243 save_cpu_state(ctx
, 0);
28244 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
28247 tcg_gen_exit_tb(NULL
, 0);
28249 case DISAS_NORETURN
:
28252 g_assert_not_reached();
28257 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
28259 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
28260 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
28263 static const TranslatorOps mips_tr_ops
= {
28264 .init_disas_context
= mips_tr_init_disas_context
,
28265 .tb_start
= mips_tr_tb_start
,
28266 .insn_start
= mips_tr_insn_start
,
28267 .breakpoint_check
= mips_tr_breakpoint_check
,
28268 .translate_insn
= mips_tr_translate_insn
,
28269 .tb_stop
= mips_tr_tb_stop
,
28270 .disas_log
= mips_tr_disas_log
,
28273 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
28277 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
28280 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
28284 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
28286 #define printfpr(fp) \
28289 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
28290 " fd:%13g fs:%13g psu: %13g\n", \
28291 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
28292 (double)(fp)->fd, \
28293 (double)(fp)->fs[FP_ENDIAN_IDX], \
28294 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
28297 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
28298 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
28299 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
28300 " fd:%13g fs:%13g psu:%13g\n", \
28301 tmp.w[FP_ENDIAN_IDX], tmp.d, \
28303 (double)tmp.fs[FP_ENDIAN_IDX], \
28304 (double)tmp.fs[!FP_ENDIAN_IDX]); \
28309 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
28310 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
28311 get_float_exception_flags(&env
->active_fpu
.fp_status
));
28312 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
28313 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
28314 printfpr(&env
->active_fpu
.fpr
[i
]);
28320 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
28323 MIPSCPU
*cpu
= MIPS_CPU(cs
);
28324 CPUMIPSState
*env
= &cpu
->env
;
28327 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
28328 " LO=0x" TARGET_FMT_lx
" ds %04x "
28329 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
28330 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
28331 env
->hflags
, env
->btarget
, env
->bcond
);
28332 for (i
= 0; i
< 32; i
++) {
28334 cpu_fprintf(f
, "GPR%02d:", i
);
28335 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
28337 cpu_fprintf(f
, "\n");
28340 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
28341 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
28342 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
28344 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
28345 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
28346 env
->CP0_Config2
, env
->CP0_Config3
);
28347 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
28348 env
->CP0_Config4
, env
->CP0_Config5
);
28349 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
28350 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
28354 void mips_tcg_init(void)
28359 for (i
= 1; i
< 32; i
++)
28360 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
28361 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
28364 for (i
= 0; i
< 32; i
++) {
28365 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
28367 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
28368 /* The scalar floating-point unit (FPU) registers are mapped on
28369 * the MSA vector registers. */
28370 fpu_f64
[i
] = msa_wr_d
[i
* 2];
28371 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
28372 msa_wr_d
[i
* 2 + 1] =
28373 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
28376 cpu_PC
= tcg_global_mem_new(cpu_env
,
28377 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
28378 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
28379 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
28380 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
28382 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
28383 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
28386 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
28387 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
28389 bcond
= tcg_global_mem_new(cpu_env
,
28390 offsetof(CPUMIPSState
, bcond
), "bcond");
28391 btarget
= tcg_global_mem_new(cpu_env
,
28392 offsetof(CPUMIPSState
, btarget
), "btarget");
28393 hflags
= tcg_global_mem_new_i32(cpu_env
,
28394 offsetof(CPUMIPSState
, hflags
), "hflags");
28396 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
28397 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
28399 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
28400 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
28403 for (i
= 0; i
< NUMBER_OF_MXU_REGISTERS
- 1; i
++) {
28404 mxu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
28405 offsetof(CPUMIPSState
,
28406 active_tc
.mxu_gpr
[i
]),
28410 mxu_CR
= tcg_global_mem_new(cpu_env
,
28411 offsetof(CPUMIPSState
, active_tc
.mxu_cr
),
28412 mxuregnames
[NUMBER_OF_MXU_REGISTERS
- 1]);
28415 #include "translate_init.inc.c"
28417 void cpu_mips_realize_env(CPUMIPSState
*env
)
28419 env
->exception_base
= (int32_t)0xBFC00000;
28421 #ifndef CONFIG_USER_ONLY
28422 mmu_init(env
, env
->cpu_model
);
28424 fpu_init(env
, env
->cpu_model
);
28425 mvp_init(env
, env
->cpu_model
);
28428 bool cpu_supports_cps_smp(const char *cpu_type
)
28430 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
28431 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
28434 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
28436 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
28437 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
28440 void cpu_set_exception_base(int vp_index
, target_ulong address
)
28442 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
28443 vp
->env
.exception_base
= address
;
28446 void cpu_state_reset(CPUMIPSState
*env
)
28448 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
28449 CPUState
*cs
= CPU(cpu
);
28451 /* Reset registers to their default values */
28452 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
28453 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
28454 #ifdef TARGET_WORDS_BIGENDIAN
28455 env
->CP0_Config0
|= (1 << CP0C0_BE
);
28457 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
28458 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
28459 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
28460 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
28461 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
28462 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
28463 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
28464 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
28465 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
28466 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
28467 << env
->cpu_model
->CP0_LLAddr_shift
;
28468 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
28469 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
28470 env
->CCRes
= env
->cpu_model
->CCRes
;
28471 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
28472 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
28473 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
28474 env
->current_tc
= 0;
28475 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
28476 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
28477 #if defined(TARGET_MIPS64)
28478 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
28479 env
->SEGMask
|= 3ULL << 62;
28482 env
->PABITS
= env
->cpu_model
->PABITS
;
28483 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
28484 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
28485 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
28486 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
28487 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
28488 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
28489 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
28490 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
28491 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
28492 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
28493 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
28494 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
28495 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
28496 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
28497 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
28498 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
28499 env
->msair
= env
->cpu_model
->MSAIR
;
28500 env
->insn_flags
= env
->cpu_model
->insn_flags
;
28502 #if defined(CONFIG_USER_ONLY)
28503 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
28504 # ifdef TARGET_MIPS64
28505 /* Enable 64-bit register mode. */
28506 env
->CP0_Status
|= (1 << CP0St_PX
);
28508 # ifdef TARGET_ABI_MIPSN64
28509 /* Enable 64-bit address mode. */
28510 env
->CP0_Status
|= (1 << CP0St_UX
);
28512 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
28513 hardware registers. */
28514 env
->CP0_HWREna
|= 0x0000000F;
28515 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
28516 env
->CP0_Status
|= (1 << CP0St_CU1
);
28518 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
28519 env
->CP0_Status
|= (1 << CP0St_MX
);
28521 # if defined(TARGET_MIPS64)
28522 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
28523 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
28524 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
28525 env
->CP0_Status
|= (1 << CP0St_FR
);
28529 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
28530 /* If the exception was raised from a delay slot,
28531 come back to the jump. */
28532 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
28533 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
28535 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
28537 env
->active_tc
.PC
= env
->exception_base
;
28538 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
28539 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
28540 env
->CP0_Wired
= 0;
28541 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
28542 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
28543 if (mips_um_ksegs_enabled()) {
28544 env
->CP0_EBase
|= 0x40000000;
28546 env
->CP0_EBase
|= (int32_t)0x80000000;
28548 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
28549 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
28551 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
28553 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
28554 /* vectored interrupts not implemented, timer on int 7,
28555 no performance counters. */
28556 env
->CP0_IntCtl
= 0xe0000000;
28560 for (i
= 0; i
< 7; i
++) {
28561 env
->CP0_WatchLo
[i
] = 0;
28562 env
->CP0_WatchHi
[i
] = 0x80000000;
28564 env
->CP0_WatchLo
[7] = 0;
28565 env
->CP0_WatchHi
[7] = 0;
28567 /* Count register increments in debug mode, EJTAG version 1 */
28568 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
28570 cpu_mips_store_count(env
, 1);
28572 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
28575 /* Only TC0 on VPE 0 starts as active. */
28576 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
28577 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
28578 env
->tcs
[i
].CP0_TCHalt
= 1;
28580 env
->active_tc
.CP0_TCHalt
= 1;
28583 if (cs
->cpu_index
== 0) {
28584 /* VPE0 starts up enabled. */
28585 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
28586 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
28588 /* TC0 starts up unhalted. */
28590 env
->active_tc
.CP0_TCHalt
= 0;
28591 env
->tcs
[0].CP0_TCHalt
= 0;
28592 /* With thread 0 active. */
28593 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
28594 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
28599 * Configure default legacy segmentation control. We use this regardless of
28600 * whether segmentation control is presented to the guest.
28602 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
28603 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
28604 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
28605 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
28606 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
28607 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
28609 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
28610 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
28611 (3 << CP0SC_C
)) << 16;
28612 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
28613 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
28614 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
28615 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
28616 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
28617 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
28618 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
28619 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
28621 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
28622 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
28623 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
28624 env
->CP0_Status
|= (1 << CP0St_FR
);
28627 if (env
->insn_flags
& ISA_MIPS32R6
) {
28629 env
->CP0_PWSize
= 0x40;
28635 env
->CP0_PWField
= 0x0C30C302;
28642 env
->CP0_PWField
= 0x02;
28645 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
28646 /* microMIPS on reset when Config3.ISA is 3 */
28647 env
->hflags
|= MIPS_HFLAG_M16
;
28651 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
28655 compute_hflags(env
);
28656 restore_fp_status(env
);
28657 restore_pamask(env
);
28658 cs
->exception_index
= EXCP_NONE
;
28660 if (semihosting_get_argc()) {
28661 /* UHI interface can be used to obtain argc and argv */
28662 env
->active_tc
.gpr
[4] = -1;
28666 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
28667 target_ulong
*data
)
28669 env
->active_tc
.PC
= data
[0];
28670 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
28671 env
->hflags
|= data
[1];
28672 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
28673 case MIPS_HFLAG_BR
:
28675 case MIPS_HFLAG_BC
:
28676 case MIPS_HFLAG_BL
:
28678 env
->btarget
= data
[2];