2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
467 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
468 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
469 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
470 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
474 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
477 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
478 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
479 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
480 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
481 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
482 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
483 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
484 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
485 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
486 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
487 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
490 /* MIPS DSP REGIMM opcodes */
492 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
493 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
496 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
500 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
501 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
502 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
505 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
507 /* MIPS DSP Arithmetic Sub-class */
508 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
509 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
510 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
515 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
516 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
517 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
518 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
519 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
522 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
523 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
525 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
526 /* MIPS DSP Multiply Sub-class insns */
527 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
528 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
529 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
530 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
531 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
532 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
535 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
536 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
538 /* MIPS DSP Arithmetic Sub-class */
539 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
548 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
549 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
550 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
551 /* MIPS DSP Multiply Sub-class insns */
552 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
553 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
554 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
555 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
558 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
560 /* MIPS DSP Arithmetic Sub-class */
561 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
572 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
573 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
574 /* DSP Bit/Manipulation Sub-class */
575 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
576 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
577 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
578 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
579 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
582 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
584 /* MIPS DSP Arithmetic Sub-class */
585 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
592 /* DSP Compare-Pick Sub-class */
593 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
600 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
601 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
602 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
603 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
604 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
605 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
606 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
607 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
610 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
612 /* MIPS DSP GPR-Based Shift Sub-class */
613 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
627 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
628 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
629 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
630 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
631 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
632 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
633 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
634 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
637 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
639 /* MIPS DSP Multiply Sub-class insns */
640 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
654 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
655 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
656 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
657 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
658 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
659 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
660 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
661 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
664 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
666 /* DSP Bit/Manipulation Sub-class */
667 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
670 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Append Sub-class */
673 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
674 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
675 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
678 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
680 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
681 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
685 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
686 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
687 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
688 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
689 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
690 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
691 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
692 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
693 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
694 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
695 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
696 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
697 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
700 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
702 /* MIPS DSP Arithmetic Sub-class */
703 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
719 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
720 /* DSP Bit/Manipulation Sub-class */
721 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
722 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
723 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
724 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
725 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
726 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
729 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
731 /* MIPS DSP Multiply Sub-class insns */
732 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
733 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
734 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
735 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
737 /* MIPS DSP Arithmetic Sub-class */
738 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
741 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
742 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
743 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
744 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
745 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
747 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
751 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
752 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
753 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
754 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
755 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
756 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
757 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
758 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
761 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
763 /* DSP Compare-Pick Sub-class */
764 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
783 /* MIPS DSP Arithmetic Sub-class */
784 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
785 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
786 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
787 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
788 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
789 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
794 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
796 /* DSP Append Sub-class */
797 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
798 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
799 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
800 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
803 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
805 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
806 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
819 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
820 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
821 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
822 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
823 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
824 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
825 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
826 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
829 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
831 /* DSP Bit/Manipulation Sub-class */
832 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
835 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
837 /* MIPS DSP Multiply Sub-class insns */
838 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
856 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
857 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
858 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
859 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
860 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
861 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
862 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
863 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
866 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
868 /* MIPS DSP GPR-Based Shift Sub-class */
869 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
887 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
888 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
889 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
890 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
891 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
892 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
893 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
894 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
897 /* Coprocessor 0 (rs field) */
898 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
901 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
902 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
903 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
904 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
905 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
906 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
907 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
908 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
909 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
910 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
911 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
912 OPC_C0
= (0x10 << 21) | OPC_CP0
,
913 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
914 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
915 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
916 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
917 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
918 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
919 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
920 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
921 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
922 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
923 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
924 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
925 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
926 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
927 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
931 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
934 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
935 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
936 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
937 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
938 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
939 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
940 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
941 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
944 /* Coprocessor 0 (with rs == C0) */
945 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
948 OPC_TLBR
= 0x01 | OPC_C0
,
949 OPC_TLBWI
= 0x02 | OPC_C0
,
950 OPC_TLBINV
= 0x03 | OPC_C0
,
951 OPC_TLBINVF
= 0x04 | OPC_C0
,
952 OPC_TLBWR
= 0x06 | OPC_C0
,
953 OPC_TLBP
= 0x08 | OPC_C0
,
954 OPC_RFE
= 0x10 | OPC_C0
,
955 OPC_ERET
= 0x18 | OPC_C0
,
956 OPC_DERET
= 0x1F | OPC_C0
,
957 OPC_WAIT
= 0x20 | OPC_C0
,
960 /* Coprocessor 1 (rs field) */
961 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
963 /* Values for the fmt field in FP instructions */
965 /* 0 - 15 are reserved */
966 FMT_S
= 16, /* single fp */
967 FMT_D
= 17, /* double fp */
968 FMT_E
= 18, /* extended fp */
969 FMT_Q
= 19, /* quad fp */
970 FMT_W
= 20, /* 32-bit fixed */
971 FMT_L
= 21, /* 64-bit fixed */
972 FMT_PS
= 22, /* paired single fp */
973 /* 23 - 31 are reserved */
977 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
978 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
979 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
980 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
981 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
982 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
983 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
984 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
985 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
986 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
987 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
988 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
989 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
990 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
991 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
992 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
993 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
994 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
995 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
996 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
997 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
998 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
999 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
1000 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
1001 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
1002 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
1003 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
1004 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
1005 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
1006 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1009 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1010 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1013 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1014 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1015 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1016 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1020 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1021 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1025 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1026 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1029 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1032 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1033 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1034 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1035 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1036 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1037 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1038 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1039 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1040 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1041 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1042 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1045 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1048 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1050 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1051 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1052 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1053 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1054 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1055 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1057 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1059 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1060 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1061 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1062 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1063 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1064 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1066 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1068 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1069 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1070 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1071 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1072 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1073 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1075 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1077 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1078 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1079 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1080 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1081 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1082 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1084 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1085 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1086 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1087 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1088 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1089 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1091 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1092 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1093 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1094 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1095 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1096 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1098 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1099 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1100 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1101 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1102 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1103 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1105 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1106 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1107 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1108 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1109 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1110 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1112 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1113 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1114 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1115 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1116 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1117 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1119 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1120 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1121 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1122 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1123 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1124 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1126 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1127 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1128 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1129 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1130 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1131 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1133 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1134 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1135 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1136 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1137 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1138 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1142 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1145 OPC_LWXC1
= 0x00 | OPC_CP3
,
1146 OPC_LDXC1
= 0x01 | OPC_CP3
,
1147 OPC_LUXC1
= 0x05 | OPC_CP3
,
1148 OPC_SWXC1
= 0x08 | OPC_CP3
,
1149 OPC_SDXC1
= 0x09 | OPC_CP3
,
1150 OPC_SUXC1
= 0x0D | OPC_CP3
,
1151 OPC_PREFX
= 0x0F | OPC_CP3
,
1152 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1153 OPC_MADD_S
= 0x20 | OPC_CP3
,
1154 OPC_MADD_D
= 0x21 | OPC_CP3
,
1155 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1156 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1157 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1158 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1159 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1160 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1161 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1162 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1163 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1164 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1168 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1170 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1171 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1172 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1173 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1174 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1175 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1176 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1177 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1178 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1179 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1180 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1181 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1182 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1183 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1184 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1185 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1186 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1187 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1188 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1189 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1190 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1192 /* MI10 instruction */
1193 OPC_LD_B
= (0x20) | OPC_MSA
,
1194 OPC_LD_H
= (0x21) | OPC_MSA
,
1195 OPC_LD_W
= (0x22) | OPC_MSA
,
1196 OPC_LD_D
= (0x23) | OPC_MSA
,
1197 OPC_ST_B
= (0x24) | OPC_MSA
,
1198 OPC_ST_H
= (0x25) | OPC_MSA
,
1199 OPC_ST_W
= (0x26) | OPC_MSA
,
1200 OPC_ST_D
= (0x27) | OPC_MSA
,
1204 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1205 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1206 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1207 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1208 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1209 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1210 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1211 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1212 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1213 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1214 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1215 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1216 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1218 /* I8 instruction */
1219 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1220 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1221 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1222 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1223 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1224 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1225 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1226 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1227 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1228 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1230 /* VEC/2R/2RF instruction */
1231 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1232 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1233 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1234 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1235 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1236 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1237 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1239 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1240 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1242 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1243 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1244 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1245 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1246 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1248 /* 2RF instruction df(bit 16) = _w, _d */
1249 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1250 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1251 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1252 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1253 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1254 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1255 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1256 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1257 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1258 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1259 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1260 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1261 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1262 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1263 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1264 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1266 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1267 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1268 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1269 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1270 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1272 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1274 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1275 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1277 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1278 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1279 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1280 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1281 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1282 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1283 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1284 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1285 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1286 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1287 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1288 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1289 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1290 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1292 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1293 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1294 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1295 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1296 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1297 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1298 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1299 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1300 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1301 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1302 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1303 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1304 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1305 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1306 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1307 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1308 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1309 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1310 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1311 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1312 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1313 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1314 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1315 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1316 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1317 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1318 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1319 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1320 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1321 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1322 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1323 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1324 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1325 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1326 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1327 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1328 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1329 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1331 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1332 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1333 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1334 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1335 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1336 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1337 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1338 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1339 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1340 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1342 /* 3RF instruction _df(bit 21) = _w, _d */
1343 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1346 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1347 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1348 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1349 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1350 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1351 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1352 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1353 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1357 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1358 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1359 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1361 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1362 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1363 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1364 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1365 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1366 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1367 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1370 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1373 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1374 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1375 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1376 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1377 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1378 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1379 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1380 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1381 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1382 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1383 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1385 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1386 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1387 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1388 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1389 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1390 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1391 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1392 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1393 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1394 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1395 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1396 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1397 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1403 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1404 * ============================================
1407 * MXU (full name: MIPS eXtension/enhanced Unit) is a SIMD extension of MIPS32
1408 * instructions set. It is designed to fit the needs of signal, graphical and
1409 * video processing applications. MXU instruction set is used in Xburst family
1410 * of microprocessors by Ingenic.
1412 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1413 * the control register.
1416 * The notation used in MXU assembler mnemonics
1417 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1419 * Register operands:
1421 * XRa, XRb, XRc, XRd - MXU registers
1422 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1424 * Non-register operands:
1426 * aptn1 - 1-bit accumulate add/subtract pattern
1427 * aptn2 - 2-bit accumulate add/subtract pattern
1428 * eptn2 - 2-bit execute add/subtract pattern
1429 * optn2 - 2-bit operand pattern
1430 * optn3 - 3-bit operand pattern
1431 * sft4 - 4-bit shift amount
1432 * strd2 - 2-bit stride amount
1436 * Level of parallelism: Operand size:
1437 * S - single operation at a time 32 - word
1438 * D - two operations in parallel 16 - half word
1439 * Q - four operations in parallel 8 - byte
1443 * ADD - Add or subtract
1444 * ADDC - Add with carry-in
1446 * ASUM - Sum together then accumulate (add or subtract)
1447 * ASUMC - Sum together then accumulate (add or subtract) with carry-in
1448 * AVG - Average between 2 operands
1449 * ABD - Absolute difference
1451 * AND - Logical bitwise 'and' operation
1453 * EXTR - Extract bits
1454 * I2M - Move from GPR register to MXU register
1455 * LDD - Load data from memory to XRF
1456 * LDI - Load data from memory to XRF (and increase the address base)
1457 * LUI - Load unsigned immediate
1459 * MULU - Unsigned multiply
1460 * MADD - 64-bit operand add 32x32 product
1461 * MSUB - 64-bit operand subtract 32x32 product
1462 * MAC - Multiply and accumulate (add or subtract)
1463 * MAD - Multiply and add or subtract
1464 * MAX - Maximum between 2 operands
1465 * MIN - Minimum between 2 operands
1466 * M2I - Move from MXU register to GPR register
1467 * MOVZ - Move if zero
1468 * MOVN - Move if non-zero
1469 * NOR - Logical bitwise 'nor' operation
1470 * OR - Logical bitwise 'or' operation
1471 * STD - Store data from XRF to memory
1472 * SDI - Store data from XRF to memory (and increase the address base)
1473 * SLT - Set of less than comparison
1474 * SAD - Sum of absolute differences
1475 * SLL - Logical shift left
1476 * SLR - Logical shift right
1477 * SAR - Arithmetic shift right
1480 * SCOP - Calculate x’s scope (-1, means x<0; 0, means x==0; 1, means x>0)
1481 * XOR - Logical bitwise 'exclusive or' operation
1485 * E - Expand results
1486 * F - Fixed point multiplication
1487 * L - Low part result
1488 * R - Doing rounding
1489 * V - Variable instead of immediate
1490 * W - Combine above L and V
1493 * The list of MXU instructions grouped by functionality
1494 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1496 * Load/Store instructions Multiplication instructions
1497 * ----------------------- ---------------------------
1499 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1500 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1501 * S32LDDV XRa, Rb, rc, strd2 S32MSUB XRa, XRd, Rs, Rt
1502 * S32STDV XRa, Rb, rc, strd2 S32MSUBU XRa, XRd, Rs, Rt
1503 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1504 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1505 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1506 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1507 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1508 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1509 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1510 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1511 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1512 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1513 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1514 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1515 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1516 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1517 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1518 * S16SDI XRa, Rb, s10, eptn2
1519 * S8LDD XRa, Rb, s8, eptn3
1520 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1521 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1522 * S8SDI XRa, Rb, s8, eptn3
1523 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1524 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1525 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1526 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1527 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1528 * S32CPS XRa, XRb, XRc
1529 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1530 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1531 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1532 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1533 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1534 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1535 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1536 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1537 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1538 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1539 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1540 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1541 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1542 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1543 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1544 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1545 * Q8SLT XRa, XRb, XRc
1546 * Q8SLTU XRa, XRb, XRc
1547 * Q8MOVZ XRa, XRb, XRc Shift instructions
1548 * Q8MOVN XRa, XRb, XRc ------------------
1550 * D32SLL XRa, XRb, XRc, XRd, sft4
1551 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1552 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1553 * D32SARL XRa, XRb, XRc, sft4
1554 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1555 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1556 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1557 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1558 * Q16SLL XRa, XRb, XRc, XRd, sft4
1559 * Q16SLR XRa, XRb, XRc, XRd, sft4
1560 * Miscellaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1561 * ------------------------- Q16SLLV XRa, XRb, Rb
1562 * Q16SLRV XRa, XRb, Rb
1563 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1564 * S32ALN XRa, XRb, XRc, Rb
1565 * S32ALNI XRa, XRb, XRc, s3
1566 * S32LUI XRa, s8, optn3 Move instructions
1567 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1568 * S32EXTRV XRa, XRb, Rs, Rt
1569 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1570 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1573 * The opcode organization of MXU instructions
1574 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1576 * The bits 31..26 of all MXU instructions are equal to 0x1C (also referred
1577 * as opcode SPECIAL2 in the base MIPS ISA). The organization and meaning of
1578 * other bits up to the instruction level is as follows:
1583 * ┌─ 000000 ─ OPC_MXU_S32MADD
1584 * ├─ 000001 ─ OPC_MXU_S32MADDU
1585 * ├─ 000010 ─ <not assigned> (non-MXU OPC_MUL)
1588 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1589 * │ ├─ 001 ─ OPC_MXU_S32MIN
1590 * │ ├─ 010 ─ OPC_MXU_D16MAX
1591 * │ ├─ 011 ─ OPC_MXU_D16MIN
1592 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1593 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1594 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1595 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1596 * ├─ 000100 ─ OPC_MXU_S32MSUB
1597 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1598 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1599 * │ ├─ 001 ─ OPC_MXU_D16SLT
1600 * │ ├─ 010 ─ OPC_MXU_D16AVG
1601 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1602 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1603 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1604 * │ └─ 111 ─ OPC_MXU_Q8ADD
1607 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1608 * │ ├─ 010 ─ OPC_MXU_D16CPS
1609 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1610 * │ └─ 110 ─ OPC_MXU_Q16SAT
1611 * ├─ 001000 ─ OPC_MXU_D16MUL
1613 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1614 * │ └─ 01 ─ OPC_MXU_D16MULE
1615 * ├─ 001010 ─ OPC_MXU_D16MAC
1616 * ├─ 001011 ─ OPC_MXU_D16MACF
1617 * ├─ 001100 ─ OPC_MXU_D16MADL
1618 * ├─ 001101 ─ OPC_MXU_S16MAD
1619 * ├─ 001110 ─ OPC_MXU_Q16ADD
1620 * ├─ 001111 ─ OPC_MXU_D16MACE 23
1621 * │ ┌─ 0 ─ OPC_MXU_S32LDD
1622 * ├─ 010000 ─ OPC_MXU__POOL04 ─┴─ 1 ─ OPC_MXU_S32LDDR
1625 * ├─ 010001 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32STD
1626 * │ └─ 1 ─ OPC_MXU_S32STDR
1629 * ├─ 010010 ─ OPC_MXU__POOL06 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1630 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1633 * ├─ 010011 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32STDV
1634 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1637 * ├─ 010100 ─ OPC_MXU__POOL08 ─┬─ 0 ─ OPC_MXU_S32LDI
1638 * │ └─ 1 ─ OPC_MXU_S32LDIR
1641 * ├─ 010101 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32SDI
1642 * │ └─ 1 ─ OPC_MXU_S32SDIR
1645 * ├─ 010110 ─ OPC_MXU__POOL10 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1646 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1649 * ├─ 010111 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1650 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1651 * ├─ 011000 ─ OPC_MXU_D32ADD
1653 * MXU ├─ 011001 ─ OPC_MXU__POOL12 ─┬─ 00 ─ OPC_MXU_D32ACC
1654 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1655 * │ └─ 10 ─ OPC_MXU_D32ASUM
1656 * ├─ 011010 ─ <not assigned>
1658 * ├─ 011011 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_Q16ACC
1659 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1660 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1663 * ├─ 011100 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1664 * │ ├─ 01 ─ OPC_MXU_D8SUM
1665 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1666 * ├─ 011110 ─ <not assigned>
1667 * ├─ 011111 ─ <not assigned>
1668 * ├─ 100000 ─ <not assigned> (overlaps with CLZ)
1669 * ├─ 100001 ─ <not assigned> (overlaps with CLO)
1670 * ├─ 100010 ─ OPC_MXU_S8LDD
1671 * ├─ 100011 ─ OPC_MXU_S8STD 15..14
1672 * ├─ 100100 ─ OPC_MXU_S8LDI ┌─ 00 ─ OPC_MXU_S32MUL
1673 * ├─ 100101 ─ OPC_MXU_S8SDI ├─ 00 ─ OPC_MXU_S32MULU
1674 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1675 * ├─ 100110 ─ OPC_MXU__POOL15 ─┴─ 00 ─ OPC_MXU_S32EXTRV
1678 * ├─ 100111 ─ OPC_MXU__POOL16 ─┬─ 000 ─ OPC_MXU_D32SARW
1679 * │ ├─ 001 ─ OPC_MXU_S32ALN
1680 * │ ├─ 010 ─ OPC_MXU_S32ALNI
1681 * │ ├─ 011 ─ OPC_MXU_S32LUI
1682 * │ ├─ 100 ─ OPC_MXU_S32NOR
1683 * │ ├─ 101 ─ OPC_MXU_S32AND
1684 * │ ├─ 110 ─ OPC_MXU_S32OR
1685 * │ └─ 111 ─ OPC_MXU_S32XOR
1688 * ├─ 101000 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_LXB
1689 * │ ├─ 001 ─ OPC_MXU_LXH
1690 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_LXW
1691 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_LXBU
1692 * ├─ 101011 ─ OPC_MXU_S16STD └─ 101 ─ OPC_MXU_LXHU
1693 * ├─ 101100 ─ OPC_MXU_S16LDI
1694 * ├─ 101101 ─ OPC_MXU_S16SDI
1695 * ├─ 101110 ─ OPC_MXU_S32M2I
1696 * ├─ 101111 ─ OPC_MXU_S32I2M
1697 * ├─ 110000 ─ OPC_MXU_D32SLL
1698 * ├─ 110001 ─ OPC_MXU_D32SLR 20..18
1699 * ├─ 110010 ─ OPC_MXU_D32SARL ┌─ 000 ─ OPC_MXU_D32SLLV
1700 * ├─ 110011 ─ OPC_MXU_D32SAR ├─ 001 ─ OPC_MXU_D32SLRV
1701 * ├─ 110100 ─ OPC_MXU_Q16SLL ├─ 010 ─ OPC_MXU_D32SARV
1702 * ├─ 110101 ─ OPC_MXU_Q16SLR ├─ 011 ─ OPC_MXU_Q16SLLV
1703 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1704 * ├─ 110110 ─ OPC_MXU__POOL18 ─┴─ 101 ─ OPC_MXU_Q16SARV
1706 * ├─ 110111 ─ OPC_MXU_Q16SAR
1708 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1709 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1712 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1713 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1714 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1715 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1716 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1717 * │ └─ 101 ─ OPC_MXU_S32MOVN
1720 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1721 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1722 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1723 * ├─ 111100 ─ OPC_MXU_Q8MADL
1724 * ├─ 111101 ─ OPC_MXU_S32SFL
1725 * ├─ 111110 ─ OPC_MXU_Q8SAD
1726 * └─ 111111 ─ <not assigned> (overlaps with SDBBP)
1731 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1732 * Programming Manual", Ingenic Semiconductor Co, Ltd., revision June 2, 2017
1736 OPC_MXU_S32MADD
= 0x00,
1737 OPC_MXU_S32MADDU
= 0x01,
1738 OPC__MXU_MUL
= 0x02,
1739 OPC_MXU__POOL00
= 0x03,
1740 OPC_MXU_S32MSUB
= 0x04,
1741 OPC_MXU_S32MSUBU
= 0x05,
1742 OPC_MXU__POOL01
= 0x06,
1743 OPC_MXU__POOL02
= 0x07,
1744 OPC_MXU_D16MUL
= 0x08,
1745 OPC_MXU__POOL03
= 0x09,
1746 OPC_MXU_D16MAC
= 0x0A,
1747 OPC_MXU_D16MACF
= 0x0B,
1748 OPC_MXU_D16MADL
= 0x0C,
1749 OPC_MXU_S16MAD
= 0x0D,
1750 OPC_MXU_Q16ADD
= 0x0E,
1751 OPC_MXU_D16MACE
= 0x0F,
1752 OPC_MXU__POOL04
= 0x10,
1753 OPC_MXU__POOL05
= 0x11,
1754 OPC_MXU__POOL06
= 0x12,
1755 OPC_MXU__POOL07
= 0x13,
1756 OPC_MXU__POOL08
= 0x14,
1757 OPC_MXU__POOL09
= 0x15,
1758 OPC_MXU__POOL10
= 0x16,
1759 OPC_MXU__POOL11
= 0x17,
1760 OPC_MXU_D32ADD
= 0x18,
1761 OPC_MXU__POOL12
= 0x19,
1762 /* not assigned 0x1A */
1763 OPC_MXU__POOL13
= 0x1B,
1764 OPC_MXU__POOL14
= 0x1C,
1765 OPC_MXU_Q8ACCE
= 0x1D,
1766 /* not assigned 0x1E */
1767 /* not assigned 0x1F */
1768 /* not assigned 0x20 */
1769 /* not assigned 0x21 */
1770 OPC_MXU_S8LDD
= 0x22,
1771 OPC_MXU_S8STD
= 0x23,
1772 OPC_MXU_S8LDI
= 0x24,
1773 OPC_MXU_S8SDI
= 0x25,
1774 OPC_MXU__POOL15
= 0x26,
1775 OPC_MXU__POOL16
= 0x27,
1776 OPC_MXU__POOL17
= 0x28,
1777 /* not assigned 0x29 */
1778 OPC_MXU_S16LDD
= 0x2A,
1779 OPC_MXU_S16STD
= 0x2B,
1780 OPC_MXU_S16LDI
= 0x2C,
1781 OPC_MXU_S16SDI
= 0x2D,
1782 OPC_MXU_S32M2I
= 0x2E,
1783 OPC_MXU_S32I2M
= 0x2F,
1784 OPC_MXU_D32SLL
= 0x30,
1785 OPC_MXU_D32SLR
= 0x31,
1786 OPC_MXU_D32SARL
= 0x32,
1787 OPC_MXU_D32SAR
= 0x33,
1788 OPC_MXU_Q16SLL
= 0x34,
1789 OPC_MXU_Q16SLR
= 0x35,
1790 OPC_MXU__POOL18
= 0x36,
1791 OPC_MXU_Q16SAR
= 0x37,
1792 OPC_MXU__POOL19
= 0x38,
1793 OPC_MXU__POOL20
= 0x39,
1794 OPC_MXU__POOL21
= 0x3A,
1795 OPC_MXU_Q16SCOP
= 0x3B,
1796 OPC_MXU_Q8MADL
= 0x3C,
1797 OPC_MXU_S32SFL
= 0x3D,
1798 OPC_MXU_Q8SAD
= 0x3E,
1799 /* not assigned 0x3F */
1807 OPC_MXU_S32MAX
= 0x00,
1808 OPC_MXU_S32MIN
= 0x01,
1809 OPC_MXU_D16MAX
= 0x02,
1810 OPC_MXU_D16MIN
= 0x03,
1811 OPC_MXU_Q8MAX
= 0x04,
1812 OPC_MXU_Q8MIN
= 0x05,
1813 OPC_MXU_Q8SLT
= 0x06,
1814 OPC_MXU_Q8SLTU
= 0x07,
1821 OPC_MXU_S32SLT
= 0x00,
1822 OPC_MXU_D16SLT
= 0x01,
1823 OPC_MXU_D16AVG
= 0x02,
1824 OPC_MXU_D16AVGR
= 0x03,
1825 OPC_MXU_Q8AVG
= 0x04,
1826 OPC_MXU_Q8AVGR
= 0x05,
1827 OPC_MXU_Q8ADD
= 0x07,
1834 OPC_MXU_S32CPS
= 0x00,
1835 OPC_MXU_D16CPS
= 0x02,
1836 OPC_MXU_Q8ABD
= 0x04,
1837 OPC_MXU_Q16SAT
= 0x06,
1844 OPC_MXU_D16MULF
= 0x00,
1845 OPC_MXU_D16MULE
= 0x01,
1852 OPC_MXU_S32LDD
= 0x00,
1853 OPC_MXU_S32LDDR
= 0x01,
1860 OPC_MXU_S32STD
= 0x00,
1861 OPC_MXU_S32STDR
= 0x01,
1868 OPC_MXU_S32LDDV
= 0x00,
1869 OPC_MXU_S32LDDVR
= 0x01,
1876 OPC_MXU_S32STDV
= 0x00,
1877 OPC_MXU_S32STDVR
= 0x01,
1884 OPC_MXU_S32LDI
= 0x00,
1885 OPC_MXU_S32LDIR
= 0x01,
1892 OPC_MXU_S32SDI
= 0x00,
1893 OPC_MXU_S32SDIR
= 0x01,
1900 OPC_MXU_S32LDIV
= 0x00,
1901 OPC_MXU_S32LDIVR
= 0x01,
1908 OPC_MXU_S32SDIV
= 0x00,
1909 OPC_MXU_S32SDIVR
= 0x01,
1916 OPC_MXU_D32ACC
= 0x00,
1917 OPC_MXU_D32ACCM
= 0x01,
1918 OPC_MXU_D32ASUM
= 0x02,
1925 OPC_MXU_Q16ACC
= 0x00,
1926 OPC_MXU_Q16ACCM
= 0x01,
1927 OPC_MXU_Q16ASUM
= 0x02,
1934 OPC_MXU_Q8ADDE
= 0x00,
1935 OPC_MXU_D8SUM
= 0x01,
1936 OPC_MXU_D8SUMC
= 0x02,
1943 OPC_MXU_S32MUL
= 0x00,
1944 OPC_MXU_S32MULU
= 0x01,
1945 OPC_MXU_S32EXTR
= 0x02,
1946 OPC_MXU_S32EXTRV
= 0x03,
1953 OPC_MXU_D32SARW
= 0x00,
1954 OPC_MXU_S32ALN
= 0x01,
1955 OPC_MXU_S32ALNI
= 0x02,
1956 OPC_MXU_S32LUI
= 0x03,
1957 OPC_MXU_S32NOR
= 0x04,
1958 OPC_MXU_S32AND
= 0x05,
1959 OPC_MXU_S32OR
= 0x06,
1960 OPC_MXU_S32XOR
= 0x07,
1970 OPC_MXU_LXBU
= 0x04,
1971 OPC_MXU_LXHU
= 0x05,
1978 OPC_MXU_D32SLLV
= 0x00,
1979 OPC_MXU_D32SLRV
= 0x01,
1980 OPC_MXU_D32SARV
= 0x03,
1981 OPC_MXU_Q16SLLV
= 0x04,
1982 OPC_MXU_Q16SLRV
= 0x05,
1983 OPC_MXU_Q16SARV
= 0x07,
1990 OPC_MXU_Q8MUL
= 0x00,
1991 OPC_MXU_Q8MULSU
= 0x01,
1998 OPC_MXU_Q8MOVZ
= 0x00,
1999 OPC_MXU_Q8MOVN
= 0x01,
2000 OPC_MXU_D16MOVZ
= 0x02,
2001 OPC_MXU_D16MOVN
= 0x03,
2002 OPC_MXU_S32MOVZ
= 0x04,
2003 OPC_MXU_S32MOVN
= 0x05,
2010 OPC_MXU_Q8MAC
= 0x00,
2011 OPC_MXU_Q8MACSU
= 0x01,
2015 * Overview of the TX79-specific instruction set
2016 * =============================================
2018 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
2019 * are only used by the specific quadword (128-bit) LQ/SQ load/store
2020 * instructions and certain multimedia instructions (MMIs). These MMIs
2021 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
2022 * or sixteen 8-bit paths.
2026 * The Toshiba TX System RISC TX79 Core Architecture manual,
2027 * https://wiki.qemu.org/File:C790.pdf
2029 * Three-Operand Multiply and Multiply-Add (4 instructions)
2030 * --------------------------------------------------------
2031 * MADD [rd,] rs, rt Multiply/Add
2032 * MADDU [rd,] rs, rt Multiply/Add Unsigned
2033 * MULT [rd,] rs, rt Multiply (3-operand)
2034 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
2036 * Multiply Instructions for Pipeline 1 (10 instructions)
2037 * ------------------------------------------------------
2038 * MULT1 [rd,] rs, rt Multiply Pipeline 1
2039 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
2040 * DIV1 rs, rt Divide Pipeline 1
2041 * DIVU1 rs, rt Divide Unsigned Pipeline 1
2042 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
2043 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
2044 * MFHI1 rd Move From HI1 Register
2045 * MFLO1 rd Move From LO1 Register
2046 * MTHI1 rs Move To HI1 Register
2047 * MTLO1 rs Move To LO1 Register
2049 * Arithmetic (19 instructions)
2050 * ----------------------------
2051 * PADDB rd, rs, rt Parallel Add Byte
2052 * PSUBB rd, rs, rt Parallel Subtract Byte
2053 * PADDH rd, rs, rt Parallel Add Halfword
2054 * PSUBH rd, rs, rt Parallel Subtract Halfword
2055 * PADDW rd, rs, rt Parallel Add Word
2056 * PSUBW rd, rs, rt Parallel Subtract Word
2057 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
2058 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
2059 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
2060 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
2061 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
2062 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
2063 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
2064 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
2065 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
2066 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
2067 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
2068 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
2069 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
2071 * Min/Max (4 instructions)
2072 * ------------------------
2073 * PMAXH rd, rs, rt Parallel Maximum Halfword
2074 * PMINH rd, rs, rt Parallel Minimum Halfword
2075 * PMAXW rd, rs, rt Parallel Maximum Word
2076 * PMINW rd, rs, rt Parallel Minimum Word
2078 * Absolute (2 instructions)
2079 * -------------------------
2080 * PABSH rd, rt Parallel Absolute Halfword
2081 * PABSW rd, rt Parallel Absolute Word
2083 * Logical (4 instructions)
2084 * ------------------------
2085 * PAND rd, rs, rt Parallel AND
2086 * POR rd, rs, rt Parallel OR
2087 * PXOR rd, rs, rt Parallel XOR
2088 * PNOR rd, rs, rt Parallel NOR
2090 * Shift (9 instructions)
2091 * ----------------------
2092 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2093 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2094 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2095 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2096 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2097 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2098 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2099 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2100 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2102 * Compare (6 instructions)
2103 * ------------------------
2104 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2105 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2106 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2107 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2108 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2109 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2111 * LZC (1 instruction)
2112 * -------------------
2113 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2115 * Quadword Load and Store (2 instructions)
2116 * ----------------------------------------
2117 * LQ rt, offset(base) Load Quadword
2118 * SQ rt, offset(base) Store Quadword
2120 * Multiply and Divide (19 instructions)
2121 * -------------------------------------
2122 * PMULTW rd, rs, rt Parallel Multiply Word
2123 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2124 * PDIVW rs, rt Parallel Divide Word
2125 * PDIVUW rs, rt Parallel Divide Unsigned Word
2126 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2127 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2128 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2129 * PMULTH rd, rs, rt Parallel Multiply Halfword
2130 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2131 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2132 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2133 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2134 * PDIVBW rs, rt Parallel Divide Broadcast Word
2135 * PMFHI rd Parallel Move From HI Register
2136 * PMFLO rd Parallel Move From LO Register
2137 * PMTHI rs Parallel Move To HI Register
2138 * PMTLO rs Parallel Move To LO Register
2139 * PMFHL rd Parallel Move From HI/LO Register
2140 * PMTHL rs Parallel Move To HI/LO Register
2142 * Pack/Extend (11 instructions)
2143 * -----------------------------
2144 * PPAC5 rd, rt Parallel Pack to 5 bits
2145 * PPACB rd, rs, rt Parallel Pack to Byte
2146 * PPACH rd, rs, rt Parallel Pack to Halfword
2147 * PPACW rd, rs, rt Parallel Pack to Word
2148 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2149 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2150 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2151 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2152 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2153 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2154 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2156 * Others (16 instructions)
2157 * ------------------------
2158 * PCPYH rd, rt Parallel Copy Halfword
2159 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2160 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2161 * PREVH rd, rt Parallel Reverse Halfword
2162 * PINTH rd, rs, rt Parallel Interleave Halfword
2163 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2164 * PEXEH rd, rt Parallel Exchange Even Halfword
2165 * PEXCH rd, rt Parallel Exchange Center Halfword
2166 * PEXEW rd, rt Parallel Exchange Even Word
2167 * PEXCW rd, rt Parallel Exchange Center Word
2168 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2169 * MFSA rd Move from Shift Amount Register
2170 * MTSA rs Move to Shift Amount Register
2171 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2172 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2173 * PROT3W rd, rt Parallel Rotate 3 Words
2175 * MMI (MultiMedia Instruction) encodings
2176 * ======================================
2178 * MMI instructions encoding table keys:
2180 * * This code is reserved for future use. An attempt to execute it
2181 * causes a Reserved Instruction exception.
2182 * % This code indicates an instruction class. The instruction word
2183 * must be further decoded by examining additional tables that show
2184 * the values for other instruction fields.
2185 * # This code is reserved for the unsupported instructions DMULT,
2186 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2187 * to execute it causes a Reserved Instruction exception.
2189 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
2192 * +--------+----------------------------------------+
2194 * +--------+----------------------------------------+
2196 * opcode bits 28..26
2197 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2198 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2199 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2200 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2201 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2202 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2203 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2204 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2205 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2206 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2207 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2211 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2212 MMI_OPC_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2213 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2217 * MMI instructions with opcode field = MMI:
2220 * +--------+-------------------------------+--------+
2221 * | MMI | |function|
2222 * +--------+-------------------------------+--------+
2224 * function bits 2..0
2225 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2226 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2227 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2228 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2229 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2230 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2231 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2232 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2233 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2234 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2235 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2238 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2240 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
2241 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
2242 MMI_OPC_PLZCW
= 0x04 | MMI_OPC_CLASS_MMI
,
2243 MMI_OPC_CLASS_MMI0
= 0x08 | MMI_OPC_CLASS_MMI
,
2244 MMI_OPC_CLASS_MMI2
= 0x09 | MMI_OPC_CLASS_MMI
,
2245 MMI_OPC_MFHI1
= 0x10 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFHI */
2246 MMI_OPC_MTHI1
= 0x11 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTHI */
2247 MMI_OPC_MFLO1
= 0x12 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFLO */
2248 MMI_OPC_MTLO1
= 0x13 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTLO */
2249 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
2250 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
2251 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
2252 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
2253 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
2254 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
2255 MMI_OPC_CLASS_MMI1
= 0x28 | MMI_OPC_CLASS_MMI
,
2256 MMI_OPC_CLASS_MMI3
= 0x29 | MMI_OPC_CLASS_MMI
,
2257 MMI_OPC_PMFHL
= 0x30 | MMI_OPC_CLASS_MMI
,
2258 MMI_OPC_PMTHL
= 0x31 | MMI_OPC_CLASS_MMI
,
2259 MMI_OPC_PSLLH
= 0x34 | MMI_OPC_CLASS_MMI
,
2260 MMI_OPC_PSRLH
= 0x36 | MMI_OPC_CLASS_MMI
,
2261 MMI_OPC_PSRAH
= 0x37 | MMI_OPC_CLASS_MMI
,
2262 MMI_OPC_PSLLW
= 0x3C | MMI_OPC_CLASS_MMI
,
2263 MMI_OPC_PSRLW
= 0x3E | MMI_OPC_CLASS_MMI
,
2264 MMI_OPC_PSRAW
= 0x3F | MMI_OPC_CLASS_MMI
,
2268 * MMI instructions with opcode field = MMI and bits 5..0 = MMI0:
2271 * +--------+----------------------+--------+--------+
2272 * | MMI | |function| MMI0 |
2273 * +--------+----------------------+--------+--------+
2275 * function bits 7..6
2276 * bits | 0 | 1 | 2 | 3
2277 * 10..8 | 00 | 01 | 10 | 11
2278 * -------+-------+-------+-------+-------
2279 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2280 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2281 * 2 010 | PADDB | PSUBB | PCGTB | *
2282 * 3 011 | * | * | * | *
2283 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2284 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2285 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2286 * 7 111 | * | * | PEXT5 | PPAC5
2289 #define MASK_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2291 MMI_OPC_0_PADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI0
,
2292 MMI_OPC_0_PSUBW
= (0x01 << 6) | MMI_OPC_CLASS_MMI0
,
2293 MMI_OPC_0_PCGTW
= (0x02 << 6) | MMI_OPC_CLASS_MMI0
,
2294 MMI_OPC_0_PMAXW
= (0x03 << 6) | MMI_OPC_CLASS_MMI0
,
2295 MMI_OPC_0_PADDH
= (0x04 << 6) | MMI_OPC_CLASS_MMI0
,
2296 MMI_OPC_0_PSUBH
= (0x05 << 6) | MMI_OPC_CLASS_MMI0
,
2297 MMI_OPC_0_PCGTH
= (0x06 << 6) | MMI_OPC_CLASS_MMI0
,
2298 MMI_OPC_0_PMAXH
= (0x07 << 6) | MMI_OPC_CLASS_MMI0
,
2299 MMI_OPC_0_PADDB
= (0x08 << 6) | MMI_OPC_CLASS_MMI0
,
2300 MMI_OPC_0_PSUBB
= (0x09 << 6) | MMI_OPC_CLASS_MMI0
,
2301 MMI_OPC_0_PCGTB
= (0x0A << 6) | MMI_OPC_CLASS_MMI0
,
2302 MMI_OPC_0_PADDSW
= (0x10 << 6) | MMI_OPC_CLASS_MMI0
,
2303 MMI_OPC_0_PSUBSW
= (0x11 << 6) | MMI_OPC_CLASS_MMI0
,
2304 MMI_OPC_0_PEXTLW
= (0x12 << 6) | MMI_OPC_CLASS_MMI0
,
2305 MMI_OPC_0_PPACW
= (0x13 << 6) | MMI_OPC_CLASS_MMI0
,
2306 MMI_OPC_0_PADDSH
= (0x14 << 6) | MMI_OPC_CLASS_MMI0
,
2307 MMI_OPC_0_PSUBSH
= (0x15 << 6) | MMI_OPC_CLASS_MMI0
,
2308 MMI_OPC_0_PEXTLH
= (0x16 << 6) | MMI_OPC_CLASS_MMI0
,
2309 MMI_OPC_0_PPACH
= (0x17 << 6) | MMI_OPC_CLASS_MMI0
,
2310 MMI_OPC_0_PADDSB
= (0x18 << 6) | MMI_OPC_CLASS_MMI0
,
2311 MMI_OPC_0_PSUBSB
= (0x19 << 6) | MMI_OPC_CLASS_MMI0
,
2312 MMI_OPC_0_PEXTLB
= (0x1A << 6) | MMI_OPC_CLASS_MMI0
,
2313 MMI_OPC_0_PPACB
= (0x1B << 6) | MMI_OPC_CLASS_MMI0
,
2314 MMI_OPC_0_PEXT5
= (0x1E << 6) | MMI_OPC_CLASS_MMI0
,
2315 MMI_OPC_0_PPAC5
= (0x1F << 6) | MMI_OPC_CLASS_MMI0
,
2319 * MMI instructions with opcode field = MMI and bits 5..0 = MMI1:
2322 * +--------+----------------------+--------+--------+
2323 * | MMI | |function| MMI1 |
2324 * +--------+----------------------+--------+--------+
2326 * function bits 7..6
2327 * bits | 0 | 1 | 2 | 3
2328 * 10..8 | 00 | 01 | 10 | 11
2329 * -------+-------+-------+-------+-------
2330 * 0 000 | * | PABSW | PCEQW | PMINW
2331 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2332 * 2 010 | * | * | PCEQB | *
2333 * 3 011 | * | * | * | *
2334 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2335 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2336 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2337 * 7 111 | * | * | * | *
2340 #define MASK_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2342 MMI_OPC_1_PABSW
= (0x01 << 6) | MMI_OPC_CLASS_MMI1
,
2343 MMI_OPC_1_PCEQW
= (0x02 << 6) | MMI_OPC_CLASS_MMI1
,
2344 MMI_OPC_1_PMINW
= (0x03 << 6) | MMI_OPC_CLASS_MMI1
,
2345 MMI_OPC_1_PADSBH
= (0x04 << 6) | MMI_OPC_CLASS_MMI1
,
2346 MMI_OPC_1_PABSH
= (0x05 << 6) | MMI_OPC_CLASS_MMI1
,
2347 MMI_OPC_1_PCEQH
= (0x06 << 6) | MMI_OPC_CLASS_MMI1
,
2348 MMI_OPC_1_PMINH
= (0x07 << 6) | MMI_OPC_CLASS_MMI1
,
2349 MMI_OPC_1_PCEQB
= (0x0A << 6) | MMI_OPC_CLASS_MMI1
,
2350 MMI_OPC_1_PADDUW
= (0x10 << 6) | MMI_OPC_CLASS_MMI1
,
2351 MMI_OPC_1_PSUBUW
= (0x11 << 6) | MMI_OPC_CLASS_MMI1
,
2352 MMI_OPC_1_PEXTUW
= (0x12 << 6) | MMI_OPC_CLASS_MMI1
,
2353 MMI_OPC_1_PADDUH
= (0x14 << 6) | MMI_OPC_CLASS_MMI1
,
2354 MMI_OPC_1_PSUBUH
= (0x15 << 6) | MMI_OPC_CLASS_MMI1
,
2355 MMI_OPC_1_PEXTUH
= (0x16 << 6) | MMI_OPC_CLASS_MMI1
,
2356 MMI_OPC_1_PADDUB
= (0x18 << 6) | MMI_OPC_CLASS_MMI1
,
2357 MMI_OPC_1_PSUBUB
= (0x19 << 6) | MMI_OPC_CLASS_MMI1
,
2358 MMI_OPC_1_PEXTUB
= (0x1A << 6) | MMI_OPC_CLASS_MMI1
,
2359 MMI_OPC_1_QFSRV
= (0x1B << 6) | MMI_OPC_CLASS_MMI1
,
2363 * MMI instructions with opcode field = MMI and bits 5..0 = MMI2:
2366 * +--------+----------------------+--------+--------+
2367 * | MMI | |function| MMI2 |
2368 * +--------+----------------------+--------+--------+
2370 * function bits 7..6
2371 * bits | 0 | 1 | 2 | 3
2372 * 10..8 | 00 | 01 | 10 | 11
2373 * -------+-------+-------+-------+-------
2374 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2375 * 1 001 | PMSUBW| * | * | *
2376 * 2 010 | PMFHI | PMFLO | PINTH | *
2377 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2378 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2379 * 5 101 | PMSUBH| PHMSBH| * | *
2380 * 6 110 | * | * | PEXEH | PREVH
2381 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2384 #define MASK_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2386 MMI_OPC_2_PMADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI2
,
2387 MMI_OPC_2_PSLLVW
= (0x02 << 6) | MMI_OPC_CLASS_MMI2
,
2388 MMI_OPC_2_PSRLVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI2
,
2389 MMI_OPC_2_PMSUBW
= (0x04 << 6) | MMI_OPC_CLASS_MMI2
,
2390 MMI_OPC_2_PMFHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI2
,
2391 MMI_OPC_2_PMFLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI2
,
2392 MMI_OPC_2_PINTH
= (0x0A << 6) | MMI_OPC_CLASS_MMI2
,
2393 MMI_OPC_2_PMULTW
= (0x0C << 6) | MMI_OPC_CLASS_MMI2
,
2394 MMI_OPC_2_PDIVW
= (0x0D << 6) | MMI_OPC_CLASS_MMI2
,
2395 MMI_OPC_2_PCPYLD
= (0x0E << 6) | MMI_OPC_CLASS_MMI2
,
2396 MMI_OPC_2_PMADDH
= (0x10 << 6) | MMI_OPC_CLASS_MMI2
,
2397 MMI_OPC_2_PHMADH
= (0x11 << 6) | MMI_OPC_CLASS_MMI2
,
2398 MMI_OPC_2_PAND
= (0x12 << 6) | MMI_OPC_CLASS_MMI2
,
2399 MMI_OPC_2_PXOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI2
,
2400 MMI_OPC_2_PMSUBH
= (0x14 << 6) | MMI_OPC_CLASS_MMI2
,
2401 MMI_OPC_2_PHMSBH
= (0x15 << 6) | MMI_OPC_CLASS_MMI2
,
2402 MMI_OPC_2_PEXEH
= (0x1A << 6) | MMI_OPC_CLASS_MMI2
,
2403 MMI_OPC_2_PREVH
= (0x1B << 6) | MMI_OPC_CLASS_MMI2
,
2404 MMI_OPC_2_PMULTH
= (0x1C << 6) | MMI_OPC_CLASS_MMI2
,
2405 MMI_OPC_2_PDIVBW
= (0x1D << 6) | MMI_OPC_CLASS_MMI2
,
2406 MMI_OPC_2_PEXEW
= (0x1E << 6) | MMI_OPC_CLASS_MMI2
,
2407 MMI_OPC_2_PROT3W
= (0x1F << 6) | MMI_OPC_CLASS_MMI2
,
2411 * MMI instructions with opcode field = MMI and bits 5..0 = MMI3:
2414 * +--------+----------------------+--------+--------+
2415 * | MMI | |function| MMI3 |
2416 * +--------+----------------------+--------+--------+
2418 * function bits 7..6
2419 * bits | 0 | 1 | 2 | 3
2420 * 10..8 | 00 | 01 | 10 | 11
2421 * -------+-------+-------+-------+-------
2422 * 0 000 |PMADDUW| * | * | PSRAVW
2423 * 1 001 | * | * | * | *
2424 * 2 010 | PMTHI | PMTLO | PINTEH| *
2425 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2426 * 4 100 | * | * | POR | PNOR
2427 * 5 101 | * | * | * | *
2428 * 6 110 | * | * | PEXCH | PCPYH
2429 * 7 111 | * | * | PEXCW | *
2432 #define MASK_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2434 MMI_OPC_3_PMADDUW
= (0x00 << 6) | MMI_OPC_CLASS_MMI3
,
2435 MMI_OPC_3_PSRAVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI3
,
2436 MMI_OPC_3_PMTHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI3
,
2437 MMI_OPC_3_PMTLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI3
,
2438 MMI_OPC_3_PINTEH
= (0x0A << 6) | MMI_OPC_CLASS_MMI3
,
2439 MMI_OPC_3_PMULTUW
= (0x0C << 6) | MMI_OPC_CLASS_MMI3
,
2440 MMI_OPC_3_PDIVUW
= (0x0D << 6) | MMI_OPC_CLASS_MMI3
,
2441 MMI_OPC_3_PCPYUD
= (0x0E << 6) | MMI_OPC_CLASS_MMI3
,
2442 MMI_OPC_3_POR
= (0x12 << 6) | MMI_OPC_CLASS_MMI3
,
2443 MMI_OPC_3_PNOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI3
,
2444 MMI_OPC_3_PEXCH
= (0x1A << 6) | MMI_OPC_CLASS_MMI3
,
2445 MMI_OPC_3_PCPYH
= (0x1B << 6) | MMI_OPC_CLASS_MMI3
,
2446 MMI_OPC_3_PEXCW
= (0x1E << 6) | MMI_OPC_CLASS_MMI3
,
2449 /* global register indices */
2450 static TCGv cpu_gpr
[32], cpu_PC
;
2451 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2452 static TCGv cpu_dspctrl
, btarget
, bcond
;
2453 static TCGv_i32 hflags
;
2454 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2455 static TCGv_i64 fpu_f64
[32];
2456 static TCGv_i64 msa_wr_d
[64];
2458 #if !defined(TARGET_MIPS64)
2460 static TCGv mxu_gpr
[NUMBER_OF_MXU_REGISTERS
- 1];
2464 #include "exec/gen-icount.h"
2466 #define gen_helper_0e0i(name, arg) do { \
2467 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2468 gen_helper_##name(cpu_env, helper_tmp); \
2469 tcg_temp_free_i32(helper_tmp); \
2472 #define gen_helper_0e1i(name, arg1, arg2) do { \
2473 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2474 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2475 tcg_temp_free_i32(helper_tmp); \
2478 #define gen_helper_1e0i(name, ret, arg1) do { \
2479 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2480 gen_helper_##name(ret, cpu_env, helper_tmp); \
2481 tcg_temp_free_i32(helper_tmp); \
2484 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2485 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2486 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2487 tcg_temp_free_i32(helper_tmp); \
2490 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2491 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2492 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2493 tcg_temp_free_i32(helper_tmp); \
2496 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2497 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2498 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2499 tcg_temp_free_i32(helper_tmp); \
2502 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2503 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2504 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2505 tcg_temp_free_i32(helper_tmp); \
2508 typedef struct DisasContext
{
2509 DisasContextBase base
;
2510 target_ulong saved_pc
;
2511 target_ulong page_start
;
2513 uint64_t insn_flags
;
2514 int32_t CP0_Config1
;
2515 int32_t CP0_Config2
;
2516 int32_t CP0_Config3
;
2517 int32_t CP0_Config5
;
2518 /* Routine used to access memory */
2520 TCGMemOp default_tcg_memop_mask
;
2521 uint32_t hflags
, saved_hflags
;
2522 target_ulong btarget
;
2533 int CP0_LLAddr_shift
;
2542 #define DISAS_STOP DISAS_TARGET_0
2543 #define DISAS_EXIT DISAS_TARGET_1
2545 static const char * const regnames
[] = {
2546 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2547 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2548 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2549 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2552 static const char * const regnames_HI
[] = {
2553 "HI0", "HI1", "HI2", "HI3",
2556 static const char * const regnames_LO
[] = {
2557 "LO0", "LO1", "LO2", "LO3",
2560 static const char * const fregnames
[] = {
2561 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2562 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2563 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2564 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2567 static const char * const msaregnames
[] = {
2568 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2569 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2570 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2571 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2572 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2573 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2574 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2575 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2576 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2577 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2578 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2579 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2580 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2581 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2582 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2583 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2586 #if !defined(TARGET_MIPS64)
2587 static const char * const mxuregnames
[] = {
2588 "XR1", "XR2", "XR3", "XR4", "XR5", "XR6", "XR7", "XR8",
2589 "XR9", "XR10", "XR11", "XR12", "XR13", "XR14", "XR15", "MXU_CR",
2593 #define LOG_DISAS(...) \
2595 if (MIPS_DEBUG_DISAS) { \
2596 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2600 #define MIPS_INVAL(op) \
2602 if (MIPS_DEBUG_DISAS) { \
2603 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2604 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2605 ctx->base.pc_next, ctx->opcode, op, \
2606 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2607 ((ctx->opcode >> 16) & 0x1F)); \
2611 /* General purpose registers moves. */
2612 static inline void gen_load_gpr (TCGv t
, int reg
)
2615 tcg_gen_movi_tl(t
, 0);
2617 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2620 static inline void gen_store_gpr (TCGv t
, int reg
)
2623 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2626 /* Moves to/from shadow registers. */
2627 static inline void gen_load_srsgpr (int from
, int to
)
2629 TCGv t0
= tcg_temp_new();
2632 tcg_gen_movi_tl(t0
, 0);
2634 TCGv_i32 t2
= tcg_temp_new_i32();
2635 TCGv_ptr addr
= tcg_temp_new_ptr();
2637 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2638 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2639 tcg_gen_andi_i32(t2
, t2
, 0xf);
2640 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2641 tcg_gen_ext_i32_ptr(addr
, t2
);
2642 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2644 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2645 tcg_temp_free_ptr(addr
);
2646 tcg_temp_free_i32(t2
);
2648 gen_store_gpr(t0
, to
);
2652 static inline void gen_store_srsgpr (int from
, int to
)
2655 TCGv t0
= tcg_temp_new();
2656 TCGv_i32 t2
= tcg_temp_new_i32();
2657 TCGv_ptr addr
= tcg_temp_new_ptr();
2659 gen_load_gpr(t0
, from
);
2660 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2661 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2662 tcg_gen_andi_i32(t2
, t2
, 0xf);
2663 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2664 tcg_gen_ext_i32_ptr(addr
, t2
);
2665 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2667 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2668 tcg_temp_free_ptr(addr
);
2669 tcg_temp_free_i32(t2
);
2674 #if !defined(TARGET_MIPS64)
2675 /* MXU General purpose registers moves. */
2676 static inline void gen_load_mxu_gpr(TCGv t
, unsigned int reg
)
2679 tcg_gen_movi_tl(t
, 0);
2680 } else if (reg
<= 15) {
2681 tcg_gen_mov_tl(t
, mxu_gpr
[reg
- 1]);
2685 static inline void gen_store_mxu_gpr(TCGv t
, unsigned int reg
)
2687 if (reg
> 0 && reg
<= 15) {
2688 tcg_gen_mov_tl(mxu_gpr
[reg
- 1], t
);
2692 /* MXU control register moves. */
2693 static inline void gen_load_mxu_cr(TCGv t
)
2695 tcg_gen_mov_tl(t
, mxu_CR
);
2698 static inline void gen_store_mxu_cr(TCGv t
)
2700 /* TODO: Add handling of RW rules for MXU_CR. */
2701 tcg_gen_mov_tl(mxu_CR
, t
);
2707 static inline void gen_save_pc(target_ulong pc
)
2709 tcg_gen_movi_tl(cpu_PC
, pc
);
2712 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2714 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2715 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2716 gen_save_pc(ctx
->base
.pc_next
);
2717 ctx
->saved_pc
= ctx
->base
.pc_next
;
2719 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2720 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2721 ctx
->saved_hflags
= ctx
->hflags
;
2722 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2728 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2734 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2736 ctx
->saved_hflags
= ctx
->hflags
;
2737 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2743 ctx
->btarget
= env
->btarget
;
2748 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2750 TCGv_i32 texcp
= tcg_const_i32(excp
);
2751 TCGv_i32 terr
= tcg_const_i32(err
);
2752 save_cpu_state(ctx
, 1);
2753 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2754 tcg_temp_free_i32(terr
);
2755 tcg_temp_free_i32(texcp
);
2756 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2759 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2761 gen_helper_0e0i(raise_exception
, excp
);
2764 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2766 generate_exception_err(ctx
, excp
, 0);
2769 /* Floating point register moves. */
2770 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2772 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2773 generate_exception(ctx
, EXCP_RI
);
2775 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2778 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2781 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2782 generate_exception(ctx
, EXCP_RI
);
2784 t64
= tcg_temp_new_i64();
2785 tcg_gen_extu_i32_i64(t64
, t
);
2786 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2787 tcg_temp_free_i64(t64
);
2790 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2792 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2793 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2795 gen_load_fpr32(ctx
, t
, reg
| 1);
2799 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2801 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2802 TCGv_i64 t64
= tcg_temp_new_i64();
2803 tcg_gen_extu_i32_i64(t64
, t
);
2804 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2805 tcg_temp_free_i64(t64
);
2807 gen_store_fpr32(ctx
, t
, reg
| 1);
2811 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2813 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2814 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2816 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2820 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2822 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2823 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2826 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2827 t0
= tcg_temp_new_i64();
2828 tcg_gen_shri_i64(t0
, t
, 32);
2829 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2830 tcg_temp_free_i64(t0
);
2834 static inline int get_fp_bit (int cc
)
2842 /* Addresses computation */
2843 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
2845 tcg_gen_add_tl(ret
, arg0
, arg1
);
2847 #if defined(TARGET_MIPS64)
2848 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2849 tcg_gen_ext32s_i64(ret
, ret
);
2854 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2857 tcg_gen_addi_tl(ret
, base
, ofs
);
2859 #if defined(TARGET_MIPS64)
2860 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2861 tcg_gen_ext32s_i64(ret
, ret
);
2866 /* Addresses computation (translation time) */
2867 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2870 target_long sum
= base
+ offset
;
2872 #if defined(TARGET_MIPS64)
2873 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2880 /* Sign-extract the low 32-bits to a target_long. */
2881 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2883 #if defined(TARGET_MIPS64)
2884 tcg_gen_ext32s_i64(ret
, arg
);
2886 tcg_gen_extrl_i64_i32(ret
, arg
);
2890 /* Sign-extract the high 32-bits to a target_long. */
2891 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2893 #if defined(TARGET_MIPS64)
2894 tcg_gen_sari_i64(ret
, arg
, 32);
2896 tcg_gen_extrh_i64_i32(ret
, arg
);
2900 static inline void check_cp0_enabled(DisasContext
*ctx
)
2902 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
2903 generate_exception_err(ctx
, EXCP_CpU
, 0);
2906 static inline void check_cp1_enabled(DisasContext
*ctx
)
2908 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
2909 generate_exception_err(ctx
, EXCP_CpU
, 1);
2912 /* Verify that the processor is running with COP1X instructions enabled.
2913 This is associated with the nabla symbol in the MIPS32 and MIPS64
2916 static inline void check_cop1x(DisasContext
*ctx
)
2918 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
2919 generate_exception_end(ctx
, EXCP_RI
);
2922 /* Verify that the processor is running with 64-bit floating-point
2923 operations enabled. */
2925 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2927 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
2928 generate_exception_end(ctx
, EXCP_RI
);
2932 * Verify if floating point register is valid; an operation is not defined
2933 * if bit 0 of any register specification is set and the FR bit in the
2934 * Status register equals zero, since the register numbers specify an
2935 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2936 * in the Status register equals one, both even and odd register numbers
2937 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2939 * Multiple 64 bit wide registers can be checked by calling
2940 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2942 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2944 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
2945 generate_exception_end(ctx
, EXCP_RI
);
2948 /* Verify that the processor is running with DSP instructions enabled.
2949 This is enabled by CP0 Status register MX(24) bit.
2952 static inline void check_dsp(DisasContext
*ctx
)
2954 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2955 if (ctx
->insn_flags
& ASE_DSP
) {
2956 generate_exception_end(ctx
, EXCP_DSPDIS
);
2958 generate_exception_end(ctx
, EXCP_RI
);
2963 static inline void check_dsp_r2(DisasContext
*ctx
)
2965 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2966 if (ctx
->insn_flags
& ASE_DSP
) {
2967 generate_exception_end(ctx
, EXCP_DSPDIS
);
2969 generate_exception_end(ctx
, EXCP_RI
);
2974 static inline void check_dsp_r3(DisasContext
*ctx
)
2976 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2977 if (ctx
->insn_flags
& ASE_DSP
) {
2978 generate_exception_end(ctx
, EXCP_DSPDIS
);
2980 generate_exception_end(ctx
, EXCP_RI
);
2985 /* This code generates a "reserved instruction" exception if the
2986 CPU does not support the instruction set corresponding to flags. */
2987 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
2989 if (unlikely(!(ctx
->insn_flags
& flags
))) {
2990 generate_exception_end(ctx
, EXCP_RI
);
2994 /* This code generates a "reserved instruction" exception if the
2995 CPU has corresponding flag set which indicates that the instruction
2996 has been removed. */
2997 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
2999 if (unlikely(ctx
->insn_flags
& flags
)) {
3000 generate_exception_end(ctx
, EXCP_RI
);
3005 * The Linux kernel traps certain reserved instruction exceptions to
3006 * emulate the corresponding instructions. QEMU is the kernel in user
3007 * mode, so those traps are emulated by accepting the instructions.
3009 * A reserved instruction exception is generated for flagged CPUs if
3010 * QEMU runs in system mode.
3012 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
3014 #ifndef CONFIG_USER_ONLY
3015 check_insn_opc_removed(ctx
, flags
);
3019 /* This code generates a "reserved instruction" exception if the
3020 CPU does not support 64-bit paired-single (PS) floating point data type */
3021 static inline void check_ps(DisasContext
*ctx
)
3023 if (unlikely(!ctx
->ps
)) {
3024 generate_exception(ctx
, EXCP_RI
);
3026 check_cp1_64bitmode(ctx
);
3029 #ifdef TARGET_MIPS64
3030 /* This code generates a "reserved instruction" exception if 64-bit
3031 instructions are not enabled. */
3032 static inline void check_mips_64(DisasContext
*ctx
)
3034 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
3035 generate_exception_end(ctx
, EXCP_RI
);
3039 #ifndef CONFIG_USER_ONLY
3040 static inline void check_mvh(DisasContext
*ctx
)
3042 if (unlikely(!ctx
->mvh
)) {
3043 generate_exception(ctx
, EXCP_RI
);
3049 * This code generates a "reserved instruction" exception if the
3050 * Config5 XNP bit is set.
3052 static inline void check_xnp(DisasContext
*ctx
)
3054 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
3055 generate_exception_end(ctx
, EXCP_RI
);
3059 #ifndef CONFIG_USER_ONLY
3061 * This code generates a "reserved instruction" exception if the
3062 * Config3 PW bit is NOT set.
3064 static inline void check_pw(DisasContext
*ctx
)
3066 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
3067 generate_exception_end(ctx
, EXCP_RI
);
3073 * This code generates a "reserved instruction" exception if the
3074 * Config3 MT bit is NOT set.
3076 static inline void check_mt(DisasContext
*ctx
)
3078 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3079 generate_exception_end(ctx
, EXCP_RI
);
3083 #ifndef CONFIG_USER_ONLY
3085 * This code generates a "coprocessor unusable" exception if CP0 is not
3086 * available, and, if that is not the case, generates a "reserved instruction"
3087 * exception if the Config5 MT bit is NOT set. This is needed for availability
3088 * control of some of MT ASE instructions.
3090 static inline void check_cp0_mt(DisasContext
*ctx
)
3092 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
3093 generate_exception_err(ctx
, EXCP_CpU
, 0);
3095 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3096 generate_exception_err(ctx
, EXCP_RI
, 0);
3103 * This code generates a "reserved instruction" exception if the
3104 * Config5 NMS bit is set.
3106 static inline void check_nms(DisasContext
*ctx
)
3108 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
3109 generate_exception_end(ctx
, EXCP_RI
);
3114 * This code generates a "reserved instruction" exception if the
3115 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
3116 * Config2 TL, and Config5 L2C are unset.
3118 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
3120 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
3121 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
3122 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
3123 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
3124 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
3125 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))
3127 generate_exception_end(ctx
, EXCP_RI
);
3132 * This code generates a "reserved instruction" exception if the
3133 * Config5 EVA bit is NOT set.
3135 static inline void check_eva(DisasContext
*ctx
)
3137 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
3138 generate_exception_end(ctx
, EXCP_RI
);
3143 /* Define small wrappers for gen_load_fpr* so that we have a uniform
3144 calling interface for 32 and 64-bit FPRs. No sense in changing
3145 all callers for gen_load_fpr32 when we need the CTX parameter for
3147 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
3148 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
3149 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
3150 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
3151 int ft, int fs, int cc) \
3153 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
3154 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
3163 check_cp1_registers(ctx, fs | ft); \
3171 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
3172 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
3174 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
3175 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
3176 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
3177 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
3178 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
3179 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
3180 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
3181 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
3182 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
3183 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
3184 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
3185 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
3186 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
3187 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
3188 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
3189 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
3192 tcg_temp_free_i##bits (fp0); \
3193 tcg_temp_free_i##bits (fp1); \
3196 FOP_CONDS(, 0, d
, FMT_D
, 64)
3197 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3198 FOP_CONDS(, 0, s
, FMT_S
, 32)
3199 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3200 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3201 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3204 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3205 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
3206 int ft, int fs, int fd) \
3208 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3209 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3210 if (ifmt == FMT_D) { \
3211 check_cp1_registers(ctx, fs | ft | fd); \
3213 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3214 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3217 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3220 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3223 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3226 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3229 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3232 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3235 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3238 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3241 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3244 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3247 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3250 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3253 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3256 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3259 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3262 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3265 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3268 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3271 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3274 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3277 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3280 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3286 tcg_temp_free_i ## bits (fp0); \
3287 tcg_temp_free_i ## bits (fp1); \
3290 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3291 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3293 #undef gen_ldcmp_fpr32
3294 #undef gen_ldcmp_fpr64
3296 /* load/store instructions. */
3297 #ifdef CONFIG_USER_ONLY
3298 #define OP_LD_ATOMIC(insn,fname) \
3299 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3300 DisasContext *ctx) \
3302 TCGv t0 = tcg_temp_new(); \
3303 tcg_gen_mov_tl(t0, arg1); \
3304 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3305 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3306 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3307 tcg_temp_free(t0); \
3310 #define OP_LD_ATOMIC(insn,fname) \
3311 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3312 DisasContext *ctx) \
3314 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3317 OP_LD_ATOMIC(ll
,ld32s
);
3318 #if defined(TARGET_MIPS64)
3319 OP_LD_ATOMIC(lld
,ld64
);
3323 #ifdef CONFIG_USER_ONLY
3324 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3325 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3326 DisasContext *ctx) \
3328 TCGv t0 = tcg_temp_new(); \
3329 TCGLabel *l1 = gen_new_label(); \
3330 TCGLabel *l2 = gen_new_label(); \
3332 tcg_gen_andi_tl(t0, arg2, almask); \
3333 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
3334 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
3335 generate_exception(ctx, EXCP_AdES); \
3336 gen_set_label(l1); \
3337 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3338 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
3339 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
3340 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
3341 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
3342 generate_exception_end(ctx, EXCP_SC); \
3343 gen_set_label(l2); \
3344 tcg_gen_movi_tl(t0, 0); \
3345 gen_store_gpr(t0, rt); \
3346 tcg_temp_free(t0); \
3349 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3350 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3351 DisasContext *ctx) \
3353 TCGv t0 = tcg_temp_new(); \
3354 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
3355 gen_store_gpr(t0, rt); \
3356 tcg_temp_free(t0); \
3359 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
3360 #if defined(TARGET_MIPS64)
3361 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
3365 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
3366 int base
, int offset
)
3369 tcg_gen_movi_tl(addr
, offset
);
3370 } else if (offset
== 0) {
3371 gen_load_gpr(addr
, base
);
3373 tcg_gen_movi_tl(addr
, offset
);
3374 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3378 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
3380 target_ulong pc
= ctx
->base
.pc_next
;
3382 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3383 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3388 pc
&= ~(target_ulong
)3;
3393 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3394 int rt
, int base
, int offset
)
3397 int mem_idx
= ctx
->mem_idx
;
3399 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3400 /* Loongson CPU uses a load to zero register for prefetch.
3401 We emulate it as a NOP. On other CPU we must perform the
3402 actual memory access. */
3406 t0
= tcg_temp_new();
3407 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3410 #if defined(TARGET_MIPS64)
3412 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3413 ctx
->default_tcg_memop_mask
);
3414 gen_store_gpr(t0
, rt
);
3417 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3418 ctx
->default_tcg_memop_mask
);
3419 gen_store_gpr(t0
, rt
);
3423 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3424 gen_store_gpr(t0
, rt
);
3427 t1
= tcg_temp_new();
3428 /* Do a byte access to possibly trigger a page
3429 fault with the unaligned address. */
3430 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3431 tcg_gen_andi_tl(t1
, t0
, 7);
3432 #ifndef TARGET_WORDS_BIGENDIAN
3433 tcg_gen_xori_tl(t1
, t1
, 7);
3435 tcg_gen_shli_tl(t1
, t1
, 3);
3436 tcg_gen_andi_tl(t0
, t0
, ~7);
3437 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3438 tcg_gen_shl_tl(t0
, t0
, t1
);
3439 t2
= tcg_const_tl(-1);
3440 tcg_gen_shl_tl(t2
, t2
, t1
);
3441 gen_load_gpr(t1
, rt
);
3442 tcg_gen_andc_tl(t1
, t1
, t2
);
3444 tcg_gen_or_tl(t0
, t0
, t1
);
3446 gen_store_gpr(t0
, rt
);
3449 t1
= tcg_temp_new();
3450 /* Do a byte access to possibly trigger a page
3451 fault with the unaligned address. */
3452 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3453 tcg_gen_andi_tl(t1
, t0
, 7);
3454 #ifdef TARGET_WORDS_BIGENDIAN
3455 tcg_gen_xori_tl(t1
, t1
, 7);
3457 tcg_gen_shli_tl(t1
, t1
, 3);
3458 tcg_gen_andi_tl(t0
, t0
, ~7);
3459 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3460 tcg_gen_shr_tl(t0
, t0
, t1
);
3461 tcg_gen_xori_tl(t1
, t1
, 63);
3462 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3463 tcg_gen_shl_tl(t2
, t2
, t1
);
3464 gen_load_gpr(t1
, rt
);
3465 tcg_gen_and_tl(t1
, t1
, t2
);
3467 tcg_gen_or_tl(t0
, t0
, t1
);
3469 gen_store_gpr(t0
, rt
);
3472 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3473 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3475 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3476 gen_store_gpr(t0
, rt
);
3480 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3481 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3483 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3484 gen_store_gpr(t0
, rt
);
3487 mem_idx
= MIPS_HFLAG_UM
;
3490 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3491 ctx
->default_tcg_memop_mask
);
3492 gen_store_gpr(t0
, rt
);
3495 mem_idx
= MIPS_HFLAG_UM
;
3498 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3499 ctx
->default_tcg_memop_mask
);
3500 gen_store_gpr(t0
, rt
);
3503 mem_idx
= MIPS_HFLAG_UM
;
3506 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3507 ctx
->default_tcg_memop_mask
);
3508 gen_store_gpr(t0
, rt
);
3511 mem_idx
= MIPS_HFLAG_UM
;
3514 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3515 gen_store_gpr(t0
, rt
);
3518 mem_idx
= MIPS_HFLAG_UM
;
3521 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3522 gen_store_gpr(t0
, rt
);
3525 mem_idx
= MIPS_HFLAG_UM
;
3528 t1
= tcg_temp_new();
3529 /* Do a byte access to possibly trigger a page
3530 fault with the unaligned address. */
3531 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3532 tcg_gen_andi_tl(t1
, t0
, 3);
3533 #ifndef TARGET_WORDS_BIGENDIAN
3534 tcg_gen_xori_tl(t1
, t1
, 3);
3536 tcg_gen_shli_tl(t1
, t1
, 3);
3537 tcg_gen_andi_tl(t0
, t0
, ~3);
3538 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3539 tcg_gen_shl_tl(t0
, t0
, t1
);
3540 t2
= tcg_const_tl(-1);
3541 tcg_gen_shl_tl(t2
, t2
, t1
);
3542 gen_load_gpr(t1
, rt
);
3543 tcg_gen_andc_tl(t1
, t1
, t2
);
3545 tcg_gen_or_tl(t0
, t0
, t1
);
3547 tcg_gen_ext32s_tl(t0
, t0
);
3548 gen_store_gpr(t0
, rt
);
3551 mem_idx
= MIPS_HFLAG_UM
;
3554 t1
= tcg_temp_new();
3555 /* Do a byte access to possibly trigger a page
3556 fault with the unaligned address. */
3557 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3558 tcg_gen_andi_tl(t1
, t0
, 3);
3559 #ifdef TARGET_WORDS_BIGENDIAN
3560 tcg_gen_xori_tl(t1
, t1
, 3);
3562 tcg_gen_shli_tl(t1
, t1
, 3);
3563 tcg_gen_andi_tl(t0
, t0
, ~3);
3564 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3565 tcg_gen_shr_tl(t0
, t0
, t1
);
3566 tcg_gen_xori_tl(t1
, t1
, 31);
3567 t2
= tcg_const_tl(0xfffffffeull
);
3568 tcg_gen_shl_tl(t2
, t2
, t1
);
3569 gen_load_gpr(t1
, rt
);
3570 tcg_gen_and_tl(t1
, t1
, t2
);
3572 tcg_gen_or_tl(t0
, t0
, t1
);
3574 tcg_gen_ext32s_tl(t0
, t0
);
3575 gen_store_gpr(t0
, rt
);
3578 mem_idx
= MIPS_HFLAG_UM
;
3582 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3583 gen_store_gpr(t0
, rt
);
3589 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3590 uint32_t reg1
, uint32_t reg2
)
3592 TCGv taddr
= tcg_temp_new();
3593 TCGv_i64 tval
= tcg_temp_new_i64();
3594 TCGv tmp1
= tcg_temp_new();
3595 TCGv tmp2
= tcg_temp_new();
3597 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3598 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3599 #ifdef TARGET_WORDS_BIGENDIAN
3600 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3602 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3604 gen_store_gpr(tmp1
, reg1
);
3605 tcg_temp_free(tmp1
);
3606 gen_store_gpr(tmp2
, reg2
);
3607 tcg_temp_free(tmp2
);
3608 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3609 tcg_temp_free_i64(tval
);
3610 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3611 tcg_temp_free(taddr
);
3615 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
3616 int base
, int offset
)
3618 TCGv t0
= tcg_temp_new();
3619 TCGv t1
= tcg_temp_new();
3620 int mem_idx
= ctx
->mem_idx
;
3622 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3623 gen_load_gpr(t1
, rt
);
3625 #if defined(TARGET_MIPS64)
3627 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3628 ctx
->default_tcg_memop_mask
);
3631 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3634 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3638 mem_idx
= MIPS_HFLAG_UM
;
3641 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3642 ctx
->default_tcg_memop_mask
);
3645 mem_idx
= MIPS_HFLAG_UM
;
3648 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3649 ctx
->default_tcg_memop_mask
);
3652 mem_idx
= MIPS_HFLAG_UM
;
3655 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3658 mem_idx
= MIPS_HFLAG_UM
;
3661 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3664 mem_idx
= MIPS_HFLAG_UM
;
3667 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3675 /* Store conditional */
3676 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
3677 int base
, int16_t offset
)
3680 int mem_idx
= ctx
->mem_idx
;
3682 #ifdef CONFIG_USER_ONLY
3683 t0
= tcg_temp_local_new();
3684 t1
= tcg_temp_local_new();
3686 t0
= tcg_temp_new();
3687 t1
= tcg_temp_new();
3689 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3690 gen_load_gpr(t1
, rt
);
3692 #if defined(TARGET_MIPS64)
3695 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
3699 mem_idx
= MIPS_HFLAG_UM
;
3703 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
3710 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3711 uint32_t reg1
, uint32_t reg2
)
3713 TCGv taddr
= tcg_temp_local_new();
3714 TCGv lladdr
= tcg_temp_local_new();
3715 TCGv_i64 tval
= tcg_temp_new_i64();
3716 TCGv_i64 llval
= tcg_temp_new_i64();
3717 TCGv_i64 val
= tcg_temp_new_i64();
3718 TCGv tmp1
= tcg_temp_new();
3719 TCGv tmp2
= tcg_temp_new();
3720 TCGLabel
*lab_fail
= gen_new_label();
3721 TCGLabel
*lab_done
= gen_new_label();
3723 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3725 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3726 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3728 gen_load_gpr(tmp1
, reg1
);
3729 gen_load_gpr(tmp2
, reg2
);
3731 #ifdef TARGET_WORDS_BIGENDIAN
3732 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3734 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3737 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3738 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3739 ctx
->mem_idx
, MO_64
);
3741 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3743 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3745 gen_set_label(lab_fail
);
3748 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3750 gen_set_label(lab_done
);
3751 tcg_gen_movi_tl(lladdr
, -1);
3752 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3755 /* Load and store */
3756 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
3759 /* Don't do NOP if destination is zero: we must perform the actual
3764 TCGv_i32 fp0
= tcg_temp_new_i32();
3765 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3766 ctx
->default_tcg_memop_mask
);
3767 gen_store_fpr32(ctx
, fp0
, ft
);
3768 tcg_temp_free_i32(fp0
);
3773 TCGv_i32 fp0
= tcg_temp_new_i32();
3774 gen_load_fpr32(ctx
, fp0
, ft
);
3775 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3776 ctx
->default_tcg_memop_mask
);
3777 tcg_temp_free_i32(fp0
);
3782 TCGv_i64 fp0
= tcg_temp_new_i64();
3783 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3784 ctx
->default_tcg_memop_mask
);
3785 gen_store_fpr64(ctx
, fp0
, ft
);
3786 tcg_temp_free_i64(fp0
);
3791 TCGv_i64 fp0
= tcg_temp_new_i64();
3792 gen_load_fpr64(ctx
, fp0
, ft
);
3793 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3794 ctx
->default_tcg_memop_mask
);
3795 tcg_temp_free_i64(fp0
);
3799 MIPS_INVAL("flt_ldst");
3800 generate_exception_end(ctx
, EXCP_RI
);
3805 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3806 int rs
, int16_t imm
)
3808 TCGv t0
= tcg_temp_new();
3810 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3811 check_cp1_enabled(ctx
);
3815 check_insn(ctx
, ISA_MIPS2
);
3818 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3819 gen_flt_ldst(ctx
, op
, rt
, t0
);
3822 generate_exception_err(ctx
, EXCP_CpU
, 1);
3827 /* Arithmetic with immediate operand */
3828 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3829 int rt
, int rs
, int imm
)
3831 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3833 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3834 /* If no destination, treat it as a NOP.
3835 For addi, we must generate the overflow exception when needed. */
3841 TCGv t0
= tcg_temp_local_new();
3842 TCGv t1
= tcg_temp_new();
3843 TCGv t2
= tcg_temp_new();
3844 TCGLabel
*l1
= gen_new_label();
3846 gen_load_gpr(t1
, rs
);
3847 tcg_gen_addi_tl(t0
, t1
, uimm
);
3848 tcg_gen_ext32s_tl(t0
, t0
);
3850 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3851 tcg_gen_xori_tl(t2
, t0
, uimm
);
3852 tcg_gen_and_tl(t1
, t1
, t2
);
3854 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3856 /* operands of same sign, result different sign */
3857 generate_exception(ctx
, EXCP_OVERFLOW
);
3859 tcg_gen_ext32s_tl(t0
, t0
);
3860 gen_store_gpr(t0
, rt
);
3866 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3867 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3869 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3872 #if defined(TARGET_MIPS64)
3875 TCGv t0
= tcg_temp_local_new();
3876 TCGv t1
= tcg_temp_new();
3877 TCGv t2
= tcg_temp_new();
3878 TCGLabel
*l1
= gen_new_label();
3880 gen_load_gpr(t1
, rs
);
3881 tcg_gen_addi_tl(t0
, t1
, uimm
);
3883 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3884 tcg_gen_xori_tl(t2
, t0
, uimm
);
3885 tcg_gen_and_tl(t1
, t1
, t2
);
3887 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3889 /* operands of same sign, result different sign */
3890 generate_exception(ctx
, EXCP_OVERFLOW
);
3892 gen_store_gpr(t0
, rt
);
3898 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3900 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3907 /* Logic with immediate operand */
3908 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3909 int rt
, int rs
, int16_t imm
)
3914 /* If no destination, treat it as a NOP. */
3917 uimm
= (uint16_t)imm
;
3920 if (likely(rs
!= 0))
3921 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3923 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3927 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3929 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3932 if (likely(rs
!= 0))
3933 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3935 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3938 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3940 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3941 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3943 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3952 /* Set on less than with immediate operand */
3953 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3954 int rt
, int rs
, int16_t imm
)
3956 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3960 /* If no destination, treat it as a NOP. */
3963 t0
= tcg_temp_new();
3964 gen_load_gpr(t0
, rs
);
3967 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3970 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3976 /* Shifts with immediate operand */
3977 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3978 int rt
, int rs
, int16_t imm
)
3980 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3984 /* If no destination, treat it as a NOP. */
3988 t0
= tcg_temp_new();
3989 gen_load_gpr(t0
, rs
);
3992 tcg_gen_shli_tl(t0
, t0
, uimm
);
3993 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3996 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4000 tcg_gen_ext32u_tl(t0
, t0
);
4001 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4003 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4008 TCGv_i32 t1
= tcg_temp_new_i32();
4010 tcg_gen_trunc_tl_i32(t1
, t0
);
4011 tcg_gen_rotri_i32(t1
, t1
, uimm
);
4012 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
4013 tcg_temp_free_i32(t1
);
4015 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4018 #if defined(TARGET_MIPS64)
4020 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
4023 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4026 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4030 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
4032 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
4036 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4039 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4042 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4045 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4053 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
4054 int rd
, int rs
, int rt
)
4056 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
4057 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
4058 /* If no destination, treat it as a NOP.
4059 For add & sub, we must generate the overflow exception when needed. */
4066 TCGv t0
= tcg_temp_local_new();
4067 TCGv t1
= tcg_temp_new();
4068 TCGv t2
= tcg_temp_new();
4069 TCGLabel
*l1
= gen_new_label();
4071 gen_load_gpr(t1
, rs
);
4072 gen_load_gpr(t2
, rt
);
4073 tcg_gen_add_tl(t0
, t1
, t2
);
4074 tcg_gen_ext32s_tl(t0
, t0
);
4075 tcg_gen_xor_tl(t1
, t1
, t2
);
4076 tcg_gen_xor_tl(t2
, t0
, t2
);
4077 tcg_gen_andc_tl(t1
, t2
, t1
);
4079 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4081 /* operands of same sign, result different sign */
4082 generate_exception(ctx
, EXCP_OVERFLOW
);
4084 gen_store_gpr(t0
, rd
);
4089 if (rs
!= 0 && rt
!= 0) {
4090 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4091 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4092 } else if (rs
== 0 && rt
!= 0) {
4093 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4094 } else if (rs
!= 0 && rt
== 0) {
4095 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4097 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4102 TCGv t0
= tcg_temp_local_new();
4103 TCGv t1
= tcg_temp_new();
4104 TCGv t2
= tcg_temp_new();
4105 TCGLabel
*l1
= gen_new_label();
4107 gen_load_gpr(t1
, rs
);
4108 gen_load_gpr(t2
, rt
);
4109 tcg_gen_sub_tl(t0
, t1
, t2
);
4110 tcg_gen_ext32s_tl(t0
, t0
);
4111 tcg_gen_xor_tl(t2
, t1
, t2
);
4112 tcg_gen_xor_tl(t1
, t0
, t1
);
4113 tcg_gen_and_tl(t1
, t1
, t2
);
4115 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4117 /* operands of different sign, first operand and result different sign */
4118 generate_exception(ctx
, EXCP_OVERFLOW
);
4120 gen_store_gpr(t0
, rd
);
4125 if (rs
!= 0 && rt
!= 0) {
4126 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4127 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4128 } else if (rs
== 0 && rt
!= 0) {
4129 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4130 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4131 } else if (rs
!= 0 && rt
== 0) {
4132 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4134 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4137 #if defined(TARGET_MIPS64)
4140 TCGv t0
= tcg_temp_local_new();
4141 TCGv t1
= tcg_temp_new();
4142 TCGv t2
= tcg_temp_new();
4143 TCGLabel
*l1
= gen_new_label();
4145 gen_load_gpr(t1
, rs
);
4146 gen_load_gpr(t2
, rt
);
4147 tcg_gen_add_tl(t0
, t1
, t2
);
4148 tcg_gen_xor_tl(t1
, t1
, t2
);
4149 tcg_gen_xor_tl(t2
, t0
, t2
);
4150 tcg_gen_andc_tl(t1
, t2
, t1
);
4152 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4154 /* operands of same sign, result different sign */
4155 generate_exception(ctx
, EXCP_OVERFLOW
);
4157 gen_store_gpr(t0
, rd
);
4162 if (rs
!= 0 && rt
!= 0) {
4163 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4164 } else if (rs
== 0 && rt
!= 0) {
4165 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4166 } else if (rs
!= 0 && rt
== 0) {
4167 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4169 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4174 TCGv t0
= tcg_temp_local_new();
4175 TCGv t1
= tcg_temp_new();
4176 TCGv t2
= tcg_temp_new();
4177 TCGLabel
*l1
= gen_new_label();
4179 gen_load_gpr(t1
, rs
);
4180 gen_load_gpr(t2
, rt
);
4181 tcg_gen_sub_tl(t0
, t1
, t2
);
4182 tcg_gen_xor_tl(t2
, t1
, t2
);
4183 tcg_gen_xor_tl(t1
, t0
, t1
);
4184 tcg_gen_and_tl(t1
, t1
, t2
);
4186 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4188 /* operands of different sign, first operand and result different sign */
4189 generate_exception(ctx
, EXCP_OVERFLOW
);
4191 gen_store_gpr(t0
, rd
);
4196 if (rs
!= 0 && rt
!= 0) {
4197 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4198 } else if (rs
== 0 && rt
!= 0) {
4199 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4200 } else if (rs
!= 0 && rt
== 0) {
4201 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4203 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4208 if (likely(rs
!= 0 && rt
!= 0)) {
4209 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4210 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4212 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4218 /* Conditional move */
4219 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4220 int rd
, int rs
, int rt
)
4225 /* If no destination, treat it as a NOP. */
4229 t0
= tcg_temp_new();
4230 gen_load_gpr(t0
, rt
);
4231 t1
= tcg_const_tl(0);
4232 t2
= tcg_temp_new();
4233 gen_load_gpr(t2
, rs
);
4236 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4239 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4242 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4245 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4254 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4255 int rd
, int rs
, int rt
)
4258 /* If no destination, treat it as a NOP. */
4264 if (likely(rs
!= 0 && rt
!= 0)) {
4265 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4267 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4271 if (rs
!= 0 && rt
!= 0) {
4272 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4273 } else if (rs
== 0 && rt
!= 0) {
4274 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4275 } else if (rs
!= 0 && rt
== 0) {
4276 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4278 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4282 if (likely(rs
!= 0 && rt
!= 0)) {
4283 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4284 } else if (rs
== 0 && rt
!= 0) {
4285 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4286 } else if (rs
!= 0 && rt
== 0) {
4287 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4289 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4293 if (likely(rs
!= 0 && rt
!= 0)) {
4294 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4295 } else if (rs
== 0 && rt
!= 0) {
4296 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4297 } else if (rs
!= 0 && rt
== 0) {
4298 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4300 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4306 /* Set on lower than */
4307 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4308 int rd
, int rs
, int rt
)
4313 /* If no destination, treat it as a NOP. */
4317 t0
= tcg_temp_new();
4318 t1
= tcg_temp_new();
4319 gen_load_gpr(t0
, rs
);
4320 gen_load_gpr(t1
, rt
);
4323 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4326 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4334 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4335 int rd
, int rs
, int rt
)
4340 /* If no destination, treat it as a NOP.
4341 For add & sub, we must generate the overflow exception when needed. */
4345 t0
= tcg_temp_new();
4346 t1
= tcg_temp_new();
4347 gen_load_gpr(t0
, rs
);
4348 gen_load_gpr(t1
, rt
);
4351 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4352 tcg_gen_shl_tl(t0
, t1
, t0
);
4353 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4356 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4357 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4360 tcg_gen_ext32u_tl(t1
, t1
);
4361 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4362 tcg_gen_shr_tl(t0
, t1
, t0
);
4363 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4367 TCGv_i32 t2
= tcg_temp_new_i32();
4368 TCGv_i32 t3
= tcg_temp_new_i32();
4370 tcg_gen_trunc_tl_i32(t2
, t0
);
4371 tcg_gen_trunc_tl_i32(t3
, t1
);
4372 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4373 tcg_gen_rotr_i32(t2
, t3
, t2
);
4374 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4375 tcg_temp_free_i32(t2
);
4376 tcg_temp_free_i32(t3
);
4379 #if defined(TARGET_MIPS64)
4381 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4382 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4385 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4386 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4389 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4390 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4393 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4394 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4402 /* Copy GPR to and from TX79 HI1/LO1 register. */
4403 static void gen_HILO1_tx79(DisasContext
*ctx
, uint32_t opc
, int reg
)
4405 if (reg
== 0 && (opc
== MMI_OPC_MFHI1
|| opc
== MMI_OPC_MFLO1
)) {
4412 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[1]);
4415 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[1]);
4419 tcg_gen_mov_tl(cpu_HI
[1], cpu_gpr
[reg
]);
4421 tcg_gen_movi_tl(cpu_HI
[1], 0);
4426 tcg_gen_mov_tl(cpu_LO
[1], cpu_gpr
[reg
]);
4428 tcg_gen_movi_tl(cpu_LO
[1], 0);
4432 MIPS_INVAL("mfthilo1 TX79");
4433 generate_exception_end(ctx
, EXCP_RI
);
4438 /* Arithmetic on HI/LO registers */
4439 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4441 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
4452 #if defined(TARGET_MIPS64)
4454 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4458 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4462 #if defined(TARGET_MIPS64)
4464 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4468 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4473 #if defined(TARGET_MIPS64)
4475 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4479 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4482 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4487 #if defined(TARGET_MIPS64)
4489 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4493 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4496 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4502 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4505 TCGv t0
= tcg_const_tl(addr
);
4506 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4507 gen_store_gpr(t0
, reg
);
4511 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4517 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4520 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4521 addr
= addr_add(ctx
, pc
, offset
);
4522 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4526 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4527 addr
= addr_add(ctx
, pc
, offset
);
4528 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4530 #if defined(TARGET_MIPS64)
4533 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4534 addr
= addr_add(ctx
, pc
, offset
);
4535 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4539 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4542 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4543 addr
= addr_add(ctx
, pc
, offset
);
4544 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4549 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4550 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4551 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4554 #if defined(TARGET_MIPS64)
4555 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4556 case R6_OPC_LDPC
+ (1 << 16):
4557 case R6_OPC_LDPC
+ (2 << 16):
4558 case R6_OPC_LDPC
+ (3 << 16):
4560 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4561 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4562 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4566 MIPS_INVAL("OPC_PCREL");
4567 generate_exception_end(ctx
, EXCP_RI
);
4574 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4583 t0
= tcg_temp_new();
4584 t1
= tcg_temp_new();
4586 gen_load_gpr(t0
, rs
);
4587 gen_load_gpr(t1
, rt
);
4592 TCGv t2
= tcg_temp_new();
4593 TCGv t3
= tcg_temp_new();
4594 tcg_gen_ext32s_tl(t0
, t0
);
4595 tcg_gen_ext32s_tl(t1
, t1
);
4596 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4597 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4598 tcg_gen_and_tl(t2
, t2
, t3
);
4599 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4600 tcg_gen_or_tl(t2
, t2
, t3
);
4601 tcg_gen_movi_tl(t3
, 0);
4602 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4603 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4604 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4611 TCGv t2
= tcg_temp_new();
4612 TCGv t3
= tcg_temp_new();
4613 tcg_gen_ext32s_tl(t0
, t0
);
4614 tcg_gen_ext32s_tl(t1
, t1
);
4615 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4616 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4617 tcg_gen_and_tl(t2
, t2
, t3
);
4618 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4619 tcg_gen_or_tl(t2
, t2
, t3
);
4620 tcg_gen_movi_tl(t3
, 0);
4621 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4622 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4623 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4630 TCGv t2
= tcg_const_tl(0);
4631 TCGv t3
= tcg_const_tl(1);
4632 tcg_gen_ext32u_tl(t0
, t0
);
4633 tcg_gen_ext32u_tl(t1
, t1
);
4634 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4635 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4636 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4643 TCGv t2
= tcg_const_tl(0);
4644 TCGv t3
= tcg_const_tl(1);
4645 tcg_gen_ext32u_tl(t0
, t0
);
4646 tcg_gen_ext32u_tl(t1
, t1
);
4647 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4648 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4649 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4656 TCGv_i32 t2
= tcg_temp_new_i32();
4657 TCGv_i32 t3
= tcg_temp_new_i32();
4658 tcg_gen_trunc_tl_i32(t2
, t0
);
4659 tcg_gen_trunc_tl_i32(t3
, t1
);
4660 tcg_gen_mul_i32(t2
, t2
, t3
);
4661 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4662 tcg_temp_free_i32(t2
);
4663 tcg_temp_free_i32(t3
);
4668 TCGv_i32 t2
= tcg_temp_new_i32();
4669 TCGv_i32 t3
= tcg_temp_new_i32();
4670 tcg_gen_trunc_tl_i32(t2
, t0
);
4671 tcg_gen_trunc_tl_i32(t3
, t1
);
4672 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4673 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4674 tcg_temp_free_i32(t2
);
4675 tcg_temp_free_i32(t3
);
4680 TCGv_i32 t2
= tcg_temp_new_i32();
4681 TCGv_i32 t3
= tcg_temp_new_i32();
4682 tcg_gen_trunc_tl_i32(t2
, t0
);
4683 tcg_gen_trunc_tl_i32(t3
, t1
);
4684 tcg_gen_mul_i32(t2
, t2
, t3
);
4685 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4686 tcg_temp_free_i32(t2
);
4687 tcg_temp_free_i32(t3
);
4692 TCGv_i32 t2
= tcg_temp_new_i32();
4693 TCGv_i32 t3
= tcg_temp_new_i32();
4694 tcg_gen_trunc_tl_i32(t2
, t0
);
4695 tcg_gen_trunc_tl_i32(t3
, t1
);
4696 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4697 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4698 tcg_temp_free_i32(t2
);
4699 tcg_temp_free_i32(t3
);
4702 #if defined(TARGET_MIPS64)
4705 TCGv t2
= tcg_temp_new();
4706 TCGv t3
= tcg_temp_new();
4707 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4708 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4709 tcg_gen_and_tl(t2
, t2
, t3
);
4710 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4711 tcg_gen_or_tl(t2
, t2
, t3
);
4712 tcg_gen_movi_tl(t3
, 0);
4713 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4714 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4721 TCGv t2
= tcg_temp_new();
4722 TCGv t3
= tcg_temp_new();
4723 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4724 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4725 tcg_gen_and_tl(t2
, t2
, t3
);
4726 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4727 tcg_gen_or_tl(t2
, t2
, t3
);
4728 tcg_gen_movi_tl(t3
, 0);
4729 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4730 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4737 TCGv t2
= tcg_const_tl(0);
4738 TCGv t3
= tcg_const_tl(1);
4739 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4740 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4747 TCGv t2
= tcg_const_tl(0);
4748 TCGv t3
= tcg_const_tl(1);
4749 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4750 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4756 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4760 TCGv t2
= tcg_temp_new();
4761 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4766 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4770 TCGv t2
= tcg_temp_new();
4771 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4777 MIPS_INVAL("r6 mul/div");
4778 generate_exception_end(ctx
, EXCP_RI
);
4786 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
4790 t0
= tcg_temp_new();
4791 t1
= tcg_temp_new();
4793 gen_load_gpr(t0
, rs
);
4794 gen_load_gpr(t1
, rt
);
4799 TCGv t2
= tcg_temp_new();
4800 TCGv t3
= tcg_temp_new();
4801 tcg_gen_ext32s_tl(t0
, t0
);
4802 tcg_gen_ext32s_tl(t1
, t1
);
4803 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4804 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4805 tcg_gen_and_tl(t2
, t2
, t3
);
4806 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4807 tcg_gen_or_tl(t2
, t2
, t3
);
4808 tcg_gen_movi_tl(t3
, 0);
4809 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4810 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
4811 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
4812 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4813 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4820 TCGv t2
= tcg_const_tl(0);
4821 TCGv t3
= tcg_const_tl(1);
4822 tcg_gen_ext32u_tl(t0
, t0
);
4823 tcg_gen_ext32u_tl(t1
, t1
);
4824 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4825 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
4826 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
4827 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4828 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4834 MIPS_INVAL("div1 TX79");
4835 generate_exception_end(ctx
, EXCP_RI
);
4843 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4844 int acc
, int rs
, int rt
)
4848 t0
= tcg_temp_new();
4849 t1
= tcg_temp_new();
4851 gen_load_gpr(t0
, rs
);
4852 gen_load_gpr(t1
, rt
);
4861 TCGv t2
= tcg_temp_new();
4862 TCGv t3
= tcg_temp_new();
4863 tcg_gen_ext32s_tl(t0
, t0
);
4864 tcg_gen_ext32s_tl(t1
, t1
);
4865 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4866 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4867 tcg_gen_and_tl(t2
, t2
, t3
);
4868 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4869 tcg_gen_or_tl(t2
, t2
, t3
);
4870 tcg_gen_movi_tl(t3
, 0);
4871 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4872 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4873 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4874 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4875 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4882 TCGv t2
= tcg_const_tl(0);
4883 TCGv t3
= tcg_const_tl(1);
4884 tcg_gen_ext32u_tl(t0
, t0
);
4885 tcg_gen_ext32u_tl(t1
, t1
);
4886 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4887 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4888 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4889 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4890 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4897 TCGv_i32 t2
= tcg_temp_new_i32();
4898 TCGv_i32 t3
= tcg_temp_new_i32();
4899 tcg_gen_trunc_tl_i32(t2
, t0
);
4900 tcg_gen_trunc_tl_i32(t3
, t1
);
4901 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4902 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4903 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4904 tcg_temp_free_i32(t2
);
4905 tcg_temp_free_i32(t3
);
4910 TCGv_i32 t2
= tcg_temp_new_i32();
4911 TCGv_i32 t3
= tcg_temp_new_i32();
4912 tcg_gen_trunc_tl_i32(t2
, t0
);
4913 tcg_gen_trunc_tl_i32(t3
, t1
);
4914 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4915 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4916 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4917 tcg_temp_free_i32(t2
);
4918 tcg_temp_free_i32(t3
);
4921 #if defined(TARGET_MIPS64)
4924 TCGv t2
= tcg_temp_new();
4925 TCGv t3
= tcg_temp_new();
4926 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4927 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4928 tcg_gen_and_tl(t2
, t2
, t3
);
4929 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4930 tcg_gen_or_tl(t2
, t2
, t3
);
4931 tcg_gen_movi_tl(t3
, 0);
4932 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4933 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4934 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4941 TCGv t2
= tcg_const_tl(0);
4942 TCGv t3
= tcg_const_tl(1);
4943 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4944 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4945 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4951 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4954 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4959 TCGv_i64 t2
= tcg_temp_new_i64();
4960 TCGv_i64 t3
= tcg_temp_new_i64();
4962 tcg_gen_ext_tl_i64(t2
, t0
);
4963 tcg_gen_ext_tl_i64(t3
, t1
);
4964 tcg_gen_mul_i64(t2
, t2
, t3
);
4965 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4966 tcg_gen_add_i64(t2
, t2
, t3
);
4967 tcg_temp_free_i64(t3
);
4968 gen_move_low32(cpu_LO
[acc
], t2
);
4969 gen_move_high32(cpu_HI
[acc
], t2
);
4970 tcg_temp_free_i64(t2
);
4975 TCGv_i64 t2
= tcg_temp_new_i64();
4976 TCGv_i64 t3
= tcg_temp_new_i64();
4978 tcg_gen_ext32u_tl(t0
, t0
);
4979 tcg_gen_ext32u_tl(t1
, t1
);
4980 tcg_gen_extu_tl_i64(t2
, t0
);
4981 tcg_gen_extu_tl_i64(t3
, t1
);
4982 tcg_gen_mul_i64(t2
, t2
, t3
);
4983 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4984 tcg_gen_add_i64(t2
, t2
, t3
);
4985 tcg_temp_free_i64(t3
);
4986 gen_move_low32(cpu_LO
[acc
], t2
);
4987 gen_move_high32(cpu_HI
[acc
], t2
);
4988 tcg_temp_free_i64(t2
);
4993 TCGv_i64 t2
= tcg_temp_new_i64();
4994 TCGv_i64 t3
= tcg_temp_new_i64();
4996 tcg_gen_ext_tl_i64(t2
, t0
);
4997 tcg_gen_ext_tl_i64(t3
, t1
);
4998 tcg_gen_mul_i64(t2
, t2
, t3
);
4999 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5000 tcg_gen_sub_i64(t2
, t3
, t2
);
5001 tcg_temp_free_i64(t3
);
5002 gen_move_low32(cpu_LO
[acc
], t2
);
5003 gen_move_high32(cpu_HI
[acc
], t2
);
5004 tcg_temp_free_i64(t2
);
5009 TCGv_i64 t2
= tcg_temp_new_i64();
5010 TCGv_i64 t3
= tcg_temp_new_i64();
5012 tcg_gen_ext32u_tl(t0
, t0
);
5013 tcg_gen_ext32u_tl(t1
, t1
);
5014 tcg_gen_extu_tl_i64(t2
, t0
);
5015 tcg_gen_extu_tl_i64(t3
, t1
);
5016 tcg_gen_mul_i64(t2
, t2
, t3
);
5017 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5018 tcg_gen_sub_i64(t2
, t3
, t2
);
5019 tcg_temp_free_i64(t3
);
5020 gen_move_low32(cpu_LO
[acc
], t2
);
5021 gen_move_high32(cpu_HI
[acc
], t2
);
5022 tcg_temp_free_i64(t2
);
5026 MIPS_INVAL("mul/div");
5027 generate_exception_end(ctx
, EXCP_RI
);
5036 * These MULT and MULTU instructions implemented in for example the
5037 * Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
5038 * architectures are special three-operand variants with the syntax
5040 * MULT[U][1] rd, rs, rt
5044 * (rd, LO, HI) <- rs * rt
5046 * where the low-order 32-bits of the result is placed into both the
5047 * GPR rd and the special register LO. The high-order 32-bits of the
5048 * result is placed into the special register HI.
5050 * If the GPR rd is omitted in assembly language, it is taken to be 0,
5051 * which is the zero register that always reads as 0.
5053 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
5054 int rd
, int rs
, int rt
)
5056 TCGv t0
= tcg_temp_new();
5057 TCGv t1
= tcg_temp_new();
5060 gen_load_gpr(t0
, rs
);
5061 gen_load_gpr(t1
, rt
);
5069 TCGv_i32 t2
= tcg_temp_new_i32();
5070 TCGv_i32 t3
= tcg_temp_new_i32();
5071 tcg_gen_trunc_tl_i32(t2
, t0
);
5072 tcg_gen_trunc_tl_i32(t3
, t1
);
5073 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
5075 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5077 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5078 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5079 tcg_temp_free_i32(t2
);
5080 tcg_temp_free_i32(t3
);
5083 case MMI_OPC_MULTU1
:
5088 TCGv_i32 t2
= tcg_temp_new_i32();
5089 TCGv_i32 t3
= tcg_temp_new_i32();
5090 tcg_gen_trunc_tl_i32(t2
, t0
);
5091 tcg_gen_trunc_tl_i32(t3
, t1
);
5092 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
5094 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5096 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5097 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5098 tcg_temp_free_i32(t2
);
5099 tcg_temp_free_i32(t3
);
5103 MIPS_INVAL("mul TXx9");
5104 generate_exception_end(ctx
, EXCP_RI
);
5113 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
5114 int rd
, int rs
, int rt
)
5116 TCGv t0
= tcg_temp_new();
5117 TCGv t1
= tcg_temp_new();
5119 gen_load_gpr(t0
, rs
);
5120 gen_load_gpr(t1
, rt
);
5123 case OPC_VR54XX_MULS
:
5124 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
5126 case OPC_VR54XX_MULSU
:
5127 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
5129 case OPC_VR54XX_MACC
:
5130 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
5132 case OPC_VR54XX_MACCU
:
5133 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
5135 case OPC_VR54XX_MSAC
:
5136 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
5138 case OPC_VR54XX_MSACU
:
5139 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
5141 case OPC_VR54XX_MULHI
:
5142 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
5144 case OPC_VR54XX_MULHIU
:
5145 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
5147 case OPC_VR54XX_MULSHI
:
5148 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
5150 case OPC_VR54XX_MULSHIU
:
5151 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
5153 case OPC_VR54XX_MACCHI
:
5154 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
5156 case OPC_VR54XX_MACCHIU
:
5157 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
5159 case OPC_VR54XX_MSACHI
:
5160 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
5162 case OPC_VR54XX_MSACHIU
:
5163 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
5166 MIPS_INVAL("mul vr54xx");
5167 generate_exception_end(ctx
, EXCP_RI
);
5170 gen_store_gpr(t0
, rd
);
5177 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
5187 gen_load_gpr(t0
, rs
);
5192 #if defined(TARGET_MIPS64)
5196 tcg_gen_not_tl(t0
, t0
);
5205 tcg_gen_ext32u_tl(t0
, t0
);
5206 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
5207 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
5209 #if defined(TARGET_MIPS64)
5214 tcg_gen_clzi_i64(t0
, t0
, 64);
5220 /* Godson integer instructions */
5221 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
5222 int rd
, int rs
, int rt
)
5234 case OPC_MULTU_G_2E
:
5235 case OPC_MULTU_G_2F
:
5236 #if defined(TARGET_MIPS64)
5237 case OPC_DMULT_G_2E
:
5238 case OPC_DMULT_G_2F
:
5239 case OPC_DMULTU_G_2E
:
5240 case OPC_DMULTU_G_2F
:
5242 t0
= tcg_temp_new();
5243 t1
= tcg_temp_new();
5246 t0
= tcg_temp_local_new();
5247 t1
= tcg_temp_local_new();
5251 gen_load_gpr(t0
, rs
);
5252 gen_load_gpr(t1
, rt
);
5257 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5258 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5260 case OPC_MULTU_G_2E
:
5261 case OPC_MULTU_G_2F
:
5262 tcg_gen_ext32u_tl(t0
, t0
);
5263 tcg_gen_ext32u_tl(t1
, t1
);
5264 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5265 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5270 TCGLabel
*l1
= gen_new_label();
5271 TCGLabel
*l2
= gen_new_label();
5272 TCGLabel
*l3
= gen_new_label();
5273 tcg_gen_ext32s_tl(t0
, t0
);
5274 tcg_gen_ext32s_tl(t1
, t1
);
5275 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5276 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5279 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5280 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5281 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5284 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5285 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5292 TCGLabel
*l1
= gen_new_label();
5293 TCGLabel
*l2
= gen_new_label();
5294 tcg_gen_ext32u_tl(t0
, t0
);
5295 tcg_gen_ext32u_tl(t1
, t1
);
5296 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5297 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5300 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5301 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5308 TCGLabel
*l1
= gen_new_label();
5309 TCGLabel
*l2
= gen_new_label();
5310 TCGLabel
*l3
= gen_new_label();
5311 tcg_gen_ext32u_tl(t0
, t0
);
5312 tcg_gen_ext32u_tl(t1
, t1
);
5313 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5314 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5315 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5317 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5320 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5321 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5328 TCGLabel
*l1
= gen_new_label();
5329 TCGLabel
*l2
= gen_new_label();
5330 tcg_gen_ext32u_tl(t0
, t0
);
5331 tcg_gen_ext32u_tl(t1
, t1
);
5332 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5333 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5336 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5337 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5341 #if defined(TARGET_MIPS64)
5342 case OPC_DMULT_G_2E
:
5343 case OPC_DMULT_G_2F
:
5344 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5346 case OPC_DMULTU_G_2E
:
5347 case OPC_DMULTU_G_2F
:
5348 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5353 TCGLabel
*l1
= gen_new_label();
5354 TCGLabel
*l2
= gen_new_label();
5355 TCGLabel
*l3
= gen_new_label();
5356 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5357 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5360 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5361 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5362 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5365 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5369 case OPC_DDIVU_G_2E
:
5370 case OPC_DDIVU_G_2F
:
5372 TCGLabel
*l1
= gen_new_label();
5373 TCGLabel
*l2
= gen_new_label();
5374 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5375 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5378 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5385 TCGLabel
*l1
= gen_new_label();
5386 TCGLabel
*l2
= gen_new_label();
5387 TCGLabel
*l3
= gen_new_label();
5388 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5389 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5390 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5392 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5395 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5399 case OPC_DMODU_G_2E
:
5400 case OPC_DMODU_G_2F
:
5402 TCGLabel
*l1
= gen_new_label();
5403 TCGLabel
*l2
= gen_new_label();
5404 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5405 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5408 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5419 /* Loongson multimedia instructions */
5420 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5422 uint32_t opc
, shift_max
;
5425 opc
= MASK_LMI(ctx
->opcode
);
5431 t0
= tcg_temp_local_new_i64();
5432 t1
= tcg_temp_local_new_i64();
5435 t0
= tcg_temp_new_i64();
5436 t1
= tcg_temp_new_i64();
5440 check_cp1_enabled(ctx
);
5441 gen_load_fpr64(ctx
, t0
, rs
);
5442 gen_load_fpr64(ctx
, t1
, rt
);
5444 #define LMI_HELPER(UP, LO) \
5445 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5446 #define LMI_HELPER_1(UP, LO) \
5447 case OPC_##UP: gen_helper_##LO(t0, t0); break
5448 #define LMI_DIRECT(UP, LO, OP) \
5449 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5452 LMI_HELPER(PADDSH
, paddsh
);
5453 LMI_HELPER(PADDUSH
, paddush
);
5454 LMI_HELPER(PADDH
, paddh
);
5455 LMI_HELPER(PADDW
, paddw
);
5456 LMI_HELPER(PADDSB
, paddsb
);
5457 LMI_HELPER(PADDUSB
, paddusb
);
5458 LMI_HELPER(PADDB
, paddb
);
5460 LMI_HELPER(PSUBSH
, psubsh
);
5461 LMI_HELPER(PSUBUSH
, psubush
);
5462 LMI_HELPER(PSUBH
, psubh
);
5463 LMI_HELPER(PSUBW
, psubw
);
5464 LMI_HELPER(PSUBSB
, psubsb
);
5465 LMI_HELPER(PSUBUSB
, psubusb
);
5466 LMI_HELPER(PSUBB
, psubb
);
5468 LMI_HELPER(PSHUFH
, pshufh
);
5469 LMI_HELPER(PACKSSWH
, packsswh
);
5470 LMI_HELPER(PACKSSHB
, packsshb
);
5471 LMI_HELPER(PACKUSHB
, packushb
);
5473 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5474 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5475 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5476 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5477 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5478 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5480 LMI_HELPER(PAVGH
, pavgh
);
5481 LMI_HELPER(PAVGB
, pavgb
);
5482 LMI_HELPER(PMAXSH
, pmaxsh
);
5483 LMI_HELPER(PMINSH
, pminsh
);
5484 LMI_HELPER(PMAXUB
, pmaxub
);
5485 LMI_HELPER(PMINUB
, pminub
);
5487 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5488 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5489 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5490 LMI_HELPER(PCMPGTH
, pcmpgth
);
5491 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5492 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5494 LMI_HELPER(PSLLW
, psllw
);
5495 LMI_HELPER(PSLLH
, psllh
);
5496 LMI_HELPER(PSRLW
, psrlw
);
5497 LMI_HELPER(PSRLH
, psrlh
);
5498 LMI_HELPER(PSRAW
, psraw
);
5499 LMI_HELPER(PSRAH
, psrah
);
5501 LMI_HELPER(PMULLH
, pmullh
);
5502 LMI_HELPER(PMULHH
, pmulhh
);
5503 LMI_HELPER(PMULHUH
, pmulhuh
);
5504 LMI_HELPER(PMADDHW
, pmaddhw
);
5506 LMI_HELPER(PASUBUB
, pasubub
);
5507 LMI_HELPER_1(BIADD
, biadd
);
5508 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5510 LMI_DIRECT(PADDD
, paddd
, add
);
5511 LMI_DIRECT(PSUBD
, psubd
, sub
);
5512 LMI_DIRECT(XOR_CP2
, xor, xor);
5513 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5514 LMI_DIRECT(AND_CP2
, and, and);
5515 LMI_DIRECT(OR_CP2
, or, or);
5518 tcg_gen_andc_i64(t0
, t1
, t0
);
5522 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5525 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5528 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5531 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5535 tcg_gen_andi_i64(t1
, t1
, 3);
5536 tcg_gen_shli_i64(t1
, t1
, 4);
5537 tcg_gen_shr_i64(t0
, t0
, t1
);
5538 tcg_gen_ext16u_i64(t0
, t0
);
5542 tcg_gen_add_i64(t0
, t0
, t1
);
5543 tcg_gen_ext32s_i64(t0
, t0
);
5546 tcg_gen_sub_i64(t0
, t0
, t1
);
5547 tcg_gen_ext32s_i64(t0
, t0
);
5569 /* Make sure shift count isn't TCG undefined behaviour. */
5570 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5575 tcg_gen_shl_i64(t0
, t0
, t1
);
5579 /* Since SRA is UndefinedResult without sign-extended inputs,
5580 we can treat SRA and DSRA the same. */
5581 tcg_gen_sar_i64(t0
, t0
, t1
);
5584 /* We want to shift in zeros for SRL; zero-extend first. */
5585 tcg_gen_ext32u_i64(t0
, t0
);
5588 tcg_gen_shr_i64(t0
, t0
, t1
);
5592 if (shift_max
== 32) {
5593 tcg_gen_ext32s_i64(t0
, t0
);
5596 /* Shifts larger than MAX produce zero. */
5597 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5598 tcg_gen_neg_i64(t1
, t1
);
5599 tcg_gen_and_i64(t0
, t0
, t1
);
5605 TCGv_i64 t2
= tcg_temp_new_i64();
5606 TCGLabel
*lab
= gen_new_label();
5608 tcg_gen_mov_i64(t2
, t0
);
5609 tcg_gen_add_i64(t0
, t1
, t2
);
5610 if (opc
== OPC_ADD_CP2
) {
5611 tcg_gen_ext32s_i64(t0
, t0
);
5613 tcg_gen_xor_i64(t1
, t1
, t2
);
5614 tcg_gen_xor_i64(t2
, t2
, t0
);
5615 tcg_gen_andc_i64(t1
, t2
, t1
);
5616 tcg_temp_free_i64(t2
);
5617 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5618 generate_exception(ctx
, EXCP_OVERFLOW
);
5626 TCGv_i64 t2
= tcg_temp_new_i64();
5627 TCGLabel
*lab
= gen_new_label();
5629 tcg_gen_mov_i64(t2
, t0
);
5630 tcg_gen_sub_i64(t0
, t1
, t2
);
5631 if (opc
== OPC_SUB_CP2
) {
5632 tcg_gen_ext32s_i64(t0
, t0
);
5634 tcg_gen_xor_i64(t1
, t1
, t2
);
5635 tcg_gen_xor_i64(t2
, t2
, t0
);
5636 tcg_gen_and_i64(t1
, t1
, t2
);
5637 tcg_temp_free_i64(t2
);
5638 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5639 generate_exception(ctx
, EXCP_OVERFLOW
);
5645 tcg_gen_ext32u_i64(t0
, t0
);
5646 tcg_gen_ext32u_i64(t1
, t1
);
5647 tcg_gen_mul_i64(t0
, t0
, t1
);
5656 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
5657 FD field is the CC field? */
5659 MIPS_INVAL("loongson_cp2");
5660 generate_exception_end(ctx
, EXCP_RI
);
5667 gen_store_fpr64(ctx
, t0
, rd
);
5669 tcg_temp_free_i64(t0
);
5670 tcg_temp_free_i64(t1
);
5674 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5675 int rs
, int rt
, int16_t imm
)
5678 TCGv t0
= tcg_temp_new();
5679 TCGv t1
= tcg_temp_new();
5682 /* Load needed operands */
5690 /* Compare two registers */
5692 gen_load_gpr(t0
, rs
);
5693 gen_load_gpr(t1
, rt
);
5703 /* Compare register to immediate */
5704 if (rs
!= 0 || imm
!= 0) {
5705 gen_load_gpr(t0
, rs
);
5706 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5713 case OPC_TEQ
: /* rs == rs */
5714 case OPC_TEQI
: /* r0 == 0 */
5715 case OPC_TGE
: /* rs >= rs */
5716 case OPC_TGEI
: /* r0 >= 0 */
5717 case OPC_TGEU
: /* rs >= rs unsigned */
5718 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5720 generate_exception_end(ctx
, EXCP_TRAP
);
5722 case OPC_TLT
: /* rs < rs */
5723 case OPC_TLTI
: /* r0 < 0 */
5724 case OPC_TLTU
: /* rs < rs unsigned */
5725 case OPC_TLTIU
: /* r0 < 0 unsigned */
5726 case OPC_TNE
: /* rs != rs */
5727 case OPC_TNEI
: /* r0 != 0 */
5728 /* Never trap: treat as NOP. */
5732 TCGLabel
*l1
= gen_new_label();
5737 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5741 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5745 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5749 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5753 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5757 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5760 generate_exception(ctx
, EXCP_TRAP
);
5767 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5769 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5773 #ifndef CONFIG_USER_ONLY
5774 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5780 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5782 if (use_goto_tb(ctx
, dest
)) {
5785 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5788 if (ctx
->base
.singlestep_enabled
) {
5789 save_cpu_state(ctx
, 0);
5790 gen_helper_raise_exception_debug(cpu_env
);
5792 tcg_gen_lookup_and_goto_ptr();
5796 /* Branches (before delay slot) */
5797 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5799 int rs
, int rt
, int32_t offset
,
5802 target_ulong btgt
= -1;
5804 int bcond_compute
= 0;
5805 TCGv t0
= tcg_temp_new();
5806 TCGv t1
= tcg_temp_new();
5808 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5809 #ifdef MIPS_DEBUG_DISAS
5810 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5811 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5813 generate_exception_end(ctx
, EXCP_RI
);
5817 /* Load needed operands */
5823 /* Compare two registers */
5825 gen_load_gpr(t0
, rs
);
5826 gen_load_gpr(t1
, rt
);
5829 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5843 /* Compare to zero */
5845 gen_load_gpr(t0
, rs
);
5848 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5851 #if defined(TARGET_MIPS64)
5853 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5855 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5858 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5863 /* Jump to immediate */
5864 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5869 /* Jump to register */
5870 if (offset
!= 0 && offset
!= 16) {
5871 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5872 others are reserved. */
5873 MIPS_INVAL("jump hint");
5874 generate_exception_end(ctx
, EXCP_RI
);
5877 gen_load_gpr(btarget
, rs
);
5880 MIPS_INVAL("branch/jump");
5881 generate_exception_end(ctx
, EXCP_RI
);
5884 if (bcond_compute
== 0) {
5885 /* No condition to be computed */
5887 case OPC_BEQ
: /* rx == rx */
5888 case OPC_BEQL
: /* rx == rx likely */
5889 case OPC_BGEZ
: /* 0 >= 0 */
5890 case OPC_BGEZL
: /* 0 >= 0 likely */
5891 case OPC_BLEZ
: /* 0 <= 0 */
5892 case OPC_BLEZL
: /* 0 <= 0 likely */
5894 ctx
->hflags
|= MIPS_HFLAG_B
;
5896 case OPC_BGEZAL
: /* 0 >= 0 */
5897 case OPC_BGEZALL
: /* 0 >= 0 likely */
5898 /* Always take and link */
5900 ctx
->hflags
|= MIPS_HFLAG_B
;
5902 case OPC_BNE
: /* rx != rx */
5903 case OPC_BGTZ
: /* 0 > 0 */
5904 case OPC_BLTZ
: /* 0 < 0 */
5907 case OPC_BLTZAL
: /* 0 < 0 */
5908 /* Handle as an unconditional branch to get correct delay
5911 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5912 ctx
->hflags
|= MIPS_HFLAG_B
;
5914 case OPC_BLTZALL
: /* 0 < 0 likely */
5915 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5916 /* Skip the instruction in the delay slot */
5917 ctx
->base
.pc_next
+= 4;
5919 case OPC_BNEL
: /* rx != rx likely */
5920 case OPC_BGTZL
: /* 0 > 0 likely */
5921 case OPC_BLTZL
: /* 0 < 0 likely */
5922 /* Skip the instruction in the delay slot */
5923 ctx
->base
.pc_next
+= 4;
5926 ctx
->hflags
|= MIPS_HFLAG_B
;
5929 ctx
->hflags
|= MIPS_HFLAG_BX
;
5933 ctx
->hflags
|= MIPS_HFLAG_B
;
5936 ctx
->hflags
|= MIPS_HFLAG_BR
;
5940 ctx
->hflags
|= MIPS_HFLAG_BR
;
5943 MIPS_INVAL("branch/jump");
5944 generate_exception_end(ctx
, EXCP_RI
);
5950 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5953 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5956 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5959 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5962 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5965 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5968 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5972 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5976 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5979 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5982 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5985 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5988 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5991 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5994 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5996 #if defined(TARGET_MIPS64)
5998 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
6002 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6005 ctx
->hflags
|= MIPS_HFLAG_BC
;
6008 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6011 ctx
->hflags
|= MIPS_HFLAG_BL
;
6014 MIPS_INVAL("conditional branch/jump");
6015 generate_exception_end(ctx
, EXCP_RI
);
6020 ctx
->btarget
= btgt
;
6022 switch (delayslot_size
) {
6024 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
6027 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
6032 int post_delay
= insn_bytes
+ delayslot_size
;
6033 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
6035 tcg_gen_movi_tl(cpu_gpr
[blink
],
6036 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
6040 if (insn_bytes
== 2)
6041 ctx
->hflags
|= MIPS_HFLAG_B16
;
6047 /* nanoMIPS Branches */
6048 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
6050 int rs
, int rt
, int32_t offset
)
6052 target_ulong btgt
= -1;
6053 int bcond_compute
= 0;
6054 TCGv t0
= tcg_temp_new();
6055 TCGv t1
= tcg_temp_new();
6057 /* Load needed operands */
6061 /* Compare two registers */
6063 gen_load_gpr(t0
, rs
);
6064 gen_load_gpr(t1
, rt
);
6067 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6070 /* Compare to zero */
6072 gen_load_gpr(t0
, rs
);
6075 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6078 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
6080 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6084 /* Jump to register */
6085 if (offset
!= 0 && offset
!= 16) {
6086 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
6087 others are reserved. */
6088 MIPS_INVAL("jump hint");
6089 generate_exception_end(ctx
, EXCP_RI
);
6092 gen_load_gpr(btarget
, rs
);
6095 MIPS_INVAL("branch/jump");
6096 generate_exception_end(ctx
, EXCP_RI
);
6099 if (bcond_compute
== 0) {
6100 /* No condition to be computed */
6102 case OPC_BEQ
: /* rx == rx */
6104 ctx
->hflags
|= MIPS_HFLAG_B
;
6106 case OPC_BGEZAL
: /* 0 >= 0 */
6107 /* Always take and link */
6108 tcg_gen_movi_tl(cpu_gpr
[31],
6109 ctx
->base
.pc_next
+ insn_bytes
);
6110 ctx
->hflags
|= MIPS_HFLAG_B
;
6112 case OPC_BNE
: /* rx != rx */
6113 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6114 /* Skip the instruction in the delay slot */
6115 ctx
->base
.pc_next
+= 4;
6118 ctx
->hflags
|= MIPS_HFLAG_BR
;
6122 tcg_gen_movi_tl(cpu_gpr
[rt
],
6123 ctx
->base
.pc_next
+ insn_bytes
);
6125 ctx
->hflags
|= MIPS_HFLAG_BR
;
6128 MIPS_INVAL("branch/jump");
6129 generate_exception_end(ctx
, EXCP_RI
);
6135 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6138 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6141 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6142 tcg_gen_movi_tl(cpu_gpr
[31],
6143 ctx
->base
.pc_next
+ insn_bytes
);
6146 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6148 ctx
->hflags
|= MIPS_HFLAG_BC
;
6151 MIPS_INVAL("conditional branch/jump");
6152 generate_exception_end(ctx
, EXCP_RI
);
6157 ctx
->btarget
= btgt
;
6160 if (insn_bytes
== 2) {
6161 ctx
->hflags
|= MIPS_HFLAG_B16
;
6168 /* special3 bitfield operations */
6169 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
6170 int rs
, int lsb
, int msb
)
6172 TCGv t0
= tcg_temp_new();
6173 TCGv t1
= tcg_temp_new();
6175 gen_load_gpr(t1
, rs
);
6178 if (lsb
+ msb
> 31) {
6182 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6184 /* The two checks together imply that lsb == 0,
6185 so this is a simple sign-extension. */
6186 tcg_gen_ext32s_tl(t0
, t1
);
6189 #if defined(TARGET_MIPS64)
6198 if (lsb
+ msb
> 63) {
6201 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6208 gen_load_gpr(t0
, rt
);
6209 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6210 tcg_gen_ext32s_tl(t0
, t0
);
6212 #if defined(TARGET_MIPS64)
6223 gen_load_gpr(t0
, rt
);
6224 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6229 MIPS_INVAL("bitops");
6230 generate_exception_end(ctx
, EXCP_RI
);
6235 gen_store_gpr(t0
, rt
);
6240 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6245 /* If no destination, treat it as a NOP. */
6249 t0
= tcg_temp_new();
6250 gen_load_gpr(t0
, rt
);
6254 TCGv t1
= tcg_temp_new();
6255 TCGv t2
= tcg_const_tl(0x00FF00FF);
6257 tcg_gen_shri_tl(t1
, t0
, 8);
6258 tcg_gen_and_tl(t1
, t1
, t2
);
6259 tcg_gen_and_tl(t0
, t0
, t2
);
6260 tcg_gen_shli_tl(t0
, t0
, 8);
6261 tcg_gen_or_tl(t0
, t0
, t1
);
6264 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6268 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6271 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6273 #if defined(TARGET_MIPS64)
6276 TCGv t1
= tcg_temp_new();
6277 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6279 tcg_gen_shri_tl(t1
, t0
, 8);
6280 tcg_gen_and_tl(t1
, t1
, t2
);
6281 tcg_gen_and_tl(t0
, t0
, t2
);
6282 tcg_gen_shli_tl(t0
, t0
, 8);
6283 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6290 TCGv t1
= tcg_temp_new();
6291 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6293 tcg_gen_shri_tl(t1
, t0
, 16);
6294 tcg_gen_and_tl(t1
, t1
, t2
);
6295 tcg_gen_and_tl(t0
, t0
, t2
);
6296 tcg_gen_shli_tl(t0
, t0
, 16);
6297 tcg_gen_or_tl(t0
, t0
, t1
);
6298 tcg_gen_shri_tl(t1
, t0
, 32);
6299 tcg_gen_shli_tl(t0
, t0
, 32);
6300 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6307 MIPS_INVAL("bsfhl");
6308 generate_exception_end(ctx
, EXCP_RI
);
6315 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6324 t0
= tcg_temp_new();
6325 t1
= tcg_temp_new();
6326 gen_load_gpr(t0
, rs
);
6327 gen_load_gpr(t1
, rt
);
6328 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6329 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6330 if (opc
== OPC_LSA
) {
6331 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6340 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6348 t0
= tcg_temp_new();
6349 if (bits
== 0 || bits
== wordsz
) {
6351 gen_load_gpr(t0
, rt
);
6353 gen_load_gpr(t0
, rs
);
6357 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6359 #if defined(TARGET_MIPS64)
6361 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6366 TCGv t1
= tcg_temp_new();
6367 gen_load_gpr(t0
, rt
);
6368 gen_load_gpr(t1
, rs
);
6372 TCGv_i64 t2
= tcg_temp_new_i64();
6373 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6374 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6375 gen_move_low32(cpu_gpr
[rd
], t2
);
6376 tcg_temp_free_i64(t2
);
6379 #if defined(TARGET_MIPS64)
6381 tcg_gen_shli_tl(t0
, t0
, bits
);
6382 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6383 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6393 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6396 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6399 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6402 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6405 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6412 t0
= tcg_temp_new();
6413 gen_load_gpr(t0
, rt
);
6416 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6418 #if defined(TARGET_MIPS64)
6420 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6427 #ifndef CONFIG_USER_ONLY
6428 /* CP0 (MMU and control) */
6429 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6431 TCGv_i64 t0
= tcg_temp_new_i64();
6432 TCGv_i64 t1
= tcg_temp_new_i64();
6434 tcg_gen_ext_tl_i64(t0
, arg
);
6435 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6436 #if defined(TARGET_MIPS64)
6437 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6439 tcg_gen_concat32_i64(t1
, t1
, t0
);
6441 tcg_gen_st_i64(t1
, cpu_env
, off
);
6442 tcg_temp_free_i64(t1
);
6443 tcg_temp_free_i64(t0
);
6446 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6448 TCGv_i64 t0
= tcg_temp_new_i64();
6449 TCGv_i64 t1
= tcg_temp_new_i64();
6451 tcg_gen_ext_tl_i64(t0
, arg
);
6452 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6453 tcg_gen_concat32_i64(t1
, t1
, t0
);
6454 tcg_gen_st_i64(t1
, cpu_env
, off
);
6455 tcg_temp_free_i64(t1
);
6456 tcg_temp_free_i64(t0
);
6459 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6461 TCGv_i64 t0
= tcg_temp_new_i64();
6463 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6464 #if defined(TARGET_MIPS64)
6465 tcg_gen_shri_i64(t0
, t0
, 30);
6467 tcg_gen_shri_i64(t0
, t0
, 32);
6469 gen_move_low32(arg
, t0
);
6470 tcg_temp_free_i64(t0
);
6473 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6475 TCGv_i64 t0
= tcg_temp_new_i64();
6477 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6478 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6479 gen_move_low32(arg
, t0
);
6480 tcg_temp_free_i64(t0
);
6483 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
6485 TCGv_i32 t0
= tcg_temp_new_i32();
6487 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6488 tcg_gen_ext_i32_tl(arg
, t0
);
6489 tcg_temp_free_i32(t0
);
6492 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
6494 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6495 tcg_gen_ext32s_tl(arg
, arg
);
6498 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
6500 TCGv_i32 t0
= tcg_temp_new_i32();
6502 tcg_gen_trunc_tl_i32(t0
, arg
);
6503 tcg_gen_st_i32(t0
, cpu_env
, off
);
6504 tcg_temp_free_i32(t0
);
6507 #define CP0_CHECK(c) \
6510 goto cp0_unimplemented; \
6514 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6516 const char *rn
= "invalid";
6522 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6523 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6527 goto cp0_unimplemented
;
6533 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6534 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6538 goto cp0_unimplemented
;
6544 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
6545 ctx
->CP0_LLAddr_shift
);
6549 CP0_CHECK(ctx
->mrp
);
6550 gen_helper_mfhc0_maar(arg
, cpu_env
);
6554 goto cp0_unimplemented
;
6563 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6567 goto cp0_unimplemented
;
6571 goto cp0_unimplemented
;
6573 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
6577 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6578 tcg_gen_movi_tl(arg
, 0);
6581 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6583 const char *rn
= "invalid";
6584 uint64_t mask
= ctx
->PAMask
>> 36;
6590 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6591 tcg_gen_andi_tl(arg
, arg
, mask
);
6592 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6596 goto cp0_unimplemented
;
6602 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6603 tcg_gen_andi_tl(arg
, arg
, mask
);
6604 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6608 goto cp0_unimplemented
;
6614 /* LLAddr is read-only (the only exception is bit 0 if LLB is
6615 supported); the CP0_LLAddr_rw_bitmask does not seem to be
6616 relevant for modern MIPS cores supporting MTHC0, therefore
6617 treating MTHC0 to LLAddr as NOP. */
6621 CP0_CHECK(ctx
->mrp
);
6622 gen_helper_mthc0_maar(cpu_env
, arg
);
6626 goto cp0_unimplemented
;
6635 tcg_gen_andi_tl(arg
, arg
, mask
);
6636 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6640 goto cp0_unimplemented
;
6644 goto cp0_unimplemented
;
6646 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
6649 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6652 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6654 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6655 tcg_gen_movi_tl(arg
, 0);
6657 tcg_gen_movi_tl(arg
, ~0);
6661 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6663 const char *rn
= "invalid";
6666 check_insn(ctx
, ISA_MIPS32
);
6672 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6676 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6677 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6681 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6682 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6686 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6687 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6692 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6696 goto cp0_unimplemented
;
6702 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6703 gen_helper_mfc0_random(arg
, cpu_env
);
6707 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6708 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6712 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6713 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6717 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6718 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6722 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6723 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6727 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6728 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6732 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6733 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6734 rn
= "VPEScheFBack";
6737 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6738 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6742 goto cp0_unimplemented
;
6749 TCGv_i64 tmp
= tcg_temp_new_i64();
6750 tcg_gen_ld_i64(tmp
, cpu_env
,
6751 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6752 #if defined(TARGET_MIPS64)
6754 /* Move RI/XI fields to bits 31:30 */
6755 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6756 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6759 gen_move_low32(arg
, tmp
);
6760 tcg_temp_free_i64(tmp
);
6765 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6766 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6770 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6771 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6775 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6776 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6780 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6781 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6785 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6786 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6790 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6791 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6795 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6796 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6800 goto cp0_unimplemented
;
6807 TCGv_i64 tmp
= tcg_temp_new_i64();
6808 tcg_gen_ld_i64(tmp
, cpu_env
,
6809 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6810 #if defined(TARGET_MIPS64)
6812 /* Move RI/XI fields to bits 31:30 */
6813 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6814 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6817 gen_move_low32(arg
, tmp
);
6818 tcg_temp_free_i64(tmp
);
6824 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6825 rn
= "GlobalNumber";
6828 goto cp0_unimplemented
;
6834 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6835 tcg_gen_ext32s_tl(arg
, arg
);
6839 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
6840 rn
= "ContextConfig";
6841 goto cp0_unimplemented
;
6843 CP0_CHECK(ctx
->ulri
);
6844 tcg_gen_ld_tl(arg
, cpu_env
,
6845 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6846 tcg_gen_ext32s_tl(arg
, arg
);
6850 goto cp0_unimplemented
;
6856 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6860 check_insn(ctx
, ISA_MIPS32R2
);
6861 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6866 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6867 tcg_gen_ext32s_tl(arg
, arg
);
6872 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6873 tcg_gen_ext32s_tl(arg
, arg
);
6878 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6879 tcg_gen_ext32s_tl(arg
, arg
);
6884 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6889 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
6894 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
6898 goto cp0_unimplemented
;
6904 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6908 check_insn(ctx
, ISA_MIPS32R2
);
6909 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6913 check_insn(ctx
, ISA_MIPS32R2
);
6914 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6918 check_insn(ctx
, ISA_MIPS32R2
);
6919 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6923 check_insn(ctx
, ISA_MIPS32R2
);
6924 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6928 check_insn(ctx
, ISA_MIPS32R2
);
6929 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6934 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
6938 goto cp0_unimplemented
;
6944 check_insn(ctx
, ISA_MIPS32R2
);
6945 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6949 goto cp0_unimplemented
;
6955 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6956 tcg_gen_ext32s_tl(arg
, arg
);
6961 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6966 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6971 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
6972 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
6976 goto cp0_unimplemented
;
6982 /* Mark as an IO operation because we read the time. */
6983 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6986 gen_helper_mfc0_count(arg
, cpu_env
);
6987 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6990 /* Break the TB to be able to take timer interrupts immediately
6991 after reading count. DISAS_STOP isn't sufficient, we need to
6992 ensure we break completely out of translated code. */
6993 gen_save_pc(ctx
->base
.pc_next
+ 4);
6994 ctx
->base
.is_jmp
= DISAS_EXIT
;
6997 /* 6,7 are implementation dependent */
6999 goto cp0_unimplemented
;
7005 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7006 tcg_gen_ext32s_tl(arg
, arg
);
7010 goto cp0_unimplemented
;
7016 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7019 /* 6,7 are implementation dependent */
7021 goto cp0_unimplemented
;
7027 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7031 check_insn(ctx
, ISA_MIPS32R2
);
7032 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7036 check_insn(ctx
, ISA_MIPS32R2
);
7037 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7041 check_insn(ctx
, ISA_MIPS32R2
);
7042 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7046 goto cp0_unimplemented
;
7052 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7056 goto cp0_unimplemented
;
7062 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7063 tcg_gen_ext32s_tl(arg
, arg
);
7067 goto cp0_unimplemented
;
7073 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7077 check_insn(ctx
, ISA_MIPS32R2
);
7078 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7079 tcg_gen_ext32s_tl(arg
, arg
);
7083 check_insn(ctx
, ISA_MIPS32R2
);
7084 CP0_CHECK(ctx
->cmgcr
);
7085 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7086 tcg_gen_ext32s_tl(arg
, arg
);
7090 goto cp0_unimplemented
;
7096 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7100 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7104 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7108 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7112 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7116 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7119 /* 6,7 are implementation dependent */
7121 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7125 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7129 goto cp0_unimplemented
;
7135 gen_helper_mfc0_lladdr(arg
, cpu_env
);
7139 CP0_CHECK(ctx
->mrp
);
7140 gen_helper_mfc0_maar(arg
, cpu_env
);
7144 CP0_CHECK(ctx
->mrp
);
7145 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7149 goto cp0_unimplemented
;
7162 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7163 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
7167 goto cp0_unimplemented
;
7180 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7181 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
7185 goto cp0_unimplemented
;
7191 #if defined(TARGET_MIPS64)
7192 check_insn(ctx
, ISA_MIPS3
);
7193 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7194 tcg_gen_ext32s_tl(arg
, arg
);
7199 goto cp0_unimplemented
;
7203 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7204 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7207 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7211 goto cp0_unimplemented
;
7215 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7216 rn
= "'Diagnostic"; /* implementation dependent */
7221 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7225 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
7226 rn
= "TraceControl";
7227 goto cp0_unimplemented
;
7229 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
7230 rn
= "TraceControl2";
7231 goto cp0_unimplemented
;
7233 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
7234 rn
= "UserTraceData";
7235 goto cp0_unimplemented
;
7237 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
7239 goto cp0_unimplemented
;
7241 goto cp0_unimplemented
;
7248 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7249 tcg_gen_ext32s_tl(arg
, arg
);
7253 goto cp0_unimplemented
;
7259 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7260 rn
= "Performance0";
7263 // gen_helper_mfc0_performance1(arg);
7264 rn
= "Performance1";
7265 goto cp0_unimplemented
;
7267 // gen_helper_mfc0_performance2(arg);
7268 rn
= "Performance2";
7269 goto cp0_unimplemented
;
7271 // gen_helper_mfc0_performance3(arg);
7272 rn
= "Performance3";
7273 goto cp0_unimplemented
;
7275 // gen_helper_mfc0_performance4(arg);
7276 rn
= "Performance4";
7277 goto cp0_unimplemented
;
7279 // gen_helper_mfc0_performance5(arg);
7280 rn
= "Performance5";
7281 goto cp0_unimplemented
;
7283 // gen_helper_mfc0_performance6(arg);
7284 rn
= "Performance6";
7285 goto cp0_unimplemented
;
7287 // gen_helper_mfc0_performance7(arg);
7288 rn
= "Performance7";
7289 goto cp0_unimplemented
;
7291 goto cp0_unimplemented
;
7297 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7301 goto cp0_unimplemented
;
7310 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7314 goto cp0_unimplemented
;
7324 TCGv_i64 tmp
= tcg_temp_new_i64();
7325 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7326 gen_move_low32(arg
, tmp
);
7327 tcg_temp_free_i64(tmp
);
7335 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7339 goto cp0_unimplemented
;
7348 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7355 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7359 goto cp0_unimplemented
;
7365 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7366 tcg_gen_ext32s_tl(arg
, arg
);
7370 goto cp0_unimplemented
;
7377 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7386 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7387 tcg_gen_ld_tl(arg
, cpu_env
,
7388 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7389 tcg_gen_ext32s_tl(arg
, arg
);
7393 goto cp0_unimplemented
;
7397 goto cp0_unimplemented
;
7399 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
7403 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7404 gen_mfc0_unimplemented(ctx
, arg
);
7407 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7409 const char *rn
= "invalid";
7412 check_insn(ctx
, ISA_MIPS32
);
7414 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7422 gen_helper_mtc0_index(cpu_env
, arg
);
7426 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7427 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7431 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7436 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7446 goto cp0_unimplemented
;
7456 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7457 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7461 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7462 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7466 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7467 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7471 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7472 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7476 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7477 tcg_gen_st_tl(arg
, cpu_env
,
7478 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7482 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7483 tcg_gen_st_tl(arg
, cpu_env
,
7484 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7485 rn
= "VPEScheFBack";
7488 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7489 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7493 goto cp0_unimplemented
;
7499 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7503 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7504 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7508 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7509 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7513 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7514 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7518 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7519 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7523 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7524 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7528 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7529 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7533 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7534 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7538 goto cp0_unimplemented
;
7544 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7550 rn
= "GlobalNumber";
7553 goto cp0_unimplemented
;
7559 gen_helper_mtc0_context(cpu_env
, arg
);
7563 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7564 rn
= "ContextConfig";
7565 goto cp0_unimplemented
;
7567 CP0_CHECK(ctx
->ulri
);
7568 tcg_gen_st_tl(arg
, cpu_env
,
7569 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7573 goto cp0_unimplemented
;
7579 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7583 check_insn(ctx
, ISA_MIPS32R2
);
7584 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7586 ctx
->base
.is_jmp
= DISAS_STOP
;
7590 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7595 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7600 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7605 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7610 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7615 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7619 goto cp0_unimplemented
;
7625 gen_helper_mtc0_wired(cpu_env
, arg
);
7629 check_insn(ctx
, ISA_MIPS32R2
);
7630 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7634 check_insn(ctx
, ISA_MIPS32R2
);
7635 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7639 check_insn(ctx
, ISA_MIPS32R2
);
7640 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7644 check_insn(ctx
, ISA_MIPS32R2
);
7645 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7649 check_insn(ctx
, ISA_MIPS32R2
);
7650 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7655 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7659 goto cp0_unimplemented
;
7665 check_insn(ctx
, ISA_MIPS32R2
);
7666 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7667 ctx
->base
.is_jmp
= DISAS_STOP
;
7671 goto cp0_unimplemented
;
7693 goto cp0_unimplemented
;
7699 gen_helper_mtc0_count(cpu_env
, arg
);
7702 /* 6,7 are implementation dependent */
7704 goto cp0_unimplemented
;
7710 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7714 goto cp0_unimplemented
;
7720 gen_helper_mtc0_compare(cpu_env
, arg
);
7723 /* 6,7 are implementation dependent */
7725 goto cp0_unimplemented
;
7731 save_cpu_state(ctx
, 1);
7732 gen_helper_mtc0_status(cpu_env
, arg
);
7733 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7734 gen_save_pc(ctx
->base
.pc_next
+ 4);
7735 ctx
->base
.is_jmp
= DISAS_EXIT
;
7739 check_insn(ctx
, ISA_MIPS32R2
);
7740 gen_helper_mtc0_intctl(cpu_env
, arg
);
7741 /* Stop translation as we may have switched the execution mode */
7742 ctx
->base
.is_jmp
= DISAS_STOP
;
7746 check_insn(ctx
, ISA_MIPS32R2
);
7747 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7748 /* Stop translation as we may have switched the execution mode */
7749 ctx
->base
.is_jmp
= DISAS_STOP
;
7753 check_insn(ctx
, ISA_MIPS32R2
);
7754 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7755 /* Stop translation as we may have switched the execution mode */
7756 ctx
->base
.is_jmp
= DISAS_STOP
;
7760 goto cp0_unimplemented
;
7766 save_cpu_state(ctx
, 1);
7767 gen_helper_mtc0_cause(cpu_env
, arg
);
7768 /* Stop translation as we may have triggered an interrupt.
7769 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7770 * translated code to check for pending interrupts. */
7771 gen_save_pc(ctx
->base
.pc_next
+ 4);
7772 ctx
->base
.is_jmp
= DISAS_EXIT
;
7776 goto cp0_unimplemented
;
7782 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7786 goto cp0_unimplemented
;
7796 check_insn(ctx
, ISA_MIPS32R2
);
7797 gen_helper_mtc0_ebase(cpu_env
, arg
);
7801 goto cp0_unimplemented
;
7807 gen_helper_mtc0_config0(cpu_env
, arg
);
7809 /* Stop translation as we may have switched the execution mode */
7810 ctx
->base
.is_jmp
= DISAS_STOP
;
7813 /* ignored, read only */
7817 gen_helper_mtc0_config2(cpu_env
, arg
);
7819 /* Stop translation as we may have switched the execution mode */
7820 ctx
->base
.is_jmp
= DISAS_STOP
;
7823 gen_helper_mtc0_config3(cpu_env
, arg
);
7825 /* Stop translation as we may have switched the execution mode */
7826 ctx
->base
.is_jmp
= DISAS_STOP
;
7829 gen_helper_mtc0_config4(cpu_env
, arg
);
7831 ctx
->base
.is_jmp
= DISAS_STOP
;
7834 gen_helper_mtc0_config5(cpu_env
, arg
);
7836 /* Stop translation as we may have switched the execution mode */
7837 ctx
->base
.is_jmp
= DISAS_STOP
;
7839 /* 6,7 are implementation dependent */
7849 rn
= "Invalid config selector";
7850 goto cp0_unimplemented
;
7856 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7860 CP0_CHECK(ctx
->mrp
);
7861 gen_helper_mtc0_maar(cpu_env
, arg
);
7865 CP0_CHECK(ctx
->mrp
);
7866 gen_helper_mtc0_maari(cpu_env
, arg
);
7870 goto cp0_unimplemented
;
7883 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7884 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7888 goto cp0_unimplemented
;
7901 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7902 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7906 goto cp0_unimplemented
;
7912 #if defined(TARGET_MIPS64)
7913 check_insn(ctx
, ISA_MIPS3
);
7914 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7919 goto cp0_unimplemented
;
7923 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7924 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7927 gen_helper_mtc0_framemask(cpu_env
, arg
);
7931 goto cp0_unimplemented
;
7936 rn
= "Diagnostic"; /* implementation dependent */
7941 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7942 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7943 gen_save_pc(ctx
->base
.pc_next
+ 4);
7944 ctx
->base
.is_jmp
= DISAS_EXIT
;
7948 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7949 rn
= "TraceControl";
7950 /* Stop translation as we may have switched the execution mode */
7951 ctx
->base
.is_jmp
= DISAS_STOP
;
7952 goto cp0_unimplemented
;
7954 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7955 rn
= "TraceControl2";
7956 /* Stop translation as we may have switched the execution mode */
7957 ctx
->base
.is_jmp
= DISAS_STOP
;
7958 goto cp0_unimplemented
;
7960 /* Stop translation as we may have switched the execution mode */
7961 ctx
->base
.is_jmp
= DISAS_STOP
;
7962 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7963 rn
= "UserTraceData";
7964 /* Stop translation as we may have switched the execution mode */
7965 ctx
->base
.is_jmp
= DISAS_STOP
;
7966 goto cp0_unimplemented
;
7968 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7969 /* Stop translation as we may have switched the execution mode */
7970 ctx
->base
.is_jmp
= DISAS_STOP
;
7972 goto cp0_unimplemented
;
7974 goto cp0_unimplemented
;
7981 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7985 goto cp0_unimplemented
;
7991 gen_helper_mtc0_performance0(cpu_env
, arg
);
7992 rn
= "Performance0";
7995 // gen_helper_mtc0_performance1(arg);
7996 rn
= "Performance1";
7997 goto cp0_unimplemented
;
7999 // gen_helper_mtc0_performance2(arg);
8000 rn
= "Performance2";
8001 goto cp0_unimplemented
;
8003 // gen_helper_mtc0_performance3(arg);
8004 rn
= "Performance3";
8005 goto cp0_unimplemented
;
8007 // gen_helper_mtc0_performance4(arg);
8008 rn
= "Performance4";
8009 goto cp0_unimplemented
;
8011 // gen_helper_mtc0_performance5(arg);
8012 rn
= "Performance5";
8013 goto cp0_unimplemented
;
8015 // gen_helper_mtc0_performance6(arg);
8016 rn
= "Performance6";
8017 goto cp0_unimplemented
;
8019 // gen_helper_mtc0_performance7(arg);
8020 rn
= "Performance7";
8021 goto cp0_unimplemented
;
8023 goto cp0_unimplemented
;
8029 gen_helper_mtc0_errctl(cpu_env
, arg
);
8030 ctx
->base
.is_jmp
= DISAS_STOP
;
8034 goto cp0_unimplemented
;
8047 goto cp0_unimplemented
;
8056 gen_helper_mtc0_taglo(cpu_env
, arg
);
8063 gen_helper_mtc0_datalo(cpu_env
, arg
);
8067 goto cp0_unimplemented
;
8076 gen_helper_mtc0_taghi(cpu_env
, arg
);
8083 gen_helper_mtc0_datahi(cpu_env
, arg
);
8088 goto cp0_unimplemented
;
8094 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8098 goto cp0_unimplemented
;
8105 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8114 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8115 tcg_gen_st_tl(arg
, cpu_env
,
8116 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8120 goto cp0_unimplemented
;
8124 goto cp0_unimplemented
;
8126 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
8128 /* For simplicity assume that all writes can cause interrupts. */
8129 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8131 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
8132 * translated code to check for pending interrupts. */
8133 gen_save_pc(ctx
->base
.pc_next
+ 4);
8134 ctx
->base
.is_jmp
= DISAS_EXIT
;
8139 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8142 #if defined(TARGET_MIPS64)
8143 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8145 const char *rn
= "invalid";
8148 check_insn(ctx
, ISA_MIPS64
);
8154 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
8158 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8159 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
8163 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8164 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
8168 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8169 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
8174 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
8178 goto cp0_unimplemented
;
8184 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8185 gen_helper_mfc0_random(arg
, cpu_env
);
8189 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8190 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
8194 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8195 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
8199 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8200 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
8204 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8205 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
8209 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8210 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8214 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8215 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8216 rn
= "VPEScheFBack";
8219 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8220 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
8224 goto cp0_unimplemented
;
8230 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
8234 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8235 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8239 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8240 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8244 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8245 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8249 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8250 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8254 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8255 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8259 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8260 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8264 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8265 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8269 goto cp0_unimplemented
;
8275 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8280 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8281 rn
= "GlobalNumber";
8284 goto cp0_unimplemented
;
8290 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8294 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8295 rn
= "ContextConfig";
8296 goto cp0_unimplemented
;
8298 CP0_CHECK(ctx
->ulri
);
8299 tcg_gen_ld_tl(arg
, cpu_env
,
8300 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8304 goto cp0_unimplemented
;
8310 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8314 check_insn(ctx
, ISA_MIPS32R2
);
8315 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8320 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8325 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8330 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8335 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8340 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8345 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8349 goto cp0_unimplemented
;
8355 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8359 check_insn(ctx
, ISA_MIPS32R2
);
8360 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8364 check_insn(ctx
, ISA_MIPS32R2
);
8365 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8369 check_insn(ctx
, ISA_MIPS32R2
);
8370 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8374 check_insn(ctx
, ISA_MIPS32R2
);
8375 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8379 check_insn(ctx
, ISA_MIPS32R2
);
8380 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8385 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8389 goto cp0_unimplemented
;
8395 check_insn(ctx
, ISA_MIPS32R2
);
8396 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8400 goto cp0_unimplemented
;
8406 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8411 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8416 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8421 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8422 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8426 goto cp0_unimplemented
;
8432 /* Mark as an IO operation because we read the time. */
8433 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8436 gen_helper_mfc0_count(arg
, cpu_env
);
8437 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8440 /* Break the TB to be able to take timer interrupts immediately
8441 after reading count. DISAS_STOP isn't sufficient, we need to
8442 ensure we break completely out of translated code. */
8443 gen_save_pc(ctx
->base
.pc_next
+ 4);
8444 ctx
->base
.is_jmp
= DISAS_EXIT
;
8447 /* 6,7 are implementation dependent */
8449 goto cp0_unimplemented
;
8455 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8459 goto cp0_unimplemented
;
8465 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8468 /* 6,7 are implementation dependent */
8470 goto cp0_unimplemented
;
8476 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8480 check_insn(ctx
, ISA_MIPS32R2
);
8481 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8485 check_insn(ctx
, ISA_MIPS32R2
);
8486 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8490 check_insn(ctx
, ISA_MIPS32R2
);
8491 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8495 goto cp0_unimplemented
;
8501 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8505 goto cp0_unimplemented
;
8511 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8515 goto cp0_unimplemented
;
8521 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8525 check_insn(ctx
, ISA_MIPS32R2
);
8526 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8530 check_insn(ctx
, ISA_MIPS32R2
);
8531 CP0_CHECK(ctx
->cmgcr
);
8532 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8536 goto cp0_unimplemented
;
8542 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8546 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8550 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8554 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8558 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8562 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8565 /* 6,7 are implementation dependent */
8567 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8571 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8575 goto cp0_unimplemented
;
8581 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8585 CP0_CHECK(ctx
->mrp
);
8586 gen_helper_dmfc0_maar(arg
, cpu_env
);
8590 CP0_CHECK(ctx
->mrp
);
8591 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8595 goto cp0_unimplemented
;
8608 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8609 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8613 goto cp0_unimplemented
;
8626 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8627 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8631 goto cp0_unimplemented
;
8637 check_insn(ctx
, ISA_MIPS3
);
8638 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8642 goto cp0_unimplemented
;
8646 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8647 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8650 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8654 goto cp0_unimplemented
;
8658 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8659 rn
= "'Diagnostic"; /* implementation dependent */
8664 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8668 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8669 rn
= "TraceControl";
8670 goto cp0_unimplemented
;
8672 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8673 rn
= "TraceControl2";
8674 goto cp0_unimplemented
;
8676 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8677 rn
= "UserTraceData";
8678 goto cp0_unimplemented
;
8680 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8682 goto cp0_unimplemented
;
8684 goto cp0_unimplemented
;
8691 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8695 goto cp0_unimplemented
;
8701 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8702 rn
= "Performance0";
8705 // gen_helper_dmfc0_performance1(arg);
8706 rn
= "Performance1";
8707 goto cp0_unimplemented
;
8709 // gen_helper_dmfc0_performance2(arg);
8710 rn
= "Performance2";
8711 goto cp0_unimplemented
;
8713 // gen_helper_dmfc0_performance3(arg);
8714 rn
= "Performance3";
8715 goto cp0_unimplemented
;
8717 // gen_helper_dmfc0_performance4(arg);
8718 rn
= "Performance4";
8719 goto cp0_unimplemented
;
8721 // gen_helper_dmfc0_performance5(arg);
8722 rn
= "Performance5";
8723 goto cp0_unimplemented
;
8725 // gen_helper_dmfc0_performance6(arg);
8726 rn
= "Performance6";
8727 goto cp0_unimplemented
;
8729 // gen_helper_dmfc0_performance7(arg);
8730 rn
= "Performance7";
8731 goto cp0_unimplemented
;
8733 goto cp0_unimplemented
;
8739 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8743 goto cp0_unimplemented
;
8753 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8757 goto cp0_unimplemented
;
8766 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8773 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8777 goto cp0_unimplemented
;
8786 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8793 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8797 goto cp0_unimplemented
;
8803 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8807 goto cp0_unimplemented
;
8814 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8823 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8824 tcg_gen_ld_tl(arg
, cpu_env
,
8825 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8829 goto cp0_unimplemented
;
8833 goto cp0_unimplemented
;
8835 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
8839 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8840 gen_mfc0_unimplemented(ctx
, arg
);
8843 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8845 const char *rn
= "invalid";
8848 check_insn(ctx
, ISA_MIPS64
);
8850 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8858 gen_helper_mtc0_index(cpu_env
, arg
);
8862 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8863 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
8867 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8872 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8882 goto cp0_unimplemented
;
8892 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8893 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
8897 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8898 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
8902 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8903 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
8907 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8908 gen_helper_mtc0_yqmask(cpu_env
, arg
);
8912 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8913 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8917 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8918 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8919 rn
= "VPEScheFBack";
8922 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8923 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
8927 goto cp0_unimplemented
;
8933 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
8937 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8938 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
8942 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8943 gen_helper_mtc0_tcbind(cpu_env
, arg
);
8947 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8948 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
8952 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8953 gen_helper_mtc0_tchalt(cpu_env
, arg
);
8957 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8958 gen_helper_mtc0_tccontext(cpu_env
, arg
);
8962 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8963 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
8967 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8968 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
8972 goto cp0_unimplemented
;
8978 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
8984 rn
= "GlobalNumber";
8987 goto cp0_unimplemented
;
8993 gen_helper_mtc0_context(cpu_env
, arg
);
8997 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
8998 rn
= "ContextConfig";
8999 goto cp0_unimplemented
;
9001 CP0_CHECK(ctx
->ulri
);
9002 tcg_gen_st_tl(arg
, cpu_env
,
9003 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
9007 goto cp0_unimplemented
;
9013 gen_helper_mtc0_pagemask(cpu_env
, arg
);
9017 check_insn(ctx
, ISA_MIPS32R2
);
9018 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
9023 gen_helper_mtc0_segctl0(cpu_env
, arg
);
9028 gen_helper_mtc0_segctl1(cpu_env
, arg
);
9033 gen_helper_mtc0_segctl2(cpu_env
, arg
);
9038 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
9043 gen_helper_mtc0_pwfield(cpu_env
, arg
);
9048 gen_helper_mtc0_pwsize(cpu_env
, arg
);
9052 goto cp0_unimplemented
;
9058 gen_helper_mtc0_wired(cpu_env
, arg
);
9062 check_insn(ctx
, ISA_MIPS32R2
);
9063 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
9067 check_insn(ctx
, ISA_MIPS32R2
);
9068 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
9072 check_insn(ctx
, ISA_MIPS32R2
);
9073 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
9077 check_insn(ctx
, ISA_MIPS32R2
);
9078 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
9082 check_insn(ctx
, ISA_MIPS32R2
);
9083 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
9088 gen_helper_mtc0_pwctl(cpu_env
, arg
);
9092 goto cp0_unimplemented
;
9098 check_insn(ctx
, ISA_MIPS32R2
);
9099 gen_helper_mtc0_hwrena(cpu_env
, arg
);
9100 ctx
->base
.is_jmp
= DISAS_STOP
;
9104 goto cp0_unimplemented
;
9126 goto cp0_unimplemented
;
9132 gen_helper_mtc0_count(cpu_env
, arg
);
9135 /* 6,7 are implementation dependent */
9137 goto cp0_unimplemented
;
9139 /* Stop translation as we may have switched the execution mode */
9140 ctx
->base
.is_jmp
= DISAS_STOP
;
9145 gen_helper_mtc0_entryhi(cpu_env
, arg
);
9149 goto cp0_unimplemented
;
9155 gen_helper_mtc0_compare(cpu_env
, arg
);
9158 /* 6,7 are implementation dependent */
9160 goto cp0_unimplemented
;
9162 /* Stop translation as we may have switched the execution mode */
9163 ctx
->base
.is_jmp
= DISAS_STOP
;
9168 save_cpu_state(ctx
, 1);
9169 gen_helper_mtc0_status(cpu_env
, arg
);
9170 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9171 gen_save_pc(ctx
->base
.pc_next
+ 4);
9172 ctx
->base
.is_jmp
= DISAS_EXIT
;
9176 check_insn(ctx
, ISA_MIPS32R2
);
9177 gen_helper_mtc0_intctl(cpu_env
, arg
);
9178 /* Stop translation as we may have switched the execution mode */
9179 ctx
->base
.is_jmp
= DISAS_STOP
;
9183 check_insn(ctx
, ISA_MIPS32R2
);
9184 gen_helper_mtc0_srsctl(cpu_env
, arg
);
9185 /* Stop translation as we may have switched the execution mode */
9186 ctx
->base
.is_jmp
= DISAS_STOP
;
9190 check_insn(ctx
, ISA_MIPS32R2
);
9191 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
9192 /* Stop translation as we may have switched the execution mode */
9193 ctx
->base
.is_jmp
= DISAS_STOP
;
9197 goto cp0_unimplemented
;
9203 save_cpu_state(ctx
, 1);
9204 gen_helper_mtc0_cause(cpu_env
, arg
);
9205 /* Stop translation as we may have triggered an interrupt.
9206 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9207 * translated code to check for pending interrupts. */
9208 gen_save_pc(ctx
->base
.pc_next
+ 4);
9209 ctx
->base
.is_jmp
= DISAS_EXIT
;
9213 goto cp0_unimplemented
;
9219 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
9223 goto cp0_unimplemented
;
9233 check_insn(ctx
, ISA_MIPS32R2
);
9234 gen_helper_mtc0_ebase(cpu_env
, arg
);
9238 goto cp0_unimplemented
;
9244 gen_helper_mtc0_config0(cpu_env
, arg
);
9246 /* Stop translation as we may have switched the execution mode */
9247 ctx
->base
.is_jmp
= DISAS_STOP
;
9250 /* ignored, read only */
9254 gen_helper_mtc0_config2(cpu_env
, arg
);
9256 /* Stop translation as we may have switched the execution mode */
9257 ctx
->base
.is_jmp
= DISAS_STOP
;
9260 gen_helper_mtc0_config3(cpu_env
, arg
);
9262 /* Stop translation as we may have switched the execution mode */
9263 ctx
->base
.is_jmp
= DISAS_STOP
;
9266 /* currently ignored */
9270 gen_helper_mtc0_config5(cpu_env
, arg
);
9272 /* Stop translation as we may have switched the execution mode */
9273 ctx
->base
.is_jmp
= DISAS_STOP
;
9275 /* 6,7 are implementation dependent */
9277 rn
= "Invalid config selector";
9278 goto cp0_unimplemented
;
9284 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9288 CP0_CHECK(ctx
->mrp
);
9289 gen_helper_mtc0_maar(cpu_env
, arg
);
9293 CP0_CHECK(ctx
->mrp
);
9294 gen_helper_mtc0_maari(cpu_env
, arg
);
9298 goto cp0_unimplemented
;
9311 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9312 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9316 goto cp0_unimplemented
;
9329 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9330 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9334 goto cp0_unimplemented
;
9340 check_insn(ctx
, ISA_MIPS3
);
9341 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9345 goto cp0_unimplemented
;
9349 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9350 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9353 gen_helper_mtc0_framemask(cpu_env
, arg
);
9357 goto cp0_unimplemented
;
9362 rn
= "Diagnostic"; /* implementation dependent */
9367 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9368 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9369 gen_save_pc(ctx
->base
.pc_next
+ 4);
9370 ctx
->base
.is_jmp
= DISAS_EXIT
;
9374 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9375 /* Stop translation as we may have switched the execution mode */
9376 ctx
->base
.is_jmp
= DISAS_STOP
;
9377 rn
= "TraceControl";
9378 goto cp0_unimplemented
;
9380 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9381 /* Stop translation as we may have switched the execution mode */
9382 ctx
->base
.is_jmp
= DISAS_STOP
;
9383 rn
= "TraceControl2";
9384 goto cp0_unimplemented
;
9386 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9387 /* Stop translation as we may have switched the execution mode */
9388 ctx
->base
.is_jmp
= DISAS_STOP
;
9389 rn
= "UserTraceData";
9390 goto cp0_unimplemented
;
9392 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9393 /* Stop translation as we may have switched the execution mode */
9394 ctx
->base
.is_jmp
= DISAS_STOP
;
9396 goto cp0_unimplemented
;
9398 goto cp0_unimplemented
;
9405 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9409 goto cp0_unimplemented
;
9415 gen_helper_mtc0_performance0(cpu_env
, arg
);
9416 rn
= "Performance0";
9419 // gen_helper_mtc0_performance1(cpu_env, arg);
9420 rn
= "Performance1";
9421 goto cp0_unimplemented
;
9423 // gen_helper_mtc0_performance2(cpu_env, arg);
9424 rn
= "Performance2";
9425 goto cp0_unimplemented
;
9427 // gen_helper_mtc0_performance3(cpu_env, arg);
9428 rn
= "Performance3";
9429 goto cp0_unimplemented
;
9431 // gen_helper_mtc0_performance4(cpu_env, arg);
9432 rn
= "Performance4";
9433 goto cp0_unimplemented
;
9435 // gen_helper_mtc0_performance5(cpu_env, arg);
9436 rn
= "Performance5";
9437 goto cp0_unimplemented
;
9439 // gen_helper_mtc0_performance6(cpu_env, arg);
9440 rn
= "Performance6";
9441 goto cp0_unimplemented
;
9443 // gen_helper_mtc0_performance7(cpu_env, arg);
9444 rn
= "Performance7";
9445 goto cp0_unimplemented
;
9447 goto cp0_unimplemented
;
9453 gen_helper_mtc0_errctl(cpu_env
, arg
);
9454 ctx
->base
.is_jmp
= DISAS_STOP
;
9458 goto cp0_unimplemented
;
9471 goto cp0_unimplemented
;
9480 gen_helper_mtc0_taglo(cpu_env
, arg
);
9487 gen_helper_mtc0_datalo(cpu_env
, arg
);
9491 goto cp0_unimplemented
;
9500 gen_helper_mtc0_taghi(cpu_env
, arg
);
9507 gen_helper_mtc0_datahi(cpu_env
, arg
);
9512 goto cp0_unimplemented
;
9518 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9522 goto cp0_unimplemented
;
9529 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9538 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9539 tcg_gen_st_tl(arg
, cpu_env
,
9540 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9544 goto cp0_unimplemented
;
9548 goto cp0_unimplemented
;
9550 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
9552 /* For simplicity assume that all writes can cause interrupts. */
9553 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9555 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
9556 * translated code to check for pending interrupts. */
9557 gen_save_pc(ctx
->base
.pc_next
+ 4);
9558 ctx
->base
.is_jmp
= DISAS_EXIT
;
9563 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
9565 #endif /* TARGET_MIPS64 */
9567 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9568 int u
, int sel
, int h
)
9570 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9571 TCGv t0
= tcg_temp_local_new();
9573 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9574 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9575 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9576 tcg_gen_movi_tl(t0
, -1);
9577 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9578 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9579 tcg_gen_movi_tl(t0
, -1);
9585 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9588 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9598 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9601 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9604 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9607 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9610 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9613 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9616 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9619 gen_mfc0(ctx
, t0
, rt
, sel
);
9626 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9629 gen_mfc0(ctx
, t0
, rt
, sel
);
9635 gen_helper_mftc0_status(t0
, cpu_env
);
9638 gen_mfc0(ctx
, t0
, rt
, sel
);
9644 gen_helper_mftc0_cause(t0
, cpu_env
);
9654 gen_helper_mftc0_epc(t0
, cpu_env
);
9664 gen_helper_mftc0_ebase(t0
, cpu_env
);
9681 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9691 gen_helper_mftc0_debug(t0
, cpu_env
);
9694 gen_mfc0(ctx
, t0
, rt
, sel
);
9699 gen_mfc0(ctx
, t0
, rt
, sel
);
9701 } else switch (sel
) {
9702 /* GPR registers. */
9704 gen_helper_1e0i(mftgpr
, t0
, rt
);
9706 /* Auxiliary CPU registers */
9710 gen_helper_1e0i(mftlo
, t0
, 0);
9713 gen_helper_1e0i(mfthi
, t0
, 0);
9716 gen_helper_1e0i(mftacx
, t0
, 0);
9719 gen_helper_1e0i(mftlo
, t0
, 1);
9722 gen_helper_1e0i(mfthi
, t0
, 1);
9725 gen_helper_1e0i(mftacx
, t0
, 1);
9728 gen_helper_1e0i(mftlo
, t0
, 2);
9731 gen_helper_1e0i(mfthi
, t0
, 2);
9734 gen_helper_1e0i(mftacx
, t0
, 2);
9737 gen_helper_1e0i(mftlo
, t0
, 3);
9740 gen_helper_1e0i(mfthi
, t0
, 3);
9743 gen_helper_1e0i(mftacx
, t0
, 3);
9746 gen_helper_mftdsp(t0
, cpu_env
);
9752 /* Floating point (COP1). */
9754 /* XXX: For now we support only a single FPU context. */
9756 TCGv_i32 fp0
= tcg_temp_new_i32();
9758 gen_load_fpr32(ctx
, fp0
, rt
);
9759 tcg_gen_ext_i32_tl(t0
, fp0
);
9760 tcg_temp_free_i32(fp0
);
9762 TCGv_i32 fp0
= tcg_temp_new_i32();
9764 gen_load_fpr32h(ctx
, fp0
, rt
);
9765 tcg_gen_ext_i32_tl(t0
, fp0
);
9766 tcg_temp_free_i32(fp0
);
9770 /* XXX: For now we support only a single FPU context. */
9771 gen_helper_1e0i(cfc1
, t0
, rt
);
9773 /* COP2: Not implemented. */
9780 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9781 gen_store_gpr(t0
, rd
);
9787 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9788 generate_exception_end(ctx
, EXCP_RI
);
9791 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9792 int u
, int sel
, int h
)
9794 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9795 TCGv t0
= tcg_temp_local_new();
9797 gen_load_gpr(t0
, rt
);
9798 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9799 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9800 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9802 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9803 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9810 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
9813 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
9823 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
9826 gen_helper_mttc0_tcbind(cpu_env
, t0
);
9829 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
9832 gen_helper_mttc0_tchalt(cpu_env
, t0
);
9835 gen_helper_mttc0_tccontext(cpu_env
, t0
);
9838 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
9841 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
9844 gen_mtc0(ctx
, t0
, rd
, sel
);
9851 gen_helper_mttc0_entryhi(cpu_env
, t0
);
9854 gen_mtc0(ctx
, t0
, rd
, sel
);
9860 gen_helper_mttc0_status(cpu_env
, t0
);
9863 gen_mtc0(ctx
, t0
, rd
, sel
);
9869 gen_helper_mttc0_cause(cpu_env
, t0
);
9879 gen_helper_mttc0_ebase(cpu_env
, t0
);
9889 gen_helper_mttc0_debug(cpu_env
, t0
);
9892 gen_mtc0(ctx
, t0
, rd
, sel
);
9897 gen_mtc0(ctx
, t0
, rd
, sel
);
9899 } else switch (sel
) {
9900 /* GPR registers. */
9902 gen_helper_0e1i(mttgpr
, t0
, rd
);
9904 /* Auxiliary CPU registers */
9908 gen_helper_0e1i(mttlo
, t0
, 0);
9911 gen_helper_0e1i(mtthi
, t0
, 0);
9914 gen_helper_0e1i(mttacx
, t0
, 0);
9917 gen_helper_0e1i(mttlo
, t0
, 1);
9920 gen_helper_0e1i(mtthi
, t0
, 1);
9923 gen_helper_0e1i(mttacx
, t0
, 1);
9926 gen_helper_0e1i(mttlo
, t0
, 2);
9929 gen_helper_0e1i(mtthi
, t0
, 2);
9932 gen_helper_0e1i(mttacx
, t0
, 2);
9935 gen_helper_0e1i(mttlo
, t0
, 3);
9938 gen_helper_0e1i(mtthi
, t0
, 3);
9941 gen_helper_0e1i(mttacx
, t0
, 3);
9944 gen_helper_mttdsp(cpu_env
, t0
);
9950 /* Floating point (COP1). */
9952 /* XXX: For now we support only a single FPU context. */
9954 TCGv_i32 fp0
= tcg_temp_new_i32();
9956 tcg_gen_trunc_tl_i32(fp0
, t0
);
9957 gen_store_fpr32(ctx
, fp0
, rd
);
9958 tcg_temp_free_i32(fp0
);
9960 TCGv_i32 fp0
= tcg_temp_new_i32();
9962 tcg_gen_trunc_tl_i32(fp0
, t0
);
9963 gen_store_fpr32h(ctx
, fp0
, rd
);
9964 tcg_temp_free_i32(fp0
);
9968 /* XXX: For now we support only a single FPU context. */
9970 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
9972 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
9973 tcg_temp_free_i32(fs_tmp
);
9975 /* Stop translation as we may have changed hflags */
9976 ctx
->base
.is_jmp
= DISAS_STOP
;
9978 /* COP2: Not implemented. */
9985 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
9991 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
9992 generate_exception_end(ctx
, EXCP_RI
);
9995 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
9997 const char *opn
= "ldst";
9999 check_cp0_enabled(ctx
);
10003 /* Treat as NOP. */
10006 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10011 TCGv t0
= tcg_temp_new();
10013 gen_load_gpr(t0
, rt
);
10014 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10019 #if defined(TARGET_MIPS64)
10021 check_insn(ctx
, ISA_MIPS3
);
10023 /* Treat as NOP. */
10026 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10030 check_insn(ctx
, ISA_MIPS3
);
10032 TCGv t0
= tcg_temp_new();
10034 gen_load_gpr(t0
, rt
);
10035 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10044 /* Treat as NOP. */
10047 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10053 TCGv t0
= tcg_temp_new();
10054 gen_load_gpr(t0
, rt
);
10055 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10061 check_cp0_enabled(ctx
);
10063 /* Treat as NOP. */
10066 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
10067 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10071 check_cp0_enabled(ctx
);
10072 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
10073 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10078 if (!env
->tlb
->helper_tlbwi
)
10080 gen_helper_tlbwi(cpu_env
);
10084 if (ctx
->ie
>= 2) {
10085 if (!env
->tlb
->helper_tlbinv
) {
10088 gen_helper_tlbinv(cpu_env
);
10089 } /* treat as nop if TLBINV not supported */
10093 if (ctx
->ie
>= 2) {
10094 if (!env
->tlb
->helper_tlbinvf
) {
10097 gen_helper_tlbinvf(cpu_env
);
10098 } /* treat as nop if TLBINV not supported */
10102 if (!env
->tlb
->helper_tlbwr
)
10104 gen_helper_tlbwr(cpu_env
);
10108 if (!env
->tlb
->helper_tlbp
)
10110 gen_helper_tlbp(cpu_env
);
10114 if (!env
->tlb
->helper_tlbr
)
10116 gen_helper_tlbr(cpu_env
);
10118 case OPC_ERET
: /* OPC_ERETNC */
10119 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10120 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10123 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
10124 if (ctx
->opcode
& (1 << bit_shift
)) {
10127 check_insn(ctx
, ISA_MIPS32R5
);
10128 gen_helper_eretnc(cpu_env
);
10132 check_insn(ctx
, ISA_MIPS2
);
10133 gen_helper_eret(cpu_env
);
10135 ctx
->base
.is_jmp
= DISAS_EXIT
;
10140 check_insn(ctx
, ISA_MIPS32
);
10141 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10142 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10145 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10147 generate_exception_end(ctx
, EXCP_RI
);
10149 gen_helper_deret(cpu_env
);
10150 ctx
->base
.is_jmp
= DISAS_EXIT
;
10155 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
10156 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10157 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10160 /* If we get an exception, we want to restart at next instruction */
10161 ctx
->base
.pc_next
+= 4;
10162 save_cpu_state(ctx
, 1);
10163 ctx
->base
.pc_next
-= 4;
10164 gen_helper_wait(cpu_env
);
10165 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10170 generate_exception_end(ctx
, EXCP_RI
);
10173 (void)opn
; /* avoid a compiler warning */
10175 #endif /* !CONFIG_USER_ONLY */
10177 /* CP1 Branches (before delay slot) */
10178 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
10179 int32_t cc
, int32_t offset
)
10181 target_ulong btarget
;
10182 TCGv_i32 t0
= tcg_temp_new_i32();
10184 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10185 generate_exception_end(ctx
, EXCP_RI
);
10190 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
10192 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
10196 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10197 tcg_gen_not_i32(t0
, t0
);
10198 tcg_gen_andi_i32(t0
, t0
, 1);
10199 tcg_gen_extu_i32_tl(bcond
, t0
);
10202 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10203 tcg_gen_not_i32(t0
, t0
);
10204 tcg_gen_andi_i32(t0
, t0
, 1);
10205 tcg_gen_extu_i32_tl(bcond
, t0
);
10208 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10209 tcg_gen_andi_i32(t0
, t0
, 1);
10210 tcg_gen_extu_i32_tl(bcond
, t0
);
10213 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10214 tcg_gen_andi_i32(t0
, t0
, 1);
10215 tcg_gen_extu_i32_tl(bcond
, t0
);
10217 ctx
->hflags
|= MIPS_HFLAG_BL
;
10221 TCGv_i32 t1
= tcg_temp_new_i32();
10222 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10223 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10224 tcg_gen_nand_i32(t0
, t0
, t1
);
10225 tcg_temp_free_i32(t1
);
10226 tcg_gen_andi_i32(t0
, t0
, 1);
10227 tcg_gen_extu_i32_tl(bcond
, t0
);
10232 TCGv_i32 t1
= tcg_temp_new_i32();
10233 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10234 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10235 tcg_gen_or_i32(t0
, t0
, t1
);
10236 tcg_temp_free_i32(t1
);
10237 tcg_gen_andi_i32(t0
, t0
, 1);
10238 tcg_gen_extu_i32_tl(bcond
, t0
);
10243 TCGv_i32 t1
= tcg_temp_new_i32();
10244 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10245 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10246 tcg_gen_and_i32(t0
, t0
, t1
);
10247 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10248 tcg_gen_and_i32(t0
, t0
, t1
);
10249 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10250 tcg_gen_nand_i32(t0
, t0
, t1
);
10251 tcg_temp_free_i32(t1
);
10252 tcg_gen_andi_i32(t0
, t0
, 1);
10253 tcg_gen_extu_i32_tl(bcond
, t0
);
10258 TCGv_i32 t1
= tcg_temp_new_i32();
10259 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10260 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10261 tcg_gen_or_i32(t0
, t0
, t1
);
10262 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10263 tcg_gen_or_i32(t0
, t0
, t1
);
10264 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10265 tcg_gen_or_i32(t0
, t0
, t1
);
10266 tcg_temp_free_i32(t1
);
10267 tcg_gen_andi_i32(t0
, t0
, 1);
10268 tcg_gen_extu_i32_tl(bcond
, t0
);
10271 ctx
->hflags
|= MIPS_HFLAG_BC
;
10274 MIPS_INVAL("cp1 cond branch");
10275 generate_exception_end(ctx
, EXCP_RI
);
10278 ctx
->btarget
= btarget
;
10279 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10281 tcg_temp_free_i32(t0
);
10284 /* R6 CP1 Branches */
10285 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10286 int32_t ft
, int32_t offset
,
10287 int delayslot_size
)
10289 target_ulong btarget
;
10290 TCGv_i64 t0
= tcg_temp_new_i64();
10292 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10293 #ifdef MIPS_DEBUG_DISAS
10294 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10295 "\n", ctx
->base
.pc_next
);
10297 generate_exception_end(ctx
, EXCP_RI
);
10301 gen_load_fpr64(ctx
, t0
, ft
);
10302 tcg_gen_andi_i64(t0
, t0
, 1);
10304 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10308 tcg_gen_xori_i64(t0
, t0
, 1);
10309 ctx
->hflags
|= MIPS_HFLAG_BC
;
10312 /* t0 already set */
10313 ctx
->hflags
|= MIPS_HFLAG_BC
;
10316 MIPS_INVAL("cp1 cond branch");
10317 generate_exception_end(ctx
, EXCP_RI
);
10321 tcg_gen_trunc_i64_tl(bcond
, t0
);
10323 ctx
->btarget
= btarget
;
10325 switch (delayslot_size
) {
10327 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10330 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10335 tcg_temp_free_i64(t0
);
10338 /* Coprocessor 1 (FPU) */
10340 #define FOP(func, fmt) (((fmt) << 21) | (func))
10343 OPC_ADD_S
= FOP(0, FMT_S
),
10344 OPC_SUB_S
= FOP(1, FMT_S
),
10345 OPC_MUL_S
= FOP(2, FMT_S
),
10346 OPC_DIV_S
= FOP(3, FMT_S
),
10347 OPC_SQRT_S
= FOP(4, FMT_S
),
10348 OPC_ABS_S
= FOP(5, FMT_S
),
10349 OPC_MOV_S
= FOP(6, FMT_S
),
10350 OPC_NEG_S
= FOP(7, FMT_S
),
10351 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10352 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10353 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10354 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10355 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10356 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10357 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10358 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10359 OPC_SEL_S
= FOP(16, FMT_S
),
10360 OPC_MOVCF_S
= FOP(17, FMT_S
),
10361 OPC_MOVZ_S
= FOP(18, FMT_S
),
10362 OPC_MOVN_S
= FOP(19, FMT_S
),
10363 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10364 OPC_RECIP_S
= FOP(21, FMT_S
),
10365 OPC_RSQRT_S
= FOP(22, FMT_S
),
10366 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10367 OPC_MADDF_S
= FOP(24, FMT_S
),
10368 OPC_MSUBF_S
= FOP(25, FMT_S
),
10369 OPC_RINT_S
= FOP(26, FMT_S
),
10370 OPC_CLASS_S
= FOP(27, FMT_S
),
10371 OPC_MIN_S
= FOP(28, FMT_S
),
10372 OPC_RECIP2_S
= FOP(28, FMT_S
),
10373 OPC_MINA_S
= FOP(29, FMT_S
),
10374 OPC_RECIP1_S
= FOP(29, FMT_S
),
10375 OPC_MAX_S
= FOP(30, FMT_S
),
10376 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10377 OPC_MAXA_S
= FOP(31, FMT_S
),
10378 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10379 OPC_CVT_D_S
= FOP(33, FMT_S
),
10380 OPC_CVT_W_S
= FOP(36, FMT_S
),
10381 OPC_CVT_L_S
= FOP(37, FMT_S
),
10382 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10383 OPC_CMP_F_S
= FOP (48, FMT_S
),
10384 OPC_CMP_UN_S
= FOP (49, FMT_S
),
10385 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
10386 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
10387 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
10388 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
10389 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
10390 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
10391 OPC_CMP_SF_S
= FOP (56, FMT_S
),
10392 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
10393 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
10394 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
10395 OPC_CMP_LT_S
= FOP (60, FMT_S
),
10396 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
10397 OPC_CMP_LE_S
= FOP (62, FMT_S
),
10398 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
10400 OPC_ADD_D
= FOP(0, FMT_D
),
10401 OPC_SUB_D
= FOP(1, FMT_D
),
10402 OPC_MUL_D
= FOP(2, FMT_D
),
10403 OPC_DIV_D
= FOP(3, FMT_D
),
10404 OPC_SQRT_D
= FOP(4, FMT_D
),
10405 OPC_ABS_D
= FOP(5, FMT_D
),
10406 OPC_MOV_D
= FOP(6, FMT_D
),
10407 OPC_NEG_D
= FOP(7, FMT_D
),
10408 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10409 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10410 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10411 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10412 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10413 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10414 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10415 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10416 OPC_SEL_D
= FOP(16, FMT_D
),
10417 OPC_MOVCF_D
= FOP(17, FMT_D
),
10418 OPC_MOVZ_D
= FOP(18, FMT_D
),
10419 OPC_MOVN_D
= FOP(19, FMT_D
),
10420 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10421 OPC_RECIP_D
= FOP(21, FMT_D
),
10422 OPC_RSQRT_D
= FOP(22, FMT_D
),
10423 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10424 OPC_MADDF_D
= FOP(24, FMT_D
),
10425 OPC_MSUBF_D
= FOP(25, FMT_D
),
10426 OPC_RINT_D
= FOP(26, FMT_D
),
10427 OPC_CLASS_D
= FOP(27, FMT_D
),
10428 OPC_MIN_D
= FOP(28, FMT_D
),
10429 OPC_RECIP2_D
= FOP(28, FMT_D
),
10430 OPC_MINA_D
= FOP(29, FMT_D
),
10431 OPC_RECIP1_D
= FOP(29, FMT_D
),
10432 OPC_MAX_D
= FOP(30, FMT_D
),
10433 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10434 OPC_MAXA_D
= FOP(31, FMT_D
),
10435 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10436 OPC_CVT_S_D
= FOP(32, FMT_D
),
10437 OPC_CVT_W_D
= FOP(36, FMT_D
),
10438 OPC_CVT_L_D
= FOP(37, FMT_D
),
10439 OPC_CMP_F_D
= FOP (48, FMT_D
),
10440 OPC_CMP_UN_D
= FOP (49, FMT_D
),
10441 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
10442 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
10443 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
10444 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
10445 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
10446 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
10447 OPC_CMP_SF_D
= FOP (56, FMT_D
),
10448 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
10449 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
10450 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
10451 OPC_CMP_LT_D
= FOP (60, FMT_D
),
10452 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
10453 OPC_CMP_LE_D
= FOP (62, FMT_D
),
10454 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
10456 OPC_CVT_S_W
= FOP(32, FMT_W
),
10457 OPC_CVT_D_W
= FOP(33, FMT_W
),
10458 OPC_CVT_S_L
= FOP(32, FMT_L
),
10459 OPC_CVT_D_L
= FOP(33, FMT_L
),
10460 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10462 OPC_ADD_PS
= FOP(0, FMT_PS
),
10463 OPC_SUB_PS
= FOP(1, FMT_PS
),
10464 OPC_MUL_PS
= FOP(2, FMT_PS
),
10465 OPC_DIV_PS
= FOP(3, FMT_PS
),
10466 OPC_ABS_PS
= FOP(5, FMT_PS
),
10467 OPC_MOV_PS
= FOP(6, FMT_PS
),
10468 OPC_NEG_PS
= FOP(7, FMT_PS
),
10469 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10470 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10471 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10472 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10473 OPC_MULR_PS
= FOP(26, FMT_PS
),
10474 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10475 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10476 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10477 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10479 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10480 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10481 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10482 OPC_PLL_PS
= FOP(44, FMT_PS
),
10483 OPC_PLU_PS
= FOP(45, FMT_PS
),
10484 OPC_PUL_PS
= FOP(46, FMT_PS
),
10485 OPC_PUU_PS
= FOP(47, FMT_PS
),
10486 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
10487 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
10488 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
10489 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
10490 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
10491 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
10492 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
10493 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
10494 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
10495 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
10496 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
10497 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
10498 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
10499 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
10500 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
10501 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
10505 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10506 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10507 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10508 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10509 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10510 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10511 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10512 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10513 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10514 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10515 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10516 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10517 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10518 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10519 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10520 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10521 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10522 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10523 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10524 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10525 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10526 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10528 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10529 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10530 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10531 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10532 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10533 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10534 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10535 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10536 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10537 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10538 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10539 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10540 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10541 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10542 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10543 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10544 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10545 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10546 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10547 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10548 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10549 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10551 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10553 TCGv t0
= tcg_temp_new();
10558 TCGv_i32 fp0
= tcg_temp_new_i32();
10560 gen_load_fpr32(ctx
, fp0
, fs
);
10561 tcg_gen_ext_i32_tl(t0
, fp0
);
10562 tcg_temp_free_i32(fp0
);
10564 gen_store_gpr(t0
, rt
);
10567 gen_load_gpr(t0
, rt
);
10569 TCGv_i32 fp0
= tcg_temp_new_i32();
10571 tcg_gen_trunc_tl_i32(fp0
, t0
);
10572 gen_store_fpr32(ctx
, fp0
, fs
);
10573 tcg_temp_free_i32(fp0
);
10577 gen_helper_1e0i(cfc1
, t0
, fs
);
10578 gen_store_gpr(t0
, rt
);
10581 gen_load_gpr(t0
, rt
);
10582 save_cpu_state(ctx
, 0);
10584 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10586 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10587 tcg_temp_free_i32(fs_tmp
);
10589 /* Stop translation as we may have changed hflags */
10590 ctx
->base
.is_jmp
= DISAS_STOP
;
10592 #if defined(TARGET_MIPS64)
10594 gen_load_fpr64(ctx
, t0
, fs
);
10595 gen_store_gpr(t0
, rt
);
10598 gen_load_gpr(t0
, rt
);
10599 gen_store_fpr64(ctx
, t0
, fs
);
10604 TCGv_i32 fp0
= tcg_temp_new_i32();
10606 gen_load_fpr32h(ctx
, fp0
, fs
);
10607 tcg_gen_ext_i32_tl(t0
, fp0
);
10608 tcg_temp_free_i32(fp0
);
10610 gen_store_gpr(t0
, rt
);
10613 gen_load_gpr(t0
, rt
);
10615 TCGv_i32 fp0
= tcg_temp_new_i32();
10617 tcg_gen_trunc_tl_i32(fp0
, t0
);
10618 gen_store_fpr32h(ctx
, fp0
, fs
);
10619 tcg_temp_free_i32(fp0
);
10623 MIPS_INVAL("cp1 move");
10624 generate_exception_end(ctx
, EXCP_RI
);
10632 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10639 /* Treat as NOP. */
10644 cond
= TCG_COND_EQ
;
10646 cond
= TCG_COND_NE
;
10648 l1
= gen_new_label();
10649 t0
= tcg_temp_new_i32();
10650 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10651 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10652 tcg_temp_free_i32(t0
);
10654 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10656 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10661 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10665 TCGv_i32 t0
= tcg_temp_new_i32();
10666 TCGLabel
*l1
= gen_new_label();
10669 cond
= TCG_COND_EQ
;
10671 cond
= TCG_COND_NE
;
10673 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10674 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10675 gen_load_fpr32(ctx
, t0
, fs
);
10676 gen_store_fpr32(ctx
, t0
, fd
);
10678 tcg_temp_free_i32(t0
);
10681 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
10684 TCGv_i32 t0
= tcg_temp_new_i32();
10686 TCGLabel
*l1
= gen_new_label();
10689 cond
= TCG_COND_EQ
;
10691 cond
= TCG_COND_NE
;
10693 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10694 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10695 tcg_temp_free_i32(t0
);
10696 fp0
= tcg_temp_new_i64();
10697 gen_load_fpr64(ctx
, fp0
, fs
);
10698 gen_store_fpr64(ctx
, fp0
, fd
);
10699 tcg_temp_free_i64(fp0
);
10703 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10707 TCGv_i32 t0
= tcg_temp_new_i32();
10708 TCGLabel
*l1
= gen_new_label();
10709 TCGLabel
*l2
= gen_new_label();
10712 cond
= TCG_COND_EQ
;
10714 cond
= TCG_COND_NE
;
10716 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10717 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10718 gen_load_fpr32(ctx
, t0
, fs
);
10719 gen_store_fpr32(ctx
, t0
, fd
);
10722 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10723 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10724 gen_load_fpr32h(ctx
, t0
, fs
);
10725 gen_store_fpr32h(ctx
, t0
, fd
);
10726 tcg_temp_free_i32(t0
);
10730 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10733 TCGv_i32 t1
= tcg_const_i32(0);
10734 TCGv_i32 fp0
= tcg_temp_new_i32();
10735 TCGv_i32 fp1
= tcg_temp_new_i32();
10736 TCGv_i32 fp2
= tcg_temp_new_i32();
10737 gen_load_fpr32(ctx
, fp0
, fd
);
10738 gen_load_fpr32(ctx
, fp1
, ft
);
10739 gen_load_fpr32(ctx
, fp2
, fs
);
10743 tcg_gen_andi_i32(fp0
, fp0
, 1);
10744 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10747 tcg_gen_andi_i32(fp1
, fp1
, 1);
10748 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10751 tcg_gen_andi_i32(fp1
, fp1
, 1);
10752 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10755 MIPS_INVAL("gen_sel_s");
10756 generate_exception_end(ctx
, EXCP_RI
);
10760 gen_store_fpr32(ctx
, fp0
, fd
);
10761 tcg_temp_free_i32(fp2
);
10762 tcg_temp_free_i32(fp1
);
10763 tcg_temp_free_i32(fp0
);
10764 tcg_temp_free_i32(t1
);
10767 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10770 TCGv_i64 t1
= tcg_const_i64(0);
10771 TCGv_i64 fp0
= tcg_temp_new_i64();
10772 TCGv_i64 fp1
= tcg_temp_new_i64();
10773 TCGv_i64 fp2
= tcg_temp_new_i64();
10774 gen_load_fpr64(ctx
, fp0
, fd
);
10775 gen_load_fpr64(ctx
, fp1
, ft
);
10776 gen_load_fpr64(ctx
, fp2
, fs
);
10780 tcg_gen_andi_i64(fp0
, fp0
, 1);
10781 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10784 tcg_gen_andi_i64(fp1
, fp1
, 1);
10785 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10788 tcg_gen_andi_i64(fp1
, fp1
, 1);
10789 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10792 MIPS_INVAL("gen_sel_d");
10793 generate_exception_end(ctx
, EXCP_RI
);
10797 gen_store_fpr64(ctx
, fp0
, fd
);
10798 tcg_temp_free_i64(fp2
);
10799 tcg_temp_free_i64(fp1
);
10800 tcg_temp_free_i64(fp0
);
10801 tcg_temp_free_i64(t1
);
10804 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
10805 int ft
, int fs
, int fd
, int cc
)
10807 uint32_t func
= ctx
->opcode
& 0x3f;
10811 TCGv_i32 fp0
= tcg_temp_new_i32();
10812 TCGv_i32 fp1
= tcg_temp_new_i32();
10814 gen_load_fpr32(ctx
, fp0
, fs
);
10815 gen_load_fpr32(ctx
, fp1
, ft
);
10816 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
10817 tcg_temp_free_i32(fp1
);
10818 gen_store_fpr32(ctx
, fp0
, fd
);
10819 tcg_temp_free_i32(fp0
);
10824 TCGv_i32 fp0
= tcg_temp_new_i32();
10825 TCGv_i32 fp1
= tcg_temp_new_i32();
10827 gen_load_fpr32(ctx
, fp0
, fs
);
10828 gen_load_fpr32(ctx
, fp1
, ft
);
10829 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
10830 tcg_temp_free_i32(fp1
);
10831 gen_store_fpr32(ctx
, fp0
, fd
);
10832 tcg_temp_free_i32(fp0
);
10837 TCGv_i32 fp0
= tcg_temp_new_i32();
10838 TCGv_i32 fp1
= tcg_temp_new_i32();
10840 gen_load_fpr32(ctx
, fp0
, fs
);
10841 gen_load_fpr32(ctx
, fp1
, ft
);
10842 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
10843 tcg_temp_free_i32(fp1
);
10844 gen_store_fpr32(ctx
, fp0
, fd
);
10845 tcg_temp_free_i32(fp0
);
10850 TCGv_i32 fp0
= tcg_temp_new_i32();
10851 TCGv_i32 fp1
= tcg_temp_new_i32();
10853 gen_load_fpr32(ctx
, fp0
, fs
);
10854 gen_load_fpr32(ctx
, fp1
, ft
);
10855 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
10856 tcg_temp_free_i32(fp1
);
10857 gen_store_fpr32(ctx
, fp0
, fd
);
10858 tcg_temp_free_i32(fp0
);
10863 TCGv_i32 fp0
= tcg_temp_new_i32();
10865 gen_load_fpr32(ctx
, fp0
, fs
);
10866 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
10867 gen_store_fpr32(ctx
, fp0
, fd
);
10868 tcg_temp_free_i32(fp0
);
10873 TCGv_i32 fp0
= tcg_temp_new_i32();
10875 gen_load_fpr32(ctx
, fp0
, fs
);
10876 if (ctx
->abs2008
) {
10877 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
10879 gen_helper_float_abs_s(fp0
, fp0
);
10881 gen_store_fpr32(ctx
, fp0
, fd
);
10882 tcg_temp_free_i32(fp0
);
10887 TCGv_i32 fp0
= tcg_temp_new_i32();
10889 gen_load_fpr32(ctx
, fp0
, fs
);
10890 gen_store_fpr32(ctx
, fp0
, fd
);
10891 tcg_temp_free_i32(fp0
);
10896 TCGv_i32 fp0
= tcg_temp_new_i32();
10898 gen_load_fpr32(ctx
, fp0
, fs
);
10899 if (ctx
->abs2008
) {
10900 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
10902 gen_helper_float_chs_s(fp0
, fp0
);
10904 gen_store_fpr32(ctx
, fp0
, fd
);
10905 tcg_temp_free_i32(fp0
);
10908 case OPC_ROUND_L_S
:
10909 check_cp1_64bitmode(ctx
);
10911 TCGv_i32 fp32
= tcg_temp_new_i32();
10912 TCGv_i64 fp64
= tcg_temp_new_i64();
10914 gen_load_fpr32(ctx
, fp32
, fs
);
10915 if (ctx
->nan2008
) {
10916 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
10918 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
10920 tcg_temp_free_i32(fp32
);
10921 gen_store_fpr64(ctx
, fp64
, fd
);
10922 tcg_temp_free_i64(fp64
);
10925 case OPC_TRUNC_L_S
:
10926 check_cp1_64bitmode(ctx
);
10928 TCGv_i32 fp32
= tcg_temp_new_i32();
10929 TCGv_i64 fp64
= tcg_temp_new_i64();
10931 gen_load_fpr32(ctx
, fp32
, fs
);
10932 if (ctx
->nan2008
) {
10933 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
10935 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
10937 tcg_temp_free_i32(fp32
);
10938 gen_store_fpr64(ctx
, fp64
, fd
);
10939 tcg_temp_free_i64(fp64
);
10943 check_cp1_64bitmode(ctx
);
10945 TCGv_i32 fp32
= tcg_temp_new_i32();
10946 TCGv_i64 fp64
= tcg_temp_new_i64();
10948 gen_load_fpr32(ctx
, fp32
, fs
);
10949 if (ctx
->nan2008
) {
10950 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
10952 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
10954 tcg_temp_free_i32(fp32
);
10955 gen_store_fpr64(ctx
, fp64
, fd
);
10956 tcg_temp_free_i64(fp64
);
10959 case OPC_FLOOR_L_S
:
10960 check_cp1_64bitmode(ctx
);
10962 TCGv_i32 fp32
= tcg_temp_new_i32();
10963 TCGv_i64 fp64
= tcg_temp_new_i64();
10965 gen_load_fpr32(ctx
, fp32
, fs
);
10966 if (ctx
->nan2008
) {
10967 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
10969 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
10971 tcg_temp_free_i32(fp32
);
10972 gen_store_fpr64(ctx
, fp64
, fd
);
10973 tcg_temp_free_i64(fp64
);
10976 case OPC_ROUND_W_S
:
10978 TCGv_i32 fp0
= tcg_temp_new_i32();
10980 gen_load_fpr32(ctx
, fp0
, fs
);
10981 if (ctx
->nan2008
) {
10982 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
10984 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
10986 gen_store_fpr32(ctx
, fp0
, fd
);
10987 tcg_temp_free_i32(fp0
);
10990 case OPC_TRUNC_W_S
:
10992 TCGv_i32 fp0
= tcg_temp_new_i32();
10994 gen_load_fpr32(ctx
, fp0
, fs
);
10995 if (ctx
->nan2008
) {
10996 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
10998 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
11000 gen_store_fpr32(ctx
, fp0
, fd
);
11001 tcg_temp_free_i32(fp0
);
11006 TCGv_i32 fp0
= tcg_temp_new_i32();
11008 gen_load_fpr32(ctx
, fp0
, fs
);
11009 if (ctx
->nan2008
) {
11010 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
11012 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
11014 gen_store_fpr32(ctx
, fp0
, fd
);
11015 tcg_temp_free_i32(fp0
);
11018 case OPC_FLOOR_W_S
:
11020 TCGv_i32 fp0
= tcg_temp_new_i32();
11022 gen_load_fpr32(ctx
, fp0
, fs
);
11023 if (ctx
->nan2008
) {
11024 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
11026 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
11028 gen_store_fpr32(ctx
, fp0
, fd
);
11029 tcg_temp_free_i32(fp0
);
11033 check_insn(ctx
, ISA_MIPS32R6
);
11034 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11037 check_insn(ctx
, ISA_MIPS32R6
);
11038 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11041 check_insn(ctx
, ISA_MIPS32R6
);
11042 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11045 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11046 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11049 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11051 TCGLabel
*l1
= gen_new_label();
11055 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11057 fp0
= tcg_temp_new_i32();
11058 gen_load_fpr32(ctx
, fp0
, fs
);
11059 gen_store_fpr32(ctx
, fp0
, fd
);
11060 tcg_temp_free_i32(fp0
);
11065 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11067 TCGLabel
*l1
= gen_new_label();
11071 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11072 fp0
= tcg_temp_new_i32();
11073 gen_load_fpr32(ctx
, fp0
, fs
);
11074 gen_store_fpr32(ctx
, fp0
, fd
);
11075 tcg_temp_free_i32(fp0
);
11082 TCGv_i32 fp0
= tcg_temp_new_i32();
11084 gen_load_fpr32(ctx
, fp0
, fs
);
11085 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
11086 gen_store_fpr32(ctx
, fp0
, fd
);
11087 tcg_temp_free_i32(fp0
);
11092 TCGv_i32 fp0
= tcg_temp_new_i32();
11094 gen_load_fpr32(ctx
, fp0
, fs
);
11095 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
11096 gen_store_fpr32(ctx
, fp0
, fd
);
11097 tcg_temp_free_i32(fp0
);
11101 check_insn(ctx
, ISA_MIPS32R6
);
11103 TCGv_i32 fp0
= tcg_temp_new_i32();
11104 TCGv_i32 fp1
= tcg_temp_new_i32();
11105 TCGv_i32 fp2
= tcg_temp_new_i32();
11106 gen_load_fpr32(ctx
, fp0
, fs
);
11107 gen_load_fpr32(ctx
, fp1
, ft
);
11108 gen_load_fpr32(ctx
, fp2
, fd
);
11109 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11110 gen_store_fpr32(ctx
, fp2
, fd
);
11111 tcg_temp_free_i32(fp2
);
11112 tcg_temp_free_i32(fp1
);
11113 tcg_temp_free_i32(fp0
);
11117 check_insn(ctx
, ISA_MIPS32R6
);
11119 TCGv_i32 fp0
= tcg_temp_new_i32();
11120 TCGv_i32 fp1
= tcg_temp_new_i32();
11121 TCGv_i32 fp2
= tcg_temp_new_i32();
11122 gen_load_fpr32(ctx
, fp0
, fs
);
11123 gen_load_fpr32(ctx
, fp1
, ft
);
11124 gen_load_fpr32(ctx
, fp2
, fd
);
11125 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11126 gen_store_fpr32(ctx
, fp2
, fd
);
11127 tcg_temp_free_i32(fp2
);
11128 tcg_temp_free_i32(fp1
);
11129 tcg_temp_free_i32(fp0
);
11133 check_insn(ctx
, ISA_MIPS32R6
);
11135 TCGv_i32 fp0
= tcg_temp_new_i32();
11136 gen_load_fpr32(ctx
, fp0
, fs
);
11137 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
11138 gen_store_fpr32(ctx
, fp0
, fd
);
11139 tcg_temp_free_i32(fp0
);
11143 check_insn(ctx
, ISA_MIPS32R6
);
11145 TCGv_i32 fp0
= tcg_temp_new_i32();
11146 gen_load_fpr32(ctx
, fp0
, fs
);
11147 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
11148 gen_store_fpr32(ctx
, fp0
, fd
);
11149 tcg_temp_free_i32(fp0
);
11152 case OPC_MIN_S
: /* OPC_RECIP2_S */
11153 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11155 TCGv_i32 fp0
= tcg_temp_new_i32();
11156 TCGv_i32 fp1
= tcg_temp_new_i32();
11157 TCGv_i32 fp2
= tcg_temp_new_i32();
11158 gen_load_fpr32(ctx
, fp0
, fs
);
11159 gen_load_fpr32(ctx
, fp1
, ft
);
11160 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
11161 gen_store_fpr32(ctx
, fp2
, fd
);
11162 tcg_temp_free_i32(fp2
);
11163 tcg_temp_free_i32(fp1
);
11164 tcg_temp_free_i32(fp0
);
11167 check_cp1_64bitmode(ctx
);
11169 TCGv_i32 fp0
= tcg_temp_new_i32();
11170 TCGv_i32 fp1
= tcg_temp_new_i32();
11172 gen_load_fpr32(ctx
, fp0
, fs
);
11173 gen_load_fpr32(ctx
, fp1
, ft
);
11174 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
11175 tcg_temp_free_i32(fp1
);
11176 gen_store_fpr32(ctx
, fp0
, fd
);
11177 tcg_temp_free_i32(fp0
);
11181 case OPC_MINA_S
: /* OPC_RECIP1_S */
11182 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11184 TCGv_i32 fp0
= tcg_temp_new_i32();
11185 TCGv_i32 fp1
= tcg_temp_new_i32();
11186 TCGv_i32 fp2
= tcg_temp_new_i32();
11187 gen_load_fpr32(ctx
, fp0
, fs
);
11188 gen_load_fpr32(ctx
, fp1
, ft
);
11189 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
11190 gen_store_fpr32(ctx
, fp2
, fd
);
11191 tcg_temp_free_i32(fp2
);
11192 tcg_temp_free_i32(fp1
);
11193 tcg_temp_free_i32(fp0
);
11196 check_cp1_64bitmode(ctx
);
11198 TCGv_i32 fp0
= tcg_temp_new_i32();
11200 gen_load_fpr32(ctx
, fp0
, fs
);
11201 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
11202 gen_store_fpr32(ctx
, fp0
, fd
);
11203 tcg_temp_free_i32(fp0
);
11207 case OPC_MAX_S
: /* OPC_RSQRT1_S */
11208 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11210 TCGv_i32 fp0
= tcg_temp_new_i32();
11211 TCGv_i32 fp1
= tcg_temp_new_i32();
11212 gen_load_fpr32(ctx
, fp0
, fs
);
11213 gen_load_fpr32(ctx
, fp1
, ft
);
11214 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
11215 gen_store_fpr32(ctx
, fp1
, fd
);
11216 tcg_temp_free_i32(fp1
);
11217 tcg_temp_free_i32(fp0
);
11220 check_cp1_64bitmode(ctx
);
11222 TCGv_i32 fp0
= tcg_temp_new_i32();
11224 gen_load_fpr32(ctx
, fp0
, fs
);
11225 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
11226 gen_store_fpr32(ctx
, fp0
, fd
);
11227 tcg_temp_free_i32(fp0
);
11231 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11232 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11234 TCGv_i32 fp0
= tcg_temp_new_i32();
11235 TCGv_i32 fp1
= tcg_temp_new_i32();
11236 gen_load_fpr32(ctx
, fp0
, fs
);
11237 gen_load_fpr32(ctx
, fp1
, ft
);
11238 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11239 gen_store_fpr32(ctx
, fp1
, fd
);
11240 tcg_temp_free_i32(fp1
);
11241 tcg_temp_free_i32(fp0
);
11244 check_cp1_64bitmode(ctx
);
11246 TCGv_i32 fp0
= tcg_temp_new_i32();
11247 TCGv_i32 fp1
= tcg_temp_new_i32();
11249 gen_load_fpr32(ctx
, fp0
, fs
);
11250 gen_load_fpr32(ctx
, fp1
, ft
);
11251 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11252 tcg_temp_free_i32(fp1
);
11253 gen_store_fpr32(ctx
, fp0
, fd
);
11254 tcg_temp_free_i32(fp0
);
11259 check_cp1_registers(ctx
, fd
);
11261 TCGv_i32 fp32
= tcg_temp_new_i32();
11262 TCGv_i64 fp64
= tcg_temp_new_i64();
11264 gen_load_fpr32(ctx
, fp32
, fs
);
11265 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11266 tcg_temp_free_i32(fp32
);
11267 gen_store_fpr64(ctx
, fp64
, fd
);
11268 tcg_temp_free_i64(fp64
);
11273 TCGv_i32 fp0
= tcg_temp_new_i32();
11275 gen_load_fpr32(ctx
, fp0
, fs
);
11276 if (ctx
->nan2008
) {
11277 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11279 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11281 gen_store_fpr32(ctx
, fp0
, fd
);
11282 tcg_temp_free_i32(fp0
);
11286 check_cp1_64bitmode(ctx
);
11288 TCGv_i32 fp32
= tcg_temp_new_i32();
11289 TCGv_i64 fp64
= tcg_temp_new_i64();
11291 gen_load_fpr32(ctx
, fp32
, fs
);
11292 if (ctx
->nan2008
) {
11293 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11295 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11297 tcg_temp_free_i32(fp32
);
11298 gen_store_fpr64(ctx
, fp64
, fd
);
11299 tcg_temp_free_i64(fp64
);
11305 TCGv_i64 fp64
= tcg_temp_new_i64();
11306 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11307 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11309 gen_load_fpr32(ctx
, fp32_0
, fs
);
11310 gen_load_fpr32(ctx
, fp32_1
, ft
);
11311 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11312 tcg_temp_free_i32(fp32_1
);
11313 tcg_temp_free_i32(fp32_0
);
11314 gen_store_fpr64(ctx
, fp64
, fd
);
11315 tcg_temp_free_i64(fp64
);
11321 case OPC_CMP_UEQ_S
:
11322 case OPC_CMP_OLT_S
:
11323 case OPC_CMP_ULT_S
:
11324 case OPC_CMP_OLE_S
:
11325 case OPC_CMP_ULE_S
:
11327 case OPC_CMP_NGLE_S
:
11328 case OPC_CMP_SEQ_S
:
11329 case OPC_CMP_NGL_S
:
11331 case OPC_CMP_NGE_S
:
11333 case OPC_CMP_NGT_S
:
11334 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11335 if (ctx
->opcode
& (1 << 6)) {
11336 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
11338 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
11342 check_cp1_registers(ctx
, fs
| ft
| fd
);
11344 TCGv_i64 fp0
= tcg_temp_new_i64();
11345 TCGv_i64 fp1
= tcg_temp_new_i64();
11347 gen_load_fpr64(ctx
, fp0
, fs
);
11348 gen_load_fpr64(ctx
, fp1
, ft
);
11349 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11350 tcg_temp_free_i64(fp1
);
11351 gen_store_fpr64(ctx
, fp0
, fd
);
11352 tcg_temp_free_i64(fp0
);
11356 check_cp1_registers(ctx
, fs
| ft
| fd
);
11358 TCGv_i64 fp0
= tcg_temp_new_i64();
11359 TCGv_i64 fp1
= tcg_temp_new_i64();
11361 gen_load_fpr64(ctx
, fp0
, fs
);
11362 gen_load_fpr64(ctx
, fp1
, ft
);
11363 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11364 tcg_temp_free_i64(fp1
);
11365 gen_store_fpr64(ctx
, fp0
, fd
);
11366 tcg_temp_free_i64(fp0
);
11370 check_cp1_registers(ctx
, fs
| ft
| fd
);
11372 TCGv_i64 fp0
= tcg_temp_new_i64();
11373 TCGv_i64 fp1
= tcg_temp_new_i64();
11375 gen_load_fpr64(ctx
, fp0
, fs
);
11376 gen_load_fpr64(ctx
, fp1
, ft
);
11377 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11378 tcg_temp_free_i64(fp1
);
11379 gen_store_fpr64(ctx
, fp0
, fd
);
11380 tcg_temp_free_i64(fp0
);
11384 check_cp1_registers(ctx
, fs
| ft
| fd
);
11386 TCGv_i64 fp0
= tcg_temp_new_i64();
11387 TCGv_i64 fp1
= tcg_temp_new_i64();
11389 gen_load_fpr64(ctx
, fp0
, fs
);
11390 gen_load_fpr64(ctx
, fp1
, ft
);
11391 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11392 tcg_temp_free_i64(fp1
);
11393 gen_store_fpr64(ctx
, fp0
, fd
);
11394 tcg_temp_free_i64(fp0
);
11398 check_cp1_registers(ctx
, fs
| fd
);
11400 TCGv_i64 fp0
= tcg_temp_new_i64();
11402 gen_load_fpr64(ctx
, fp0
, fs
);
11403 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11404 gen_store_fpr64(ctx
, fp0
, fd
);
11405 tcg_temp_free_i64(fp0
);
11409 check_cp1_registers(ctx
, fs
| fd
);
11411 TCGv_i64 fp0
= tcg_temp_new_i64();
11413 gen_load_fpr64(ctx
, fp0
, fs
);
11414 if (ctx
->abs2008
) {
11415 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11417 gen_helper_float_abs_d(fp0
, fp0
);
11419 gen_store_fpr64(ctx
, fp0
, fd
);
11420 tcg_temp_free_i64(fp0
);
11424 check_cp1_registers(ctx
, fs
| fd
);
11426 TCGv_i64 fp0
= tcg_temp_new_i64();
11428 gen_load_fpr64(ctx
, fp0
, fs
);
11429 gen_store_fpr64(ctx
, fp0
, fd
);
11430 tcg_temp_free_i64(fp0
);
11434 check_cp1_registers(ctx
, fs
| fd
);
11436 TCGv_i64 fp0
= tcg_temp_new_i64();
11438 gen_load_fpr64(ctx
, fp0
, fs
);
11439 if (ctx
->abs2008
) {
11440 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11442 gen_helper_float_chs_d(fp0
, fp0
);
11444 gen_store_fpr64(ctx
, fp0
, fd
);
11445 tcg_temp_free_i64(fp0
);
11448 case OPC_ROUND_L_D
:
11449 check_cp1_64bitmode(ctx
);
11451 TCGv_i64 fp0
= tcg_temp_new_i64();
11453 gen_load_fpr64(ctx
, fp0
, fs
);
11454 if (ctx
->nan2008
) {
11455 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11457 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11459 gen_store_fpr64(ctx
, fp0
, fd
);
11460 tcg_temp_free_i64(fp0
);
11463 case OPC_TRUNC_L_D
:
11464 check_cp1_64bitmode(ctx
);
11466 TCGv_i64 fp0
= tcg_temp_new_i64();
11468 gen_load_fpr64(ctx
, fp0
, fs
);
11469 if (ctx
->nan2008
) {
11470 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11472 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11474 gen_store_fpr64(ctx
, fp0
, fd
);
11475 tcg_temp_free_i64(fp0
);
11479 check_cp1_64bitmode(ctx
);
11481 TCGv_i64 fp0
= tcg_temp_new_i64();
11483 gen_load_fpr64(ctx
, fp0
, fs
);
11484 if (ctx
->nan2008
) {
11485 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11487 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11489 gen_store_fpr64(ctx
, fp0
, fd
);
11490 tcg_temp_free_i64(fp0
);
11493 case OPC_FLOOR_L_D
:
11494 check_cp1_64bitmode(ctx
);
11496 TCGv_i64 fp0
= tcg_temp_new_i64();
11498 gen_load_fpr64(ctx
, fp0
, fs
);
11499 if (ctx
->nan2008
) {
11500 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11502 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11504 gen_store_fpr64(ctx
, fp0
, fd
);
11505 tcg_temp_free_i64(fp0
);
11508 case OPC_ROUND_W_D
:
11509 check_cp1_registers(ctx
, fs
);
11511 TCGv_i32 fp32
= tcg_temp_new_i32();
11512 TCGv_i64 fp64
= tcg_temp_new_i64();
11514 gen_load_fpr64(ctx
, fp64
, fs
);
11515 if (ctx
->nan2008
) {
11516 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11518 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11520 tcg_temp_free_i64(fp64
);
11521 gen_store_fpr32(ctx
, fp32
, fd
);
11522 tcg_temp_free_i32(fp32
);
11525 case OPC_TRUNC_W_D
:
11526 check_cp1_registers(ctx
, fs
);
11528 TCGv_i32 fp32
= tcg_temp_new_i32();
11529 TCGv_i64 fp64
= tcg_temp_new_i64();
11531 gen_load_fpr64(ctx
, fp64
, fs
);
11532 if (ctx
->nan2008
) {
11533 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11535 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11537 tcg_temp_free_i64(fp64
);
11538 gen_store_fpr32(ctx
, fp32
, fd
);
11539 tcg_temp_free_i32(fp32
);
11543 check_cp1_registers(ctx
, fs
);
11545 TCGv_i32 fp32
= tcg_temp_new_i32();
11546 TCGv_i64 fp64
= tcg_temp_new_i64();
11548 gen_load_fpr64(ctx
, fp64
, fs
);
11549 if (ctx
->nan2008
) {
11550 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11552 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11554 tcg_temp_free_i64(fp64
);
11555 gen_store_fpr32(ctx
, fp32
, fd
);
11556 tcg_temp_free_i32(fp32
);
11559 case OPC_FLOOR_W_D
:
11560 check_cp1_registers(ctx
, fs
);
11562 TCGv_i32 fp32
= tcg_temp_new_i32();
11563 TCGv_i64 fp64
= tcg_temp_new_i64();
11565 gen_load_fpr64(ctx
, fp64
, fs
);
11566 if (ctx
->nan2008
) {
11567 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11569 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11571 tcg_temp_free_i64(fp64
);
11572 gen_store_fpr32(ctx
, fp32
, fd
);
11573 tcg_temp_free_i32(fp32
);
11577 check_insn(ctx
, ISA_MIPS32R6
);
11578 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11581 check_insn(ctx
, ISA_MIPS32R6
);
11582 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11585 check_insn(ctx
, ISA_MIPS32R6
);
11586 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11589 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11590 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11593 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11595 TCGLabel
*l1
= gen_new_label();
11599 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11601 fp0
= tcg_temp_new_i64();
11602 gen_load_fpr64(ctx
, fp0
, fs
);
11603 gen_store_fpr64(ctx
, fp0
, fd
);
11604 tcg_temp_free_i64(fp0
);
11609 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11611 TCGLabel
*l1
= gen_new_label();
11615 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11616 fp0
= tcg_temp_new_i64();
11617 gen_load_fpr64(ctx
, fp0
, fs
);
11618 gen_store_fpr64(ctx
, fp0
, fd
);
11619 tcg_temp_free_i64(fp0
);
11625 check_cp1_registers(ctx
, fs
| fd
);
11627 TCGv_i64 fp0
= tcg_temp_new_i64();
11629 gen_load_fpr64(ctx
, fp0
, fs
);
11630 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11631 gen_store_fpr64(ctx
, fp0
, fd
);
11632 tcg_temp_free_i64(fp0
);
11636 check_cp1_registers(ctx
, fs
| fd
);
11638 TCGv_i64 fp0
= tcg_temp_new_i64();
11640 gen_load_fpr64(ctx
, fp0
, fs
);
11641 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11642 gen_store_fpr64(ctx
, fp0
, fd
);
11643 tcg_temp_free_i64(fp0
);
11647 check_insn(ctx
, ISA_MIPS32R6
);
11649 TCGv_i64 fp0
= tcg_temp_new_i64();
11650 TCGv_i64 fp1
= tcg_temp_new_i64();
11651 TCGv_i64 fp2
= tcg_temp_new_i64();
11652 gen_load_fpr64(ctx
, fp0
, fs
);
11653 gen_load_fpr64(ctx
, fp1
, ft
);
11654 gen_load_fpr64(ctx
, fp2
, fd
);
11655 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11656 gen_store_fpr64(ctx
, fp2
, fd
);
11657 tcg_temp_free_i64(fp2
);
11658 tcg_temp_free_i64(fp1
);
11659 tcg_temp_free_i64(fp0
);
11663 check_insn(ctx
, ISA_MIPS32R6
);
11665 TCGv_i64 fp0
= tcg_temp_new_i64();
11666 TCGv_i64 fp1
= tcg_temp_new_i64();
11667 TCGv_i64 fp2
= tcg_temp_new_i64();
11668 gen_load_fpr64(ctx
, fp0
, fs
);
11669 gen_load_fpr64(ctx
, fp1
, ft
);
11670 gen_load_fpr64(ctx
, fp2
, fd
);
11671 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11672 gen_store_fpr64(ctx
, fp2
, fd
);
11673 tcg_temp_free_i64(fp2
);
11674 tcg_temp_free_i64(fp1
);
11675 tcg_temp_free_i64(fp0
);
11679 check_insn(ctx
, ISA_MIPS32R6
);
11681 TCGv_i64 fp0
= tcg_temp_new_i64();
11682 gen_load_fpr64(ctx
, fp0
, fs
);
11683 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11684 gen_store_fpr64(ctx
, fp0
, fd
);
11685 tcg_temp_free_i64(fp0
);
11689 check_insn(ctx
, ISA_MIPS32R6
);
11691 TCGv_i64 fp0
= tcg_temp_new_i64();
11692 gen_load_fpr64(ctx
, fp0
, fs
);
11693 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11694 gen_store_fpr64(ctx
, fp0
, fd
);
11695 tcg_temp_free_i64(fp0
);
11698 case OPC_MIN_D
: /* OPC_RECIP2_D */
11699 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11701 TCGv_i64 fp0
= tcg_temp_new_i64();
11702 TCGv_i64 fp1
= tcg_temp_new_i64();
11703 gen_load_fpr64(ctx
, fp0
, fs
);
11704 gen_load_fpr64(ctx
, fp1
, ft
);
11705 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11706 gen_store_fpr64(ctx
, fp1
, fd
);
11707 tcg_temp_free_i64(fp1
);
11708 tcg_temp_free_i64(fp0
);
11711 check_cp1_64bitmode(ctx
);
11713 TCGv_i64 fp0
= tcg_temp_new_i64();
11714 TCGv_i64 fp1
= tcg_temp_new_i64();
11716 gen_load_fpr64(ctx
, fp0
, fs
);
11717 gen_load_fpr64(ctx
, fp1
, ft
);
11718 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11719 tcg_temp_free_i64(fp1
);
11720 gen_store_fpr64(ctx
, fp0
, fd
);
11721 tcg_temp_free_i64(fp0
);
11725 case OPC_MINA_D
: /* OPC_RECIP1_D */
11726 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11728 TCGv_i64 fp0
= tcg_temp_new_i64();
11729 TCGv_i64 fp1
= tcg_temp_new_i64();
11730 gen_load_fpr64(ctx
, fp0
, fs
);
11731 gen_load_fpr64(ctx
, fp1
, ft
);
11732 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11733 gen_store_fpr64(ctx
, fp1
, fd
);
11734 tcg_temp_free_i64(fp1
);
11735 tcg_temp_free_i64(fp0
);
11738 check_cp1_64bitmode(ctx
);
11740 TCGv_i64 fp0
= tcg_temp_new_i64();
11742 gen_load_fpr64(ctx
, fp0
, fs
);
11743 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11744 gen_store_fpr64(ctx
, fp0
, fd
);
11745 tcg_temp_free_i64(fp0
);
11749 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11750 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11752 TCGv_i64 fp0
= tcg_temp_new_i64();
11753 TCGv_i64 fp1
= tcg_temp_new_i64();
11754 gen_load_fpr64(ctx
, fp0
, fs
);
11755 gen_load_fpr64(ctx
, fp1
, ft
);
11756 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11757 gen_store_fpr64(ctx
, fp1
, fd
);
11758 tcg_temp_free_i64(fp1
);
11759 tcg_temp_free_i64(fp0
);
11762 check_cp1_64bitmode(ctx
);
11764 TCGv_i64 fp0
= tcg_temp_new_i64();
11766 gen_load_fpr64(ctx
, fp0
, fs
);
11767 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11768 gen_store_fpr64(ctx
, fp0
, fd
);
11769 tcg_temp_free_i64(fp0
);
11773 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11774 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11776 TCGv_i64 fp0
= tcg_temp_new_i64();
11777 TCGv_i64 fp1
= tcg_temp_new_i64();
11778 gen_load_fpr64(ctx
, fp0
, fs
);
11779 gen_load_fpr64(ctx
, fp1
, ft
);
11780 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11781 gen_store_fpr64(ctx
, fp1
, fd
);
11782 tcg_temp_free_i64(fp1
);
11783 tcg_temp_free_i64(fp0
);
11786 check_cp1_64bitmode(ctx
);
11788 TCGv_i64 fp0
= tcg_temp_new_i64();
11789 TCGv_i64 fp1
= tcg_temp_new_i64();
11791 gen_load_fpr64(ctx
, fp0
, fs
);
11792 gen_load_fpr64(ctx
, fp1
, ft
);
11793 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
11794 tcg_temp_free_i64(fp1
);
11795 gen_store_fpr64(ctx
, fp0
, fd
);
11796 tcg_temp_free_i64(fp0
);
11803 case OPC_CMP_UEQ_D
:
11804 case OPC_CMP_OLT_D
:
11805 case OPC_CMP_ULT_D
:
11806 case OPC_CMP_OLE_D
:
11807 case OPC_CMP_ULE_D
:
11809 case OPC_CMP_NGLE_D
:
11810 case OPC_CMP_SEQ_D
:
11811 case OPC_CMP_NGL_D
:
11813 case OPC_CMP_NGE_D
:
11815 case OPC_CMP_NGT_D
:
11816 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11817 if (ctx
->opcode
& (1 << 6)) {
11818 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
11820 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
11824 check_cp1_registers(ctx
, fs
);
11826 TCGv_i32 fp32
= tcg_temp_new_i32();
11827 TCGv_i64 fp64
= tcg_temp_new_i64();
11829 gen_load_fpr64(ctx
, fp64
, fs
);
11830 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
11831 tcg_temp_free_i64(fp64
);
11832 gen_store_fpr32(ctx
, fp32
, fd
);
11833 tcg_temp_free_i32(fp32
);
11837 check_cp1_registers(ctx
, fs
);
11839 TCGv_i32 fp32
= tcg_temp_new_i32();
11840 TCGv_i64 fp64
= tcg_temp_new_i64();
11842 gen_load_fpr64(ctx
, fp64
, fs
);
11843 if (ctx
->nan2008
) {
11844 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
11846 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
11848 tcg_temp_free_i64(fp64
);
11849 gen_store_fpr32(ctx
, fp32
, fd
);
11850 tcg_temp_free_i32(fp32
);
11854 check_cp1_64bitmode(ctx
);
11856 TCGv_i64 fp0
= tcg_temp_new_i64();
11858 gen_load_fpr64(ctx
, fp0
, fs
);
11859 if (ctx
->nan2008
) {
11860 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
11862 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
11864 gen_store_fpr64(ctx
, fp0
, fd
);
11865 tcg_temp_free_i64(fp0
);
11870 TCGv_i32 fp0
= tcg_temp_new_i32();
11872 gen_load_fpr32(ctx
, fp0
, fs
);
11873 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
11874 gen_store_fpr32(ctx
, fp0
, fd
);
11875 tcg_temp_free_i32(fp0
);
11879 check_cp1_registers(ctx
, fd
);
11881 TCGv_i32 fp32
= tcg_temp_new_i32();
11882 TCGv_i64 fp64
= tcg_temp_new_i64();
11884 gen_load_fpr32(ctx
, fp32
, fs
);
11885 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
11886 tcg_temp_free_i32(fp32
);
11887 gen_store_fpr64(ctx
, fp64
, fd
);
11888 tcg_temp_free_i64(fp64
);
11892 check_cp1_64bitmode(ctx
);
11894 TCGv_i32 fp32
= tcg_temp_new_i32();
11895 TCGv_i64 fp64
= tcg_temp_new_i64();
11897 gen_load_fpr64(ctx
, fp64
, fs
);
11898 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
11899 tcg_temp_free_i64(fp64
);
11900 gen_store_fpr32(ctx
, fp32
, fd
);
11901 tcg_temp_free_i32(fp32
);
11905 check_cp1_64bitmode(ctx
);
11907 TCGv_i64 fp0
= tcg_temp_new_i64();
11909 gen_load_fpr64(ctx
, fp0
, fs
);
11910 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
11911 gen_store_fpr64(ctx
, fp0
, fd
);
11912 tcg_temp_free_i64(fp0
);
11915 case OPC_CVT_PS_PW
:
11918 TCGv_i64 fp0
= tcg_temp_new_i64();
11920 gen_load_fpr64(ctx
, fp0
, fs
);
11921 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
11922 gen_store_fpr64(ctx
, fp0
, fd
);
11923 tcg_temp_free_i64(fp0
);
11929 TCGv_i64 fp0
= tcg_temp_new_i64();
11930 TCGv_i64 fp1
= tcg_temp_new_i64();
11932 gen_load_fpr64(ctx
, fp0
, fs
);
11933 gen_load_fpr64(ctx
, fp1
, ft
);
11934 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
11935 tcg_temp_free_i64(fp1
);
11936 gen_store_fpr64(ctx
, fp0
, fd
);
11937 tcg_temp_free_i64(fp0
);
11943 TCGv_i64 fp0
= tcg_temp_new_i64();
11944 TCGv_i64 fp1
= tcg_temp_new_i64();
11946 gen_load_fpr64(ctx
, fp0
, fs
);
11947 gen_load_fpr64(ctx
, fp1
, ft
);
11948 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
11949 tcg_temp_free_i64(fp1
);
11950 gen_store_fpr64(ctx
, fp0
, fd
);
11951 tcg_temp_free_i64(fp0
);
11957 TCGv_i64 fp0
= tcg_temp_new_i64();
11958 TCGv_i64 fp1
= tcg_temp_new_i64();
11960 gen_load_fpr64(ctx
, fp0
, fs
);
11961 gen_load_fpr64(ctx
, fp1
, ft
);
11962 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
11963 tcg_temp_free_i64(fp1
);
11964 gen_store_fpr64(ctx
, fp0
, fd
);
11965 tcg_temp_free_i64(fp0
);
11971 TCGv_i64 fp0
= tcg_temp_new_i64();
11973 gen_load_fpr64(ctx
, fp0
, fs
);
11974 gen_helper_float_abs_ps(fp0
, fp0
);
11975 gen_store_fpr64(ctx
, fp0
, fd
);
11976 tcg_temp_free_i64(fp0
);
11982 TCGv_i64 fp0
= tcg_temp_new_i64();
11984 gen_load_fpr64(ctx
, fp0
, fs
);
11985 gen_store_fpr64(ctx
, fp0
, fd
);
11986 tcg_temp_free_i64(fp0
);
11992 TCGv_i64 fp0
= tcg_temp_new_i64();
11994 gen_load_fpr64(ctx
, fp0
, fs
);
11995 gen_helper_float_chs_ps(fp0
, fp0
);
11996 gen_store_fpr64(ctx
, fp0
, fd
);
11997 tcg_temp_free_i64(fp0
);
12002 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
12007 TCGLabel
*l1
= gen_new_label();
12011 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
12012 fp0
= tcg_temp_new_i64();
12013 gen_load_fpr64(ctx
, fp0
, fs
);
12014 gen_store_fpr64(ctx
, fp0
, fd
);
12015 tcg_temp_free_i64(fp0
);
12022 TCGLabel
*l1
= gen_new_label();
12026 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
12027 fp0
= tcg_temp_new_i64();
12028 gen_load_fpr64(ctx
, fp0
, fs
);
12029 gen_store_fpr64(ctx
, fp0
, fd
);
12030 tcg_temp_free_i64(fp0
);
12038 TCGv_i64 fp0
= tcg_temp_new_i64();
12039 TCGv_i64 fp1
= tcg_temp_new_i64();
12041 gen_load_fpr64(ctx
, fp0
, ft
);
12042 gen_load_fpr64(ctx
, fp1
, fs
);
12043 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
12044 tcg_temp_free_i64(fp1
);
12045 gen_store_fpr64(ctx
, fp0
, fd
);
12046 tcg_temp_free_i64(fp0
);
12052 TCGv_i64 fp0
= tcg_temp_new_i64();
12053 TCGv_i64 fp1
= tcg_temp_new_i64();
12055 gen_load_fpr64(ctx
, fp0
, ft
);
12056 gen_load_fpr64(ctx
, fp1
, fs
);
12057 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
12058 tcg_temp_free_i64(fp1
);
12059 gen_store_fpr64(ctx
, fp0
, fd
);
12060 tcg_temp_free_i64(fp0
);
12063 case OPC_RECIP2_PS
:
12066 TCGv_i64 fp0
= tcg_temp_new_i64();
12067 TCGv_i64 fp1
= tcg_temp_new_i64();
12069 gen_load_fpr64(ctx
, fp0
, fs
);
12070 gen_load_fpr64(ctx
, fp1
, ft
);
12071 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
12072 tcg_temp_free_i64(fp1
);
12073 gen_store_fpr64(ctx
, fp0
, fd
);
12074 tcg_temp_free_i64(fp0
);
12077 case OPC_RECIP1_PS
:
12080 TCGv_i64 fp0
= tcg_temp_new_i64();
12082 gen_load_fpr64(ctx
, fp0
, fs
);
12083 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
12084 gen_store_fpr64(ctx
, fp0
, fd
);
12085 tcg_temp_free_i64(fp0
);
12088 case OPC_RSQRT1_PS
:
12091 TCGv_i64 fp0
= tcg_temp_new_i64();
12093 gen_load_fpr64(ctx
, fp0
, fs
);
12094 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
12095 gen_store_fpr64(ctx
, fp0
, fd
);
12096 tcg_temp_free_i64(fp0
);
12099 case OPC_RSQRT2_PS
:
12102 TCGv_i64 fp0
= tcg_temp_new_i64();
12103 TCGv_i64 fp1
= tcg_temp_new_i64();
12105 gen_load_fpr64(ctx
, fp0
, fs
);
12106 gen_load_fpr64(ctx
, fp1
, ft
);
12107 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
12108 tcg_temp_free_i64(fp1
);
12109 gen_store_fpr64(ctx
, fp0
, fd
);
12110 tcg_temp_free_i64(fp0
);
12114 check_cp1_64bitmode(ctx
);
12116 TCGv_i32 fp0
= tcg_temp_new_i32();
12118 gen_load_fpr32h(ctx
, fp0
, fs
);
12119 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
12120 gen_store_fpr32(ctx
, fp0
, fd
);
12121 tcg_temp_free_i32(fp0
);
12124 case OPC_CVT_PW_PS
:
12127 TCGv_i64 fp0
= tcg_temp_new_i64();
12129 gen_load_fpr64(ctx
, fp0
, fs
);
12130 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
12131 gen_store_fpr64(ctx
, fp0
, fd
);
12132 tcg_temp_free_i64(fp0
);
12136 check_cp1_64bitmode(ctx
);
12138 TCGv_i32 fp0
= tcg_temp_new_i32();
12140 gen_load_fpr32(ctx
, fp0
, fs
);
12141 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
12142 gen_store_fpr32(ctx
, fp0
, fd
);
12143 tcg_temp_free_i32(fp0
);
12149 TCGv_i32 fp0
= tcg_temp_new_i32();
12150 TCGv_i32 fp1
= tcg_temp_new_i32();
12152 gen_load_fpr32(ctx
, fp0
, fs
);
12153 gen_load_fpr32(ctx
, fp1
, ft
);
12154 gen_store_fpr32h(ctx
, fp0
, fd
);
12155 gen_store_fpr32(ctx
, fp1
, fd
);
12156 tcg_temp_free_i32(fp0
);
12157 tcg_temp_free_i32(fp1
);
12163 TCGv_i32 fp0
= tcg_temp_new_i32();
12164 TCGv_i32 fp1
= tcg_temp_new_i32();
12166 gen_load_fpr32(ctx
, fp0
, fs
);
12167 gen_load_fpr32h(ctx
, fp1
, ft
);
12168 gen_store_fpr32(ctx
, fp1
, fd
);
12169 gen_store_fpr32h(ctx
, fp0
, fd
);
12170 tcg_temp_free_i32(fp0
);
12171 tcg_temp_free_i32(fp1
);
12177 TCGv_i32 fp0
= tcg_temp_new_i32();
12178 TCGv_i32 fp1
= tcg_temp_new_i32();
12180 gen_load_fpr32h(ctx
, fp0
, fs
);
12181 gen_load_fpr32(ctx
, fp1
, ft
);
12182 gen_store_fpr32(ctx
, fp1
, fd
);
12183 gen_store_fpr32h(ctx
, fp0
, fd
);
12184 tcg_temp_free_i32(fp0
);
12185 tcg_temp_free_i32(fp1
);
12191 TCGv_i32 fp0
= tcg_temp_new_i32();
12192 TCGv_i32 fp1
= tcg_temp_new_i32();
12194 gen_load_fpr32h(ctx
, fp0
, fs
);
12195 gen_load_fpr32h(ctx
, fp1
, ft
);
12196 gen_store_fpr32(ctx
, fp1
, fd
);
12197 gen_store_fpr32h(ctx
, fp0
, fd
);
12198 tcg_temp_free_i32(fp0
);
12199 tcg_temp_free_i32(fp1
);
12203 case OPC_CMP_UN_PS
:
12204 case OPC_CMP_EQ_PS
:
12205 case OPC_CMP_UEQ_PS
:
12206 case OPC_CMP_OLT_PS
:
12207 case OPC_CMP_ULT_PS
:
12208 case OPC_CMP_OLE_PS
:
12209 case OPC_CMP_ULE_PS
:
12210 case OPC_CMP_SF_PS
:
12211 case OPC_CMP_NGLE_PS
:
12212 case OPC_CMP_SEQ_PS
:
12213 case OPC_CMP_NGL_PS
:
12214 case OPC_CMP_LT_PS
:
12215 case OPC_CMP_NGE_PS
:
12216 case OPC_CMP_LE_PS
:
12217 case OPC_CMP_NGT_PS
:
12218 if (ctx
->opcode
& (1 << 6)) {
12219 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
12221 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
12225 MIPS_INVAL("farith");
12226 generate_exception_end(ctx
, EXCP_RI
);
12231 /* Coprocessor 3 (FPU) */
12232 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
12233 int fd
, int fs
, int base
, int index
)
12235 TCGv t0
= tcg_temp_new();
12238 gen_load_gpr(t0
, index
);
12239 } else if (index
== 0) {
12240 gen_load_gpr(t0
, base
);
12242 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12244 /* Don't do NOP if destination is zero: we must perform the actual
12250 TCGv_i32 fp0
= tcg_temp_new_i32();
12252 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12253 tcg_gen_trunc_tl_i32(fp0
, t0
);
12254 gen_store_fpr32(ctx
, fp0
, fd
);
12255 tcg_temp_free_i32(fp0
);
12260 check_cp1_registers(ctx
, fd
);
12262 TCGv_i64 fp0
= tcg_temp_new_i64();
12263 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12264 gen_store_fpr64(ctx
, fp0
, fd
);
12265 tcg_temp_free_i64(fp0
);
12269 check_cp1_64bitmode(ctx
);
12270 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12272 TCGv_i64 fp0
= tcg_temp_new_i64();
12274 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12275 gen_store_fpr64(ctx
, fp0
, fd
);
12276 tcg_temp_free_i64(fp0
);
12282 TCGv_i32 fp0
= tcg_temp_new_i32();
12283 gen_load_fpr32(ctx
, fp0
, fs
);
12284 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12285 tcg_temp_free_i32(fp0
);
12290 check_cp1_registers(ctx
, fs
);
12292 TCGv_i64 fp0
= tcg_temp_new_i64();
12293 gen_load_fpr64(ctx
, fp0
, fs
);
12294 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12295 tcg_temp_free_i64(fp0
);
12299 check_cp1_64bitmode(ctx
);
12300 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12302 TCGv_i64 fp0
= tcg_temp_new_i64();
12303 gen_load_fpr64(ctx
, fp0
, fs
);
12304 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12305 tcg_temp_free_i64(fp0
);
12312 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
12313 int fd
, int fr
, int fs
, int ft
)
12319 TCGv t0
= tcg_temp_local_new();
12320 TCGv_i32 fp
= tcg_temp_new_i32();
12321 TCGv_i32 fph
= tcg_temp_new_i32();
12322 TCGLabel
*l1
= gen_new_label();
12323 TCGLabel
*l2
= gen_new_label();
12325 gen_load_gpr(t0
, fr
);
12326 tcg_gen_andi_tl(t0
, t0
, 0x7);
12328 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12329 gen_load_fpr32(ctx
, fp
, fs
);
12330 gen_load_fpr32h(ctx
, fph
, fs
);
12331 gen_store_fpr32(ctx
, fp
, fd
);
12332 gen_store_fpr32h(ctx
, fph
, fd
);
12335 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12337 #ifdef TARGET_WORDS_BIGENDIAN
12338 gen_load_fpr32(ctx
, fp
, fs
);
12339 gen_load_fpr32h(ctx
, fph
, ft
);
12340 gen_store_fpr32h(ctx
, fp
, fd
);
12341 gen_store_fpr32(ctx
, fph
, fd
);
12343 gen_load_fpr32h(ctx
, fph
, fs
);
12344 gen_load_fpr32(ctx
, fp
, ft
);
12345 gen_store_fpr32(ctx
, fph
, fd
);
12346 gen_store_fpr32h(ctx
, fp
, fd
);
12349 tcg_temp_free_i32(fp
);
12350 tcg_temp_free_i32(fph
);
12356 TCGv_i32 fp0
= tcg_temp_new_i32();
12357 TCGv_i32 fp1
= tcg_temp_new_i32();
12358 TCGv_i32 fp2
= tcg_temp_new_i32();
12360 gen_load_fpr32(ctx
, fp0
, fs
);
12361 gen_load_fpr32(ctx
, fp1
, ft
);
12362 gen_load_fpr32(ctx
, fp2
, fr
);
12363 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12364 tcg_temp_free_i32(fp0
);
12365 tcg_temp_free_i32(fp1
);
12366 gen_store_fpr32(ctx
, fp2
, fd
);
12367 tcg_temp_free_i32(fp2
);
12372 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12374 TCGv_i64 fp0
= tcg_temp_new_i64();
12375 TCGv_i64 fp1
= tcg_temp_new_i64();
12376 TCGv_i64 fp2
= tcg_temp_new_i64();
12378 gen_load_fpr64(ctx
, fp0
, fs
);
12379 gen_load_fpr64(ctx
, fp1
, ft
);
12380 gen_load_fpr64(ctx
, fp2
, fr
);
12381 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12382 tcg_temp_free_i64(fp0
);
12383 tcg_temp_free_i64(fp1
);
12384 gen_store_fpr64(ctx
, fp2
, fd
);
12385 tcg_temp_free_i64(fp2
);
12391 TCGv_i64 fp0
= tcg_temp_new_i64();
12392 TCGv_i64 fp1
= tcg_temp_new_i64();
12393 TCGv_i64 fp2
= tcg_temp_new_i64();
12395 gen_load_fpr64(ctx
, fp0
, fs
);
12396 gen_load_fpr64(ctx
, fp1
, ft
);
12397 gen_load_fpr64(ctx
, fp2
, fr
);
12398 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12399 tcg_temp_free_i64(fp0
);
12400 tcg_temp_free_i64(fp1
);
12401 gen_store_fpr64(ctx
, fp2
, fd
);
12402 tcg_temp_free_i64(fp2
);
12408 TCGv_i32 fp0
= tcg_temp_new_i32();
12409 TCGv_i32 fp1
= tcg_temp_new_i32();
12410 TCGv_i32 fp2
= tcg_temp_new_i32();
12412 gen_load_fpr32(ctx
, fp0
, fs
);
12413 gen_load_fpr32(ctx
, fp1
, ft
);
12414 gen_load_fpr32(ctx
, fp2
, fr
);
12415 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12416 tcg_temp_free_i32(fp0
);
12417 tcg_temp_free_i32(fp1
);
12418 gen_store_fpr32(ctx
, fp2
, fd
);
12419 tcg_temp_free_i32(fp2
);
12424 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12426 TCGv_i64 fp0
= tcg_temp_new_i64();
12427 TCGv_i64 fp1
= tcg_temp_new_i64();
12428 TCGv_i64 fp2
= tcg_temp_new_i64();
12430 gen_load_fpr64(ctx
, fp0
, fs
);
12431 gen_load_fpr64(ctx
, fp1
, ft
);
12432 gen_load_fpr64(ctx
, fp2
, fr
);
12433 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12434 tcg_temp_free_i64(fp0
);
12435 tcg_temp_free_i64(fp1
);
12436 gen_store_fpr64(ctx
, fp2
, fd
);
12437 tcg_temp_free_i64(fp2
);
12443 TCGv_i64 fp0
= tcg_temp_new_i64();
12444 TCGv_i64 fp1
= tcg_temp_new_i64();
12445 TCGv_i64 fp2
= tcg_temp_new_i64();
12447 gen_load_fpr64(ctx
, fp0
, fs
);
12448 gen_load_fpr64(ctx
, fp1
, ft
);
12449 gen_load_fpr64(ctx
, fp2
, fr
);
12450 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12451 tcg_temp_free_i64(fp0
);
12452 tcg_temp_free_i64(fp1
);
12453 gen_store_fpr64(ctx
, fp2
, fd
);
12454 tcg_temp_free_i64(fp2
);
12460 TCGv_i32 fp0
= tcg_temp_new_i32();
12461 TCGv_i32 fp1
= tcg_temp_new_i32();
12462 TCGv_i32 fp2
= tcg_temp_new_i32();
12464 gen_load_fpr32(ctx
, fp0
, fs
);
12465 gen_load_fpr32(ctx
, fp1
, ft
);
12466 gen_load_fpr32(ctx
, fp2
, fr
);
12467 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12468 tcg_temp_free_i32(fp0
);
12469 tcg_temp_free_i32(fp1
);
12470 gen_store_fpr32(ctx
, fp2
, fd
);
12471 tcg_temp_free_i32(fp2
);
12476 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12478 TCGv_i64 fp0
= tcg_temp_new_i64();
12479 TCGv_i64 fp1
= tcg_temp_new_i64();
12480 TCGv_i64 fp2
= tcg_temp_new_i64();
12482 gen_load_fpr64(ctx
, fp0
, fs
);
12483 gen_load_fpr64(ctx
, fp1
, ft
);
12484 gen_load_fpr64(ctx
, fp2
, fr
);
12485 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12486 tcg_temp_free_i64(fp0
);
12487 tcg_temp_free_i64(fp1
);
12488 gen_store_fpr64(ctx
, fp2
, fd
);
12489 tcg_temp_free_i64(fp2
);
12495 TCGv_i64 fp0
= tcg_temp_new_i64();
12496 TCGv_i64 fp1
= tcg_temp_new_i64();
12497 TCGv_i64 fp2
= tcg_temp_new_i64();
12499 gen_load_fpr64(ctx
, fp0
, fs
);
12500 gen_load_fpr64(ctx
, fp1
, ft
);
12501 gen_load_fpr64(ctx
, fp2
, fr
);
12502 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12503 tcg_temp_free_i64(fp0
);
12504 tcg_temp_free_i64(fp1
);
12505 gen_store_fpr64(ctx
, fp2
, fd
);
12506 tcg_temp_free_i64(fp2
);
12512 TCGv_i32 fp0
= tcg_temp_new_i32();
12513 TCGv_i32 fp1
= tcg_temp_new_i32();
12514 TCGv_i32 fp2
= tcg_temp_new_i32();
12516 gen_load_fpr32(ctx
, fp0
, fs
);
12517 gen_load_fpr32(ctx
, fp1
, ft
);
12518 gen_load_fpr32(ctx
, fp2
, fr
);
12519 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12520 tcg_temp_free_i32(fp0
);
12521 tcg_temp_free_i32(fp1
);
12522 gen_store_fpr32(ctx
, fp2
, fd
);
12523 tcg_temp_free_i32(fp2
);
12528 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12530 TCGv_i64 fp0
= tcg_temp_new_i64();
12531 TCGv_i64 fp1
= tcg_temp_new_i64();
12532 TCGv_i64 fp2
= tcg_temp_new_i64();
12534 gen_load_fpr64(ctx
, fp0
, fs
);
12535 gen_load_fpr64(ctx
, fp1
, ft
);
12536 gen_load_fpr64(ctx
, fp2
, fr
);
12537 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12538 tcg_temp_free_i64(fp0
);
12539 tcg_temp_free_i64(fp1
);
12540 gen_store_fpr64(ctx
, fp2
, fd
);
12541 tcg_temp_free_i64(fp2
);
12547 TCGv_i64 fp0
= tcg_temp_new_i64();
12548 TCGv_i64 fp1
= tcg_temp_new_i64();
12549 TCGv_i64 fp2
= tcg_temp_new_i64();
12551 gen_load_fpr64(ctx
, fp0
, fs
);
12552 gen_load_fpr64(ctx
, fp1
, ft
);
12553 gen_load_fpr64(ctx
, fp2
, fr
);
12554 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12555 tcg_temp_free_i64(fp0
);
12556 tcg_temp_free_i64(fp1
);
12557 gen_store_fpr64(ctx
, fp2
, fd
);
12558 tcg_temp_free_i64(fp2
);
12562 MIPS_INVAL("flt3_arith");
12563 generate_exception_end(ctx
, EXCP_RI
);
12568 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12572 #if !defined(CONFIG_USER_ONLY)
12573 /* The Linux kernel will emulate rdhwr if it's not supported natively.
12574 Therefore only check the ISA in system mode. */
12575 check_insn(ctx
, ISA_MIPS32R2
);
12577 t0
= tcg_temp_new();
12581 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12582 gen_store_gpr(t0
, rt
);
12585 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12586 gen_store_gpr(t0
, rt
);
12589 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12592 gen_helper_rdhwr_cc(t0
, cpu_env
);
12593 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12596 gen_store_gpr(t0
, rt
);
12597 /* Break the TB to be able to take timer interrupts immediately
12598 after reading count. DISAS_STOP isn't sufficient, we need to ensure
12599 we break completely out of translated code. */
12600 gen_save_pc(ctx
->base
.pc_next
+ 4);
12601 ctx
->base
.is_jmp
= DISAS_EXIT
;
12604 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12605 gen_store_gpr(t0
, rt
);
12608 check_insn(ctx
, ISA_MIPS32R6
);
12610 /* Performance counter registers are not implemented other than
12611 * control register 0.
12613 generate_exception(ctx
, EXCP_RI
);
12615 gen_helper_rdhwr_performance(t0
, cpu_env
);
12616 gen_store_gpr(t0
, rt
);
12619 check_insn(ctx
, ISA_MIPS32R6
);
12620 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12621 gen_store_gpr(t0
, rt
);
12624 #if defined(CONFIG_USER_ONLY)
12625 tcg_gen_ld_tl(t0
, cpu_env
,
12626 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12627 gen_store_gpr(t0
, rt
);
12630 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12631 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12632 tcg_gen_ld_tl(t0
, cpu_env
,
12633 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12634 gen_store_gpr(t0
, rt
);
12636 generate_exception_end(ctx
, EXCP_RI
);
12640 default: /* Invalid */
12641 MIPS_INVAL("rdhwr");
12642 generate_exception_end(ctx
, EXCP_RI
);
12648 static inline void clear_branch_hflags(DisasContext
*ctx
)
12650 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12651 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12652 save_cpu_state(ctx
, 0);
12654 /* it is not safe to save ctx->hflags as hflags may be changed
12655 in execution time by the instruction in delay / forbidden slot. */
12656 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12660 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12662 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12663 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12664 /* Branches completion */
12665 clear_branch_hflags(ctx
);
12666 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12667 /* FIXME: Need to clear can_do_io. */
12668 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12669 case MIPS_HFLAG_FBNSLOT
:
12670 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12673 /* unconditional branch */
12674 if (proc_hflags
& MIPS_HFLAG_BX
) {
12675 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12677 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12679 case MIPS_HFLAG_BL
:
12680 /* blikely taken case */
12681 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12683 case MIPS_HFLAG_BC
:
12684 /* Conditional branch */
12686 TCGLabel
*l1
= gen_new_label();
12688 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12689 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12691 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12694 case MIPS_HFLAG_BR
:
12695 /* unconditional branch to register */
12696 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12697 TCGv t0
= tcg_temp_new();
12698 TCGv_i32 t1
= tcg_temp_new_i32();
12700 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12701 tcg_gen_trunc_tl_i32(t1
, t0
);
12703 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12704 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12705 tcg_gen_or_i32(hflags
, hflags
, t1
);
12706 tcg_temp_free_i32(t1
);
12708 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12710 tcg_gen_mov_tl(cpu_PC
, btarget
);
12712 if (ctx
->base
.singlestep_enabled
) {
12713 save_cpu_state(ctx
, 0);
12714 gen_helper_raise_exception_debug(cpu_env
);
12716 tcg_gen_lookup_and_goto_ptr();
12719 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12725 /* Compact Branches */
12726 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12727 int rs
, int rt
, int32_t offset
)
12729 int bcond_compute
= 0;
12730 TCGv t0
= tcg_temp_new();
12731 TCGv t1
= tcg_temp_new();
12732 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12734 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12735 #ifdef MIPS_DEBUG_DISAS
12736 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12737 "\n", ctx
->base
.pc_next
);
12739 generate_exception_end(ctx
, EXCP_RI
);
12743 /* Load needed operands and calculate btarget */
12745 /* compact branch */
12746 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12747 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12748 gen_load_gpr(t0
, rs
);
12749 gen_load_gpr(t1
, rt
);
12751 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12752 if (rs
<= rt
&& rs
== 0) {
12753 /* OPC_BEQZALC, OPC_BNEZALC */
12754 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12757 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12758 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12759 gen_load_gpr(t0
, rs
);
12760 gen_load_gpr(t1
, rt
);
12762 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12764 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12765 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12766 if (rs
== 0 || rs
== rt
) {
12767 /* OPC_BLEZALC, OPC_BGEZALC */
12768 /* OPC_BGTZALC, OPC_BLTZALC */
12769 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12771 gen_load_gpr(t0
, rs
);
12772 gen_load_gpr(t1
, rt
);
12774 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12778 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12783 /* OPC_BEQZC, OPC_BNEZC */
12784 gen_load_gpr(t0
, rs
);
12786 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12788 /* OPC_JIC, OPC_JIALC */
12789 TCGv tbase
= tcg_temp_new();
12790 TCGv toffset
= tcg_temp_new();
12792 gen_load_gpr(tbase
, rt
);
12793 tcg_gen_movi_tl(toffset
, offset
);
12794 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
12795 tcg_temp_free(tbase
);
12796 tcg_temp_free(toffset
);
12800 MIPS_INVAL("Compact branch/jump");
12801 generate_exception_end(ctx
, EXCP_RI
);
12805 if (bcond_compute
== 0) {
12806 /* Uncoditional compact branch */
12809 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12812 ctx
->hflags
|= MIPS_HFLAG_BR
;
12815 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12818 ctx
->hflags
|= MIPS_HFLAG_B
;
12821 MIPS_INVAL("Compact branch/jump");
12822 generate_exception_end(ctx
, EXCP_RI
);
12826 /* Generating branch here as compact branches don't have delay slot */
12827 gen_branch(ctx
, 4);
12829 /* Conditional compact branch */
12830 TCGLabel
*fs
= gen_new_label();
12831 save_cpu_state(ctx
, 0);
12834 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12835 if (rs
== 0 && rt
!= 0) {
12837 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12838 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12840 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12843 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
12846 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12847 if (rs
== 0 && rt
!= 0) {
12849 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12850 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12852 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12855 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
12858 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12859 if (rs
== 0 && rt
!= 0) {
12861 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12862 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12864 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12867 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
12870 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12871 if (rs
== 0 && rt
!= 0) {
12873 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12874 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12876 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12879 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
12882 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12883 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12885 /* OPC_BOVC, OPC_BNVC */
12886 TCGv t2
= tcg_temp_new();
12887 TCGv t3
= tcg_temp_new();
12888 TCGv t4
= tcg_temp_new();
12889 TCGv input_overflow
= tcg_temp_new();
12891 gen_load_gpr(t0
, rs
);
12892 gen_load_gpr(t1
, rt
);
12893 tcg_gen_ext32s_tl(t2
, t0
);
12894 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
12895 tcg_gen_ext32s_tl(t3
, t1
);
12896 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
12897 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
12899 tcg_gen_add_tl(t4
, t2
, t3
);
12900 tcg_gen_ext32s_tl(t4
, t4
);
12901 tcg_gen_xor_tl(t2
, t2
, t3
);
12902 tcg_gen_xor_tl(t3
, t4
, t3
);
12903 tcg_gen_andc_tl(t2
, t3
, t2
);
12904 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
12905 tcg_gen_or_tl(t4
, t4
, input_overflow
);
12906 if (opc
== OPC_BOVC
) {
12908 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
12911 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
12913 tcg_temp_free(input_overflow
);
12917 } else if (rs
< rt
&& rs
== 0) {
12918 /* OPC_BEQZALC, OPC_BNEZALC */
12919 if (opc
== OPC_BEQZALC
) {
12921 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
12924 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
12927 /* OPC_BEQC, OPC_BNEC */
12928 if (opc
== OPC_BEQC
) {
12930 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
12933 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
12938 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
12941 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
12944 MIPS_INVAL("Compact conditional branch/jump");
12945 generate_exception_end(ctx
, EXCP_RI
);
12949 /* Generating branch here as compact branches don't have delay slot */
12950 gen_goto_tb(ctx
, 1, ctx
->btarget
);
12953 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
12961 /* ISA extensions (ASEs) */
12962 /* MIPS16 extension to MIPS32 */
12964 /* MIPS16 major opcodes */
12966 M16_OPC_ADDIUSP
= 0x00,
12967 M16_OPC_ADDIUPC
= 0x01,
12969 M16_OPC_JAL
= 0x03,
12970 M16_OPC_BEQZ
= 0x04,
12971 M16_OPC_BNEQZ
= 0x05,
12972 M16_OPC_SHIFT
= 0x06,
12974 M16_OPC_RRIA
= 0x08,
12975 M16_OPC_ADDIU8
= 0x09,
12976 M16_OPC_SLTI
= 0x0a,
12977 M16_OPC_SLTIU
= 0x0b,
12980 M16_OPC_CMPI
= 0x0e,
12984 M16_OPC_LWSP
= 0x12,
12986 M16_OPC_LBU
= 0x14,
12987 M16_OPC_LHU
= 0x15,
12988 M16_OPC_LWPC
= 0x16,
12989 M16_OPC_LWU
= 0x17,
12992 M16_OPC_SWSP
= 0x1a,
12994 M16_OPC_RRR
= 0x1c,
12996 M16_OPC_EXTEND
= 0x1e,
13000 /* I8 funct field */
13019 /* RR funct field */
13053 /* I64 funct field */
13061 I64_DADDIUPC
= 0x6,
13065 /* RR ry field for CNVT */
13067 RR_RY_CNVT_ZEB
= 0x0,
13068 RR_RY_CNVT_ZEH
= 0x1,
13069 RR_RY_CNVT_ZEW
= 0x2,
13070 RR_RY_CNVT_SEB
= 0x4,
13071 RR_RY_CNVT_SEH
= 0x5,
13072 RR_RY_CNVT_SEW
= 0x6,
13075 static int xlat (int r
)
13077 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13082 static void gen_mips16_save (DisasContext
*ctx
,
13083 int xsregs
, int aregs
,
13084 int do_ra
, int do_s0
, int do_s1
,
13087 TCGv t0
= tcg_temp_new();
13088 TCGv t1
= tcg_temp_new();
13089 TCGv t2
= tcg_temp_new();
13119 generate_exception_end(ctx
, EXCP_RI
);
13125 gen_base_offset_addr(ctx
, t0
, 29, 12);
13126 gen_load_gpr(t1
, 7);
13127 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13130 gen_base_offset_addr(ctx
, t0
, 29, 8);
13131 gen_load_gpr(t1
, 6);
13132 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13135 gen_base_offset_addr(ctx
, t0
, 29, 4);
13136 gen_load_gpr(t1
, 5);
13137 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13140 gen_base_offset_addr(ctx
, t0
, 29, 0);
13141 gen_load_gpr(t1
, 4);
13142 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13145 gen_load_gpr(t0
, 29);
13147 #define DECR_AND_STORE(reg) do { \
13148 tcg_gen_movi_tl(t2, -4); \
13149 gen_op_addr_add(ctx, t0, t0, t2); \
13150 gen_load_gpr(t1, reg); \
13151 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
13155 DECR_AND_STORE(31);
13160 DECR_AND_STORE(30);
13163 DECR_AND_STORE(23);
13166 DECR_AND_STORE(22);
13169 DECR_AND_STORE(21);
13172 DECR_AND_STORE(20);
13175 DECR_AND_STORE(19);
13178 DECR_AND_STORE(18);
13182 DECR_AND_STORE(17);
13185 DECR_AND_STORE(16);
13215 generate_exception_end(ctx
, EXCP_RI
);
13231 #undef DECR_AND_STORE
13233 tcg_gen_movi_tl(t2
, -framesize
);
13234 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13240 static void gen_mips16_restore (DisasContext
*ctx
,
13241 int xsregs
, int aregs
,
13242 int do_ra
, int do_s0
, int do_s1
,
13246 TCGv t0
= tcg_temp_new();
13247 TCGv t1
= tcg_temp_new();
13248 TCGv t2
= tcg_temp_new();
13250 tcg_gen_movi_tl(t2
, framesize
);
13251 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13253 #define DECR_AND_LOAD(reg) do { \
13254 tcg_gen_movi_tl(t2, -4); \
13255 gen_op_addr_add(ctx, t0, t0, t2); \
13256 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13257 gen_store_gpr(t1, reg); \
13321 generate_exception_end(ctx
, EXCP_RI
);
13337 #undef DECR_AND_LOAD
13339 tcg_gen_movi_tl(t2
, framesize
);
13340 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13346 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
13347 int is_64_bit
, int extended
)
13351 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13352 generate_exception_end(ctx
, EXCP_RI
);
13356 t0
= tcg_temp_new();
13358 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13359 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13361 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13367 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13370 TCGv_i32 t0
= tcg_const_i32(op
);
13371 TCGv t1
= tcg_temp_new();
13372 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13373 gen_helper_cache(cpu_env
, t1
, t0
);
13376 #if defined(TARGET_MIPS64)
13377 static void decode_i64_mips16 (DisasContext
*ctx
,
13378 int ry
, int funct
, int16_t offset
,
13383 check_insn(ctx
, ISA_MIPS3
);
13384 check_mips_64(ctx
);
13385 offset
= extended
? offset
: offset
<< 3;
13386 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13389 check_insn(ctx
, ISA_MIPS3
);
13390 check_mips_64(ctx
);
13391 offset
= extended
? offset
: offset
<< 3;
13392 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13395 check_insn(ctx
, ISA_MIPS3
);
13396 check_mips_64(ctx
);
13397 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13398 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13401 check_insn(ctx
, ISA_MIPS3
);
13402 check_mips_64(ctx
);
13403 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13404 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13407 check_insn(ctx
, ISA_MIPS3
);
13408 check_mips_64(ctx
);
13409 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13410 generate_exception_end(ctx
, EXCP_RI
);
13412 offset
= extended
? offset
: offset
<< 3;
13413 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13417 check_insn(ctx
, ISA_MIPS3
);
13418 check_mips_64(ctx
);
13419 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13420 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13423 check_insn(ctx
, ISA_MIPS3
);
13424 check_mips_64(ctx
);
13425 offset
= extended
? offset
: offset
<< 2;
13426 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13429 check_insn(ctx
, ISA_MIPS3
);
13430 check_mips_64(ctx
);
13431 offset
= extended
? offset
: offset
<< 2;
13432 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13438 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13440 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13441 int op
, rx
, ry
, funct
, sa
;
13442 int16_t imm
, offset
;
13444 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13445 op
= (ctx
->opcode
>> 11) & 0x1f;
13446 sa
= (ctx
->opcode
>> 22) & 0x1f;
13447 funct
= (ctx
->opcode
>> 8) & 0x7;
13448 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13449 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13450 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13451 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13452 | (ctx
->opcode
& 0x1f));
13454 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
13457 case M16_OPC_ADDIUSP
:
13458 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13460 case M16_OPC_ADDIUPC
:
13461 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13464 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13465 /* No delay slot, so just process as a normal instruction */
13468 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13469 /* No delay slot, so just process as a normal instruction */
13471 case M16_OPC_BNEQZ
:
13472 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13473 /* No delay slot, so just process as a normal instruction */
13475 case M16_OPC_SHIFT
:
13476 switch (ctx
->opcode
& 0x3) {
13478 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13481 #if defined(TARGET_MIPS64)
13482 check_mips_64(ctx
);
13483 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13485 generate_exception_end(ctx
, EXCP_RI
);
13489 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13492 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13496 #if defined(TARGET_MIPS64)
13498 check_insn(ctx
, ISA_MIPS3
);
13499 check_mips_64(ctx
);
13500 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13504 imm
= ctx
->opcode
& 0xf;
13505 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13506 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13507 imm
= (int16_t) (imm
<< 1) >> 1;
13508 if ((ctx
->opcode
>> 4) & 0x1) {
13509 #if defined(TARGET_MIPS64)
13510 check_mips_64(ctx
);
13511 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13513 generate_exception_end(ctx
, EXCP_RI
);
13516 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13519 case M16_OPC_ADDIU8
:
13520 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13523 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13525 case M16_OPC_SLTIU
:
13526 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13531 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13534 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13537 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13540 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13543 check_insn(ctx
, ISA_MIPS32
);
13545 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13546 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13547 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13548 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13549 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13550 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13551 | (ctx
->opcode
& 0xf)) << 3;
13553 if (ctx
->opcode
& (1 << 7)) {
13554 gen_mips16_save(ctx
, xsregs
, aregs
,
13555 do_ra
, do_s0
, do_s1
,
13558 gen_mips16_restore(ctx
, xsregs
, aregs
,
13559 do_ra
, do_s0
, do_s1
,
13565 generate_exception_end(ctx
, EXCP_RI
);
13570 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13573 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13575 #if defined(TARGET_MIPS64)
13577 check_insn(ctx
, ISA_MIPS3
);
13578 check_mips_64(ctx
);
13579 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13583 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13586 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13589 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13592 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13595 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13598 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13601 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13603 #if defined(TARGET_MIPS64)
13605 check_insn(ctx
, ISA_MIPS3
);
13606 check_mips_64(ctx
);
13607 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13611 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13614 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13617 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13620 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13622 #if defined(TARGET_MIPS64)
13624 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13628 generate_exception_end(ctx
, EXCP_RI
);
13635 static inline bool is_uhi(int sdbbp_code
)
13637 #ifdef CONFIG_USER_ONLY
13640 return semihosting_enabled() && sdbbp_code
== 1;
13644 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13648 int op
, cnvt_op
, op1
, offset
;
13652 op
= (ctx
->opcode
>> 11) & 0x1f;
13653 sa
= (ctx
->opcode
>> 2) & 0x7;
13654 sa
= sa
== 0 ? 8 : sa
;
13655 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13656 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13657 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13658 op1
= offset
= ctx
->opcode
& 0x1f;
13663 case M16_OPC_ADDIUSP
:
13665 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13667 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13670 case M16_OPC_ADDIUPC
:
13671 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13674 offset
= (ctx
->opcode
& 0x7ff) << 1;
13675 offset
= (int16_t)(offset
<< 4) >> 4;
13676 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13677 /* No delay slot, so just process as a normal instruction */
13680 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13681 offset
= (((ctx
->opcode
& 0x1f) << 21)
13682 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13684 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13685 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13689 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13690 ((int8_t)ctx
->opcode
) << 1, 0);
13691 /* No delay slot, so just process as a normal instruction */
13693 case M16_OPC_BNEQZ
:
13694 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13695 ((int8_t)ctx
->opcode
) << 1, 0);
13696 /* No delay slot, so just process as a normal instruction */
13698 case M16_OPC_SHIFT
:
13699 switch (ctx
->opcode
& 0x3) {
13701 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13704 #if defined(TARGET_MIPS64)
13705 check_insn(ctx
, ISA_MIPS3
);
13706 check_mips_64(ctx
);
13707 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13709 generate_exception_end(ctx
, EXCP_RI
);
13713 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13716 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13720 #if defined(TARGET_MIPS64)
13722 check_insn(ctx
, ISA_MIPS3
);
13723 check_mips_64(ctx
);
13724 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13729 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13731 if ((ctx
->opcode
>> 4) & 1) {
13732 #if defined(TARGET_MIPS64)
13733 check_insn(ctx
, ISA_MIPS3
);
13734 check_mips_64(ctx
);
13735 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13737 generate_exception_end(ctx
, EXCP_RI
);
13740 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13744 case M16_OPC_ADDIU8
:
13746 int16_t imm
= (int8_t) ctx
->opcode
;
13748 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13753 int16_t imm
= (uint8_t) ctx
->opcode
;
13754 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13757 case M16_OPC_SLTIU
:
13759 int16_t imm
= (uint8_t) ctx
->opcode
;
13760 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13767 funct
= (ctx
->opcode
>> 8) & 0x7;
13770 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
13771 ((int8_t)ctx
->opcode
) << 1, 0);
13774 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
13775 ((int8_t)ctx
->opcode
) << 1, 0);
13778 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
13781 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
13782 ((int8_t)ctx
->opcode
) << 3);
13785 check_insn(ctx
, ISA_MIPS32
);
13787 int do_ra
= ctx
->opcode
& (1 << 6);
13788 int do_s0
= ctx
->opcode
& (1 << 5);
13789 int do_s1
= ctx
->opcode
& (1 << 4);
13790 int framesize
= ctx
->opcode
& 0xf;
13792 if (framesize
== 0) {
13795 framesize
= framesize
<< 3;
13798 if (ctx
->opcode
& (1 << 7)) {
13799 gen_mips16_save(ctx
, 0, 0,
13800 do_ra
, do_s0
, do_s1
, framesize
);
13802 gen_mips16_restore(ctx
, 0, 0,
13803 do_ra
, do_s0
, do_s1
, framesize
);
13809 int rz
= xlat(ctx
->opcode
& 0x7);
13811 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
13812 ((ctx
->opcode
>> 5) & 0x7);
13813 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
13817 reg32
= ctx
->opcode
& 0x1f;
13818 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
13821 generate_exception_end(ctx
, EXCP_RI
);
13828 int16_t imm
= (uint8_t) ctx
->opcode
;
13830 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
13835 int16_t imm
= (uint8_t) ctx
->opcode
;
13836 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
13839 #if defined(TARGET_MIPS64)
13841 check_insn(ctx
, ISA_MIPS3
);
13842 check_mips_64(ctx
);
13843 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
13847 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13850 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
13853 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13856 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
13859 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13862 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
13865 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
13867 #if defined (TARGET_MIPS64)
13869 check_insn(ctx
, ISA_MIPS3
);
13870 check_mips_64(ctx
);
13871 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
13875 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13878 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
13881 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13884 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
13888 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
13891 switch (ctx
->opcode
& 0x3) {
13893 mips32_op
= OPC_ADDU
;
13896 mips32_op
= OPC_SUBU
;
13898 #if defined(TARGET_MIPS64)
13900 mips32_op
= OPC_DADDU
;
13901 check_insn(ctx
, ISA_MIPS3
);
13902 check_mips_64(ctx
);
13905 mips32_op
= OPC_DSUBU
;
13906 check_insn(ctx
, ISA_MIPS3
);
13907 check_mips_64(ctx
);
13911 generate_exception_end(ctx
, EXCP_RI
);
13915 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
13924 int nd
= (ctx
->opcode
>> 7) & 0x1;
13925 int link
= (ctx
->opcode
>> 6) & 0x1;
13926 int ra
= (ctx
->opcode
>> 5) & 0x1;
13929 check_insn(ctx
, ISA_MIPS32
);
13938 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
13943 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
13944 gen_helper_do_semihosting(cpu_env
);
13946 /* XXX: not clear which exception should be raised
13947 * when in debug mode...
13949 check_insn(ctx
, ISA_MIPS32
);
13950 generate_exception_end(ctx
, EXCP_DBp
);
13954 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
13957 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
13960 generate_exception_end(ctx
, EXCP_BREAK
);
13963 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
13966 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
13969 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
13971 #if defined (TARGET_MIPS64)
13973 check_insn(ctx
, ISA_MIPS3
);
13974 check_mips_64(ctx
);
13975 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
13979 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
13982 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
13985 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
13988 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
13991 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
13994 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
13997 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
14000 check_insn(ctx
, ISA_MIPS32
);
14002 case RR_RY_CNVT_ZEB
:
14003 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14005 case RR_RY_CNVT_ZEH
:
14006 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14008 case RR_RY_CNVT_SEB
:
14009 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14011 case RR_RY_CNVT_SEH
:
14012 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14014 #if defined (TARGET_MIPS64)
14015 case RR_RY_CNVT_ZEW
:
14016 check_insn(ctx
, ISA_MIPS64
);
14017 check_mips_64(ctx
);
14018 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14020 case RR_RY_CNVT_SEW
:
14021 check_insn(ctx
, ISA_MIPS64
);
14022 check_mips_64(ctx
);
14023 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14027 generate_exception_end(ctx
, EXCP_RI
);
14032 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
14034 #if defined (TARGET_MIPS64)
14036 check_insn(ctx
, ISA_MIPS3
);
14037 check_mips_64(ctx
);
14038 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
14041 check_insn(ctx
, ISA_MIPS3
);
14042 check_mips_64(ctx
);
14043 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
14046 check_insn(ctx
, ISA_MIPS3
);
14047 check_mips_64(ctx
);
14048 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
14051 check_insn(ctx
, ISA_MIPS3
);
14052 check_mips_64(ctx
);
14053 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
14057 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
14060 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
14063 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
14066 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
14068 #if defined (TARGET_MIPS64)
14070 check_insn(ctx
, ISA_MIPS3
);
14071 check_mips_64(ctx
);
14072 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
14075 check_insn(ctx
, ISA_MIPS3
);
14076 check_mips_64(ctx
);
14077 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
14080 check_insn(ctx
, ISA_MIPS3
);
14081 check_mips_64(ctx
);
14082 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
14085 check_insn(ctx
, ISA_MIPS3
);
14086 check_mips_64(ctx
);
14087 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
14091 generate_exception_end(ctx
, EXCP_RI
);
14095 case M16_OPC_EXTEND
:
14096 decode_extended_mips16_opc(env
, ctx
);
14099 #if defined(TARGET_MIPS64)
14101 funct
= (ctx
->opcode
>> 8) & 0x7;
14102 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
14106 generate_exception_end(ctx
, EXCP_RI
);
14113 /* microMIPS extension to MIPS32/MIPS64 */
14116 * microMIPS32/microMIPS64 major opcodes
14118 * 1. MIPS Architecture for Programmers Volume II-B:
14119 * The microMIPS32 Instruction Set (Revision 3.05)
14121 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
14123 * 2. MIPS Architecture For Programmers Volume II-A:
14124 * The MIPS64 Instruction Set (Revision 3.51)
14154 POOL32S
= 0x16, /* MIPS64 */
14155 DADDIU32
= 0x17, /* MIPS64 */
14184 /* 0x29 is reserved */
14197 /* 0x31 is reserved */
14210 SD32
= 0x36, /* MIPS64 */
14211 LD32
= 0x37, /* MIPS64 */
14213 /* 0x39 is reserved */
14229 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14251 /* POOL32A encoding of minor opcode field */
14254 /* These opcodes are distinguished only by bits 9..6; those bits are
14255 * what are recorded below. */
14292 /* The following can be distinguished by their lower 6 bits. */
14302 /* POOL32AXF encoding of minor opcode field extension */
14305 * 1. MIPS Architecture for Programmers Volume II-B:
14306 * The microMIPS32 Instruction Set (Revision 3.05)
14308 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14310 * 2. MIPS Architecture for Programmers VolumeIV-e:
14311 * The MIPS DSP Application-Specific Extension
14312 * to the microMIPS32 Architecture (Revision 2.34)
14314 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14329 /* begin of microMIPS32 DSP */
14331 /* bits 13..12 for 0x01 */
14337 /* bits 13..12 for 0x2a */
14343 /* bits 13..12 for 0x32 */
14347 /* end of microMIPS32 DSP */
14349 /* bits 15..12 for 0x2c */
14366 /* bits 15..12 for 0x34 */
14374 /* bits 15..12 for 0x3c */
14376 JR
= 0x0, /* alias */
14384 /* bits 15..12 for 0x05 */
14388 /* bits 15..12 for 0x0d */
14400 /* bits 15..12 for 0x15 */
14406 /* bits 15..12 for 0x1d */
14410 /* bits 15..12 for 0x2d */
14415 /* bits 15..12 for 0x35 */
14422 /* POOL32B encoding of minor opcode field (bits 15..12) */
14438 /* POOL32C encoding of minor opcode field (bits 15..12) */
14459 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14472 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14485 /* POOL32F encoding of minor opcode field (bits 5..0) */
14488 /* These are the bit 7..6 values */
14497 /* These are the bit 8..6 values */
14522 MOVZ_FMT_05
= 0x05,
14556 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14563 /* POOL32Fxf encoding of minor opcode extension field */
14601 /* POOL32I encoding of minor opcode field (bits 25..21) */
14631 /* These overlap and are distinguished by bit16 of the instruction */
14640 /* POOL16A encoding of minor opcode field */
14647 /* POOL16B encoding of minor opcode field */
14654 /* POOL16C encoding of minor opcode field */
14674 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14698 /* POOL16D encoding of minor opcode field */
14705 /* POOL16E encoding of minor opcode field */
14712 static int mmreg (int r
)
14714 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14719 /* Used for 16-bit store instructions. */
14720 static int mmreg2 (int r
)
14722 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14727 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14728 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14729 #define uMIPS_RS2(op) uMIPS_RS(op)
14730 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14731 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14732 #define uMIPS_RS5(op) (op & 0x1f)
14734 /* Signed immediate */
14735 #define SIMM(op, start, width) \
14736 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14739 /* Zero-extended immediate */
14740 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14742 static void gen_addiur1sp(DisasContext
*ctx
)
14744 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14746 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14749 static void gen_addiur2(DisasContext
*ctx
)
14751 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14752 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14753 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14755 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14758 static void gen_addiusp(DisasContext
*ctx
)
14760 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14763 if (encoded
<= 1) {
14764 decoded
= 256 + encoded
;
14765 } else if (encoded
<= 255) {
14767 } else if (encoded
<= 509) {
14768 decoded
= encoded
- 512;
14770 decoded
= encoded
- 768;
14773 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
14776 static void gen_addius5(DisasContext
*ctx
)
14778 int imm
= SIMM(ctx
->opcode
, 1, 4);
14779 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14781 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
14784 static void gen_andi16(DisasContext
*ctx
)
14786 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
14787 31, 32, 63, 64, 255, 32768, 65535 };
14788 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14789 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14790 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
14792 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
14795 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
14796 int base
, int16_t offset
)
14801 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
14802 generate_exception_end(ctx
, EXCP_RI
);
14806 t0
= tcg_temp_new();
14808 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14810 t1
= tcg_const_tl(reglist
);
14811 t2
= tcg_const_i32(ctx
->mem_idx
);
14813 save_cpu_state(ctx
, 1);
14816 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
14819 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
14821 #ifdef TARGET_MIPS64
14823 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
14826 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
14832 tcg_temp_free_i32(t2
);
14836 static void gen_pool16c_insn(DisasContext
*ctx
)
14838 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
14839 int rs
= mmreg(ctx
->opcode
& 0x7);
14841 switch (((ctx
->opcode
) >> 4) & 0x3f) {
14846 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
14852 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
14858 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
14864 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
14871 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14872 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14874 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
14883 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14884 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14886 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
14893 int reg
= ctx
->opcode
& 0x1f;
14895 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
14901 int reg
= ctx
->opcode
& 0x1f;
14902 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
14903 /* Let normal delay slot handling in our caller take us
14904 to the branch target. */
14909 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
14910 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14914 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
14915 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14919 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
14923 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
14926 generate_exception_end(ctx
, EXCP_BREAK
);
14929 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
14930 gen_helper_do_semihosting(cpu_env
);
14932 /* XXX: not clear which exception should be raised
14933 * when in debug mode...
14935 check_insn(ctx
, ISA_MIPS32
);
14936 generate_exception_end(ctx
, EXCP_DBp
);
14939 case JRADDIUSP
+ 0:
14940 case JRADDIUSP
+ 1:
14942 int imm
= ZIMM(ctx
->opcode
, 0, 5);
14943 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14944 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14945 /* Let normal delay slot handling in our caller take us
14946 to the branch target. */
14950 generate_exception_end(ctx
, EXCP_RI
);
14955 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
14958 int rd
, rs
, re
, rt
;
14959 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
14960 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
14961 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
14962 rd
= rd_enc
[enc_dest
];
14963 re
= re_enc
[enc_dest
];
14964 rs
= rs_rt_enc
[enc_rs
];
14965 rt
= rs_rt_enc
[enc_rt
];
14967 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
14969 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
14972 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
14974 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
14978 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
14980 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
14981 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
14983 switch (ctx
->opcode
& 0xf) {
14985 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
14988 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
14992 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14993 int offset
= extract32(ctx
->opcode
, 4, 4);
14994 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
14997 case R6_JRC16
: /* JRCADDIUSP */
14998 if ((ctx
->opcode
>> 4) & 1) {
15000 int imm
= extract32(ctx
->opcode
, 5, 5);
15001 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15002 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15005 rs
= extract32(ctx
->opcode
, 5, 5);
15006 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
15018 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15019 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15020 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
15021 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15025 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
15028 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
15032 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15033 int offset
= extract32(ctx
->opcode
, 4, 4);
15034 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
15037 case JALRC16
: /* BREAK16, SDBBP16 */
15038 switch (ctx
->opcode
& 0x3f) {
15040 case JALRC16
+ 0x20:
15042 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
15047 generate_exception(ctx
, EXCP_BREAK
);
15051 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
15052 gen_helper_do_semihosting(cpu_env
);
15054 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15055 generate_exception(ctx
, EXCP_RI
);
15057 generate_exception(ctx
, EXCP_DBp
);
15064 generate_exception(ctx
, EXCP_RI
);
15069 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
15071 TCGv t0
= tcg_temp_new();
15072 TCGv t1
= tcg_temp_new();
15074 gen_load_gpr(t0
, base
);
15077 gen_load_gpr(t1
, index
);
15078 tcg_gen_shli_tl(t1
, t1
, 2);
15079 gen_op_addr_add(ctx
, t0
, t1
, t0
);
15082 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15083 gen_store_gpr(t1
, rd
);
15089 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
15090 int base
, int16_t offset
)
15094 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
15095 generate_exception_end(ctx
, EXCP_RI
);
15099 t0
= tcg_temp_new();
15100 t1
= tcg_temp_new();
15102 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15107 generate_exception_end(ctx
, EXCP_RI
);
15110 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15111 gen_store_gpr(t1
, rd
);
15112 tcg_gen_movi_tl(t1
, 4);
15113 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15114 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15115 gen_store_gpr(t1
, rd
+1);
15118 gen_load_gpr(t1
, rd
);
15119 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15120 tcg_gen_movi_tl(t1
, 4);
15121 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15122 gen_load_gpr(t1
, rd
+1);
15123 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15125 #ifdef TARGET_MIPS64
15128 generate_exception_end(ctx
, EXCP_RI
);
15131 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15132 gen_store_gpr(t1
, rd
);
15133 tcg_gen_movi_tl(t1
, 8);
15134 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15135 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15136 gen_store_gpr(t1
, rd
+1);
15139 gen_load_gpr(t1
, rd
);
15140 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15141 tcg_gen_movi_tl(t1
, 8);
15142 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15143 gen_load_gpr(t1
, rd
+1);
15144 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15152 static void gen_sync(int stype
)
15154 TCGBar tcg_mo
= TCG_BAR_SC
;
15157 case 0x4: /* SYNC_WMB */
15158 tcg_mo
|= TCG_MO_ST_ST
;
15160 case 0x10: /* SYNC_MB */
15161 tcg_mo
|= TCG_MO_ALL
;
15163 case 0x11: /* SYNC_ACQUIRE */
15164 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
15166 case 0x12: /* SYNC_RELEASE */
15167 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
15169 case 0x13: /* SYNC_RMB */
15170 tcg_mo
|= TCG_MO_LD_LD
;
15173 tcg_mo
|= TCG_MO_ALL
;
15177 tcg_gen_mb(tcg_mo
);
15180 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
15182 int extension
= (ctx
->opcode
>> 6) & 0x3f;
15183 int minor
= (ctx
->opcode
>> 12) & 0xf;
15184 uint32_t mips32_op
;
15186 switch (extension
) {
15188 mips32_op
= OPC_TEQ
;
15191 mips32_op
= OPC_TGE
;
15194 mips32_op
= OPC_TGEU
;
15197 mips32_op
= OPC_TLT
;
15200 mips32_op
= OPC_TLTU
;
15203 mips32_op
= OPC_TNE
;
15205 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
15207 #ifndef CONFIG_USER_ONLY
15210 check_cp0_enabled(ctx
);
15212 /* Treat as NOP. */
15215 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
15219 check_cp0_enabled(ctx
);
15221 TCGv t0
= tcg_temp_new();
15223 gen_load_gpr(t0
, rt
);
15224 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
15230 switch (minor
& 3) {
15232 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15235 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15238 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15241 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15244 goto pool32axf_invalid
;
15248 switch (minor
& 3) {
15250 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15253 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15256 goto pool32axf_invalid
;
15262 check_insn(ctx
, ISA_MIPS32R6
);
15263 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15266 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15269 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15272 mips32_op
= OPC_CLO
;
15275 mips32_op
= OPC_CLZ
;
15277 check_insn(ctx
, ISA_MIPS32
);
15278 gen_cl(ctx
, mips32_op
, rt
, rs
);
15281 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15282 gen_rdhwr(ctx
, rt
, rs
, 0);
15285 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15288 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15289 mips32_op
= OPC_MULT
;
15292 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15293 mips32_op
= OPC_MULTU
;
15296 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15297 mips32_op
= OPC_DIV
;
15300 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15301 mips32_op
= OPC_DIVU
;
15304 check_insn(ctx
, ISA_MIPS32
);
15305 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15308 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15309 mips32_op
= OPC_MADD
;
15312 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15313 mips32_op
= OPC_MADDU
;
15316 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15317 mips32_op
= OPC_MSUB
;
15320 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15321 mips32_op
= OPC_MSUBU
;
15323 check_insn(ctx
, ISA_MIPS32
);
15324 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15327 goto pool32axf_invalid
;
15338 generate_exception_err(ctx
, EXCP_CpU
, 2);
15341 goto pool32axf_invalid
;
15346 case JALR
: /* JALRC */
15347 case JALR_HB
: /* JALRC_HB */
15348 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15349 /* JALRC, JALRC_HB */
15350 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15352 /* JALR, JALR_HB */
15353 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15354 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15359 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15360 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15361 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15364 goto pool32axf_invalid
;
15370 check_cp0_enabled(ctx
);
15371 check_insn(ctx
, ISA_MIPS32R2
);
15372 gen_load_srsgpr(rs
, rt
);
15375 check_cp0_enabled(ctx
);
15376 check_insn(ctx
, ISA_MIPS32R2
);
15377 gen_store_srsgpr(rs
, rt
);
15380 goto pool32axf_invalid
;
15383 #ifndef CONFIG_USER_ONLY
15387 mips32_op
= OPC_TLBP
;
15390 mips32_op
= OPC_TLBR
;
15393 mips32_op
= OPC_TLBWI
;
15396 mips32_op
= OPC_TLBWR
;
15399 mips32_op
= OPC_TLBINV
;
15402 mips32_op
= OPC_TLBINVF
;
15405 mips32_op
= OPC_WAIT
;
15408 mips32_op
= OPC_DERET
;
15411 mips32_op
= OPC_ERET
;
15413 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15416 goto pool32axf_invalid
;
15422 check_cp0_enabled(ctx
);
15424 TCGv t0
= tcg_temp_new();
15426 save_cpu_state(ctx
, 1);
15427 gen_helper_di(t0
, cpu_env
);
15428 gen_store_gpr(t0
, rs
);
15429 /* Stop translation as we may have switched the execution mode */
15430 ctx
->base
.is_jmp
= DISAS_STOP
;
15435 check_cp0_enabled(ctx
);
15437 TCGv t0
= tcg_temp_new();
15439 save_cpu_state(ctx
, 1);
15440 gen_helper_ei(t0
, cpu_env
);
15441 gen_store_gpr(t0
, rs
);
15442 /* DISAS_STOP isn't sufficient, we need to ensure we break out
15443 of translated code to check for pending interrupts. */
15444 gen_save_pc(ctx
->base
.pc_next
+ 4);
15445 ctx
->base
.is_jmp
= DISAS_EXIT
;
15450 goto pool32axf_invalid
;
15457 gen_sync(extract32(ctx
->opcode
, 16, 5));
15460 generate_exception_end(ctx
, EXCP_SYSCALL
);
15463 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15464 gen_helper_do_semihosting(cpu_env
);
15466 check_insn(ctx
, ISA_MIPS32
);
15467 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15468 generate_exception_end(ctx
, EXCP_RI
);
15470 generate_exception_end(ctx
, EXCP_DBp
);
15475 goto pool32axf_invalid
;
15479 switch (minor
& 3) {
15481 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15484 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15487 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15490 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15493 goto pool32axf_invalid
;
15497 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15500 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15503 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15506 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15509 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15512 goto pool32axf_invalid
;
15517 MIPS_INVAL("pool32axf");
15518 generate_exception_end(ctx
, EXCP_RI
);
15523 /* Values for microMIPS fmt field. Variable-width, depending on which
15524 formats the instruction supports. */
15543 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15545 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15546 uint32_t mips32_op
;
15548 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
15549 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
15550 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
15552 switch (extension
) {
15553 case FLOAT_1BIT_FMT(CFC1
, 0):
15554 mips32_op
= OPC_CFC1
;
15556 case FLOAT_1BIT_FMT(CTC1
, 0):
15557 mips32_op
= OPC_CTC1
;
15559 case FLOAT_1BIT_FMT(MFC1
, 0):
15560 mips32_op
= OPC_MFC1
;
15562 case FLOAT_1BIT_FMT(MTC1
, 0):
15563 mips32_op
= OPC_MTC1
;
15565 case FLOAT_1BIT_FMT(MFHC1
, 0):
15566 mips32_op
= OPC_MFHC1
;
15568 case FLOAT_1BIT_FMT(MTHC1
, 0):
15569 mips32_op
= OPC_MTHC1
;
15571 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15574 /* Reciprocal square root */
15575 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15576 mips32_op
= OPC_RSQRT_S
;
15578 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15579 mips32_op
= OPC_RSQRT_D
;
15583 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15584 mips32_op
= OPC_SQRT_S
;
15586 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15587 mips32_op
= OPC_SQRT_D
;
15591 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15592 mips32_op
= OPC_RECIP_S
;
15594 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15595 mips32_op
= OPC_RECIP_D
;
15599 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15600 mips32_op
= OPC_FLOOR_L_S
;
15602 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15603 mips32_op
= OPC_FLOOR_L_D
;
15605 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15606 mips32_op
= OPC_FLOOR_W_S
;
15608 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15609 mips32_op
= OPC_FLOOR_W_D
;
15613 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15614 mips32_op
= OPC_CEIL_L_S
;
15616 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15617 mips32_op
= OPC_CEIL_L_D
;
15619 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15620 mips32_op
= OPC_CEIL_W_S
;
15622 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15623 mips32_op
= OPC_CEIL_W_D
;
15627 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15628 mips32_op
= OPC_TRUNC_L_S
;
15630 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15631 mips32_op
= OPC_TRUNC_L_D
;
15633 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15634 mips32_op
= OPC_TRUNC_W_S
;
15636 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15637 mips32_op
= OPC_TRUNC_W_D
;
15641 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15642 mips32_op
= OPC_ROUND_L_S
;
15644 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15645 mips32_op
= OPC_ROUND_L_D
;
15647 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15648 mips32_op
= OPC_ROUND_W_S
;
15650 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15651 mips32_op
= OPC_ROUND_W_D
;
15654 /* Integer to floating-point conversion */
15655 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15656 mips32_op
= OPC_CVT_L_S
;
15658 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15659 mips32_op
= OPC_CVT_L_D
;
15661 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15662 mips32_op
= OPC_CVT_W_S
;
15664 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15665 mips32_op
= OPC_CVT_W_D
;
15668 /* Paired-foo conversions */
15669 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15670 mips32_op
= OPC_CVT_S_PL
;
15672 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15673 mips32_op
= OPC_CVT_S_PU
;
15675 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15676 mips32_op
= OPC_CVT_PW_PS
;
15678 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15679 mips32_op
= OPC_CVT_PS_PW
;
15682 /* Floating-point moves */
15683 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15684 mips32_op
= OPC_MOV_S
;
15686 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15687 mips32_op
= OPC_MOV_D
;
15689 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15690 mips32_op
= OPC_MOV_PS
;
15693 /* Absolute value */
15694 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15695 mips32_op
= OPC_ABS_S
;
15697 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15698 mips32_op
= OPC_ABS_D
;
15700 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15701 mips32_op
= OPC_ABS_PS
;
15705 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15706 mips32_op
= OPC_NEG_S
;
15708 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15709 mips32_op
= OPC_NEG_D
;
15711 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15712 mips32_op
= OPC_NEG_PS
;
15715 /* Reciprocal square root step */
15716 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15717 mips32_op
= OPC_RSQRT1_S
;
15719 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15720 mips32_op
= OPC_RSQRT1_D
;
15722 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15723 mips32_op
= OPC_RSQRT1_PS
;
15726 /* Reciprocal step */
15727 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15728 mips32_op
= OPC_RECIP1_S
;
15730 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15731 mips32_op
= OPC_RECIP1_S
;
15733 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15734 mips32_op
= OPC_RECIP1_PS
;
15737 /* Conversions from double */
15738 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15739 mips32_op
= OPC_CVT_D_S
;
15741 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15742 mips32_op
= OPC_CVT_D_W
;
15744 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15745 mips32_op
= OPC_CVT_D_L
;
15748 /* Conversions from single */
15749 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15750 mips32_op
= OPC_CVT_S_D
;
15752 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15753 mips32_op
= OPC_CVT_S_W
;
15755 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15756 mips32_op
= OPC_CVT_S_L
;
15758 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
15761 /* Conditional moves on floating-point codes */
15762 case COND_FLOAT_MOV(MOVT
, 0):
15763 case COND_FLOAT_MOV(MOVT
, 1):
15764 case COND_FLOAT_MOV(MOVT
, 2):
15765 case COND_FLOAT_MOV(MOVT
, 3):
15766 case COND_FLOAT_MOV(MOVT
, 4):
15767 case COND_FLOAT_MOV(MOVT
, 5):
15768 case COND_FLOAT_MOV(MOVT
, 6):
15769 case COND_FLOAT_MOV(MOVT
, 7):
15770 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15771 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
15773 case COND_FLOAT_MOV(MOVF
, 0):
15774 case COND_FLOAT_MOV(MOVF
, 1):
15775 case COND_FLOAT_MOV(MOVF
, 2):
15776 case COND_FLOAT_MOV(MOVF
, 3):
15777 case COND_FLOAT_MOV(MOVF
, 4):
15778 case COND_FLOAT_MOV(MOVF
, 5):
15779 case COND_FLOAT_MOV(MOVF
, 6):
15780 case COND_FLOAT_MOV(MOVF
, 7):
15781 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15782 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
15785 MIPS_INVAL("pool32fxf");
15786 generate_exception_end(ctx
, EXCP_RI
);
15791 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15795 int rt
, rs
, rd
, rr
;
15797 uint32_t op
, minor
, minor2
, mips32_op
;
15798 uint32_t cond
, fmt
, cc
;
15800 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
15801 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
15803 rt
= (ctx
->opcode
>> 21) & 0x1f;
15804 rs
= (ctx
->opcode
>> 16) & 0x1f;
15805 rd
= (ctx
->opcode
>> 11) & 0x1f;
15806 rr
= (ctx
->opcode
>> 6) & 0x1f;
15807 imm
= (int16_t) ctx
->opcode
;
15809 op
= (ctx
->opcode
>> 26) & 0x3f;
15812 minor
= ctx
->opcode
& 0x3f;
15815 minor
= (ctx
->opcode
>> 6) & 0xf;
15818 mips32_op
= OPC_SLL
;
15821 mips32_op
= OPC_SRA
;
15824 mips32_op
= OPC_SRL
;
15827 mips32_op
= OPC_ROTR
;
15829 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
15832 check_insn(ctx
, ISA_MIPS32R6
);
15833 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
15836 check_insn(ctx
, ISA_MIPS32R6
);
15837 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
15840 check_insn(ctx
, ISA_MIPS32R6
);
15841 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
15844 goto pool32a_invalid
;
15848 minor
= (ctx
->opcode
>> 6) & 0xf;
15852 mips32_op
= OPC_ADD
;
15855 mips32_op
= OPC_ADDU
;
15858 mips32_op
= OPC_SUB
;
15861 mips32_op
= OPC_SUBU
;
15864 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15865 mips32_op
= OPC_MUL
;
15867 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
15871 mips32_op
= OPC_SLLV
;
15874 mips32_op
= OPC_SRLV
;
15877 mips32_op
= OPC_SRAV
;
15880 mips32_op
= OPC_ROTRV
;
15882 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
15884 /* Logical operations */
15886 mips32_op
= OPC_AND
;
15889 mips32_op
= OPC_OR
;
15892 mips32_op
= OPC_NOR
;
15895 mips32_op
= OPC_XOR
;
15897 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
15899 /* Set less than */
15901 mips32_op
= OPC_SLT
;
15904 mips32_op
= OPC_SLTU
;
15906 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
15909 goto pool32a_invalid
;
15913 minor
= (ctx
->opcode
>> 6) & 0xf;
15915 /* Conditional moves */
15916 case MOVN
: /* MUL */
15917 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15919 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
15922 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
15925 case MOVZ
: /* MUH */
15926 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15928 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
15931 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
15935 check_insn(ctx
, ISA_MIPS32R6
);
15936 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
15939 check_insn(ctx
, ISA_MIPS32R6
);
15940 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
15942 case LWXS
: /* DIV */
15943 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15945 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
15948 gen_ldxs(ctx
, rs
, rt
, rd
);
15952 check_insn(ctx
, ISA_MIPS32R6
);
15953 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
15956 check_insn(ctx
, ISA_MIPS32R6
);
15957 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
15960 check_insn(ctx
, ISA_MIPS32R6
);
15961 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
15964 goto pool32a_invalid
;
15968 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
15971 check_insn(ctx
, ISA_MIPS32R6
);
15972 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
15973 extract32(ctx
->opcode
, 9, 2));
15976 check_insn(ctx
, ISA_MIPS32R6
);
15977 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
15980 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
15983 gen_pool32axf(env
, ctx
, rt
, rs
);
15986 generate_exception_end(ctx
, EXCP_BREAK
);
15989 check_insn(ctx
, ISA_MIPS32R6
);
15990 generate_exception_end(ctx
, EXCP_RI
);
15994 MIPS_INVAL("pool32a");
15995 generate_exception_end(ctx
, EXCP_RI
);
16000 minor
= (ctx
->opcode
>> 12) & 0xf;
16003 check_cp0_enabled(ctx
);
16004 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16005 gen_cache_operation(ctx
, rt
, rs
, imm
);
16010 /* COP2: Not implemented. */
16011 generate_exception_err(ctx
, EXCP_CpU
, 2);
16013 #ifdef TARGET_MIPS64
16016 check_insn(ctx
, ISA_MIPS3
);
16017 check_mips_64(ctx
);
16022 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16024 #ifdef TARGET_MIPS64
16027 check_insn(ctx
, ISA_MIPS3
);
16028 check_mips_64(ctx
);
16033 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16036 MIPS_INVAL("pool32b");
16037 generate_exception_end(ctx
, EXCP_RI
);
16042 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
16043 minor
= ctx
->opcode
& 0x3f;
16044 check_cp1_enabled(ctx
);
16047 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16048 mips32_op
= OPC_ALNV_PS
;
16051 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16052 mips32_op
= OPC_MADD_S
;
16055 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16056 mips32_op
= OPC_MADD_D
;
16059 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16060 mips32_op
= OPC_MADD_PS
;
16063 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16064 mips32_op
= OPC_MSUB_S
;
16067 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16068 mips32_op
= OPC_MSUB_D
;
16071 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16072 mips32_op
= OPC_MSUB_PS
;
16075 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16076 mips32_op
= OPC_NMADD_S
;
16079 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16080 mips32_op
= OPC_NMADD_D
;
16083 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16084 mips32_op
= OPC_NMADD_PS
;
16087 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16088 mips32_op
= OPC_NMSUB_S
;
16091 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16092 mips32_op
= OPC_NMSUB_D
;
16095 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16096 mips32_op
= OPC_NMSUB_PS
;
16098 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
16100 case CABS_COND_FMT
:
16101 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16102 cond
= (ctx
->opcode
>> 6) & 0xf;
16103 cc
= (ctx
->opcode
>> 13) & 0x7;
16104 fmt
= (ctx
->opcode
>> 10) & 0x3;
16107 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
16110 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
16113 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
16116 goto pool32f_invalid
;
16120 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16121 cond
= (ctx
->opcode
>> 6) & 0xf;
16122 cc
= (ctx
->opcode
>> 13) & 0x7;
16123 fmt
= (ctx
->opcode
>> 10) & 0x3;
16126 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
16129 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
16132 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
16135 goto pool32f_invalid
;
16139 check_insn(ctx
, ISA_MIPS32R6
);
16140 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16143 check_insn(ctx
, ISA_MIPS32R6
);
16144 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16147 gen_pool32fxf(ctx
, rt
, rs
);
16151 switch ((ctx
->opcode
>> 6) & 0x7) {
16153 mips32_op
= OPC_PLL_PS
;
16156 mips32_op
= OPC_PLU_PS
;
16159 mips32_op
= OPC_PUL_PS
;
16162 mips32_op
= OPC_PUU_PS
;
16165 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16166 mips32_op
= OPC_CVT_PS_S
;
16168 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16171 goto pool32f_invalid
;
16175 check_insn(ctx
, ISA_MIPS32R6
);
16176 switch ((ctx
->opcode
>> 9) & 0x3) {
16178 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
16181 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
16184 goto pool32f_invalid
;
16189 switch ((ctx
->opcode
>> 6) & 0x7) {
16191 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16192 mips32_op
= OPC_LWXC1
;
16195 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16196 mips32_op
= OPC_SWXC1
;
16199 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16200 mips32_op
= OPC_LDXC1
;
16203 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16204 mips32_op
= OPC_SDXC1
;
16207 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16208 mips32_op
= OPC_LUXC1
;
16211 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16212 mips32_op
= OPC_SUXC1
;
16214 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
16217 goto pool32f_invalid
;
16221 check_insn(ctx
, ISA_MIPS32R6
);
16222 switch ((ctx
->opcode
>> 9) & 0x3) {
16224 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
16227 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
16230 goto pool32f_invalid
;
16235 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16236 fmt
= (ctx
->opcode
>> 9) & 0x3;
16237 switch ((ctx
->opcode
>> 6) & 0x7) {
16241 mips32_op
= OPC_RSQRT2_S
;
16244 mips32_op
= OPC_RSQRT2_D
;
16247 mips32_op
= OPC_RSQRT2_PS
;
16250 goto pool32f_invalid
;
16256 mips32_op
= OPC_RECIP2_S
;
16259 mips32_op
= OPC_RECIP2_D
;
16262 mips32_op
= OPC_RECIP2_PS
;
16265 goto pool32f_invalid
;
16269 mips32_op
= OPC_ADDR_PS
;
16272 mips32_op
= OPC_MULR_PS
;
16274 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16277 goto pool32f_invalid
;
16281 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16282 cc
= (ctx
->opcode
>> 13) & 0x7;
16283 fmt
= (ctx
->opcode
>> 9) & 0x3;
16284 switch ((ctx
->opcode
>> 6) & 0x7) {
16285 case MOVF_FMT
: /* RINT_FMT */
16286 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16290 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16293 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16296 goto pool32f_invalid
;
16302 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16305 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16309 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16312 goto pool32f_invalid
;
16316 case MOVT_FMT
: /* CLASS_FMT */
16317 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16321 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16324 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16327 goto pool32f_invalid
;
16333 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16336 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16340 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16343 goto pool32f_invalid
;
16348 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16351 goto pool32f_invalid
;
16354 #define FINSN_3ARG_SDPS(prfx) \
16355 switch ((ctx->opcode >> 8) & 0x3) { \
16357 mips32_op = OPC_##prfx##_S; \
16360 mips32_op = OPC_##prfx##_D; \
16362 case FMT_SDPS_PS: \
16364 mips32_op = OPC_##prfx##_PS; \
16367 goto pool32f_invalid; \
16370 check_insn(ctx
, ISA_MIPS32R6
);
16371 switch ((ctx
->opcode
>> 9) & 0x3) {
16373 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16376 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16379 goto pool32f_invalid
;
16383 check_insn(ctx
, ISA_MIPS32R6
);
16384 switch ((ctx
->opcode
>> 9) & 0x3) {
16386 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16389 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16392 goto pool32f_invalid
;
16396 /* regular FP ops */
16397 switch ((ctx
->opcode
>> 6) & 0x3) {
16399 FINSN_3ARG_SDPS(ADD
);
16402 FINSN_3ARG_SDPS(SUB
);
16405 FINSN_3ARG_SDPS(MUL
);
16408 fmt
= (ctx
->opcode
>> 8) & 0x3;
16410 mips32_op
= OPC_DIV_D
;
16411 } else if (fmt
== 0) {
16412 mips32_op
= OPC_DIV_S
;
16414 goto pool32f_invalid
;
16418 goto pool32f_invalid
;
16423 switch ((ctx
->opcode
>> 6) & 0x7) {
16424 case MOVN_FMT
: /* SELEQZ_FMT */
16425 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16427 switch ((ctx
->opcode
>> 9) & 0x3) {
16429 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16432 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16435 goto pool32f_invalid
;
16439 FINSN_3ARG_SDPS(MOVN
);
16443 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16444 FINSN_3ARG_SDPS(MOVN
);
16446 case MOVZ_FMT
: /* SELNEZ_FMT */
16447 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16449 switch ((ctx
->opcode
>> 9) & 0x3) {
16451 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16454 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16457 goto pool32f_invalid
;
16461 FINSN_3ARG_SDPS(MOVZ
);
16465 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16466 FINSN_3ARG_SDPS(MOVZ
);
16469 check_insn(ctx
, ISA_MIPS32R6
);
16470 switch ((ctx
->opcode
>> 9) & 0x3) {
16472 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16475 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16478 goto pool32f_invalid
;
16482 check_insn(ctx
, ISA_MIPS32R6
);
16483 switch ((ctx
->opcode
>> 9) & 0x3) {
16485 mips32_op
= OPC_MADDF_S
;
16488 mips32_op
= OPC_MADDF_D
;
16491 goto pool32f_invalid
;
16495 check_insn(ctx
, ISA_MIPS32R6
);
16496 switch ((ctx
->opcode
>> 9) & 0x3) {
16498 mips32_op
= OPC_MSUBF_S
;
16501 mips32_op
= OPC_MSUBF_D
;
16504 goto pool32f_invalid
;
16508 goto pool32f_invalid
;
16512 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16516 MIPS_INVAL("pool32f");
16517 generate_exception_end(ctx
, EXCP_RI
);
16521 generate_exception_err(ctx
, EXCP_CpU
, 1);
16525 minor
= (ctx
->opcode
>> 21) & 0x1f;
16528 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16529 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16532 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16533 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16534 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16537 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16538 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16539 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16542 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16543 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16546 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16547 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16548 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16551 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16552 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16553 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16556 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16557 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16560 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16561 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16565 case TLTI
: /* BC1EQZC */
16566 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16568 check_cp1_enabled(ctx
);
16569 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16572 mips32_op
= OPC_TLTI
;
16576 case TGEI
: /* BC1NEZC */
16577 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16579 check_cp1_enabled(ctx
);
16580 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16583 mips32_op
= OPC_TGEI
;
16588 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16589 mips32_op
= OPC_TLTIU
;
16592 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16593 mips32_op
= OPC_TGEIU
;
16595 case TNEI
: /* SYNCI */
16596 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16598 /* Break the TB to be able to sync copied instructions
16600 ctx
->base
.is_jmp
= DISAS_STOP
;
16603 mips32_op
= OPC_TNEI
;
16608 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16609 mips32_op
= OPC_TEQI
;
16611 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16616 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16617 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16618 4, rs
, 0, imm
<< 1, 0);
16619 /* Compact branches don't have a delay slot, so just let
16620 the normal delay slot handling take us to the branch
16624 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16625 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16628 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16629 /* Break the TB to be able to sync copied instructions
16631 ctx
->base
.is_jmp
= DISAS_STOP
;
16635 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16636 /* COP2: Not implemented. */
16637 generate_exception_err(ctx
, EXCP_CpU
, 2);
16640 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16641 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16644 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16645 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16648 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16649 mips32_op
= OPC_BC1FANY4
;
16652 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16653 mips32_op
= OPC_BC1TANY4
;
16656 check_insn(ctx
, ASE_MIPS3D
);
16659 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16660 check_cp1_enabled(ctx
);
16661 gen_compute_branch1(ctx
, mips32_op
,
16662 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16664 generate_exception_err(ctx
, EXCP_CpU
, 1);
16669 /* MIPS DSP: not implemented */
16672 MIPS_INVAL("pool32i");
16673 generate_exception_end(ctx
, EXCP_RI
);
16678 minor
= (ctx
->opcode
>> 12) & 0xf;
16679 offset
= sextract32(ctx
->opcode
, 0,
16680 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16683 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16684 mips32_op
= OPC_LWL
;
16687 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16688 mips32_op
= OPC_SWL
;
16691 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16692 mips32_op
= OPC_LWR
;
16695 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16696 mips32_op
= OPC_SWR
;
16698 #if defined(TARGET_MIPS64)
16700 check_insn(ctx
, ISA_MIPS3
);
16701 check_mips_64(ctx
);
16702 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16703 mips32_op
= OPC_LDL
;
16706 check_insn(ctx
, ISA_MIPS3
);
16707 check_mips_64(ctx
);
16708 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16709 mips32_op
= OPC_SDL
;
16712 check_insn(ctx
, ISA_MIPS3
);
16713 check_mips_64(ctx
);
16714 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16715 mips32_op
= OPC_LDR
;
16718 check_insn(ctx
, ISA_MIPS3
);
16719 check_mips_64(ctx
);
16720 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16721 mips32_op
= OPC_SDR
;
16724 check_insn(ctx
, ISA_MIPS3
);
16725 check_mips_64(ctx
);
16726 mips32_op
= OPC_LWU
;
16729 check_insn(ctx
, ISA_MIPS3
);
16730 check_mips_64(ctx
);
16731 mips32_op
= OPC_LLD
;
16735 mips32_op
= OPC_LL
;
16738 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16741 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16744 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
16746 #if defined(TARGET_MIPS64)
16748 check_insn(ctx
, ISA_MIPS3
);
16749 check_mips_64(ctx
);
16750 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
16755 MIPS_INVAL("pool32c ld-eva");
16756 generate_exception_end(ctx
, EXCP_RI
);
16759 check_cp0_enabled(ctx
);
16761 minor2
= (ctx
->opcode
>> 9) & 0x7;
16762 offset
= sextract32(ctx
->opcode
, 0, 9);
16765 mips32_op
= OPC_LBUE
;
16768 mips32_op
= OPC_LHUE
;
16771 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16772 mips32_op
= OPC_LWLE
;
16775 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16776 mips32_op
= OPC_LWRE
;
16779 mips32_op
= OPC_LBE
;
16782 mips32_op
= OPC_LHE
;
16785 mips32_op
= OPC_LLE
;
16788 mips32_op
= OPC_LWE
;
16794 MIPS_INVAL("pool32c st-eva");
16795 generate_exception_end(ctx
, EXCP_RI
);
16798 check_cp0_enabled(ctx
);
16800 minor2
= (ctx
->opcode
>> 9) & 0x7;
16801 offset
= sextract32(ctx
->opcode
, 0, 9);
16804 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16805 mips32_op
= OPC_SWLE
;
16808 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16809 mips32_op
= OPC_SWRE
;
16812 /* Treat as no-op */
16813 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16814 /* hint codes 24-31 are reserved and signal RI */
16815 generate_exception(ctx
, EXCP_RI
);
16819 /* Treat as no-op */
16820 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16821 gen_cache_operation(ctx
, rt
, rs
, offset
);
16825 mips32_op
= OPC_SBE
;
16828 mips32_op
= OPC_SHE
;
16831 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
16834 mips32_op
= OPC_SWE
;
16839 /* Treat as no-op */
16840 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16841 /* hint codes 24-31 are reserved and signal RI */
16842 generate_exception(ctx
, EXCP_RI
);
16846 MIPS_INVAL("pool32c");
16847 generate_exception_end(ctx
, EXCP_RI
);
16851 case ADDI32
: /* AUI, LUI */
16852 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16854 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
16857 mips32_op
= OPC_ADDI
;
16862 mips32_op
= OPC_ADDIU
;
16864 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16867 /* Logical operations */
16869 mips32_op
= OPC_ORI
;
16872 mips32_op
= OPC_XORI
;
16875 mips32_op
= OPC_ANDI
;
16877 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16880 /* Set less than immediate */
16882 mips32_op
= OPC_SLTI
;
16885 mips32_op
= OPC_SLTIU
;
16887 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16890 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16891 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
16892 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
16893 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16895 case JALS32
: /* BOVC, BEQC, BEQZALC */
16896 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16899 mips32_op
= OPC_BOVC
;
16900 } else if (rs
< rt
&& rs
== 0) {
16902 mips32_op
= OPC_BEQZALC
;
16905 mips32_op
= OPC_BEQC
;
16907 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16910 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
16911 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
16912 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16915 case BEQ32
: /* BC */
16916 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16918 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
16919 sextract32(ctx
->opcode
<< 1, 0, 27));
16922 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
16925 case BNE32
: /* BALC */
16926 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16928 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
16929 sextract32(ctx
->opcode
<< 1, 0, 27));
16932 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
16935 case J32
: /* BGTZC, BLTZC, BLTC */
16936 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16937 if (rs
== 0 && rt
!= 0) {
16939 mips32_op
= OPC_BGTZC
;
16940 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16942 mips32_op
= OPC_BLTZC
;
16945 mips32_op
= OPC_BLTC
;
16947 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16950 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
16951 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16954 case JAL32
: /* BLEZC, BGEZC, BGEC */
16955 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16956 if (rs
== 0 && rt
!= 0) {
16958 mips32_op
= OPC_BLEZC
;
16959 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16961 mips32_op
= OPC_BGEZC
;
16964 mips32_op
= OPC_BGEC
;
16966 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16969 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
16970 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16971 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16974 /* Floating point (COP1) */
16976 mips32_op
= OPC_LWC1
;
16979 mips32_op
= OPC_LDC1
;
16982 mips32_op
= OPC_SWC1
;
16985 mips32_op
= OPC_SDC1
;
16987 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
16989 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16990 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16991 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16992 switch ((ctx
->opcode
>> 16) & 0x1f) {
17001 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17004 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
17007 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
17017 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17020 generate_exception(ctx
, EXCP_RI
);
17025 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
17026 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
17028 gen_addiupc(ctx
, reg
, offset
, 0, 0);
17031 case BNVC
: /* BNEC, BNEZALC */
17032 check_insn(ctx
, ISA_MIPS32R6
);
17035 mips32_op
= OPC_BNVC
;
17036 } else if (rs
< rt
&& rs
== 0) {
17038 mips32_op
= OPC_BNEZALC
;
17041 mips32_op
= OPC_BNEC
;
17043 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17045 case R6_BNEZC
: /* JIALC */
17046 check_insn(ctx
, ISA_MIPS32R6
);
17049 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
17050 sextract32(ctx
->opcode
<< 1, 0, 22));
17053 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
17056 case R6_BEQZC
: /* JIC */
17057 check_insn(ctx
, ISA_MIPS32R6
);
17060 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
17061 sextract32(ctx
->opcode
<< 1, 0, 22));
17064 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
17067 case BLEZALC
: /* BGEZALC, BGEUC */
17068 check_insn(ctx
, ISA_MIPS32R6
);
17069 if (rs
== 0 && rt
!= 0) {
17071 mips32_op
= OPC_BLEZALC
;
17072 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17074 mips32_op
= OPC_BGEZALC
;
17077 mips32_op
= OPC_BGEUC
;
17079 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17081 case BGTZALC
: /* BLTZALC, BLTUC */
17082 check_insn(ctx
, ISA_MIPS32R6
);
17083 if (rs
== 0 && rt
!= 0) {
17085 mips32_op
= OPC_BGTZALC
;
17086 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17088 mips32_op
= OPC_BLTZALC
;
17091 mips32_op
= OPC_BLTUC
;
17093 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17095 /* Loads and stores */
17097 mips32_op
= OPC_LB
;
17100 mips32_op
= OPC_LBU
;
17103 mips32_op
= OPC_LH
;
17106 mips32_op
= OPC_LHU
;
17109 mips32_op
= OPC_LW
;
17111 #ifdef TARGET_MIPS64
17113 check_insn(ctx
, ISA_MIPS3
);
17114 check_mips_64(ctx
);
17115 mips32_op
= OPC_LD
;
17118 check_insn(ctx
, ISA_MIPS3
);
17119 check_mips_64(ctx
);
17120 mips32_op
= OPC_SD
;
17124 mips32_op
= OPC_SB
;
17127 mips32_op
= OPC_SH
;
17130 mips32_op
= OPC_SW
;
17133 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
17136 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
17139 generate_exception_end(ctx
, EXCP_RI
);
17144 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
17148 /* make sure instructions are on a halfword boundary */
17149 if (ctx
->base
.pc_next
& 0x1) {
17150 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
17151 generate_exception_end(ctx
, EXCP_AdEL
);
17155 op
= (ctx
->opcode
>> 10) & 0x3f;
17156 /* Enforce properly-sized instructions in a delay slot */
17157 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
17158 switch (op
& 0x7) { /* MSB-3..MSB-5 */
17160 /* POOL32A, POOL32B, POOL32I, POOL32C */
17162 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
17164 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
17166 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
17168 /* LB32, LH32, LWC132, LDC132, LW32 */
17169 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
17170 generate_exception_end(ctx
, EXCP_RI
);
17175 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
17177 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
17179 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
17180 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
17181 generate_exception_end(ctx
, EXCP_RI
);
17191 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17192 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
17193 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
17196 switch (ctx
->opcode
& 0x1) {
17204 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17205 /* In the Release 6 the register number location in
17206 * the instruction encoding has changed.
17208 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
17210 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
17216 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17217 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
17218 int amount
= (ctx
->opcode
>> 1) & 0x7;
17220 amount
= amount
== 0 ? 8 : amount
;
17222 switch (ctx
->opcode
& 0x1) {
17231 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17235 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17236 gen_pool16c_r6_insn(ctx
);
17238 gen_pool16c_insn(ctx
);
17243 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17244 int rb
= 28; /* GP */
17245 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17247 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17251 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17252 if (ctx
->opcode
& 1) {
17253 generate_exception_end(ctx
, EXCP_RI
);
17256 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17257 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17258 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17259 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17264 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17265 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17266 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17267 offset
= (offset
== 0xf ? -1 : offset
);
17269 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17274 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17275 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17276 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17278 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17283 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17284 int rb
= 29; /* SP */
17285 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17287 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17292 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17293 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17294 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17296 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17301 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17302 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17303 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17305 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17310 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17311 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17312 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17314 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17319 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17320 int rb
= 29; /* SP */
17321 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17323 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17328 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17329 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17330 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17332 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17337 int rd
= uMIPS_RD5(ctx
->opcode
);
17338 int rs
= uMIPS_RS5(ctx
->opcode
);
17340 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17347 switch (ctx
->opcode
& 0x1) {
17357 switch (ctx
->opcode
& 0x1) {
17362 gen_addiur1sp(ctx
);
17366 case B16
: /* BC16 */
17367 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17368 sextract32(ctx
->opcode
, 0, 10) << 1,
17369 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17371 case BNEZ16
: /* BNEZC16 */
17372 case BEQZ16
: /* BEQZC16 */
17373 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17374 mmreg(uMIPS_RD(ctx
->opcode
)),
17375 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17376 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17381 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17382 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17384 imm
= (imm
== 0x7f ? -1 : imm
);
17385 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17391 generate_exception_end(ctx
, EXCP_RI
);
17394 decode_micromips32_opc(env
, ctx
);
17407 /* MAJOR, P16, and P32 pools opcodes */
17411 NM_MOVE_BALC
= 0x02,
17419 NM_P16_SHIFT
= 0x0c,
17437 NM_P_LS_U12
= 0x21,
17447 NM_P16_ADDU
= 0x2c,
17461 NM_MOVEPREV
= 0x3f,
17464 /* POOL32A instruction pool */
17466 NM_POOL32A0
= 0x00,
17467 NM_SPECIAL2
= 0x01,
17470 NM_POOL32A5
= 0x05,
17471 NM_POOL32A7
= 0x07,
17474 /* P.GP.W instruction pool */
17476 NM_ADDIUGP_W
= 0x00,
17481 /* P48I instruction pool */
17485 NM_ADDIUGP48
= 0x02,
17486 NM_ADDIUPC48
= 0x03,
17491 /* P.U12 instruction pool */
17500 NM_ADDIUNEG
= 0x08,
17507 /* POOL32F instruction pool */
17509 NM_POOL32F_0
= 0x00,
17510 NM_POOL32F_3
= 0x03,
17511 NM_POOL32F_5
= 0x05,
17514 /* POOL32S instruction pool */
17516 NM_POOL32S_0
= 0x00,
17517 NM_POOL32S_4
= 0x04,
17520 /* P.LUI instruction pool */
17526 /* P.GP.BH instruction pool */
17531 NM_ADDIUGP_B
= 0x03,
17534 NM_P_GP_CP1
= 0x06,
17537 /* P.LS.U12 instruction pool */
17542 NM_P_PREFU12
= 0x03,
17555 /* P.LS.S9 instruction pool */
17561 NM_P_LS_UAWM
= 0x05,
17564 /* P.BAL instruction pool */
17570 /* P.J instruction pool */
17573 NM_JALRC_HB
= 0x01,
17574 NM_P_BALRSC
= 0x08,
17577 /* P.BR1 instruction pool */
17585 /* P.BR2 instruction pool */
17592 /* P.BRI instruction pool */
17604 /* P16.SHIFT instruction pool */
17610 /* POOL16C instruction pool */
17612 NM_POOL16C_0
= 0x00,
17616 /* P16.A1 instruction pool */
17618 NM_ADDIUR1SP
= 0x01,
17621 /* P16.A2 instruction pool */
17624 NM_P_ADDIURS5
= 0x01,
17627 /* P16.ADDU instruction pool */
17633 /* P16.SR instruction pool */
17636 NM_RESTORE_JRC16
= 0x01,
17639 /* P16.4X4 instruction pool */
17645 /* P16.LB instruction pool */
17652 /* P16.LH instruction pool */
17659 /* P.RI instruction pool */
17662 NM_P_SYSCALL
= 0x01,
17667 /* POOL32A0 instruction pool */
17702 NM_D_E_MT_VPE
= 0x56,
17710 /* CRC32 instruction pool */
17720 /* POOL32A5 instruction pool */
17722 NM_CMP_EQ_PH
= 0x00,
17723 NM_CMP_LT_PH
= 0x08,
17724 NM_CMP_LE_PH
= 0x10,
17725 NM_CMPGU_EQ_QB
= 0x18,
17726 NM_CMPGU_LT_QB
= 0x20,
17727 NM_CMPGU_LE_QB
= 0x28,
17728 NM_CMPGDU_EQ_QB
= 0x30,
17729 NM_CMPGDU_LT_QB
= 0x38,
17730 NM_CMPGDU_LE_QB
= 0x40,
17731 NM_CMPU_EQ_QB
= 0x48,
17732 NM_CMPU_LT_QB
= 0x50,
17733 NM_CMPU_LE_QB
= 0x58,
17734 NM_ADDQ_S_W
= 0x60,
17735 NM_SUBQ_S_W
= 0x68,
17739 NM_ADDQ_S_PH
= 0x01,
17740 NM_ADDQH_R_PH
= 0x09,
17741 NM_ADDQH_R_W
= 0x11,
17742 NM_ADDU_S_QB
= 0x19,
17743 NM_ADDU_S_PH
= 0x21,
17744 NM_ADDUH_R_QB
= 0x29,
17745 NM_SHRAV_R_PH
= 0x31,
17746 NM_SHRAV_R_QB
= 0x39,
17747 NM_SUBQ_S_PH
= 0x41,
17748 NM_SUBQH_R_PH
= 0x49,
17749 NM_SUBQH_R_W
= 0x51,
17750 NM_SUBU_S_QB
= 0x59,
17751 NM_SUBU_S_PH
= 0x61,
17752 NM_SUBUH_R_QB
= 0x69,
17753 NM_SHLLV_S_PH
= 0x71,
17754 NM_PRECR_SRA_R_PH_W
= 0x79,
17756 NM_MULEU_S_PH_QBL
= 0x12,
17757 NM_MULEU_S_PH_QBR
= 0x1a,
17758 NM_MULQ_RS_PH
= 0x22,
17759 NM_MULQ_S_PH
= 0x2a,
17760 NM_MULQ_RS_W
= 0x32,
17761 NM_MULQ_S_W
= 0x3a,
17764 NM_SHRAV_R_W
= 0x5a,
17765 NM_SHRLV_PH
= 0x62,
17766 NM_SHRLV_QB
= 0x6a,
17767 NM_SHLLV_QB
= 0x72,
17768 NM_SHLLV_S_W
= 0x7a,
17772 NM_MULEQ_S_W_PHL
= 0x04,
17773 NM_MULEQ_S_W_PHR
= 0x0c,
17775 NM_MUL_S_PH
= 0x05,
17776 NM_PRECR_QB_PH
= 0x0d,
17777 NM_PRECRQ_QB_PH
= 0x15,
17778 NM_PRECRQ_PH_W
= 0x1d,
17779 NM_PRECRQ_RS_PH_W
= 0x25,
17780 NM_PRECRQU_S_QB_PH
= 0x2d,
17781 NM_PACKRL_PH
= 0x35,
17785 NM_SHRA_R_W
= 0x5e,
17786 NM_SHRA_R_PH
= 0x66,
17787 NM_SHLL_S_PH
= 0x76,
17788 NM_SHLL_S_W
= 0x7e,
17793 /* POOL32A7 instruction pool */
17798 NM_POOL32AXF
= 0x07,
17801 /* P.SR instruction pool */
17807 /* P.SHIFT instruction pool */
17815 /* P.ROTX instruction pool */
17820 /* P.INS instruction pool */
17825 /* P.EXT instruction pool */
17830 /* POOL32F_0 (fmt) instruction pool */
17835 NM_SELEQZ_S
= 0x07,
17836 NM_SELEQZ_D
= 0x47,
17840 NM_SELNEZ_S
= 0x0f,
17841 NM_SELNEZ_D
= 0x4f,
17856 /* POOL32F_3 instruction pool */
17860 NM_MINA_FMT
= 0x04,
17861 NM_MAXA_FMT
= 0x05,
17862 NM_POOL32FXF
= 0x07,
17865 /* POOL32F_5 instruction pool */
17867 NM_CMP_CONDN_S
= 0x00,
17868 NM_CMP_CONDN_D
= 0x02,
17871 /* P.GP.LH instruction pool */
17877 /* P.GP.SH instruction pool */
17882 /* P.GP.CP1 instruction pool */
17890 /* P.LS.S0 instruction pool */
17907 NM_P_PREFS9
= 0x03,
17913 /* P.LS.S1 instruction pool */
17915 NM_ASET_ACLR
= 0x02,
17923 /* P.LS.E0 instruction pool */
17939 /* P.PREFE instruction pool */
17945 /* P.LLE instruction pool */
17951 /* P.SCE instruction pool */
17957 /* P.LS.WM instruction pool */
17963 /* P.LS.UAWM instruction pool */
17969 /* P.BR3A instruction pool */
17975 NM_BPOSGE32C
= 0x04,
17978 /* P16.RI instruction pool */
17980 NM_P16_SYSCALL
= 0x01,
17985 /* POOL16C_0 instruction pool */
17987 NM_POOL16C_00
= 0x00,
17990 /* P16.JRC instruction pool */
17996 /* P.SYSCALL instruction pool */
18002 /* P.TRAP instruction pool */
18008 /* P.CMOVE instruction pool */
18014 /* POOL32Axf instruction pool */
18016 NM_POOL32AXF_1
= 0x01,
18017 NM_POOL32AXF_2
= 0x02,
18018 NM_POOL32AXF_4
= 0x04,
18019 NM_POOL32AXF_5
= 0x05,
18020 NM_POOL32AXF_7
= 0x07,
18023 /* POOL32Axf_1 instruction pool */
18025 NM_POOL32AXF_1_0
= 0x00,
18026 NM_POOL32AXF_1_1
= 0x01,
18027 NM_POOL32AXF_1_3
= 0x03,
18028 NM_POOL32AXF_1_4
= 0x04,
18029 NM_POOL32AXF_1_5
= 0x05,
18030 NM_POOL32AXF_1_7
= 0x07,
18033 /* POOL32Axf_2 instruction pool */
18035 NM_POOL32AXF_2_0_7
= 0x00,
18036 NM_POOL32AXF_2_8_15
= 0x01,
18037 NM_POOL32AXF_2_16_23
= 0x02,
18038 NM_POOL32AXF_2_24_31
= 0x03,
18041 /* POOL32Axf_7 instruction pool */
18043 NM_SHRA_R_QB
= 0x0,
18048 /* POOL32Axf_1_0 instruction pool */
18056 /* POOL32Axf_1_1 instruction pool */
18062 /* POOL32Axf_1_3 instruction pool */
18070 /* POOL32Axf_1_4 instruction pool */
18076 /* POOL32Axf_1_5 instruction pool */
18078 NM_MAQ_S_W_PHR
= 0x0,
18079 NM_MAQ_S_W_PHL
= 0x1,
18080 NM_MAQ_SA_W_PHR
= 0x2,
18081 NM_MAQ_SA_W_PHL
= 0x3,
18084 /* POOL32Axf_1_7 instruction pool */
18088 NM_EXTR_RS_W
= 0x2,
18092 /* POOL32Axf_2_0_7 instruction pool */
18095 NM_DPAQ_S_W_PH
= 0x1,
18097 NM_DPSQ_S_W_PH
= 0x3,
18104 /* POOL32Axf_2_8_15 instruction pool */
18106 NM_DPAX_W_PH
= 0x0,
18107 NM_DPAQ_SA_L_W
= 0x1,
18108 NM_DPSX_W_PH
= 0x2,
18109 NM_DPSQ_SA_L_W
= 0x3,
18112 NM_EXTRV_R_W
= 0x7,
18115 /* POOL32Axf_2_16_23 instruction pool */
18117 NM_DPAU_H_QBL
= 0x0,
18118 NM_DPAQX_S_W_PH
= 0x1,
18119 NM_DPSU_H_QBL
= 0x2,
18120 NM_DPSQX_S_W_PH
= 0x3,
18123 NM_MULSA_W_PH
= 0x6,
18124 NM_EXTRV_RS_W
= 0x7,
18127 /* POOL32Axf_2_24_31 instruction pool */
18129 NM_DPAU_H_QBR
= 0x0,
18130 NM_DPAQX_SA_W_PH
= 0x1,
18131 NM_DPSU_H_QBR
= 0x2,
18132 NM_DPSQX_SA_W_PH
= 0x3,
18135 NM_MULSAQ_S_W_PH
= 0x6,
18136 NM_EXTRV_S_H
= 0x7,
18139 /* POOL32Axf_{4, 5} instruction pool */
18158 /* nanoMIPS DSP instructions */
18159 NM_ABSQ_S_QB
= 0x00,
18160 NM_ABSQ_S_PH
= 0x08,
18161 NM_ABSQ_S_W
= 0x10,
18162 NM_PRECEQ_W_PHL
= 0x28,
18163 NM_PRECEQ_W_PHR
= 0x30,
18164 NM_PRECEQU_PH_QBL
= 0x38,
18165 NM_PRECEQU_PH_QBR
= 0x48,
18166 NM_PRECEU_PH_QBL
= 0x58,
18167 NM_PRECEU_PH_QBR
= 0x68,
18168 NM_PRECEQU_PH_QBLA
= 0x39,
18169 NM_PRECEQU_PH_QBRA
= 0x49,
18170 NM_PRECEU_PH_QBLA
= 0x59,
18171 NM_PRECEU_PH_QBRA
= 0x69,
18172 NM_REPLV_PH
= 0x01,
18173 NM_REPLV_QB
= 0x09,
18176 NM_RADDU_W_QB
= 0x78,
18182 /* PP.SR instruction pool */
18186 NM_RESTORE_JRC
= 0x03,
18189 /* P.SR.F instruction pool */
18192 NM_RESTOREF
= 0x01,
18195 /* P16.SYSCALL instruction pool */
18197 NM_SYSCALL16
= 0x00,
18198 NM_HYPCALL16
= 0x01,
18201 /* POOL16C_00 instruction pool */
18209 /* PP.LSX and PP.LSXS instruction pool */
18247 /* ERETx instruction pool */
18253 /* POOL32FxF_{0, 1} insturction pool */
18262 NM_CVT_S_PL
= 0x84,
18263 NM_CVT_S_PU
= 0xa4,
18265 NM_CVT_L_S
= 0x004,
18266 NM_CVT_L_D
= 0x104,
18267 NM_CVT_W_S
= 0x024,
18268 NM_CVT_W_D
= 0x124,
18270 NM_RSQRT_S
= 0x008,
18271 NM_RSQRT_D
= 0x108,
18276 NM_RECIP_S
= 0x048,
18277 NM_RECIP_D
= 0x148,
18279 NM_FLOOR_L_S
= 0x00c,
18280 NM_FLOOR_L_D
= 0x10c,
18282 NM_FLOOR_W_S
= 0x02c,
18283 NM_FLOOR_W_D
= 0x12c,
18285 NM_CEIL_L_S
= 0x04c,
18286 NM_CEIL_L_D
= 0x14c,
18287 NM_CEIL_W_S
= 0x06c,
18288 NM_CEIL_W_D
= 0x16c,
18289 NM_TRUNC_L_S
= 0x08c,
18290 NM_TRUNC_L_D
= 0x18c,
18291 NM_TRUNC_W_S
= 0x0ac,
18292 NM_TRUNC_W_D
= 0x1ac,
18293 NM_ROUND_L_S
= 0x0cc,
18294 NM_ROUND_L_D
= 0x1cc,
18295 NM_ROUND_W_S
= 0x0ec,
18296 NM_ROUND_W_D
= 0x1ec,
18304 NM_CVT_D_S
= 0x04d,
18305 NM_CVT_D_W
= 0x0cd,
18306 NM_CVT_D_L
= 0x14d,
18307 NM_CVT_S_D
= 0x06d,
18308 NM_CVT_S_W
= 0x0ed,
18309 NM_CVT_S_L
= 0x16d,
18312 /* P.LL instruction pool */
18318 /* P.SC instruction pool */
18324 /* P.DVP instruction pool */
18333 * nanoMIPS decoding engine
18338 /* extraction utilities */
18340 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18341 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18342 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18343 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18344 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18345 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18347 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18348 static inline int decode_gpr_gpr3(int r
)
18350 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18352 return map
[r
& 0x7];
18355 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18356 static inline int decode_gpr_gpr3_src_store(int r
)
18358 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18360 return map
[r
& 0x7];
18363 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18364 static inline int decode_gpr_gpr4(int r
)
18366 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18367 16, 17, 18, 19, 20, 21, 22, 23 };
18369 return map
[r
& 0xf];
18372 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18373 static inline int decode_gpr_gpr4_zero(int r
)
18375 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18376 16, 17, 18, 19, 20, 21, 22, 23 };
18378 return map
[r
& 0xf];
18382 /* extraction utilities */
18384 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18385 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18386 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18387 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18388 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18389 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18392 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18394 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18397 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18398 uint8_t gp
, uint16_t u
)
18401 TCGv va
= tcg_temp_new();
18402 TCGv t0
= tcg_temp_new();
18404 while (counter
!= count
) {
18405 bool use_gp
= gp
&& (counter
== count
- 1);
18406 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18407 int this_offset
= -((counter
+ 1) << 2);
18408 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18409 gen_load_gpr(t0
, this_rt
);
18410 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18411 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18415 /* adjust stack pointer */
18416 gen_adjust_sp(ctx
, -u
);
18422 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18423 uint8_t gp
, uint16_t u
)
18426 TCGv va
= tcg_temp_new();
18427 TCGv t0
= tcg_temp_new();
18429 while (counter
!= count
) {
18430 bool use_gp
= gp
&& (counter
== count
- 1);
18431 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18432 int this_offset
= u
- ((counter
+ 1) << 2);
18433 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18434 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18435 ctx
->default_tcg_memop_mask
);
18436 tcg_gen_ext32s_tl(t0
, t0
);
18437 gen_store_gpr(t0
, this_rt
);
18441 /* adjust stack pointer */
18442 gen_adjust_sp(ctx
, u
);
18448 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18450 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
18451 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
18453 switch (extract32(ctx
->opcode
, 2, 2)) {
18455 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18458 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18461 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18464 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18469 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18471 int rt
= extract32(ctx
->opcode
, 21, 5);
18472 int rs
= extract32(ctx
->opcode
, 16, 5);
18473 int rd
= extract32(ctx
->opcode
, 11, 5);
18475 switch (extract32(ctx
->opcode
, 3, 7)) {
18477 switch (extract32(ctx
->opcode
, 10, 1)) {
18480 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18484 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18490 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18494 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18497 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18500 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18503 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18506 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18509 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18512 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18515 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18519 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18522 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18525 switch (extract32(ctx
->opcode
, 10, 1)) {
18527 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18530 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18535 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18538 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18541 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18544 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18547 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18552 #ifndef CONFIG_USER_ONLY
18553 TCGv t0
= tcg_temp_new();
18554 switch (extract32(ctx
->opcode
, 10, 1)) {
18557 check_cp0_enabled(ctx
);
18558 gen_helper_dvp(t0
, cpu_env
);
18559 gen_store_gpr(t0
, rt
);
18564 check_cp0_enabled(ctx
);
18565 gen_helper_evp(t0
, cpu_env
);
18566 gen_store_gpr(t0
, rt
);
18573 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18578 TCGv t0
= tcg_temp_new();
18579 TCGv t1
= tcg_temp_new();
18580 TCGv t2
= tcg_temp_new();
18582 gen_load_gpr(t1
, rs
);
18583 gen_load_gpr(t2
, rt
);
18584 tcg_gen_add_tl(t0
, t1
, t2
);
18585 tcg_gen_ext32s_tl(t0
, t0
);
18586 tcg_gen_xor_tl(t1
, t1
, t2
);
18587 tcg_gen_xor_tl(t2
, t0
, t2
);
18588 tcg_gen_andc_tl(t1
, t2
, t1
);
18590 /* operands of same sign, result different sign */
18591 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18592 gen_store_gpr(t0
, rd
);
18600 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18603 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18606 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18609 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18612 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18615 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18618 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18621 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18623 #ifndef CONFIG_USER_ONLY
18625 check_cp0_enabled(ctx
);
18627 /* Treat as NOP. */
18630 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18633 check_cp0_enabled(ctx
);
18635 TCGv t0
= tcg_temp_new();
18637 gen_load_gpr(t0
, rt
);
18638 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18642 case NM_D_E_MT_VPE
:
18644 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18645 TCGv t0
= tcg_temp_new();
18652 gen_helper_dmt(t0
);
18653 gen_store_gpr(t0
, rt
);
18654 } else if (rs
== 0) {
18657 gen_helper_dvpe(t0
, cpu_env
);
18658 gen_store_gpr(t0
, rt
);
18660 generate_exception_end(ctx
, EXCP_RI
);
18667 gen_helper_emt(t0
);
18668 gen_store_gpr(t0
, rt
);
18669 } else if (rs
== 0) {
18672 gen_helper_evpe(t0
, cpu_env
);
18673 gen_store_gpr(t0
, rt
);
18675 generate_exception_end(ctx
, EXCP_RI
);
18686 TCGv t0
= tcg_temp_new();
18687 TCGv t1
= tcg_temp_new();
18689 gen_load_gpr(t0
, rt
);
18690 gen_load_gpr(t1
, rs
);
18691 gen_helper_fork(t0
, t1
);
18698 check_cp0_enabled(ctx
);
18700 /* Treat as NOP. */
18703 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18704 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18708 check_cp0_enabled(ctx
);
18709 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18710 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18715 TCGv t0
= tcg_temp_new();
18717 gen_load_gpr(t0
, rs
);
18718 gen_helper_yield(t0
, cpu_env
, t0
);
18719 gen_store_gpr(t0
, rt
);
18725 generate_exception_end(ctx
, EXCP_RI
);
18731 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18732 int ret
, int v1
, int v2
)
18738 t0
= tcg_temp_new_i32();
18740 v0_t
= tcg_temp_new();
18741 v1_t
= tcg_temp_new();
18743 tcg_gen_movi_i32(t0
, v2
>> 3);
18745 gen_load_gpr(v0_t
, ret
);
18746 gen_load_gpr(v1_t
, v1
);
18749 case NM_MAQ_S_W_PHR
:
18751 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18753 case NM_MAQ_S_W_PHL
:
18755 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18757 case NM_MAQ_SA_W_PHR
:
18759 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18761 case NM_MAQ_SA_W_PHL
:
18763 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18766 generate_exception_end(ctx
, EXCP_RI
);
18770 tcg_temp_free_i32(t0
);
18772 tcg_temp_free(v0_t
);
18773 tcg_temp_free(v1_t
);
18777 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18778 int ret
, int v1
, int v2
)
18781 TCGv t0
= tcg_temp_new();
18782 TCGv t1
= tcg_temp_new();
18783 TCGv v0_t
= tcg_temp_new();
18785 gen_load_gpr(v0_t
, v1
);
18788 case NM_POOL32AXF_1_0
:
18790 switch (extract32(ctx
->opcode
, 12, 2)) {
18792 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
18795 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
18798 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
18801 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
18805 case NM_POOL32AXF_1_1
:
18807 switch (extract32(ctx
->opcode
, 12, 2)) {
18809 tcg_gen_movi_tl(t0
, v2
);
18810 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
18813 tcg_gen_movi_tl(t0
, v2
>> 3);
18814 gen_helper_shilo(t0
, v0_t
, cpu_env
);
18817 generate_exception_end(ctx
, EXCP_RI
);
18821 case NM_POOL32AXF_1_3
:
18823 imm
= extract32(ctx
->opcode
, 14, 7);
18824 switch (extract32(ctx
->opcode
, 12, 2)) {
18826 tcg_gen_movi_tl(t0
, imm
);
18827 gen_helper_rddsp(t0
, t0
, cpu_env
);
18828 gen_store_gpr(t0
, ret
);
18831 gen_load_gpr(t0
, ret
);
18832 tcg_gen_movi_tl(t1
, imm
);
18833 gen_helper_wrdsp(t0
, t1
, cpu_env
);
18836 tcg_gen_movi_tl(t0
, v2
>> 3);
18837 tcg_gen_movi_tl(t1
, v1
);
18838 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
18839 gen_store_gpr(t0
, ret
);
18842 tcg_gen_movi_tl(t0
, v2
>> 3);
18843 tcg_gen_movi_tl(t1
, v1
);
18844 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
18845 gen_store_gpr(t0
, ret
);
18849 case NM_POOL32AXF_1_4
:
18851 tcg_gen_movi_tl(t0
, v2
>> 2);
18852 switch (extract32(ctx
->opcode
, 12, 1)) {
18854 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
18855 gen_store_gpr(t0
, ret
);
18858 gen_helper_shrl_qb(t0
, t0
, v0_t
);
18859 gen_store_gpr(t0
, ret
);
18863 case NM_POOL32AXF_1_5
:
18864 opc
= extract32(ctx
->opcode
, 12, 2);
18865 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
18867 case NM_POOL32AXF_1_7
:
18869 tcg_gen_movi_tl(t0
, v2
>> 3);
18870 tcg_gen_movi_tl(t1
, v1
);
18871 switch (extract32(ctx
->opcode
, 12, 2)) {
18873 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
18874 gen_store_gpr(t0
, ret
);
18877 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
18878 gen_store_gpr(t0
, ret
);
18881 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
18882 gen_store_gpr(t0
, ret
);
18885 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
18886 gen_store_gpr(t0
, ret
);
18891 generate_exception_end(ctx
, EXCP_RI
);
18897 tcg_temp_free(v0_t
);
18900 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
18901 TCGv v0
, TCGv v1
, int rd
)
18905 t0
= tcg_temp_new_i32();
18907 tcg_gen_movi_i32(t0
, rd
>> 3);
18910 case NM_POOL32AXF_2_0_7
:
18911 switch (extract32(ctx
->opcode
, 9, 3)) {
18914 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
18916 case NM_DPAQ_S_W_PH
:
18918 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18922 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
18924 case NM_DPSQ_S_W_PH
:
18926 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18929 generate_exception_end(ctx
, EXCP_RI
);
18933 case NM_POOL32AXF_2_8_15
:
18934 switch (extract32(ctx
->opcode
, 9, 3)) {
18937 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
18939 case NM_DPAQ_SA_L_W
:
18941 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18945 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
18947 case NM_DPSQ_SA_L_W
:
18949 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18952 generate_exception_end(ctx
, EXCP_RI
);
18956 case NM_POOL32AXF_2_16_23
:
18957 switch (extract32(ctx
->opcode
, 9, 3)) {
18958 case NM_DPAU_H_QBL
:
18960 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
18962 case NM_DPAQX_S_W_PH
:
18964 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18966 case NM_DPSU_H_QBL
:
18968 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
18970 case NM_DPSQX_S_W_PH
:
18972 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18974 case NM_MULSA_W_PH
:
18976 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
18979 generate_exception_end(ctx
, EXCP_RI
);
18983 case NM_POOL32AXF_2_24_31
:
18984 switch (extract32(ctx
->opcode
, 9, 3)) {
18985 case NM_DPAU_H_QBR
:
18987 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
18989 case NM_DPAQX_SA_W_PH
:
18991 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18993 case NM_DPSU_H_QBR
:
18995 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
18997 case NM_DPSQX_SA_W_PH
:
18999 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19001 case NM_MULSAQ_S_W_PH
:
19003 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19006 generate_exception_end(ctx
, EXCP_RI
);
19011 generate_exception_end(ctx
, EXCP_RI
);
19015 tcg_temp_free_i32(t0
);
19018 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19019 int rt
, int rs
, int rd
)
19022 TCGv t0
= tcg_temp_new();
19023 TCGv t1
= tcg_temp_new();
19024 TCGv v0_t
= tcg_temp_new();
19025 TCGv v1_t
= tcg_temp_new();
19027 gen_load_gpr(v0_t
, rt
);
19028 gen_load_gpr(v1_t
, rs
);
19031 case NM_POOL32AXF_2_0_7
:
19032 switch (extract32(ctx
->opcode
, 9, 3)) {
19034 case NM_DPAQ_S_W_PH
:
19036 case NM_DPSQ_S_W_PH
:
19037 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19042 gen_load_gpr(t0
, rs
);
19044 if (rd
!= 0 && rd
!= 2) {
19045 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
19046 tcg_gen_ext32u_tl(t0
, t0
);
19047 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
19048 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
19050 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
19056 int acc
= extract32(ctx
->opcode
, 14, 2);
19057 TCGv_i64 t2
= tcg_temp_new_i64();
19058 TCGv_i64 t3
= tcg_temp_new_i64();
19060 gen_load_gpr(t0
, rt
);
19061 gen_load_gpr(t1
, rs
);
19062 tcg_gen_ext_tl_i64(t2
, t0
);
19063 tcg_gen_ext_tl_i64(t3
, t1
);
19064 tcg_gen_mul_i64(t2
, t2
, t3
);
19065 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19066 tcg_gen_add_i64(t2
, t2
, t3
);
19067 tcg_temp_free_i64(t3
);
19068 gen_move_low32(cpu_LO
[acc
], t2
);
19069 gen_move_high32(cpu_HI
[acc
], t2
);
19070 tcg_temp_free_i64(t2
);
19076 int acc
= extract32(ctx
->opcode
, 14, 2);
19077 TCGv_i32 t2
= tcg_temp_new_i32();
19078 TCGv_i32 t3
= tcg_temp_new_i32();
19080 gen_load_gpr(t0
, rs
);
19081 gen_load_gpr(t1
, rt
);
19082 tcg_gen_trunc_tl_i32(t2
, t0
);
19083 tcg_gen_trunc_tl_i32(t3
, t1
);
19084 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
19085 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19086 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19087 tcg_temp_free_i32(t2
);
19088 tcg_temp_free_i32(t3
);
19093 gen_load_gpr(v1_t
, rs
);
19094 tcg_gen_movi_tl(t0
, rd
>> 3);
19095 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
19096 gen_store_gpr(t0
, ret
);
19100 case NM_POOL32AXF_2_8_15
:
19101 switch (extract32(ctx
->opcode
, 9, 3)) {
19103 case NM_DPAQ_SA_L_W
:
19105 case NM_DPSQ_SA_L_W
:
19106 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19111 int acc
= extract32(ctx
->opcode
, 14, 2);
19112 TCGv_i64 t2
= tcg_temp_new_i64();
19113 TCGv_i64 t3
= tcg_temp_new_i64();
19115 gen_load_gpr(t0
, rs
);
19116 gen_load_gpr(t1
, rt
);
19117 tcg_gen_ext32u_tl(t0
, t0
);
19118 tcg_gen_ext32u_tl(t1
, t1
);
19119 tcg_gen_extu_tl_i64(t2
, t0
);
19120 tcg_gen_extu_tl_i64(t3
, t1
);
19121 tcg_gen_mul_i64(t2
, t2
, t3
);
19122 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19123 tcg_gen_add_i64(t2
, t2
, t3
);
19124 tcg_temp_free_i64(t3
);
19125 gen_move_low32(cpu_LO
[acc
], t2
);
19126 gen_move_high32(cpu_HI
[acc
], t2
);
19127 tcg_temp_free_i64(t2
);
19133 int acc
= extract32(ctx
->opcode
, 14, 2);
19134 TCGv_i32 t2
= tcg_temp_new_i32();
19135 TCGv_i32 t3
= tcg_temp_new_i32();
19137 gen_load_gpr(t0
, rs
);
19138 gen_load_gpr(t1
, rt
);
19139 tcg_gen_trunc_tl_i32(t2
, t0
);
19140 tcg_gen_trunc_tl_i32(t3
, t1
);
19141 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
19142 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19143 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19144 tcg_temp_free_i32(t2
);
19145 tcg_temp_free_i32(t3
);
19150 tcg_gen_movi_tl(t0
, rd
>> 3);
19151 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
19152 gen_store_gpr(t0
, ret
);
19155 generate_exception_end(ctx
, EXCP_RI
);
19159 case NM_POOL32AXF_2_16_23
:
19160 switch (extract32(ctx
->opcode
, 9, 3)) {
19161 case NM_DPAU_H_QBL
:
19162 case NM_DPAQX_S_W_PH
:
19163 case NM_DPSU_H_QBL
:
19164 case NM_DPSQX_S_W_PH
:
19165 case NM_MULSA_W_PH
:
19166 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19170 tcg_gen_movi_tl(t0
, rd
>> 3);
19171 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
19172 gen_store_gpr(t0
, ret
);
19177 int acc
= extract32(ctx
->opcode
, 14, 2);
19178 TCGv_i64 t2
= tcg_temp_new_i64();
19179 TCGv_i64 t3
= tcg_temp_new_i64();
19181 gen_load_gpr(t0
, rs
);
19182 gen_load_gpr(t1
, rt
);
19183 tcg_gen_ext_tl_i64(t2
, t0
);
19184 tcg_gen_ext_tl_i64(t3
, t1
);
19185 tcg_gen_mul_i64(t2
, t2
, t3
);
19186 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19187 tcg_gen_sub_i64(t2
, t3
, t2
);
19188 tcg_temp_free_i64(t3
);
19189 gen_move_low32(cpu_LO
[acc
], t2
);
19190 gen_move_high32(cpu_HI
[acc
], t2
);
19191 tcg_temp_free_i64(t2
);
19194 case NM_EXTRV_RS_W
:
19196 tcg_gen_movi_tl(t0
, rd
>> 3);
19197 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
19198 gen_store_gpr(t0
, ret
);
19202 case NM_POOL32AXF_2_24_31
:
19203 switch (extract32(ctx
->opcode
, 9, 3)) {
19204 case NM_DPAU_H_QBR
:
19205 case NM_DPAQX_SA_W_PH
:
19206 case NM_DPSU_H_QBR
:
19207 case NM_DPSQX_SA_W_PH
:
19208 case NM_MULSAQ_S_W_PH
:
19209 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19213 tcg_gen_movi_tl(t0
, rd
>> 3);
19214 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
19215 gen_store_gpr(t0
, ret
);
19220 int acc
= extract32(ctx
->opcode
, 14, 2);
19221 TCGv_i64 t2
= tcg_temp_new_i64();
19222 TCGv_i64 t3
= tcg_temp_new_i64();
19224 gen_load_gpr(t0
, rs
);
19225 gen_load_gpr(t1
, rt
);
19226 tcg_gen_ext32u_tl(t0
, t0
);
19227 tcg_gen_ext32u_tl(t1
, t1
);
19228 tcg_gen_extu_tl_i64(t2
, t0
);
19229 tcg_gen_extu_tl_i64(t3
, t1
);
19230 tcg_gen_mul_i64(t2
, t2
, t3
);
19231 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19232 tcg_gen_sub_i64(t2
, t3
, t2
);
19233 tcg_temp_free_i64(t3
);
19234 gen_move_low32(cpu_LO
[acc
], t2
);
19235 gen_move_high32(cpu_HI
[acc
], t2
);
19236 tcg_temp_free_i64(t2
);
19241 tcg_gen_movi_tl(t0
, rd
>> 3);
19242 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19243 gen_store_gpr(t0
, ret
);
19248 generate_exception_end(ctx
, EXCP_RI
);
19255 tcg_temp_free(v0_t
);
19256 tcg_temp_free(v1_t
);
19259 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19263 TCGv t0
= tcg_temp_new();
19264 TCGv v0_t
= tcg_temp_new();
19266 gen_load_gpr(v0_t
, rs
);
19271 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19272 gen_store_gpr(v0_t
, ret
);
19276 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19277 gen_store_gpr(v0_t
, ret
);
19281 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19282 gen_store_gpr(v0_t
, ret
);
19284 case NM_PRECEQ_W_PHL
:
19286 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19287 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19288 gen_store_gpr(v0_t
, ret
);
19290 case NM_PRECEQ_W_PHR
:
19292 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19293 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19294 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19295 gen_store_gpr(v0_t
, ret
);
19297 case NM_PRECEQU_PH_QBL
:
19299 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19300 gen_store_gpr(v0_t
, ret
);
19302 case NM_PRECEQU_PH_QBR
:
19304 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19305 gen_store_gpr(v0_t
, ret
);
19307 case NM_PRECEQU_PH_QBLA
:
19309 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19310 gen_store_gpr(v0_t
, ret
);
19312 case NM_PRECEQU_PH_QBRA
:
19314 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19315 gen_store_gpr(v0_t
, ret
);
19317 case NM_PRECEU_PH_QBL
:
19319 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19320 gen_store_gpr(v0_t
, ret
);
19322 case NM_PRECEU_PH_QBR
:
19324 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19325 gen_store_gpr(v0_t
, ret
);
19327 case NM_PRECEU_PH_QBLA
:
19329 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19330 gen_store_gpr(v0_t
, ret
);
19332 case NM_PRECEU_PH_QBRA
:
19334 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19335 gen_store_gpr(v0_t
, ret
);
19339 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19340 tcg_gen_shli_tl(t0
, v0_t
, 16);
19341 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19342 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19343 gen_store_gpr(v0_t
, ret
);
19347 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19348 tcg_gen_shli_tl(t0
, v0_t
, 8);
19349 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19350 tcg_gen_shli_tl(t0
, v0_t
, 16);
19351 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19352 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19353 gen_store_gpr(v0_t
, ret
);
19357 gen_helper_bitrev(v0_t
, v0_t
);
19358 gen_store_gpr(v0_t
, ret
);
19363 TCGv tv0
= tcg_temp_new();
19365 gen_load_gpr(tv0
, rt
);
19366 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19367 gen_store_gpr(v0_t
, ret
);
19368 tcg_temp_free(tv0
);
19371 case NM_RADDU_W_QB
:
19373 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19374 gen_store_gpr(v0_t
, ret
);
19377 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19381 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19385 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19388 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19391 generate_exception_end(ctx
, EXCP_RI
);
19395 tcg_temp_free(v0_t
);
19399 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19400 int rt
, int rs
, int rd
)
19402 TCGv t0
= tcg_temp_new();
19403 TCGv rs_t
= tcg_temp_new();
19405 gen_load_gpr(rs_t
, rs
);
19410 tcg_gen_movi_tl(t0
, rd
>> 2);
19411 switch (extract32(ctx
->opcode
, 12, 1)) {
19414 gen_helper_shra_qb(t0
, t0
, rs_t
);
19415 gen_store_gpr(t0
, rt
);
19419 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19420 gen_store_gpr(t0
, rt
);
19426 tcg_gen_movi_tl(t0
, rd
>> 1);
19427 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19428 gen_store_gpr(t0
, rt
);
19434 target_long result
;
19435 imm
= extract32(ctx
->opcode
, 13, 8);
19436 result
= (uint32_t)imm
<< 24 |
19437 (uint32_t)imm
<< 16 |
19438 (uint32_t)imm
<< 8 |
19440 result
= (int32_t)result
;
19441 tcg_gen_movi_tl(t0
, result
);
19442 gen_store_gpr(t0
, rt
);
19446 generate_exception_end(ctx
, EXCP_RI
);
19450 tcg_temp_free(rs_t
);
19454 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19456 int rt
= extract32(ctx
->opcode
, 21, 5);
19457 int rs
= extract32(ctx
->opcode
, 16, 5);
19458 int rd
= extract32(ctx
->opcode
, 11, 5);
19460 switch (extract32(ctx
->opcode
, 6, 3)) {
19461 case NM_POOL32AXF_1
:
19463 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19464 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19467 case NM_POOL32AXF_2
:
19469 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19470 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19473 case NM_POOL32AXF_4
:
19475 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19476 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19479 case NM_POOL32AXF_5
:
19480 switch (extract32(ctx
->opcode
, 9, 7)) {
19481 #ifndef CONFIG_USER_ONLY
19483 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19486 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19489 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19492 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19495 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19498 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19501 check_cp0_enabled(ctx
);
19503 TCGv t0
= tcg_temp_new();
19505 save_cpu_state(ctx
, 1);
19506 gen_helper_di(t0
, cpu_env
);
19507 gen_store_gpr(t0
, rt
);
19508 /* Stop translation as we may have switched the execution mode */
19509 ctx
->base
.is_jmp
= DISAS_STOP
;
19514 check_cp0_enabled(ctx
);
19516 TCGv t0
= tcg_temp_new();
19518 save_cpu_state(ctx
, 1);
19519 gen_helper_ei(t0
, cpu_env
);
19520 gen_store_gpr(t0
, rt
);
19521 /* Stop translation as we may have switched the execution mode */
19522 ctx
->base
.is_jmp
= DISAS_STOP
;
19527 gen_load_srsgpr(rs
, rt
);
19530 gen_store_srsgpr(rs
, rt
);
19533 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19536 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19539 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19543 generate_exception_end(ctx
, EXCP_RI
);
19547 case NM_POOL32AXF_7
:
19549 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19550 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19554 generate_exception_end(ctx
, EXCP_RI
);
19559 /* Immediate Value Compact Branches */
19560 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19561 int rt
, int32_t imm
, int32_t offset
)
19564 int bcond_compute
= 0;
19565 TCGv t0
= tcg_temp_new();
19566 TCGv t1
= tcg_temp_new();
19568 gen_load_gpr(t0
, rt
);
19569 tcg_gen_movi_tl(t1
, imm
);
19570 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19572 /* Load needed operands and calculate btarget */
19575 if (rt
== 0 && imm
== 0) {
19576 /* Unconditional branch */
19577 } else if (rt
== 0 && imm
!= 0) {
19582 cond
= TCG_COND_EQ
;
19588 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19589 generate_exception_end(ctx
, EXCP_RI
);
19591 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19592 /* Unconditional branch */
19593 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19597 tcg_gen_shri_tl(t0
, t0
, imm
);
19598 tcg_gen_andi_tl(t0
, t0
, 1);
19599 tcg_gen_movi_tl(t1
, 0);
19601 if (opc
== NM_BBEQZC
) {
19602 cond
= TCG_COND_EQ
;
19604 cond
= TCG_COND_NE
;
19609 if (rt
== 0 && imm
== 0) {
19612 } else if (rt
== 0 && imm
!= 0) {
19613 /* Unconditional branch */
19616 cond
= TCG_COND_NE
;
19620 if (rt
== 0 && imm
== 0) {
19621 /* Unconditional branch */
19624 cond
= TCG_COND_GE
;
19629 cond
= TCG_COND_LT
;
19632 if (rt
== 0 && imm
== 0) {
19633 /* Unconditional branch */
19636 cond
= TCG_COND_GEU
;
19641 cond
= TCG_COND_LTU
;
19644 MIPS_INVAL("Immediate Value Compact branch");
19645 generate_exception_end(ctx
, EXCP_RI
);
19649 if (bcond_compute
== 0) {
19650 /* Uncoditional compact branch */
19651 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19653 /* Conditional compact branch */
19654 TCGLabel
*fs
= gen_new_label();
19656 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19658 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19661 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19669 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19670 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19673 TCGv t0
= tcg_temp_new();
19674 TCGv t1
= tcg_temp_new();
19677 gen_load_gpr(t0
, rs
);
19681 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19684 /* calculate btarget */
19685 tcg_gen_shli_tl(t0
, t0
, 1);
19686 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19687 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19689 /* unconditional branch to register */
19690 tcg_gen_mov_tl(cpu_PC
, btarget
);
19691 tcg_gen_lookup_and_goto_ptr();
19697 /* nanoMIPS Branches */
19698 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19699 int rs
, int rt
, int32_t offset
)
19701 int bcond_compute
= 0;
19702 TCGv t0
= tcg_temp_new();
19703 TCGv t1
= tcg_temp_new();
19705 /* Load needed operands and calculate btarget */
19707 /* compact branch */
19710 gen_load_gpr(t0
, rs
);
19711 gen_load_gpr(t1
, rt
);
19713 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19717 if (rs
== 0 || rs
== rt
) {
19718 /* OPC_BLEZALC, OPC_BGEZALC */
19719 /* OPC_BGTZALC, OPC_BLTZALC */
19720 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19722 gen_load_gpr(t0
, rs
);
19723 gen_load_gpr(t1
, rt
);
19725 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19728 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19732 /* OPC_BEQZC, OPC_BNEZC */
19733 gen_load_gpr(t0
, rs
);
19735 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19737 /* OPC_JIC, OPC_JIALC */
19738 TCGv tbase
= tcg_temp_new();
19739 TCGv toffset
= tcg_temp_new();
19741 gen_load_gpr(tbase
, rt
);
19742 tcg_gen_movi_tl(toffset
, offset
);
19743 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19744 tcg_temp_free(tbase
);
19745 tcg_temp_free(toffset
);
19749 MIPS_INVAL("Compact branch/jump");
19750 generate_exception_end(ctx
, EXCP_RI
);
19754 if (bcond_compute
== 0) {
19755 /* Uncoditional compact branch */
19758 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19761 MIPS_INVAL("Compact branch/jump");
19762 generate_exception_end(ctx
, EXCP_RI
);
19766 /* Conditional compact branch */
19767 TCGLabel
*fs
= gen_new_label();
19771 if (rs
== 0 && rt
!= 0) {
19773 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19774 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19776 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19779 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
19783 if (rs
== 0 && rt
!= 0) {
19785 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19786 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19788 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19791 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
19795 if (rs
== 0 && rt
!= 0) {
19797 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19798 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19800 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19803 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
19807 if (rs
== 0 && rt
!= 0) {
19809 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19810 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19812 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19815 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
19819 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
19822 MIPS_INVAL("Compact conditional branch/jump");
19823 generate_exception_end(ctx
, EXCP_RI
);
19827 /* Generating branch here as compact branches don't have delay slot */
19828 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19831 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19840 /* nanoMIPS CP1 Branches */
19841 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
19842 int32_t ft
, int32_t offset
)
19844 target_ulong btarget
;
19845 TCGv_i64 t0
= tcg_temp_new_i64();
19847 gen_load_fpr64(ctx
, t0
, ft
);
19848 tcg_gen_andi_i64(t0
, t0
, 1);
19850 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19854 tcg_gen_xori_i64(t0
, t0
, 1);
19855 ctx
->hflags
|= MIPS_HFLAG_BC
;
19858 /* t0 already set */
19859 ctx
->hflags
|= MIPS_HFLAG_BC
;
19862 MIPS_INVAL("cp1 cond branch");
19863 generate_exception_end(ctx
, EXCP_RI
);
19867 tcg_gen_trunc_i64_tl(bcond
, t0
);
19869 ctx
->btarget
= btarget
;
19872 tcg_temp_free_i64(t0
);
19876 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
19879 t0
= tcg_temp_new();
19880 t1
= tcg_temp_new();
19882 gen_load_gpr(t0
, rs
);
19883 gen_load_gpr(t1
, rt
);
19885 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
19886 /* PP.LSXS instructions require shifting */
19887 switch (extract32(ctx
->opcode
, 7, 4)) {
19892 tcg_gen_shli_tl(t0
, t0
, 1);
19899 tcg_gen_shli_tl(t0
, t0
, 2);
19903 tcg_gen_shli_tl(t0
, t0
, 3);
19907 gen_op_addr_add(ctx
, t0
, t0
, t1
);
19909 switch (extract32(ctx
->opcode
, 7, 4)) {
19911 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19913 gen_store_gpr(t0
, rd
);
19917 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19919 gen_store_gpr(t0
, rd
);
19923 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19925 gen_store_gpr(t0
, rd
);
19928 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19930 gen_store_gpr(t0
, rd
);
19934 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19936 gen_store_gpr(t0
, rd
);
19940 gen_load_gpr(t1
, rd
);
19941 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19947 gen_load_gpr(t1
, rd
);
19948 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19954 gen_load_gpr(t1
, rd
);
19955 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19959 /*case NM_LWC1XS:*/
19961 /*case NM_LDC1XS:*/
19963 /*case NM_SWC1XS:*/
19965 /*case NM_SDC1XS:*/
19966 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19967 check_cp1_enabled(ctx
);
19968 switch (extract32(ctx
->opcode
, 7, 4)) {
19970 /*case NM_LWC1XS:*/
19971 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
19974 /*case NM_LDC1XS:*/
19975 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
19978 /*case NM_SWC1XS:*/
19979 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
19982 /*case NM_SDC1XS:*/
19983 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
19987 generate_exception_err(ctx
, EXCP_CpU
, 1);
19991 generate_exception_end(ctx
, EXCP_RI
);
19999 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
20003 rt
= extract32(ctx
->opcode
, 21, 5);
20004 rs
= extract32(ctx
->opcode
, 16, 5);
20005 rd
= extract32(ctx
->opcode
, 11, 5);
20007 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
20008 generate_exception_end(ctx
, EXCP_RI
);
20011 check_cp1_enabled(ctx
);
20012 switch (extract32(ctx
->opcode
, 0, 3)) {
20014 switch (extract32(ctx
->opcode
, 3, 7)) {
20016 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
20019 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
20022 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
20025 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
20028 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
20031 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
20034 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
20037 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
20040 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
20043 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
20046 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
20049 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
20052 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
20055 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
20058 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
20061 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
20064 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
20067 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
20070 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
20073 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
20076 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
20079 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
20082 generate_exception_end(ctx
, EXCP_RI
);
20087 switch (extract32(ctx
->opcode
, 3, 3)) {
20089 switch (extract32(ctx
->opcode
, 9, 1)) {
20091 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
20094 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
20099 switch (extract32(ctx
->opcode
, 9, 1)) {
20101 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
20104 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
20109 switch (extract32(ctx
->opcode
, 9, 1)) {
20111 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
20114 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
20119 switch (extract32(ctx
->opcode
, 9, 1)) {
20121 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
20124 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
20129 switch (extract32(ctx
->opcode
, 6, 8)) {
20131 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
20134 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
20137 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
20140 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
20143 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
20146 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
20149 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
20152 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
20155 switch (extract32(ctx
->opcode
, 6, 9)) {
20157 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
20160 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
20163 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
20166 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
20169 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
20172 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
20175 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
20178 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
20181 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
20184 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
20187 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
20190 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
20193 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
20196 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
20199 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
20202 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
20205 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
20208 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
20211 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
20214 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
20217 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
20220 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
20223 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
20226 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
20229 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
20232 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
20235 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
20238 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
20241 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20244 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20247 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20250 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20253 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20256 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20259 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20262 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20265 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20268 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20271 generate_exception_end(ctx
, EXCP_RI
);
20280 switch (extract32(ctx
->opcode
, 3, 3)) {
20281 case NM_CMP_CONDN_S
:
20282 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20284 case NM_CMP_CONDN_D
:
20285 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20288 generate_exception_end(ctx
, EXCP_RI
);
20293 generate_exception_end(ctx
, EXCP_RI
);
20298 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20299 int rd
, int rs
, int rt
)
20302 TCGv t0
= tcg_temp_new();
20303 TCGv v1_t
= tcg_temp_new();
20304 TCGv v2_t
= tcg_temp_new();
20306 gen_load_gpr(v1_t
, rs
);
20307 gen_load_gpr(v2_t
, rt
);
20312 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20316 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20320 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20322 case NM_CMPU_EQ_QB
:
20324 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20326 case NM_CMPU_LT_QB
:
20328 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20330 case NM_CMPU_LE_QB
:
20332 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20334 case NM_CMPGU_EQ_QB
:
20336 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20337 gen_store_gpr(v1_t
, ret
);
20339 case NM_CMPGU_LT_QB
:
20341 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20342 gen_store_gpr(v1_t
, ret
);
20344 case NM_CMPGU_LE_QB
:
20346 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20347 gen_store_gpr(v1_t
, ret
);
20349 case NM_CMPGDU_EQ_QB
:
20351 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20352 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20353 gen_store_gpr(v1_t
, ret
);
20355 case NM_CMPGDU_LT_QB
:
20357 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20358 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20359 gen_store_gpr(v1_t
, ret
);
20361 case NM_CMPGDU_LE_QB
:
20363 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20364 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20365 gen_store_gpr(v1_t
, ret
);
20369 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20370 gen_store_gpr(v1_t
, ret
);
20374 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20375 gen_store_gpr(v1_t
, ret
);
20379 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20380 gen_store_gpr(v1_t
, ret
);
20384 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20385 gen_store_gpr(v1_t
, ret
);
20389 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20390 gen_store_gpr(v1_t
, ret
);
20394 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20395 gen_store_gpr(v1_t
, ret
);
20399 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20400 gen_store_gpr(v1_t
, ret
);
20404 switch (extract32(ctx
->opcode
, 10, 1)) {
20407 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20408 gen_store_gpr(v1_t
, ret
);
20412 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20413 gen_store_gpr(v1_t
, ret
);
20417 case NM_ADDQH_R_PH
:
20419 switch (extract32(ctx
->opcode
, 10, 1)) {
20422 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20423 gen_store_gpr(v1_t
, ret
);
20427 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20428 gen_store_gpr(v1_t
, ret
);
20434 switch (extract32(ctx
->opcode
, 10, 1)) {
20437 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20438 gen_store_gpr(v1_t
, ret
);
20442 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20443 gen_store_gpr(v1_t
, ret
);
20449 switch (extract32(ctx
->opcode
, 10, 1)) {
20452 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20453 gen_store_gpr(v1_t
, ret
);
20457 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20458 gen_store_gpr(v1_t
, ret
);
20464 switch (extract32(ctx
->opcode
, 10, 1)) {
20467 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20468 gen_store_gpr(v1_t
, ret
);
20472 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20473 gen_store_gpr(v1_t
, ret
);
20477 case NM_ADDUH_R_QB
:
20479 switch (extract32(ctx
->opcode
, 10, 1)) {
20482 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20483 gen_store_gpr(v1_t
, ret
);
20487 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20488 gen_store_gpr(v1_t
, ret
);
20492 case NM_SHRAV_R_PH
:
20494 switch (extract32(ctx
->opcode
, 10, 1)) {
20497 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20498 gen_store_gpr(v1_t
, ret
);
20502 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20503 gen_store_gpr(v1_t
, ret
);
20507 case NM_SHRAV_R_QB
:
20509 switch (extract32(ctx
->opcode
, 10, 1)) {
20512 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20513 gen_store_gpr(v1_t
, ret
);
20517 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20518 gen_store_gpr(v1_t
, ret
);
20524 switch (extract32(ctx
->opcode
, 10, 1)) {
20527 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20528 gen_store_gpr(v1_t
, ret
);
20532 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20533 gen_store_gpr(v1_t
, ret
);
20537 case NM_SUBQH_R_PH
:
20539 switch (extract32(ctx
->opcode
, 10, 1)) {
20542 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20543 gen_store_gpr(v1_t
, ret
);
20547 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20548 gen_store_gpr(v1_t
, ret
);
20554 switch (extract32(ctx
->opcode
, 10, 1)) {
20557 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20558 gen_store_gpr(v1_t
, ret
);
20562 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20563 gen_store_gpr(v1_t
, ret
);
20569 switch (extract32(ctx
->opcode
, 10, 1)) {
20572 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20573 gen_store_gpr(v1_t
, ret
);
20577 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20578 gen_store_gpr(v1_t
, ret
);
20584 switch (extract32(ctx
->opcode
, 10, 1)) {
20587 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20588 gen_store_gpr(v1_t
, ret
);
20592 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20593 gen_store_gpr(v1_t
, ret
);
20597 case NM_SUBUH_R_QB
:
20599 switch (extract32(ctx
->opcode
, 10, 1)) {
20602 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20603 gen_store_gpr(v1_t
, ret
);
20607 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20608 gen_store_gpr(v1_t
, ret
);
20612 case NM_SHLLV_S_PH
:
20614 switch (extract32(ctx
->opcode
, 10, 1)) {
20617 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20618 gen_store_gpr(v1_t
, ret
);
20622 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20623 gen_store_gpr(v1_t
, ret
);
20627 case NM_PRECR_SRA_R_PH_W
:
20629 switch (extract32(ctx
->opcode
, 10, 1)) {
20631 /* PRECR_SRA_PH_W */
20633 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20634 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20636 gen_store_gpr(v1_t
, rt
);
20637 tcg_temp_free_i32(sa_t
);
20641 /* PRECR_SRA_R_PH_W */
20643 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20644 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20646 gen_store_gpr(v1_t
, rt
);
20647 tcg_temp_free_i32(sa_t
);
20652 case NM_MULEU_S_PH_QBL
:
20654 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20655 gen_store_gpr(v1_t
, ret
);
20657 case NM_MULEU_S_PH_QBR
:
20659 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20660 gen_store_gpr(v1_t
, ret
);
20662 case NM_MULQ_RS_PH
:
20664 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20665 gen_store_gpr(v1_t
, ret
);
20669 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20670 gen_store_gpr(v1_t
, ret
);
20674 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20675 gen_store_gpr(v1_t
, ret
);
20679 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20680 gen_store_gpr(v1_t
, ret
);
20684 gen_load_gpr(t0
, rs
);
20686 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20688 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20692 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20693 gen_store_gpr(v1_t
, ret
);
20697 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20698 gen_store_gpr(v1_t
, ret
);
20702 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20703 gen_store_gpr(v1_t
, ret
);
20707 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20708 gen_store_gpr(v1_t
, ret
);
20712 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20713 gen_store_gpr(v1_t
, ret
);
20717 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20718 gen_store_gpr(v1_t
, ret
);
20723 TCGv tv0
= tcg_temp_new();
20724 TCGv tv1
= tcg_temp_new();
20725 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20727 tcg_gen_movi_tl(tv0
, rd
>> 3);
20728 tcg_gen_movi_tl(tv1
, imm
);
20729 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20732 case NM_MULEQ_S_W_PHL
:
20734 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20735 gen_store_gpr(v1_t
, ret
);
20737 case NM_MULEQ_S_W_PHR
:
20739 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20740 gen_store_gpr(v1_t
, ret
);
20744 switch (extract32(ctx
->opcode
, 10, 1)) {
20747 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20748 gen_store_gpr(v1_t
, ret
);
20752 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20753 gen_store_gpr(v1_t
, ret
);
20757 case NM_PRECR_QB_PH
:
20759 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
20760 gen_store_gpr(v1_t
, ret
);
20762 case NM_PRECRQ_QB_PH
:
20764 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
20765 gen_store_gpr(v1_t
, ret
);
20767 case NM_PRECRQ_PH_W
:
20769 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
20770 gen_store_gpr(v1_t
, ret
);
20772 case NM_PRECRQ_RS_PH_W
:
20774 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20775 gen_store_gpr(v1_t
, ret
);
20777 case NM_PRECRQU_S_QB_PH
:
20779 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20780 gen_store_gpr(v1_t
, ret
);
20784 tcg_gen_movi_tl(t0
, rd
);
20785 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
20786 gen_store_gpr(v1_t
, rt
);
20790 tcg_gen_movi_tl(t0
, rd
>> 1);
20791 switch (extract32(ctx
->opcode
, 10, 1)) {
20794 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
20795 gen_store_gpr(v1_t
, rt
);
20799 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
20800 gen_store_gpr(v1_t
, rt
);
20806 tcg_gen_movi_tl(t0
, rd
>> 1);
20807 switch (extract32(ctx
->opcode
, 10, 2)) {
20810 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
20811 gen_store_gpr(v1_t
, rt
);
20815 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
20816 gen_store_gpr(v1_t
, rt
);
20819 generate_exception_end(ctx
, EXCP_RI
);
20825 tcg_gen_movi_tl(t0
, rd
);
20826 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
20827 gen_store_gpr(v1_t
, rt
);
20833 imm
= sextract32(ctx
->opcode
, 11, 11);
20834 imm
= (int16_t)(imm
<< 6) >> 6;
20836 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
20841 generate_exception_end(ctx
, EXCP_RI
);
20846 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
20854 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
20855 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
20857 rt
= extract32(ctx
->opcode
, 21, 5);
20858 rs
= extract32(ctx
->opcode
, 16, 5);
20859 rd
= extract32(ctx
->opcode
, 11, 5);
20861 op
= extract32(ctx
->opcode
, 26, 6);
20866 switch (extract32(ctx
->opcode
, 19, 2)) {
20869 generate_exception_end(ctx
, EXCP_RI
);
20872 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
20873 generate_exception_end(ctx
, EXCP_SYSCALL
);
20875 generate_exception_end(ctx
, EXCP_RI
);
20879 generate_exception_end(ctx
, EXCP_BREAK
);
20882 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
20883 gen_helper_do_semihosting(cpu_env
);
20885 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
20886 generate_exception_end(ctx
, EXCP_RI
);
20888 generate_exception_end(ctx
, EXCP_DBp
);
20895 imm
= extract32(ctx
->opcode
, 0, 16);
20897 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
20899 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
20901 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20906 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
20907 extract32(ctx
->opcode
, 1, 20) << 1;
20908 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20909 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20913 switch (ctx
->opcode
& 0x07) {
20915 gen_pool32a0_nanomips_insn(env
, ctx
);
20919 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
20920 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
20924 switch (extract32(ctx
->opcode
, 3, 3)) {
20926 gen_p_lsx(ctx
, rd
, rs
, rt
);
20929 /* In nanoMIPS, the shift field directly encodes the shift
20930 * amount, meaning that the supported shift values are in
20931 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
20932 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
20933 extract32(ctx
->opcode
, 9, 2) - 1);
20936 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
20939 gen_pool32axf_nanomips_insn(env
, ctx
);
20942 generate_exception_end(ctx
, EXCP_RI
);
20947 generate_exception_end(ctx
, EXCP_RI
);
20952 switch (ctx
->opcode
& 0x03) {
20955 offset
= extract32(ctx
->opcode
, 0, 21);
20956 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
20960 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20963 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20966 generate_exception_end(ctx
, EXCP_RI
);
20972 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
20973 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
20974 switch (extract32(ctx
->opcode
, 16, 5)) {
20978 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
20984 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
20985 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20991 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
20997 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21000 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21007 t0
= tcg_temp_new();
21009 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21012 tcg_gen_movi_tl(t0
, addr
);
21013 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
21021 t0
= tcg_temp_new();
21022 t1
= tcg_temp_new();
21024 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21027 tcg_gen_movi_tl(t0
, addr
);
21028 gen_load_gpr(t1
, rt
);
21030 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
21037 generate_exception_end(ctx
, EXCP_RI
);
21043 switch (extract32(ctx
->opcode
, 12, 4)) {
21045 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21048 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21051 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21054 switch (extract32(ctx
->opcode
, 20, 1)) {
21056 switch (ctx
->opcode
& 3) {
21058 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21059 extract32(ctx
->opcode
, 2, 1),
21060 extract32(ctx
->opcode
, 3, 9) << 3);
21063 case NM_RESTORE_JRC
:
21064 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21065 extract32(ctx
->opcode
, 2, 1),
21066 extract32(ctx
->opcode
, 3, 9) << 3);
21067 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
21068 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21072 generate_exception_end(ctx
, EXCP_RI
);
21077 generate_exception_end(ctx
, EXCP_RI
);
21082 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21085 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21089 TCGv t0
= tcg_temp_new();
21091 imm
= extract32(ctx
->opcode
, 0, 12);
21092 gen_load_gpr(t0
, rs
);
21093 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
21094 gen_store_gpr(t0
, rt
);
21100 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
21101 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
21105 int shift
= extract32(ctx
->opcode
, 0, 5);
21106 switch (extract32(ctx
->opcode
, 5, 4)) {
21108 if (rt
== 0 && shift
== 0) {
21110 } else if (rt
== 0 && shift
== 3) {
21111 /* EHB - treat as NOP */
21112 } else if (rt
== 0 && shift
== 5) {
21113 /* PAUSE - treat as NOP */
21114 } else if (rt
== 0 && shift
== 6) {
21116 gen_sync(extract32(ctx
->opcode
, 16, 5));
21119 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
21120 extract32(ctx
->opcode
, 0, 5));
21124 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
21125 extract32(ctx
->opcode
, 0, 5));
21128 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
21129 extract32(ctx
->opcode
, 0, 5));
21132 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
21133 extract32(ctx
->opcode
, 0, 5));
21141 TCGv t0
= tcg_temp_new();
21142 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
21143 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
21145 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
21147 gen_load_gpr(t0
, rs
);
21148 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
21151 tcg_temp_free_i32(shift
);
21152 tcg_temp_free_i32(shiftx
);
21153 tcg_temp_free_i32(stripe
);
21157 switch (((ctx
->opcode
>> 10) & 2) |
21158 (extract32(ctx
->opcode
, 5, 1))) {
21161 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21162 extract32(ctx
->opcode
, 6, 5));
21165 generate_exception_end(ctx
, EXCP_RI
);
21170 switch (((ctx
->opcode
>> 10) & 2) |
21171 (extract32(ctx
->opcode
, 5, 1))) {
21174 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21175 extract32(ctx
->opcode
, 6, 5));
21178 generate_exception_end(ctx
, EXCP_RI
);
21183 generate_exception_end(ctx
, EXCP_RI
);
21188 gen_pool32f_nanomips_insn(ctx
);
21193 switch (extract32(ctx
->opcode
, 1, 1)) {
21196 tcg_gen_movi_tl(cpu_gpr
[rt
],
21197 sextract32(ctx
->opcode
, 0, 1) << 31 |
21198 extract32(ctx
->opcode
, 2, 10) << 21 |
21199 extract32(ctx
->opcode
, 12, 9) << 12);
21204 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
21205 extract32(ctx
->opcode
, 2, 10) << 21 |
21206 extract32(ctx
->opcode
, 12, 9) << 12;
21208 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21209 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21216 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
21218 switch (extract32(ctx
->opcode
, 18, 3)) {
21220 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
21223 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
21226 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
21230 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
21235 switch (ctx
->opcode
& 1) {
21237 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
21240 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21246 switch (ctx
->opcode
& 1) {
21248 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21251 generate_exception_end(ctx
, EXCP_RI
);
21257 switch (ctx
->opcode
& 0x3) {
21259 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21262 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21265 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21268 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21273 generate_exception_end(ctx
, EXCP_RI
);
21280 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21282 switch (extract32(ctx
->opcode
, 12, 4)) {
21286 /* Break the TB to be able to sync copied instructions
21288 ctx
->base
.is_jmp
= DISAS_STOP
;
21291 /* Treat as NOP. */
21295 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21298 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21301 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21304 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21307 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21310 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21313 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21316 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21319 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21322 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21325 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21328 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21331 generate_exception_end(ctx
, EXCP_RI
);
21338 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21339 extract32(ctx
->opcode
, 0, 8);
21341 switch (extract32(ctx
->opcode
, 8, 3)) {
21343 switch (extract32(ctx
->opcode
, 11, 4)) {
21345 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21348 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21351 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21354 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21357 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21360 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21363 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21366 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21369 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21372 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21375 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21378 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21383 /* Break the TB to be able to sync copied instructions
21385 ctx
->base
.is_jmp
= DISAS_STOP
;
21388 /* Treat as NOP. */
21392 generate_exception_end(ctx
, EXCP_RI
);
21397 switch (extract32(ctx
->opcode
, 11, 4)) {
21402 TCGv t0
= tcg_temp_new();
21403 TCGv t1
= tcg_temp_new();
21405 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21407 switch (extract32(ctx
->opcode
, 11, 4)) {
21409 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21411 gen_store_gpr(t0
, rt
);
21414 gen_load_gpr(t1
, rt
);
21415 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21424 switch (ctx
->opcode
& 0x03) {
21426 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21430 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21435 switch (ctx
->opcode
& 0x03) {
21437 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, s
);
21441 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21446 check_cp0_enabled(ctx
);
21447 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21448 gen_cache_operation(ctx
, rt
, rs
, s
);
21454 switch (extract32(ctx
->opcode
, 11, 4)) {
21457 check_cp0_enabled(ctx
);
21458 gen_ld(ctx
, OPC_LBE
, rt
, rs
, s
);
21462 check_cp0_enabled(ctx
);
21463 gen_st(ctx
, OPC_SBE
, rt
, rs
, s
);
21467 check_cp0_enabled(ctx
);
21468 gen_ld(ctx
, OPC_LBUE
, rt
, rs
, s
);
21472 /* case NM_SYNCIE */
21474 check_cp0_enabled(ctx
);
21475 /* Break the TB to be able to sync copied instructions
21477 ctx
->base
.is_jmp
= DISAS_STOP
;
21479 /* case NM_PREFE */
21481 check_cp0_enabled(ctx
);
21482 /* Treat as NOP. */
21487 check_cp0_enabled(ctx
);
21488 gen_ld(ctx
, OPC_LHE
, rt
, rs
, s
);
21492 check_cp0_enabled(ctx
);
21493 gen_st(ctx
, OPC_SHE
, rt
, rs
, s
);
21497 check_cp0_enabled(ctx
);
21498 gen_ld(ctx
, OPC_LHUE
, rt
, rs
, s
);
21501 check_nms_dl_il_sl_tl_l2c(ctx
);
21502 gen_cache_operation(ctx
, rt
, rs
, s
);
21506 check_cp0_enabled(ctx
);
21507 gen_ld(ctx
, OPC_LWE
, rt
, rs
, s
);
21511 check_cp0_enabled(ctx
);
21512 gen_st(ctx
, OPC_SWE
, rt
, rs
, s
);
21515 switch (extract32(ctx
->opcode
, 2, 2)) {
21519 check_cp0_enabled(ctx
);
21520 gen_ld(ctx
, OPC_LLE
, rt
, rs
, s
);
21525 check_cp0_enabled(ctx
);
21526 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21529 generate_exception_end(ctx
, EXCP_RI
);
21534 switch (extract32(ctx
->opcode
, 2, 2)) {
21538 check_cp0_enabled(ctx
);
21539 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, s
);
21544 check_cp0_enabled(ctx
);
21545 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21548 generate_exception_end(ctx
, EXCP_RI
);
21558 int count
= extract32(ctx
->opcode
, 12, 3);
21561 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21562 extract32(ctx
->opcode
, 0, 8);
21563 TCGv va
= tcg_temp_new();
21564 TCGv t1
= tcg_temp_new();
21565 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21566 NM_P_LS_UAWM
? MO_UNALN
: 0;
21568 count
= (count
== 0) ? 8 : count
;
21569 while (counter
!= count
) {
21570 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21571 int this_offset
= offset
+ (counter
<< 2);
21573 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21575 switch (extract32(ctx
->opcode
, 11, 1)) {
21577 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21579 gen_store_gpr(t1
, this_rt
);
21580 if ((this_rt
== rs
) &&
21581 (counter
!= (count
- 1))) {
21582 /* UNPREDICTABLE */
21586 this_rt
= (rt
== 0) ? 0 : this_rt
;
21587 gen_load_gpr(t1
, this_rt
);
21588 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21599 generate_exception_end(ctx
, EXCP_RI
);
21607 TCGv t0
= tcg_temp_new();
21608 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21609 extract32(ctx
->opcode
, 1, 20) << 1;
21610 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21611 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21612 extract32(ctx
->opcode
, 21, 3));
21613 gen_load_gpr(t0
, rt
);
21614 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21615 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21621 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21622 extract32(ctx
->opcode
, 1, 24) << 1;
21624 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21626 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21629 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21634 switch (extract32(ctx
->opcode
, 12, 4)) {
21637 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21640 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21643 generate_exception_end(ctx
, EXCP_RI
);
21649 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21650 extract32(ctx
->opcode
, 1, 13) << 1;
21651 switch (extract32(ctx
->opcode
, 14, 2)) {
21654 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21657 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21658 extract32(ctx
->opcode
, 1, 13) << 1;
21659 check_cp1_enabled(ctx
);
21660 switch (extract32(ctx
->opcode
, 16, 5)) {
21662 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21665 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21670 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21671 extract32(ctx
->opcode
, 0, 1) << 13;
21673 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21678 generate_exception_end(ctx
, EXCP_RI
);
21684 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21686 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21690 if (rs
== rt
|| rt
== 0) {
21691 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21692 } else if (rs
== 0) {
21693 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21695 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21703 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21704 extract32(ctx
->opcode
, 1, 13) << 1;
21705 switch (extract32(ctx
->opcode
, 14, 2)) {
21708 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21711 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21713 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21715 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21719 if (rs
== 0 || rs
== rt
) {
21721 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21723 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21727 generate_exception_end(ctx
, EXCP_RI
);
21734 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21735 extract32(ctx
->opcode
, 1, 10) << 1;
21736 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21738 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
21743 generate_exception_end(ctx
, EXCP_RI
);
21749 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21752 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21753 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21754 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS1(ctx
->opcode
));
21758 /* make sure instructions are on a halfword boundary */
21759 if (ctx
->base
.pc_next
& 0x1) {
21760 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
21761 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
21762 tcg_temp_free(tmp
);
21763 generate_exception_end(ctx
, EXCP_AdEL
);
21767 op
= extract32(ctx
->opcode
, 10, 6);
21770 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21773 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
21774 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
21777 switch (extract32(ctx
->opcode
, 3, 2)) {
21778 case NM_P16_SYSCALL
:
21779 if (extract32(ctx
->opcode
, 2, 1) == 0) {
21780 generate_exception_end(ctx
, EXCP_SYSCALL
);
21782 generate_exception_end(ctx
, EXCP_RI
);
21786 generate_exception_end(ctx
, EXCP_BREAK
);
21789 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
21790 gen_helper_do_semihosting(cpu_env
);
21792 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21793 generate_exception_end(ctx
, EXCP_RI
);
21795 generate_exception_end(ctx
, EXCP_DBp
);
21800 generate_exception_end(ctx
, EXCP_RI
);
21807 int shift
= extract32(ctx
->opcode
, 0, 3);
21809 shift
= (shift
== 0) ? 8 : shift
;
21811 switch (extract32(ctx
->opcode
, 3, 1)) {
21819 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
21823 switch (ctx
->opcode
& 1) {
21825 gen_pool16c_nanomips_insn(ctx
);
21828 gen_ldxs(ctx
, rt
, rs
, rd
);
21833 switch (extract32(ctx
->opcode
, 6, 1)) {
21835 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
21836 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
21839 generate_exception_end(ctx
, EXCP_RI
);
21844 switch (extract32(ctx
->opcode
, 3, 1)) {
21846 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
21847 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
21849 case NM_P_ADDIURS5
:
21850 rt
= extract32(ctx
->opcode
, 5, 5);
21852 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
21853 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
21854 (extract32(ctx
->opcode
, 0, 3));
21855 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
21861 switch (ctx
->opcode
& 0x1) {
21863 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
21866 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
21871 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21872 extract32(ctx
->opcode
, 5, 3);
21873 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21874 extract32(ctx
->opcode
, 0, 3);
21875 rt
= decode_gpr_gpr4(rt
);
21876 rs
= decode_gpr_gpr4(rs
);
21877 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
21878 (extract32(ctx
->opcode
, 3, 1))) {
21881 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
21885 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
21888 generate_exception_end(ctx
, EXCP_RI
);
21894 int imm
= extract32(ctx
->opcode
, 0, 7);
21895 imm
= (imm
== 0x7f ? -1 : imm
);
21897 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21903 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
21904 u
= (u
== 12) ? 0xff :
21905 (u
== 13) ? 0xffff : u
;
21906 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
21910 offset
= extract32(ctx
->opcode
, 0, 2);
21911 switch (extract32(ctx
->opcode
, 2, 2)) {
21913 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
21916 rt
= decode_gpr_gpr3_src_store(
21917 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21918 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
21921 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
21924 generate_exception_end(ctx
, EXCP_RI
);
21929 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
21930 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
21932 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
21935 rt
= decode_gpr_gpr3_src_store(
21936 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21937 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
21940 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
21943 generate_exception_end(ctx
, EXCP_RI
);
21948 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21949 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21952 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21953 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21954 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
21958 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21959 extract32(ctx
->opcode
, 5, 3);
21960 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21961 extract32(ctx
->opcode
, 0, 3);
21962 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21963 (extract32(ctx
->opcode
, 8, 1) << 2);
21964 rt
= decode_gpr_gpr4(rt
);
21965 rs
= decode_gpr_gpr4(rs
);
21966 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21970 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21971 extract32(ctx
->opcode
, 5, 3);
21972 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21973 extract32(ctx
->opcode
, 0, 3);
21974 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21975 (extract32(ctx
->opcode
, 8, 1) << 2);
21976 rt
= decode_gpr_gpr4_zero(rt
);
21977 rs
= decode_gpr_gpr4(rs
);
21978 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21981 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21982 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
21985 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21986 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21987 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
21990 rt
= decode_gpr_gpr3_src_store(
21991 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21992 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21993 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21994 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21997 rt
= decode_gpr_gpr3_src_store(
21998 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21999 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22000 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
22003 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
22004 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22005 (extract32(ctx
->opcode
, 1, 9) << 1));
22008 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
22009 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22010 (extract32(ctx
->opcode
, 1, 9) << 1));
22013 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
22014 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22015 (extract32(ctx
->opcode
, 1, 6) << 1));
22018 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
22019 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22020 (extract32(ctx
->opcode
, 1, 6) << 1));
22023 switch (ctx
->opcode
& 0xf) {
22026 switch (extract32(ctx
->opcode
, 4, 1)) {
22028 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
22029 extract32(ctx
->opcode
, 5, 5), 0, 0);
22032 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
22033 extract32(ctx
->opcode
, 5, 5), 31, 0);
22040 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
22041 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
22042 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
22043 extract32(ctx
->opcode
, 0, 4) << 1);
22050 int count
= extract32(ctx
->opcode
, 0, 4);
22051 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
22053 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
22054 switch (extract32(ctx
->opcode
, 8, 1)) {
22056 gen_save(ctx
, rt
, count
, 0, u
);
22058 case NM_RESTORE_JRC16
:
22059 gen_restore(ctx
, rt
, count
, 0, u
);
22060 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
22069 static const int gpr2reg1
[] = {4, 5, 6, 7};
22070 static const int gpr2reg2
[] = {5, 6, 7, 8};
22072 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
22073 extract32(ctx
->opcode
, 8, 1);
22074 int r1
= gpr2reg1
[rd2
];
22075 int r2
= gpr2reg2
[rd2
];
22076 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
22077 extract32(ctx
->opcode
, 0, 3);
22078 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
22079 extract32(ctx
->opcode
, 5, 3);
22080 TCGv t0
= tcg_temp_new();
22081 TCGv t1
= tcg_temp_new();
22082 if (op
== NM_MOVEP
) {
22085 rs
= decode_gpr_gpr4_zero(r3
);
22086 rt
= decode_gpr_gpr4_zero(r4
);
22088 rd
= decode_gpr_gpr4(r3
);
22089 re
= decode_gpr_gpr4(r4
);
22093 gen_load_gpr(t0
, rs
);
22094 gen_load_gpr(t1
, rt
);
22095 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
22096 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
22102 return decode_nanomips_32_48_opc(env
, ctx
);
22109 /* SmartMIPS extension to MIPS32 */
22111 #if defined(TARGET_MIPS64)
22113 /* MDMX extension to MIPS64 */
22117 /* MIPSDSP functions. */
22118 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
22119 int rd
, int base
, int offset
)
22124 t0
= tcg_temp_new();
22127 gen_load_gpr(t0
, offset
);
22128 } else if (offset
== 0) {
22129 gen_load_gpr(t0
, base
);
22131 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
22136 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
22137 gen_store_gpr(t0
, rd
);
22140 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
22141 gen_store_gpr(t0
, rd
);
22144 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
22145 gen_store_gpr(t0
, rd
);
22147 #if defined(TARGET_MIPS64)
22149 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
22150 gen_store_gpr(t0
, rd
);
22157 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22158 int ret
, int v1
, int v2
)
22164 /* Treat as NOP. */
22168 v1_t
= tcg_temp_new();
22169 v2_t
= tcg_temp_new();
22171 gen_load_gpr(v1_t
, v1
);
22172 gen_load_gpr(v2_t
, v2
);
22175 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
22176 case OPC_MULT_G_2E
:
22180 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22182 case OPC_ADDUH_R_QB
:
22183 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22186 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22188 case OPC_ADDQH_R_PH
:
22189 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22192 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22194 case OPC_ADDQH_R_W
:
22195 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22198 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22200 case OPC_SUBUH_R_QB
:
22201 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22204 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22206 case OPC_SUBQH_R_PH
:
22207 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22210 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22212 case OPC_SUBQH_R_W
:
22213 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22217 case OPC_ABSQ_S_PH_DSP
:
22219 case OPC_ABSQ_S_QB
:
22221 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
22223 case OPC_ABSQ_S_PH
:
22225 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
22229 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
22231 case OPC_PRECEQ_W_PHL
:
22233 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
22234 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22236 case OPC_PRECEQ_W_PHR
:
22238 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
22239 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
22240 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22242 case OPC_PRECEQU_PH_QBL
:
22244 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
22246 case OPC_PRECEQU_PH_QBR
:
22248 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
22250 case OPC_PRECEQU_PH_QBLA
:
22252 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
22254 case OPC_PRECEQU_PH_QBRA
:
22256 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
22258 case OPC_PRECEU_PH_QBL
:
22260 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
22262 case OPC_PRECEU_PH_QBR
:
22264 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
22266 case OPC_PRECEU_PH_QBLA
:
22268 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
22270 case OPC_PRECEU_PH_QBRA
:
22272 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
22276 case OPC_ADDU_QB_DSP
:
22280 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22282 case OPC_ADDQ_S_PH
:
22284 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22288 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22292 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22294 case OPC_ADDU_S_QB
:
22296 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22300 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22302 case OPC_ADDU_S_PH
:
22304 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22308 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22310 case OPC_SUBQ_S_PH
:
22312 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22316 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22320 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22322 case OPC_SUBU_S_QB
:
22324 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22328 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22330 case OPC_SUBU_S_PH
:
22332 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22336 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22340 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22344 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22346 case OPC_RADDU_W_QB
:
22348 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22352 case OPC_CMPU_EQ_QB_DSP
:
22354 case OPC_PRECR_QB_PH
:
22356 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22358 case OPC_PRECRQ_QB_PH
:
22360 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22362 case OPC_PRECR_SRA_PH_W
:
22365 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22366 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22368 tcg_temp_free_i32(sa_t
);
22371 case OPC_PRECR_SRA_R_PH_W
:
22374 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22375 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22377 tcg_temp_free_i32(sa_t
);
22380 case OPC_PRECRQ_PH_W
:
22382 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22384 case OPC_PRECRQ_RS_PH_W
:
22386 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22388 case OPC_PRECRQU_S_QB_PH
:
22390 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22394 #ifdef TARGET_MIPS64
22395 case OPC_ABSQ_S_QH_DSP
:
22397 case OPC_PRECEQ_L_PWL
:
22399 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22401 case OPC_PRECEQ_L_PWR
:
22403 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22405 case OPC_PRECEQ_PW_QHL
:
22407 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22409 case OPC_PRECEQ_PW_QHR
:
22411 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22413 case OPC_PRECEQ_PW_QHLA
:
22415 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22417 case OPC_PRECEQ_PW_QHRA
:
22419 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22421 case OPC_PRECEQU_QH_OBL
:
22423 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22425 case OPC_PRECEQU_QH_OBR
:
22427 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22429 case OPC_PRECEQU_QH_OBLA
:
22431 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22433 case OPC_PRECEQU_QH_OBRA
:
22435 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22437 case OPC_PRECEU_QH_OBL
:
22439 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22441 case OPC_PRECEU_QH_OBR
:
22443 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22445 case OPC_PRECEU_QH_OBLA
:
22447 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22449 case OPC_PRECEU_QH_OBRA
:
22451 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22453 case OPC_ABSQ_S_OB
:
22455 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22457 case OPC_ABSQ_S_PW
:
22459 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22461 case OPC_ABSQ_S_QH
:
22463 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22467 case OPC_ADDU_OB_DSP
:
22469 case OPC_RADDU_L_OB
:
22471 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22475 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22477 case OPC_SUBQ_S_PW
:
22479 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22483 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22485 case OPC_SUBQ_S_QH
:
22487 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22491 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22493 case OPC_SUBU_S_OB
:
22495 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22499 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22501 case OPC_SUBU_S_QH
:
22503 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22507 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22509 case OPC_SUBUH_R_OB
:
22511 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22515 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22517 case OPC_ADDQ_S_PW
:
22519 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22523 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22525 case OPC_ADDQ_S_QH
:
22527 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22531 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22533 case OPC_ADDU_S_OB
:
22535 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22539 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22541 case OPC_ADDU_S_QH
:
22543 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22547 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22549 case OPC_ADDUH_R_OB
:
22551 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22555 case OPC_CMPU_EQ_OB_DSP
:
22557 case OPC_PRECR_OB_QH
:
22559 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22561 case OPC_PRECR_SRA_QH_PW
:
22564 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22565 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22566 tcg_temp_free_i32(ret_t
);
22569 case OPC_PRECR_SRA_R_QH_PW
:
22572 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22573 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22574 tcg_temp_free_i32(sa_v
);
22577 case OPC_PRECRQ_OB_QH
:
22579 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22581 case OPC_PRECRQ_PW_L
:
22583 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22585 case OPC_PRECRQ_QH_PW
:
22587 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22589 case OPC_PRECRQ_RS_QH_PW
:
22591 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22593 case OPC_PRECRQU_S_OB_QH
:
22595 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22602 tcg_temp_free(v1_t
);
22603 tcg_temp_free(v2_t
);
22606 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22607 int ret
, int v1
, int v2
)
22615 /* Treat as NOP. */
22619 t0
= tcg_temp_new();
22620 v1_t
= tcg_temp_new();
22621 v2_t
= tcg_temp_new();
22623 tcg_gen_movi_tl(t0
, v1
);
22624 gen_load_gpr(v1_t
, v1
);
22625 gen_load_gpr(v2_t
, v2
);
22628 case OPC_SHLL_QB_DSP
:
22630 op2
= MASK_SHLL_QB(ctx
->opcode
);
22634 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22638 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22642 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22646 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22648 case OPC_SHLL_S_PH
:
22650 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22652 case OPC_SHLLV_S_PH
:
22654 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22658 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22660 case OPC_SHLLV_S_W
:
22662 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22666 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22670 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22674 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22678 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22682 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22684 case OPC_SHRA_R_QB
:
22686 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22690 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22692 case OPC_SHRAV_R_QB
:
22694 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22698 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22700 case OPC_SHRA_R_PH
:
22702 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22706 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22708 case OPC_SHRAV_R_PH
:
22710 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22714 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22716 case OPC_SHRAV_R_W
:
22718 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22720 default: /* Invalid */
22721 MIPS_INVAL("MASK SHLL.QB");
22722 generate_exception_end(ctx
, EXCP_RI
);
22727 #ifdef TARGET_MIPS64
22728 case OPC_SHLL_OB_DSP
:
22729 op2
= MASK_SHLL_OB(ctx
->opcode
);
22733 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22737 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22739 case OPC_SHLL_S_PW
:
22741 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22743 case OPC_SHLLV_S_PW
:
22745 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22749 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22753 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22757 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22761 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22763 case OPC_SHLL_S_QH
:
22765 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22767 case OPC_SHLLV_S_QH
:
22769 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22773 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
22777 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22779 case OPC_SHRA_R_OB
:
22781 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
22783 case OPC_SHRAV_R_OB
:
22785 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22789 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
22793 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22795 case OPC_SHRA_R_PW
:
22797 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
22799 case OPC_SHRAV_R_PW
:
22801 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22805 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
22809 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22811 case OPC_SHRA_R_QH
:
22813 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
22815 case OPC_SHRAV_R_QH
:
22817 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22821 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
22825 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22829 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
22833 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22835 default: /* Invalid */
22836 MIPS_INVAL("MASK SHLL.OB");
22837 generate_exception_end(ctx
, EXCP_RI
);
22845 tcg_temp_free(v1_t
);
22846 tcg_temp_free(v2_t
);
22849 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22850 int ret
, int v1
, int v2
, int check_ret
)
22856 if ((ret
== 0) && (check_ret
== 1)) {
22857 /* Treat as NOP. */
22861 t0
= tcg_temp_new_i32();
22862 v1_t
= tcg_temp_new();
22863 v2_t
= tcg_temp_new();
22865 tcg_gen_movi_i32(t0
, ret
);
22866 gen_load_gpr(v1_t
, v1
);
22867 gen_load_gpr(v2_t
, v2
);
22870 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22871 * the same mask and op1. */
22872 case OPC_MULT_G_2E
:
22876 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22879 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22882 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22884 case OPC_MULQ_RS_W
:
22885 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22889 case OPC_DPA_W_PH_DSP
:
22891 case OPC_DPAU_H_QBL
:
22893 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22895 case OPC_DPAU_H_QBR
:
22897 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22899 case OPC_DPSU_H_QBL
:
22901 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22903 case OPC_DPSU_H_QBR
:
22905 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22909 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22911 case OPC_DPAX_W_PH
:
22913 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22915 case OPC_DPAQ_S_W_PH
:
22917 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22919 case OPC_DPAQX_S_W_PH
:
22921 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22923 case OPC_DPAQX_SA_W_PH
:
22925 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22929 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22931 case OPC_DPSX_W_PH
:
22933 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22935 case OPC_DPSQ_S_W_PH
:
22937 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22939 case OPC_DPSQX_S_W_PH
:
22941 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22943 case OPC_DPSQX_SA_W_PH
:
22945 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22947 case OPC_MULSAQ_S_W_PH
:
22949 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22951 case OPC_DPAQ_SA_L_W
:
22953 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22955 case OPC_DPSQ_SA_L_W
:
22957 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22959 case OPC_MAQ_S_W_PHL
:
22961 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22963 case OPC_MAQ_S_W_PHR
:
22965 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22967 case OPC_MAQ_SA_W_PHL
:
22969 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22971 case OPC_MAQ_SA_W_PHR
:
22973 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22975 case OPC_MULSA_W_PH
:
22977 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22981 #ifdef TARGET_MIPS64
22982 case OPC_DPAQ_W_QH_DSP
:
22984 int ac
= ret
& 0x03;
22985 tcg_gen_movi_i32(t0
, ac
);
22990 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
22994 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
22998 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
23002 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
23006 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23008 case OPC_DPAQ_S_W_QH
:
23010 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23012 case OPC_DPAQ_SA_L_PW
:
23014 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23016 case OPC_DPAU_H_OBL
:
23018 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23020 case OPC_DPAU_H_OBR
:
23022 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23026 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23028 case OPC_DPSQ_S_W_QH
:
23030 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23032 case OPC_DPSQ_SA_L_PW
:
23034 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23036 case OPC_DPSU_H_OBL
:
23038 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23040 case OPC_DPSU_H_OBR
:
23042 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23044 case OPC_MAQ_S_L_PWL
:
23046 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
23048 case OPC_MAQ_S_L_PWR
:
23050 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
23052 case OPC_MAQ_S_W_QHLL
:
23054 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23056 case OPC_MAQ_SA_W_QHLL
:
23058 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23060 case OPC_MAQ_S_W_QHLR
:
23062 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23064 case OPC_MAQ_SA_W_QHLR
:
23066 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23068 case OPC_MAQ_S_W_QHRL
:
23070 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23072 case OPC_MAQ_SA_W_QHRL
:
23074 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23076 case OPC_MAQ_S_W_QHRR
:
23078 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23080 case OPC_MAQ_SA_W_QHRR
:
23082 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23084 case OPC_MULSAQ_S_L_PW
:
23086 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23088 case OPC_MULSAQ_S_W_QH
:
23090 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23096 case OPC_ADDU_QB_DSP
:
23098 case OPC_MULEU_S_PH_QBL
:
23100 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23102 case OPC_MULEU_S_PH_QBR
:
23104 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23106 case OPC_MULQ_RS_PH
:
23108 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23110 case OPC_MULEQ_S_W_PHL
:
23112 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23114 case OPC_MULEQ_S_W_PHR
:
23116 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23118 case OPC_MULQ_S_PH
:
23120 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23124 #ifdef TARGET_MIPS64
23125 case OPC_ADDU_OB_DSP
:
23127 case OPC_MULEQ_S_PW_QHL
:
23129 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23131 case OPC_MULEQ_S_PW_QHR
:
23133 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23135 case OPC_MULEU_S_QH_OBL
:
23137 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23139 case OPC_MULEU_S_QH_OBR
:
23141 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23143 case OPC_MULQ_RS_QH
:
23145 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23152 tcg_temp_free_i32(t0
);
23153 tcg_temp_free(v1_t
);
23154 tcg_temp_free(v2_t
);
23157 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23165 /* Treat as NOP. */
23169 t0
= tcg_temp_new();
23170 val_t
= tcg_temp_new();
23171 gen_load_gpr(val_t
, val
);
23174 case OPC_ABSQ_S_PH_DSP
:
23178 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
23183 target_long result
;
23184 imm
= (ctx
->opcode
>> 16) & 0xFF;
23185 result
= (uint32_t)imm
<< 24 |
23186 (uint32_t)imm
<< 16 |
23187 (uint32_t)imm
<< 8 |
23189 result
= (int32_t)result
;
23190 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
23195 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23196 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23197 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23198 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23199 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23200 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23205 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23206 imm
= (int16_t)(imm
<< 6) >> 6;
23207 tcg_gen_movi_tl(cpu_gpr
[ret
], \
23208 (target_long
)((int32_t)imm
<< 16 | \
23214 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23215 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23216 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23217 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23221 #ifdef TARGET_MIPS64
23222 case OPC_ABSQ_S_QH_DSP
:
23229 imm
= (ctx
->opcode
>> 16) & 0xFF;
23230 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
23231 temp
= (temp
<< 16) | temp
;
23232 temp
= (temp
<< 32) | temp
;
23233 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23241 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23242 imm
= (int16_t)(imm
<< 6) >> 6;
23243 temp
= ((target_long
)imm
<< 32) \
23244 | ((target_long
)imm
& 0xFFFFFFFF);
23245 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23253 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23254 imm
= (int16_t)(imm
<< 6) >> 6;
23256 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
23257 ((uint64_t)(uint16_t)imm
<< 32) |
23258 ((uint64_t)(uint16_t)imm
<< 16) |
23259 (uint64_t)(uint16_t)imm
;
23260 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23265 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23266 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23267 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23268 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23269 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23270 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23271 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23275 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
23276 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23277 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23281 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23282 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23283 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23284 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23285 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23292 tcg_temp_free(val_t
);
23295 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
23296 uint32_t op1
, uint32_t op2
,
23297 int ret
, int v1
, int v2
, int check_ret
)
23303 if ((ret
== 0) && (check_ret
== 1)) {
23304 /* Treat as NOP. */
23308 t1
= tcg_temp_new();
23309 v1_t
= tcg_temp_new();
23310 v2_t
= tcg_temp_new();
23312 gen_load_gpr(v1_t
, v1
);
23313 gen_load_gpr(v2_t
, v2
);
23316 case OPC_CMPU_EQ_QB_DSP
:
23318 case OPC_CMPU_EQ_QB
:
23320 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
23322 case OPC_CMPU_LT_QB
:
23324 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
23326 case OPC_CMPU_LE_QB
:
23328 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
23330 case OPC_CMPGU_EQ_QB
:
23332 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23334 case OPC_CMPGU_LT_QB
:
23336 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23338 case OPC_CMPGU_LE_QB
:
23340 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23342 case OPC_CMPGDU_EQ_QB
:
23344 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23345 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23346 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23347 tcg_gen_shli_tl(t1
, t1
, 24);
23348 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23350 case OPC_CMPGDU_LT_QB
:
23352 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23353 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23354 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23355 tcg_gen_shli_tl(t1
, t1
, 24);
23356 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23358 case OPC_CMPGDU_LE_QB
:
23360 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23361 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23362 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23363 tcg_gen_shli_tl(t1
, t1
, 24);
23364 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23366 case OPC_CMP_EQ_PH
:
23368 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23370 case OPC_CMP_LT_PH
:
23372 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23374 case OPC_CMP_LE_PH
:
23376 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23380 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23384 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23386 case OPC_PACKRL_PH
:
23388 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23392 #ifdef TARGET_MIPS64
23393 case OPC_CMPU_EQ_OB_DSP
:
23395 case OPC_CMP_EQ_PW
:
23397 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23399 case OPC_CMP_LT_PW
:
23401 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23403 case OPC_CMP_LE_PW
:
23405 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23407 case OPC_CMP_EQ_QH
:
23409 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23411 case OPC_CMP_LT_QH
:
23413 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23415 case OPC_CMP_LE_QH
:
23417 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23419 case OPC_CMPGDU_EQ_OB
:
23421 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23423 case OPC_CMPGDU_LT_OB
:
23425 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23427 case OPC_CMPGDU_LE_OB
:
23429 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23431 case OPC_CMPGU_EQ_OB
:
23433 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23435 case OPC_CMPGU_LT_OB
:
23437 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23439 case OPC_CMPGU_LE_OB
:
23441 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23443 case OPC_CMPU_EQ_OB
:
23445 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23447 case OPC_CMPU_LT_OB
:
23449 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23451 case OPC_CMPU_LE_OB
:
23453 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23455 case OPC_PACKRL_PW
:
23457 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23461 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23465 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23469 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23477 tcg_temp_free(v1_t
);
23478 tcg_temp_free(v2_t
);
23481 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23482 uint32_t op1
, int rt
, int rs
, int sa
)
23489 /* Treat as NOP. */
23493 t0
= tcg_temp_new();
23494 gen_load_gpr(t0
, rs
);
23497 case OPC_APPEND_DSP
:
23498 switch (MASK_APPEND(ctx
->opcode
)) {
23501 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23503 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23507 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23508 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23509 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23510 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23512 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23516 if (sa
!= 0 && sa
!= 2) {
23517 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23518 tcg_gen_ext32u_tl(t0
, t0
);
23519 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23520 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23522 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23524 default: /* Invalid */
23525 MIPS_INVAL("MASK APPEND");
23526 generate_exception_end(ctx
, EXCP_RI
);
23530 #ifdef TARGET_MIPS64
23531 case OPC_DAPPEND_DSP
:
23532 switch (MASK_DAPPEND(ctx
->opcode
)) {
23535 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23539 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23540 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23541 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23545 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23546 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23547 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23552 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23553 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23554 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23555 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23558 default: /* Invalid */
23559 MIPS_INVAL("MASK DAPPEND");
23560 generate_exception_end(ctx
, EXCP_RI
);
23569 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23570 int ret
, int v1
, int v2
, int check_ret
)
23579 if ((ret
== 0) && (check_ret
== 1)) {
23580 /* Treat as NOP. */
23584 t0
= tcg_temp_new();
23585 t1
= tcg_temp_new();
23586 v1_t
= tcg_temp_new();
23587 v2_t
= tcg_temp_new();
23589 gen_load_gpr(v1_t
, v1
);
23590 gen_load_gpr(v2_t
, v2
);
23593 case OPC_EXTR_W_DSP
:
23597 tcg_gen_movi_tl(t0
, v2
);
23598 tcg_gen_movi_tl(t1
, v1
);
23599 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23602 tcg_gen_movi_tl(t0
, v2
);
23603 tcg_gen_movi_tl(t1
, v1
);
23604 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23606 case OPC_EXTR_RS_W
:
23607 tcg_gen_movi_tl(t0
, v2
);
23608 tcg_gen_movi_tl(t1
, v1
);
23609 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23612 tcg_gen_movi_tl(t0
, v2
);
23613 tcg_gen_movi_tl(t1
, v1
);
23614 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23616 case OPC_EXTRV_S_H
:
23617 tcg_gen_movi_tl(t0
, v2
);
23618 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23621 tcg_gen_movi_tl(t0
, v2
);
23622 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23624 case OPC_EXTRV_R_W
:
23625 tcg_gen_movi_tl(t0
, v2
);
23626 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23628 case OPC_EXTRV_RS_W
:
23629 tcg_gen_movi_tl(t0
, v2
);
23630 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23633 tcg_gen_movi_tl(t0
, v2
);
23634 tcg_gen_movi_tl(t1
, v1
);
23635 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23638 tcg_gen_movi_tl(t0
, v2
);
23639 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23642 tcg_gen_movi_tl(t0
, v2
);
23643 tcg_gen_movi_tl(t1
, v1
);
23644 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23647 tcg_gen_movi_tl(t0
, v2
);
23648 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23651 imm
= (ctx
->opcode
>> 20) & 0x3F;
23652 tcg_gen_movi_tl(t0
, ret
);
23653 tcg_gen_movi_tl(t1
, imm
);
23654 gen_helper_shilo(t0
, t1
, cpu_env
);
23657 tcg_gen_movi_tl(t0
, ret
);
23658 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23661 tcg_gen_movi_tl(t0
, ret
);
23662 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23665 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23666 tcg_gen_movi_tl(t0
, imm
);
23667 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23670 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23671 tcg_gen_movi_tl(t0
, imm
);
23672 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23676 #ifdef TARGET_MIPS64
23677 case OPC_DEXTR_W_DSP
:
23681 tcg_gen_movi_tl(t0
, ret
);
23682 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23686 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23687 int ac
= (ctx
->opcode
>> 11) & 0x03;
23688 tcg_gen_movi_tl(t0
, shift
);
23689 tcg_gen_movi_tl(t1
, ac
);
23690 gen_helper_dshilo(t0
, t1
, cpu_env
);
23695 int ac
= (ctx
->opcode
>> 11) & 0x03;
23696 tcg_gen_movi_tl(t0
, ac
);
23697 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23701 tcg_gen_movi_tl(t0
, v2
);
23702 tcg_gen_movi_tl(t1
, v1
);
23704 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23707 tcg_gen_movi_tl(t0
, v2
);
23708 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23711 tcg_gen_movi_tl(t0
, v2
);
23712 tcg_gen_movi_tl(t1
, v1
);
23713 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23716 tcg_gen_movi_tl(t0
, v2
);
23717 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23720 tcg_gen_movi_tl(t0
, v2
);
23721 tcg_gen_movi_tl(t1
, v1
);
23722 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23724 case OPC_DEXTR_R_L
:
23725 tcg_gen_movi_tl(t0
, v2
);
23726 tcg_gen_movi_tl(t1
, v1
);
23727 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23729 case OPC_DEXTR_RS_L
:
23730 tcg_gen_movi_tl(t0
, v2
);
23731 tcg_gen_movi_tl(t1
, v1
);
23732 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23735 tcg_gen_movi_tl(t0
, v2
);
23736 tcg_gen_movi_tl(t1
, v1
);
23737 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23739 case OPC_DEXTR_R_W
:
23740 tcg_gen_movi_tl(t0
, v2
);
23741 tcg_gen_movi_tl(t1
, v1
);
23742 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23744 case OPC_DEXTR_RS_W
:
23745 tcg_gen_movi_tl(t0
, v2
);
23746 tcg_gen_movi_tl(t1
, v1
);
23747 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23749 case OPC_DEXTR_S_H
:
23750 tcg_gen_movi_tl(t0
, v2
);
23751 tcg_gen_movi_tl(t1
, v1
);
23752 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23754 case OPC_DEXTRV_S_H
:
23755 tcg_gen_movi_tl(t0
, v2
);
23756 tcg_gen_movi_tl(t1
, v1
);
23757 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23760 tcg_gen_movi_tl(t0
, v2
);
23761 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23763 case OPC_DEXTRV_R_L
:
23764 tcg_gen_movi_tl(t0
, v2
);
23765 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23767 case OPC_DEXTRV_RS_L
:
23768 tcg_gen_movi_tl(t0
, v2
);
23769 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23772 tcg_gen_movi_tl(t0
, v2
);
23773 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23775 case OPC_DEXTRV_R_W
:
23776 tcg_gen_movi_tl(t0
, v2
);
23777 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23779 case OPC_DEXTRV_RS_W
:
23780 tcg_gen_movi_tl(t0
, v2
);
23781 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23790 tcg_temp_free(v1_t
);
23791 tcg_temp_free(v2_t
);
23794 /* End MIPSDSP functions. */
23796 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23798 int rs
, rt
, rd
, sa
;
23801 rs
= (ctx
->opcode
>> 21) & 0x1f;
23802 rt
= (ctx
->opcode
>> 16) & 0x1f;
23803 rd
= (ctx
->opcode
>> 11) & 0x1f;
23804 sa
= (ctx
->opcode
>> 6) & 0x1f;
23806 op1
= MASK_SPECIAL(ctx
->opcode
);
23809 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23815 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23825 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23828 MIPS_INVAL("special_r6 muldiv");
23829 generate_exception_end(ctx
, EXCP_RI
);
23835 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23839 if (rt
== 0 && sa
== 1) {
23840 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23841 We need additionally to check other fields */
23842 gen_cl(ctx
, op1
, rd
, rs
);
23844 generate_exception_end(ctx
, EXCP_RI
);
23848 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23849 gen_helper_do_semihosting(cpu_env
);
23851 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
23852 generate_exception_end(ctx
, EXCP_RI
);
23854 generate_exception_end(ctx
, EXCP_DBp
);
23858 #if defined(TARGET_MIPS64)
23860 check_mips_64(ctx
);
23861 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23865 if (rt
== 0 && sa
== 1) {
23866 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23867 We need additionally to check other fields */
23868 check_mips_64(ctx
);
23869 gen_cl(ctx
, op1
, rd
, rs
);
23871 generate_exception_end(ctx
, EXCP_RI
);
23879 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23889 check_mips_64(ctx
);
23890 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23893 MIPS_INVAL("special_r6 muldiv");
23894 generate_exception_end(ctx
, EXCP_RI
);
23899 default: /* Invalid */
23900 MIPS_INVAL("special_r6");
23901 generate_exception_end(ctx
, EXCP_RI
);
23906 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
23908 int rs
= extract32(ctx
->opcode
, 21, 5);
23909 int rt
= extract32(ctx
->opcode
, 16, 5);
23910 int rd
= extract32(ctx
->opcode
, 11, 5);
23911 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
23914 case OPC_MOVN
: /* Conditional move */
23916 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23918 case OPC_MFHI
: /* Move from HI/LO */
23920 gen_HILO(ctx
, op1
, 0, rd
);
23923 case OPC_MTLO
: /* Move to HI/LO */
23924 gen_HILO(ctx
, op1
, 0, rs
);
23928 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
23932 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23934 #if defined(TARGET_MIPS64)
23939 check_insn_opc_user_only(ctx
, INSN_R5900
);
23940 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23944 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
23946 default: /* Invalid */
23947 MIPS_INVAL("special_tx79");
23948 generate_exception_end(ctx
, EXCP_RI
);
23953 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23955 int rs
, rt
, rd
, sa
;
23958 rs
= (ctx
->opcode
>> 21) & 0x1f;
23959 rt
= (ctx
->opcode
>> 16) & 0x1f;
23960 rd
= (ctx
->opcode
>> 11) & 0x1f;
23961 sa
= (ctx
->opcode
>> 6) & 0x1f;
23963 op1
= MASK_SPECIAL(ctx
->opcode
);
23965 case OPC_MOVN
: /* Conditional move */
23967 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
23968 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
23969 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23971 case OPC_MFHI
: /* Move from HI/LO */
23973 gen_HILO(ctx
, op1
, rs
& 3, rd
);
23976 case OPC_MTLO
: /* Move to HI/LO */
23977 gen_HILO(ctx
, op1
, rd
& 3, rs
);
23980 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
23981 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
23982 check_cp1_enabled(ctx
);
23983 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
23984 (ctx
->opcode
>> 16) & 1);
23986 generate_exception_err(ctx
, EXCP_CpU
, 1);
23992 check_insn(ctx
, INSN_VR54XX
);
23993 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
23994 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
23996 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
24001 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24003 #if defined(TARGET_MIPS64)
24008 check_insn(ctx
, ISA_MIPS3
);
24009 check_mips_64(ctx
);
24010 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24014 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24017 #ifdef MIPS_STRICT_STANDARD
24018 MIPS_INVAL("SPIM");
24019 generate_exception_end(ctx
, EXCP_RI
);
24021 /* Implemented as RI exception for now. */
24022 MIPS_INVAL("spim (unofficial)");
24023 generate_exception_end(ctx
, EXCP_RI
);
24026 default: /* Invalid */
24027 MIPS_INVAL("special_legacy");
24028 generate_exception_end(ctx
, EXCP_RI
);
24033 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
24035 int rs
, rt
, rd
, sa
;
24038 rs
= (ctx
->opcode
>> 21) & 0x1f;
24039 rt
= (ctx
->opcode
>> 16) & 0x1f;
24040 rd
= (ctx
->opcode
>> 11) & 0x1f;
24041 sa
= (ctx
->opcode
>> 6) & 0x1f;
24043 op1
= MASK_SPECIAL(ctx
->opcode
);
24045 case OPC_SLL
: /* Shift with immediate */
24046 if (sa
== 5 && rd
== 0 &&
24047 rs
== 0 && rt
== 0) { /* PAUSE */
24048 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
24049 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
24050 generate_exception_end(ctx
, EXCP_RI
);
24056 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24059 switch ((ctx
->opcode
>> 21) & 0x1f) {
24061 /* rotr is decoded as srl on non-R2 CPUs */
24062 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24067 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24070 generate_exception_end(ctx
, EXCP_RI
);
24078 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24080 case OPC_SLLV
: /* Shifts */
24082 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24085 switch ((ctx
->opcode
>> 6) & 0x1f) {
24087 /* rotrv is decoded as srlv on non-R2 CPUs */
24088 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24093 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24096 generate_exception_end(ctx
, EXCP_RI
);
24100 case OPC_SLT
: /* Set on less than */
24102 gen_slt(ctx
, op1
, rd
, rs
, rt
);
24104 case OPC_AND
: /* Logic*/
24108 gen_logic(ctx
, op1
, rd
, rs
, rt
);
24111 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24113 case OPC_TGE
: /* Traps */
24119 check_insn(ctx
, ISA_MIPS2
);
24120 gen_trap(ctx
, op1
, rs
, rt
, -1);
24122 case OPC_LSA
: /* OPC_PMON */
24123 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24124 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24125 decode_opc_special_r6(env
, ctx
);
24127 /* Pmon entry point, also R4010 selsl */
24128 #ifdef MIPS_STRICT_STANDARD
24129 MIPS_INVAL("PMON / selsl");
24130 generate_exception_end(ctx
, EXCP_RI
);
24132 gen_helper_0e0i(pmon
, sa
);
24137 generate_exception_end(ctx
, EXCP_SYSCALL
);
24140 generate_exception_end(ctx
, EXCP_BREAK
);
24143 check_insn(ctx
, ISA_MIPS2
);
24144 gen_sync(extract32(ctx
->opcode
, 6, 5));
24147 #if defined(TARGET_MIPS64)
24148 /* MIPS64 specific opcodes */
24153 check_insn(ctx
, ISA_MIPS3
);
24154 check_mips_64(ctx
);
24155 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24158 switch ((ctx
->opcode
>> 21) & 0x1f) {
24160 /* drotr is decoded as dsrl on non-R2 CPUs */
24161 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24166 check_insn(ctx
, ISA_MIPS3
);
24167 check_mips_64(ctx
);
24168 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24171 generate_exception_end(ctx
, EXCP_RI
);
24176 switch ((ctx
->opcode
>> 21) & 0x1f) {
24178 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
24179 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24184 check_insn(ctx
, ISA_MIPS3
);
24185 check_mips_64(ctx
);
24186 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24189 generate_exception_end(ctx
, EXCP_RI
);
24197 check_insn(ctx
, ISA_MIPS3
);
24198 check_mips_64(ctx
);
24199 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24203 check_insn(ctx
, ISA_MIPS3
);
24204 check_mips_64(ctx
);
24205 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24208 switch ((ctx
->opcode
>> 6) & 0x1f) {
24210 /* drotrv is decoded as dsrlv on non-R2 CPUs */
24211 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24216 check_insn(ctx
, ISA_MIPS3
);
24217 check_mips_64(ctx
);
24218 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24221 generate_exception_end(ctx
, EXCP_RI
);
24226 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24227 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24228 decode_opc_special_r6(env
, ctx
);
24233 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24234 decode_opc_special_r6(env
, ctx
);
24235 } else if (ctx
->insn_flags
& INSN_R5900
) {
24236 decode_opc_special_tx79(env
, ctx
);
24238 decode_opc_special_legacy(env
, ctx
);
24244 #if !defined(TARGET_MIPS64)
24246 /* MXU accumulate add/subtract 1-bit pattern 'aptn1' */
24247 #define MXU_APTN1_A 0
24248 #define MXU_APTN1_S 1
24250 /* MXU accumulate add/subtract 2-bit pattern 'aptn2' */
24251 #define MXU_APTN2_AA 0
24252 #define MXU_APTN2_AS 1
24253 #define MXU_APTN2_SA 2
24254 #define MXU_APTN2_SS 3
24256 /* MXU execute add/subtract 2-bit pattern 'eptn2' */
24257 #define MXU_EPTN2_AA 0
24258 #define MXU_EPTN2_AS 1
24259 #define MXU_EPTN2_SA 2
24260 #define MXU_EPTN2_SS 3
24262 /* MXU operand getting pattern 'optn2' */
24263 #define MXU_OPTN2_PTN0 0
24264 #define MXU_OPTN2_PTN1 1
24265 #define MXU_OPTN2_PTN2 2
24266 #define MXU_OPTN2_PTN3 3
24267 /* alternative naming scheme for 'optn2' */
24268 #define MXU_OPTN2_WW 0
24269 #define MXU_OPTN2_LW 1
24270 #define MXU_OPTN2_HW 2
24271 #define MXU_OPTN2_XW 3
24273 /* MXU operand getting pattern 'optn3' */
24274 #define MXU_OPTN3_PTN0 0
24275 #define MXU_OPTN3_PTN1 1
24276 #define MXU_OPTN3_PTN2 2
24277 #define MXU_OPTN3_PTN3 3
24278 #define MXU_OPTN3_PTN4 4
24279 #define MXU_OPTN3_PTN5 5
24280 #define MXU_OPTN3_PTN6 6
24281 #define MXU_OPTN3_PTN7 7
24285 * S32I2M XRa, rb - Register move from GRF to XRF
24287 static void gen_mxu_s32i2m(DisasContext
*ctx
)
24292 t0
= tcg_temp_new();
24294 XRa
= extract32(ctx
->opcode
, 6, 5);
24295 Rb
= extract32(ctx
->opcode
, 16, 5);
24297 gen_load_gpr(t0
, Rb
);
24299 gen_store_mxu_gpr(t0
, XRa
);
24300 } else if (XRa
== 16) {
24301 gen_store_mxu_cr(t0
);
24308 * S32M2I XRa, rb - Register move from XRF to GRF
24310 static void gen_mxu_s32m2i(DisasContext
*ctx
)
24315 t0
= tcg_temp_new();
24317 XRa
= extract32(ctx
->opcode
, 6, 5);
24318 Rb
= extract32(ctx
->opcode
, 16, 5);
24321 gen_load_mxu_gpr(t0
, XRa
);
24322 } else if (XRa
== 16) {
24323 gen_load_mxu_cr(t0
);
24326 gen_store_gpr(t0
, Rb
);
24332 * S8LDD XRa, Rb, s8, optn3 - Load a byte from memory to XRF
24334 static void gen_mxu_s8ldd(DisasContext
*ctx
)
24337 uint32_t XRa
, Rb
, s8
, optn3
;
24339 t0
= tcg_temp_new();
24340 t1
= tcg_temp_new();
24342 XRa
= extract32(ctx
->opcode
, 6, 4);
24343 s8
= extract32(ctx
->opcode
, 10, 8);
24344 optn3
= extract32(ctx
->opcode
, 18, 3);
24345 Rb
= extract32(ctx
->opcode
, 21, 5);
24347 gen_load_gpr(t0
, Rb
);
24348 tcg_gen_addi_tl(t0
, t0
, (int8_t)s8
);
24351 /* XRa[7:0] = tmp8 */
24352 case MXU_OPTN3_PTN0
:
24353 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24354 gen_load_mxu_gpr(t0
, XRa
);
24355 tcg_gen_deposit_tl(t0
, t0
, t1
, 0, 8);
24357 /* XRa[15:8] = tmp8 */
24358 case MXU_OPTN3_PTN1
:
24359 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24360 gen_load_mxu_gpr(t0
, XRa
);
24361 tcg_gen_deposit_tl(t0
, t0
, t1
, 8, 8);
24363 /* XRa[23:16] = tmp8 */
24364 case MXU_OPTN3_PTN2
:
24365 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24366 gen_load_mxu_gpr(t0
, XRa
);
24367 tcg_gen_deposit_tl(t0
, t0
, t1
, 16, 8);
24369 /* XRa[31:24] = tmp8 */
24370 case MXU_OPTN3_PTN3
:
24371 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24372 gen_load_mxu_gpr(t0
, XRa
);
24373 tcg_gen_deposit_tl(t0
, t0
, t1
, 24, 8);
24375 /* XRa = {8'b0, tmp8, 8'b0, tmp8} */
24376 case MXU_OPTN3_PTN4
:
24377 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24378 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24380 /* XRa = {tmp8, 8'b0, tmp8, 8'b0} */
24381 case MXU_OPTN3_PTN5
:
24382 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24383 tcg_gen_shli_tl(t1
, t1
, 8);
24384 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24386 /* XRa = {{8{sign of tmp8}}, tmp8, {8{sign of tmp8}}, tmp8} */
24387 case MXU_OPTN3_PTN6
:
24388 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
24389 tcg_gen_mov_tl(t0
, t1
);
24390 tcg_gen_andi_tl(t0
, t0
, 0xFF00FFFF);
24391 tcg_gen_shli_tl(t1
, t1
, 16);
24392 tcg_gen_or_tl(t0
, t0
, t1
);
24394 /* XRa = {tmp8, tmp8, tmp8, tmp8} */
24395 case MXU_OPTN3_PTN7
:
24396 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24397 tcg_gen_deposit_tl(t1
, t1
, t1
, 8, 8);
24398 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24402 gen_store_mxu_gpr(t0
, XRa
);
24409 * D16MUL XRa, XRb, XRc, XRd, optn2 - Signed 16 bit pattern multiplication
24411 static void gen_mxu_d16mul(DisasContext
*ctx
)
24413 TCGv t0
, t1
, t2
, t3
;
24414 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
;
24416 t0
= tcg_temp_new();
24417 t1
= tcg_temp_new();
24418 t2
= tcg_temp_new();
24419 t3
= tcg_temp_new();
24421 XRa
= extract32(ctx
->opcode
, 6, 4);
24422 XRb
= extract32(ctx
->opcode
, 10, 4);
24423 XRc
= extract32(ctx
->opcode
, 14, 4);
24424 XRd
= extract32(ctx
->opcode
, 18, 4);
24425 optn2
= extract32(ctx
->opcode
, 22, 2);
24427 gen_load_mxu_gpr(t1
, XRb
);
24428 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24429 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24430 gen_load_mxu_gpr(t3
, XRc
);
24431 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24432 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24435 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24436 tcg_gen_mul_tl(t3
, t1
, t3
);
24437 tcg_gen_mul_tl(t2
, t0
, t2
);
24439 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24440 tcg_gen_mul_tl(t3
, t0
, t3
);
24441 tcg_gen_mul_tl(t2
, t0
, t2
);
24443 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24444 tcg_gen_mul_tl(t3
, t1
, t3
);
24445 tcg_gen_mul_tl(t2
, t1
, t2
);
24447 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24448 tcg_gen_mul_tl(t3
, t0
, t3
);
24449 tcg_gen_mul_tl(t2
, t1
, t2
);
24452 gen_store_mxu_gpr(t3
, XRa
);
24453 gen_store_mxu_gpr(t2
, XRd
);
24462 * D16MAC XRa, XRb, XRc, XRd, aptn2, optn2 - Signed 16 bit pattern multiply
24465 static void gen_mxu_d16mac(DisasContext
*ctx
)
24467 TCGv t0
, t1
, t2
, t3
;
24468 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
, aptn2
;
24470 t0
= tcg_temp_new();
24471 t1
= tcg_temp_new();
24472 t2
= tcg_temp_new();
24473 t3
= tcg_temp_new();
24475 XRa
= extract32(ctx
->opcode
, 6, 4);
24476 XRb
= extract32(ctx
->opcode
, 10, 4);
24477 XRc
= extract32(ctx
->opcode
, 14, 4);
24478 XRd
= extract32(ctx
->opcode
, 18, 4);
24479 optn2
= extract32(ctx
->opcode
, 22, 2);
24480 aptn2
= extract32(ctx
->opcode
, 24, 2);
24482 gen_load_mxu_gpr(t1
, XRb
);
24483 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24484 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24486 gen_load_mxu_gpr(t3
, XRc
);
24487 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24488 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24491 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24492 tcg_gen_mul_tl(t3
, t1
, t3
);
24493 tcg_gen_mul_tl(t2
, t0
, t2
);
24495 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24496 tcg_gen_mul_tl(t3
, t0
, t3
);
24497 tcg_gen_mul_tl(t2
, t0
, t2
);
24499 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24500 tcg_gen_mul_tl(t3
, t1
, t3
);
24501 tcg_gen_mul_tl(t2
, t1
, t2
);
24503 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24504 tcg_gen_mul_tl(t3
, t0
, t3
);
24505 tcg_gen_mul_tl(t2
, t1
, t2
);
24508 gen_load_mxu_gpr(t0
, XRa
);
24509 gen_load_mxu_gpr(t1
, XRd
);
24513 tcg_gen_add_tl(t3
, t0
, t3
);
24514 tcg_gen_add_tl(t2
, t1
, t2
);
24517 tcg_gen_add_tl(t3
, t0
, t3
);
24518 tcg_gen_sub_tl(t2
, t1
, t2
);
24521 tcg_gen_sub_tl(t3
, t0
, t3
);
24522 tcg_gen_add_tl(t2
, t1
, t2
);
24525 tcg_gen_sub_tl(t3
, t0
, t3
);
24526 tcg_gen_sub_tl(t2
, t1
, t2
);
24529 gen_store_mxu_gpr(t3
, XRa
);
24530 gen_store_mxu_gpr(t2
, XRd
);
24539 * Q8MUL XRa, XRb, XRc, XRd - Parallel unsigned 8 bit pattern multiply
24540 * Q8MULSU XRa, XRb, XRc, XRd - Parallel signed 8 bit pattern multiply
24542 static void gen_mxu_q8mul_q8mulsu(DisasContext
*ctx
)
24544 TCGv t0
, t1
, t2
, t3
, t4
, t5
, t6
, t7
;
24545 uint32_t XRa
, XRb
, XRc
, XRd
, sel
;
24547 t0
= tcg_temp_new();
24548 t1
= tcg_temp_new();
24549 t2
= tcg_temp_new();
24550 t3
= tcg_temp_new();
24551 t4
= tcg_temp_new();
24552 t5
= tcg_temp_new();
24553 t6
= tcg_temp_new();
24554 t7
= tcg_temp_new();
24556 XRa
= extract32(ctx
->opcode
, 6, 4);
24557 XRb
= extract32(ctx
->opcode
, 10, 4);
24558 XRc
= extract32(ctx
->opcode
, 14, 4);
24559 XRd
= extract32(ctx
->opcode
, 18, 4);
24560 sel
= extract32(ctx
->opcode
, 22, 2);
24562 gen_load_mxu_gpr(t3
, XRb
);
24563 gen_load_mxu_gpr(t7
, XRc
);
24567 tcg_gen_ext8s_tl(t0
, t3
);
24568 tcg_gen_shri_tl(t3
, t3
, 8);
24569 tcg_gen_ext8s_tl(t1
, t3
);
24570 tcg_gen_shri_tl(t3
, t3
, 8);
24571 tcg_gen_ext8s_tl(t2
, t3
);
24572 tcg_gen_shri_tl(t3
, t3
, 8);
24573 tcg_gen_ext8s_tl(t3
, t3
);
24576 tcg_gen_ext8u_tl(t0
, t3
);
24577 tcg_gen_shri_tl(t3
, t3
, 8);
24578 tcg_gen_ext8u_tl(t1
, t3
);
24579 tcg_gen_shri_tl(t3
, t3
, 8);
24580 tcg_gen_ext8u_tl(t2
, t3
);
24581 tcg_gen_shri_tl(t3
, t3
, 8);
24582 tcg_gen_ext8u_tl(t3
, t3
);
24585 tcg_gen_ext8u_tl(t4
, t7
);
24586 tcg_gen_shri_tl(t7
, t7
, 8);
24587 tcg_gen_ext8u_tl(t5
, t7
);
24588 tcg_gen_shri_tl(t7
, t7
, 8);
24589 tcg_gen_ext8u_tl(t6
, t7
);
24590 tcg_gen_shri_tl(t7
, t7
, 8);
24591 tcg_gen_ext8u_tl(t7
, t7
);
24593 tcg_gen_mul_tl(t0
, t0
, t4
);
24594 tcg_gen_mul_tl(t1
, t1
, t5
);
24595 tcg_gen_mul_tl(t2
, t2
, t6
);
24596 tcg_gen_mul_tl(t3
, t3
, t7
);
24598 tcg_gen_andi_tl(t0
, t0
, 0xFFFF);
24599 tcg_gen_andi_tl(t1
, t1
, 0xFFFF);
24600 tcg_gen_andi_tl(t2
, t2
, 0xFFFF);
24601 tcg_gen_andi_tl(t3
, t3
, 0xFFFF);
24603 tcg_gen_shli_tl(t1
, t1
, 16);
24604 tcg_gen_shli_tl(t3
, t3
, 16);
24606 tcg_gen_or_tl(t0
, t0
, t1
);
24607 tcg_gen_or_tl(t1
, t2
, t3
);
24609 gen_store_mxu_gpr(t0
, XRd
);
24610 gen_store_mxu_gpr(t1
, XRa
);
24623 * S32LDD XRa, Rb, S12 - Load a word from memory to XRF
24624 * S32LDDR XRa, Rb, S12 - Load a word from memory to XRF, reversed byte seq.
24626 static void gen_mxu_s32ldd_s32lddr(DisasContext
*ctx
)
24629 uint32_t XRa
, Rb
, s12
, sel
;
24631 t0
= tcg_temp_new();
24632 t1
= tcg_temp_new();
24634 XRa
= extract32(ctx
->opcode
, 6, 4);
24635 s12
= extract32(ctx
->opcode
, 10, 10);
24636 sel
= extract32(ctx
->opcode
, 20, 1);
24637 Rb
= extract32(ctx
->opcode
, 21, 5);
24639 gen_load_gpr(t0
, Rb
);
24641 tcg_gen_movi_tl(t1
, s12
);
24642 tcg_gen_shli_tl(t1
, t1
, 2);
24644 tcg_gen_ori_tl(t1
, t1
, 0xFFFFF000);
24646 tcg_gen_add_tl(t1
, t0
, t1
);
24647 tcg_gen_qemu_ld_tl(t1
, t1
, ctx
->mem_idx
, MO_SL
);
24651 tcg_gen_bswap32_tl(t1
, t1
);
24653 gen_store_mxu_gpr(t1
, XRa
);
24661 * MXU instruction category: logic
24662 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24664 * S32NOR S32AND S32OR S32XOR
24668 * S32NOR XRa, XRb, XRc
24669 * Update XRa with the result of logical bitwise 'nor' operation
24670 * applied to the content of XRb and XRc.
24672 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24673 * +-----------+---------+-----+-------+-------+-------+-----------+
24674 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24675 * +-----------+---------+-----+-------+-------+-------+-----------+
24677 static void gen_mxu_S32NOR(DisasContext
*ctx
)
24679 uint32_t pad
, XRc
, XRb
, XRa
;
24681 pad
= extract32(ctx
->opcode
, 21, 5);
24682 XRc
= extract32(ctx
->opcode
, 14, 4);
24683 XRb
= extract32(ctx
->opcode
, 10, 4);
24684 XRa
= extract32(ctx
->opcode
, 6, 4);
24686 if (unlikely(pad
!= 0)) {
24687 /* opcode padding incorrect -> do nothing */
24688 } else if (unlikely(XRa
== 0)) {
24689 /* destination is zero register -> do nothing */
24690 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24691 /* both operands zero registers -> just set destination to all 1s */
24692 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0xFFFFFFFF);
24693 } else if (unlikely(XRb
== 0)) {
24694 /* XRb zero register -> just set destination to the negation of XRc */
24695 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
24696 } else if (unlikely(XRc
== 0)) {
24697 /* XRa zero register -> just set destination to the negation of XRb */
24698 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24699 } else if (unlikely(XRb
== XRc
)) {
24700 /* both operands same -> just set destination to the negation of XRb */
24701 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24703 /* the most general case */
24704 tcg_gen_nor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24709 * S32AND XRa, XRb, XRc
24710 * Update XRa with the result of logical bitwise 'and' operation
24711 * applied to the content of XRb and XRc.
24713 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24714 * +-----------+---------+-----+-------+-------+-------+-----------+
24715 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24716 * +-----------+---------+-----+-------+-------+-------+-----------+
24718 static void gen_mxu_S32AND(DisasContext
*ctx
)
24720 uint32_t pad
, XRc
, XRb
, XRa
;
24722 pad
= extract32(ctx
->opcode
, 21, 5);
24723 XRc
= extract32(ctx
->opcode
, 14, 4);
24724 XRb
= extract32(ctx
->opcode
, 10, 4);
24725 XRa
= extract32(ctx
->opcode
, 6, 4);
24727 if (unlikely(pad
!= 0)) {
24728 /* opcode padding incorrect -> do nothing */
24729 } else if (unlikely(XRa
== 0)) {
24730 /* destination is zero register -> do nothing */
24731 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
24732 /* one of operands zero register -> just set destination to all 0s */
24733 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24734 } else if (unlikely(XRb
== XRc
)) {
24735 /* both operands same -> just set destination to one of them */
24736 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24738 /* the most general case */
24739 tcg_gen_and_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24744 * S32OR XRa, XRb, XRc
24745 * Update XRa with the result of logical bitwise 'or' operation
24746 * applied to the content of XRb and XRc.
24748 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24749 * +-----------+---------+-----+-------+-------+-------+-----------+
24750 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24751 * +-----------+---------+-----+-------+-------+-------+-----------+
24753 static void gen_mxu_S32OR(DisasContext
*ctx
)
24755 uint32_t pad
, XRc
, XRb
, XRa
;
24757 pad
= extract32(ctx
->opcode
, 21, 5);
24758 XRc
= extract32(ctx
->opcode
, 14, 4);
24759 XRb
= extract32(ctx
->opcode
, 10, 4);
24760 XRa
= extract32(ctx
->opcode
, 6, 4);
24762 if (unlikely(pad
!= 0)) {
24763 /* opcode padding incorrect -> do nothing */
24764 } else if (unlikely(XRa
== 0)) {
24765 /* destination is zero register -> do nothing */
24766 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24767 /* both operands zero registers -> just set destination to all 0s */
24768 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24769 } else if (unlikely(XRb
== 0)) {
24770 /* XRb zero register -> just set destination to the content of XRc */
24771 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
24772 } else if (unlikely(XRc
== 0)) {
24773 /* XRc zero register -> just set destination to the content of XRb */
24774 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24775 } else if (unlikely(XRb
== XRc
)) {
24776 /* both operands same -> just set destination to one of them */
24777 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24779 /* the most general case */
24780 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24785 * S32XOR XRa, XRb, XRc
24786 * Update XRa with the result of logical bitwise 'xor' operation
24787 * applied to the content of XRb and XRc.
24789 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24790 * +-----------+---------+-----+-------+-------+-------+-----------+
24791 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24792 * +-----------+---------+-----+-------+-------+-------+-----------+
24794 static void gen_mxu_S32XOR(DisasContext
*ctx
)
24796 uint32_t pad
, XRc
, XRb
, XRa
;
24798 pad
= extract32(ctx
->opcode
, 21, 5);
24799 XRc
= extract32(ctx
->opcode
, 14, 4);
24800 XRb
= extract32(ctx
->opcode
, 10, 4);
24801 XRa
= extract32(ctx
->opcode
, 6, 4);
24803 if (unlikely(pad
!= 0)) {
24804 /* opcode padding incorrect -> do nothing */
24805 } else if (unlikely(XRa
== 0)) {
24806 /* destination is zero register -> do nothing */
24807 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24808 /* both operands zero registers -> just set destination to all 0s */
24809 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24810 } else if (unlikely(XRb
== 0)) {
24811 /* XRb zero register -> just set destination to the content of XRc */
24812 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
24813 } else if (unlikely(XRc
== 0)) {
24814 /* XRc zero register -> just set destination to the content of XRb */
24815 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24816 } else if (unlikely(XRb
== XRc
)) {
24817 /* both operands same -> just set destination to all 0s */
24818 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24820 /* the most general case */
24821 tcg_gen_xor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24827 * MXU instruction category max/min
24828 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24830 * S32MAX D16MAX Q8MAX
24831 * S32MIN D16MIN Q8MIN
24835 * S32MAX XRa, XRb, XRc
24836 * Update XRa with the maximum of signed 32-bit integers contained
24839 * S32MIN XRa, XRb, XRc
24840 * Update XRa with the minimum of signed 32-bit integers contained
24843 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24844 * +-----------+---------+-----+-------+-------+-------+-----------+
24845 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
24846 * +-----------+---------+-----+-------+-------+-------+-----------+
24848 static void gen_mxu_S32MAX_S32MIN(DisasContext
*ctx
)
24850 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
24852 pad
= extract32(ctx
->opcode
, 21, 5);
24853 opc
= extract32(ctx
->opcode
, 18, 3);
24854 XRc
= extract32(ctx
->opcode
, 14, 4);
24855 XRb
= extract32(ctx
->opcode
, 10, 4);
24856 XRa
= extract32(ctx
->opcode
, 6, 4);
24858 if (unlikely(pad
!= 0)) {
24859 /* opcode padding incorrect -> do nothing */
24860 } else if (unlikely(XRa
== 0)) {
24861 /* destination is zero register -> do nothing */
24862 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24863 /* both operands zero registers -> just set destination to zero */
24864 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24865 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
24866 /* exactly one operand is zero register - find which one is not...*/
24867 uint32_t XRx
= XRb
? XRb
: XRc
;
24868 /* ...and do max/min operation with one operand 0 */
24869 if (opc
== OPC_MXU_S32MAX
) {
24870 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
24872 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
24874 } else if (unlikely(XRb
== XRc
)) {
24875 /* both operands same -> just set destination to one of them */
24876 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24878 /* the most general case */
24879 if (opc
== OPC_MXU_S32MAX
) {
24880 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
24883 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
24891 * Update XRa with the 16-bit-wise maximums of signed integers
24892 * contained in XRb and XRc.
24895 * Update XRa with the 16-bit-wise minimums of signed integers
24896 * contained in XRb and XRc.
24898 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24899 * +-----------+---------+-----+-------+-------+-------+-----------+
24900 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
24901 * +-----------+---------+-----+-------+-------+-------+-----------+
24903 static void gen_mxu_D16MAX_D16MIN(DisasContext
*ctx
)
24905 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
24907 pad
= extract32(ctx
->opcode
, 21, 5);
24908 opc
= extract32(ctx
->opcode
, 18, 3);
24909 XRc
= extract32(ctx
->opcode
, 14, 4);
24910 XRb
= extract32(ctx
->opcode
, 10, 4);
24911 XRa
= extract32(ctx
->opcode
, 6, 4);
24913 if (unlikely(pad
!= 0)) {
24914 /* opcode padding incorrect -> do nothing */
24915 } else if (unlikely(XRc
== 0)) {
24916 /* destination is zero register -> do nothing */
24917 } else if (unlikely((XRb
== 0) && (XRa
== 0))) {
24918 /* both operands zero registers -> just set destination to zero */
24919 tcg_gen_movi_i32(mxu_gpr
[XRc
- 1], 0);
24920 } else if (unlikely((XRb
== 0) || (XRa
== 0))) {
24921 /* exactly one operand is zero register - find which one is not...*/
24922 uint32_t XRx
= XRb
? XRb
: XRc
;
24923 /* ...and do half-word-wise max/min with one operand 0 */
24924 TCGv_i32 t0
= tcg_temp_new();
24925 TCGv_i32 t1
= tcg_const_i32(0);
24927 /* the left half-word first */
24928 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFFFF0000);
24929 if (opc
== OPC_MXU_D16MAX
) {
24930 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
24932 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
24935 /* the right half-word */
24936 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0x0000FFFF);
24937 /* move half-words to the leftmost position */
24938 tcg_gen_shli_i32(t0
, t0
, 16);
24939 /* t0 will be max/min of t0 and t1 */
24940 if (opc
== OPC_MXU_D16MAX
) {
24941 tcg_gen_smax_i32(t0
, t0
, t1
);
24943 tcg_gen_smin_i32(t0
, t0
, t1
);
24945 /* return resulting half-words to its original position */
24946 tcg_gen_shri_i32(t0
, t0
, 16);
24947 /* finaly update the destination */
24948 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
24952 } else if (unlikely(XRb
== XRc
)) {
24953 /* both operands same -> just set destination to one of them */
24954 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24956 /* the most general case */
24957 TCGv_i32 t0
= tcg_temp_new();
24958 TCGv_i32 t1
= tcg_temp_new();
24960 /* the left half-word first */
24961 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFFFF0000);
24962 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
24963 if (opc
== OPC_MXU_D16MAX
) {
24964 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
24966 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
24969 /* the right half-word */
24970 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
24971 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0x0000FFFF);
24972 /* move half-words to the leftmost position */
24973 tcg_gen_shli_i32(t0
, t0
, 16);
24974 tcg_gen_shli_i32(t1
, t1
, 16);
24975 /* t0 will be max/min of t0 and t1 */
24976 if (opc
== OPC_MXU_D16MAX
) {
24977 tcg_gen_smax_i32(t0
, t0
, t1
);
24979 tcg_gen_smin_i32(t0
, t0
, t1
);
24981 /* return resulting half-words to its original position */
24982 tcg_gen_shri_i32(t0
, t0
, 16);
24983 /* finaly update the destination */
24984 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
24993 * Update XRa with the 8-bit-wise maximums of signed integers
24994 * contained in XRb and XRc.
24997 * Update XRa with the 8-bit-wise minimums of signed integers
24998 * contained in XRb and XRc.
25000 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25001 * +-----------+---------+-----+-------+-------+-------+-----------+
25002 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25003 * +-----------+---------+-----+-------+-------+-------+-----------+
25005 static void gen_mxu_Q8MAX_Q8MIN(DisasContext
*ctx
)
25007 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25009 pad
= extract32(ctx
->opcode
, 21, 5);
25010 opc
= extract32(ctx
->opcode
, 18, 3);
25011 XRc
= extract32(ctx
->opcode
, 14, 4);
25012 XRb
= extract32(ctx
->opcode
, 10, 4);
25013 XRa
= extract32(ctx
->opcode
, 6, 4);
25015 if (unlikely(pad
!= 0)) {
25016 /* opcode padding incorrect -> do nothing */
25017 } else if (unlikely(XRa
== 0)) {
25018 /* destination is zero register -> do nothing */
25019 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25020 /* both operands zero registers -> just set destination to zero */
25021 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25022 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25023 /* exactly one operand is zero register - make it be the first...*/
25024 uint32_t XRx
= XRb
? XRb
: XRc
;
25025 /* ...and do byte-wise max/min with one operand 0 */
25026 TCGv_i32 t0
= tcg_temp_new();
25027 TCGv_i32 t1
= tcg_const_i32(0);
25030 /* the leftmost byte (byte 3) first */
25031 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF000000);
25032 if (opc
== OPC_MXU_Q8MAX
) {
25033 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25035 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25038 /* bytes 2, 1, 0 */
25039 for (i
= 2; i
>= 0; i
--) {
25040 /* extract the byte */
25041 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF << (8 * i
));
25042 /* move the byte to the leftmost position */
25043 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25044 /* t0 will be max/min of t0 and t1 */
25045 if (opc
== OPC_MXU_Q8MAX
) {
25046 tcg_gen_smax_i32(t0
, t0
, t1
);
25048 tcg_gen_smin_i32(t0
, t0
, t1
);
25050 /* return resulting byte to its original position */
25051 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25052 /* finaly update the destination */
25053 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25058 } else if (unlikely(XRb
== XRc
)) {
25059 /* both operands same -> just set destination to one of them */
25060 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25062 /* the most general case */
25063 TCGv_i32 t0
= tcg_temp_new();
25064 TCGv_i32 t1
= tcg_temp_new();
25067 /* the leftmost bytes (bytes 3) first */
25068 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF000000);
25069 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25070 if (opc
== OPC_MXU_Q8MAX
) {
25071 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25073 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25076 /* bytes 2, 1, 0 */
25077 for (i
= 2; i
>= 0; i
--) {
25078 /* extract corresponding bytes */
25079 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF << (8 * i
));
25080 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF << (8 * i
));
25081 /* move the bytes to the leftmost position */
25082 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25083 tcg_gen_shli_i32(t1
, t1
, 8 * (3 - i
));
25084 /* t0 will be max/min of t0 and t1 */
25085 if (opc
== OPC_MXU_Q8MAX
) {
25086 tcg_gen_smax_i32(t0
, t0
, t1
);
25088 tcg_gen_smin_i32(t0
, t0
, t1
);
25090 /* return resulting byte to its original position */
25091 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25092 /* finaly update the destination */
25093 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25103 * Decoding engine for MXU
25104 * =======================
25109 * Decode MXU pool00
25111 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25112 * +-----------+---------+-----+-------+-------+-------+-----------+
25113 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL00|
25114 * +-----------+---------+-----+-------+-------+-------+-----------+
25117 static void decode_opc_mxu__pool00(CPUMIPSState
*env
, DisasContext
*ctx
)
25119 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25122 case OPC_MXU_S32MAX
:
25123 case OPC_MXU_S32MIN
:
25124 gen_mxu_S32MAX_S32MIN(ctx
);
25126 case OPC_MXU_D16MAX
:
25127 case OPC_MXU_D16MIN
:
25128 gen_mxu_D16MAX_D16MIN(ctx
);
25130 case OPC_MXU_Q8MAX
:
25131 case OPC_MXU_Q8MIN
:
25132 gen_mxu_Q8MAX_Q8MIN(ctx
);
25134 case OPC_MXU_Q8SLT
:
25135 /* TODO: Implement emulation of Q8SLT instruction. */
25136 MIPS_INVAL("OPC_MXU_Q8SLT");
25137 generate_exception_end(ctx
, EXCP_RI
);
25139 case OPC_MXU_Q8SLTU
:
25140 /* TODO: Implement emulation of Q8SLTU instruction. */
25141 MIPS_INVAL("OPC_MXU_Q8SLTU");
25142 generate_exception_end(ctx
, EXCP_RI
);
25145 MIPS_INVAL("decode_opc_mxu");
25146 generate_exception_end(ctx
, EXCP_RI
);
25153 * Decode MXU pool01
25155 * S32SLT, D16SLT, D16AVG, D16AVGR, Q8AVG, Q8AVGR:
25156 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25157 * +-----------+---------+-----+-------+-------+-------+-----------+
25158 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25159 * +-----------+---------+-----+-------+-------+-------+-----------+
25162 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25163 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25164 * | SPECIAL2 |en2|0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25165 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25168 static void decode_opc_mxu__pool01(CPUMIPSState
*env
, DisasContext
*ctx
)
25170 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25173 case OPC_MXU_S32SLT
:
25174 /* TODO: Implement emulation of S32SLT instruction. */
25175 MIPS_INVAL("OPC_MXU_S32SLT");
25176 generate_exception_end(ctx
, EXCP_RI
);
25178 case OPC_MXU_D16SLT
:
25179 /* TODO: Implement emulation of D16SLT instruction. */
25180 MIPS_INVAL("OPC_MXU_D16SLT");
25181 generate_exception_end(ctx
, EXCP_RI
);
25183 case OPC_MXU_D16AVG
:
25184 /* TODO: Implement emulation of D16AVG instruction. */
25185 MIPS_INVAL("OPC_MXU_D16AVG");
25186 generate_exception_end(ctx
, EXCP_RI
);
25188 case OPC_MXU_D16AVGR
:
25189 /* TODO: Implement emulation of D16AVGR instruction. */
25190 MIPS_INVAL("OPC_MXU_D16AVGR");
25191 generate_exception_end(ctx
, EXCP_RI
);
25193 case OPC_MXU_Q8AVG
:
25194 /* TODO: Implement emulation of Q8AVG instruction. */
25195 MIPS_INVAL("OPC_MXU_Q8AVG");
25196 generate_exception_end(ctx
, EXCP_RI
);
25198 case OPC_MXU_Q8AVGR
:
25199 /* TODO: Implement emulation of Q8AVGR instruction. */
25200 MIPS_INVAL("OPC_MXU_Q8AVGR");
25201 generate_exception_end(ctx
, EXCP_RI
);
25203 case OPC_MXU_Q8ADD
:
25204 /* TODO: Implement emulation of Q8ADD instruction. */
25205 MIPS_INVAL("OPC_MXU_Q8ADD");
25206 generate_exception_end(ctx
, EXCP_RI
);
25209 MIPS_INVAL("decode_opc_mxu");
25210 generate_exception_end(ctx
, EXCP_RI
);
25217 * Decode MXU pool02
25219 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25220 * +-----------+---------+-----+-------+-------+-------+-----------+
25221 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL02|
25222 * +-----------+---------+-----+-------+-------+-------+-----------+
25225 static void decode_opc_mxu__pool02(CPUMIPSState
*env
, DisasContext
*ctx
)
25227 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25230 case OPC_MXU_S32CPS
:
25231 /* TODO: Implement emulation of S32CPS instruction. */
25232 MIPS_INVAL("OPC_MXU_S32CPS");
25233 generate_exception_end(ctx
, EXCP_RI
);
25235 case OPC_MXU_D16CPS
:
25236 /* TODO: Implement emulation of D16CPS instruction. */
25237 MIPS_INVAL("OPC_MXU_D16CPS");
25238 generate_exception_end(ctx
, EXCP_RI
);
25240 case OPC_MXU_Q8ABD
:
25241 /* TODO: Implement emulation of Q8ABD instruction. */
25242 MIPS_INVAL("OPC_MXU_Q8ABD");
25243 generate_exception_end(ctx
, EXCP_RI
);
25245 case OPC_MXU_Q16SAT
:
25246 /* TODO: Implement emulation of Q16SAT instruction. */
25247 MIPS_INVAL("OPC_MXU_Q16SAT");
25248 generate_exception_end(ctx
, EXCP_RI
);
25251 MIPS_INVAL("decode_opc_mxu");
25252 generate_exception_end(ctx
, EXCP_RI
);
25259 * Decode MXU pool03
25262 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25263 * +-----------+---+---+-------+-------+-------+-------+-----------+
25264 * | SPECIAL2 |x x|on2|0 0 0 0| XRc | XRb | XRa |MXU__POOL03|
25265 * +-----------+---+---+-------+-------+-------+-------+-----------+
25268 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25269 * +-----------+---+---+-------+-------+-------+-------+-----------+
25270 * | SPECIAL2 |x x|on2| Xd | XRc | XRb | XRa |MXU__POOL03|
25271 * +-----------+---+---+-------+-------+-------+-------+-----------+
25274 static void decode_opc_mxu__pool03(CPUMIPSState
*env
, DisasContext
*ctx
)
25276 uint32_t opcode
= extract32(ctx
->opcode
, 24, 2);
25279 case OPC_MXU_D16MULF
:
25280 /* TODO: Implement emulation of D16MULF instruction. */
25281 MIPS_INVAL("OPC_MXU_D16MULF");
25282 generate_exception_end(ctx
, EXCP_RI
);
25284 case OPC_MXU_D16MULE
:
25285 /* TODO: Implement emulation of D16MULE instruction. */
25286 MIPS_INVAL("OPC_MXU_D16MULE");
25287 generate_exception_end(ctx
, EXCP_RI
);
25290 MIPS_INVAL("decode_opc_mxu");
25291 generate_exception_end(ctx
, EXCP_RI
);
25298 * Decode MXU pool04
25300 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25301 * +-----------+---------+-+-------------------+-------+-----------+
25302 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL04|
25303 * +-----------+---------+-+-------------------+-------+-----------+
25306 static void decode_opc_mxu__pool04(CPUMIPSState
*env
, DisasContext
*ctx
)
25308 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25311 case OPC_MXU_S32LDD
:
25312 case OPC_MXU_S32LDDR
:
25313 gen_mxu_s32ldd_s32lddr(ctx
);
25316 MIPS_INVAL("decode_opc_mxu");
25317 generate_exception_end(ctx
, EXCP_RI
);
25324 * Decode MXU pool05
25326 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25327 * +-----------+---------+-+-------------------+-------+-----------+
25328 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL05|
25329 * +-----------+---------+-+-------------------+-------+-----------+
25332 static void decode_opc_mxu__pool05(CPUMIPSState
*env
, DisasContext
*ctx
)
25334 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25337 case OPC_MXU_S32STD
:
25338 /* TODO: Implement emulation of S32STD instruction. */
25339 MIPS_INVAL("OPC_MXU_S32STD");
25340 generate_exception_end(ctx
, EXCP_RI
);
25342 case OPC_MXU_S32STDR
:
25343 /* TODO: Implement emulation of S32STDR instruction. */
25344 MIPS_INVAL("OPC_MXU_S32STDR");
25345 generate_exception_end(ctx
, EXCP_RI
);
25348 MIPS_INVAL("decode_opc_mxu");
25349 generate_exception_end(ctx
, EXCP_RI
);
25356 * Decode MXU pool06
25358 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25359 * +-----------+---------+---------+---+-------+-------+-----------+
25360 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL06|
25361 * +-----------+---------+---------+---+-------+-------+-----------+
25364 static void decode_opc_mxu__pool06(CPUMIPSState
*env
, DisasContext
*ctx
)
25366 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25369 case OPC_MXU_S32LDDV
:
25370 /* TODO: Implement emulation of S32LDDV instruction. */
25371 MIPS_INVAL("OPC_MXU_S32LDDV");
25372 generate_exception_end(ctx
, EXCP_RI
);
25374 case OPC_MXU_S32LDDVR
:
25375 /* TODO: Implement emulation of S32LDDVR instruction. */
25376 MIPS_INVAL("OPC_MXU_S32LDDVR");
25377 generate_exception_end(ctx
, EXCP_RI
);
25380 MIPS_INVAL("decode_opc_mxu");
25381 generate_exception_end(ctx
, EXCP_RI
);
25388 * Decode MXU pool07
25390 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25391 * +-----------+---------+---------+---+-------+-------+-----------+
25392 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL07|
25393 * +-----------+---------+---------+---+-------+-------+-----------+
25396 static void decode_opc_mxu__pool07(CPUMIPSState
*env
, DisasContext
*ctx
)
25398 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25401 case OPC_MXU_S32STDV
:
25402 /* TODO: Implement emulation of S32TDV instruction. */
25403 MIPS_INVAL("OPC_MXU_S32TDV");
25404 generate_exception_end(ctx
, EXCP_RI
);
25406 case OPC_MXU_S32STDVR
:
25407 /* TODO: Implement emulation of S32TDVR instruction. */
25408 MIPS_INVAL("OPC_MXU_S32TDVR");
25409 generate_exception_end(ctx
, EXCP_RI
);
25412 MIPS_INVAL("decode_opc_mxu");
25413 generate_exception_end(ctx
, EXCP_RI
);
25420 * Decode MXU pool08
25422 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25423 * +-----------+---------+-+-------------------+-------+-----------+
25424 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL08|
25425 * +-----------+---------+-+-------------------+-------+-----------+
25428 static void decode_opc_mxu__pool08(CPUMIPSState
*env
, DisasContext
*ctx
)
25430 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25433 case OPC_MXU_S32LDI
:
25434 /* TODO: Implement emulation of S32LDI instruction. */
25435 MIPS_INVAL("OPC_MXU_S32LDI");
25436 generate_exception_end(ctx
, EXCP_RI
);
25438 case OPC_MXU_S32LDIR
:
25439 /* TODO: Implement emulation of S32LDIR instruction. */
25440 MIPS_INVAL("OPC_MXU_S32LDIR");
25441 generate_exception_end(ctx
, EXCP_RI
);
25444 MIPS_INVAL("decode_opc_mxu");
25445 generate_exception_end(ctx
, EXCP_RI
);
25452 * Decode MXU pool09
25454 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25455 * +-----------+---------+-+-------------------+-------+-----------+
25456 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL09|
25457 * +-----------+---------+-+-------------------+-------+-----------+
25460 static void decode_opc_mxu__pool09(CPUMIPSState
*env
, DisasContext
*ctx
)
25462 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
25465 case OPC_MXU_S32SDI
:
25466 /* TODO: Implement emulation of S32SDI instruction. */
25467 MIPS_INVAL("OPC_MXU_S32SDI");
25468 generate_exception_end(ctx
, EXCP_RI
);
25470 case OPC_MXU_S32SDIR
:
25471 /* TODO: Implement emulation of S32SDIR instruction. */
25472 MIPS_INVAL("OPC_MXU_S32SDIR");
25473 generate_exception_end(ctx
, EXCP_RI
);
25476 MIPS_INVAL("decode_opc_mxu");
25477 generate_exception_end(ctx
, EXCP_RI
);
25484 * Decode MXU pool10
25486 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25487 * +-----------+---------+---------+---+-------+-------+-----------+
25488 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL10|
25489 * +-----------+---------+---------+---+-------+-------+-----------+
25492 static void decode_opc_mxu__pool10(CPUMIPSState
*env
, DisasContext
*ctx
)
25494 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
25497 case OPC_MXU_S32LDIV
:
25498 /* TODO: Implement emulation of S32LDIV instruction. */
25499 MIPS_INVAL("OPC_MXU_S32LDIV");
25500 generate_exception_end(ctx
, EXCP_RI
);
25502 case OPC_MXU_S32LDIVR
:
25503 /* TODO: Implement emulation of S32LDIVR instruction. */
25504 MIPS_INVAL("OPC_MXU_S32LDIVR");
25505 generate_exception_end(ctx
, EXCP_RI
);
25508 MIPS_INVAL("decode_opc_mxu");
25509 generate_exception_end(ctx
, EXCP_RI
);
25516 * Decode MXU pool11
25518 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25519 * +-----------+---------+---------+---+-------+-------+-----------+
25520 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL11|
25521 * +-----------+---------+---------+---+-------+-------+-----------+
25524 static void decode_opc_mxu__pool11(CPUMIPSState
*env
, DisasContext
*ctx
)
25526 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25529 case OPC_MXU_S32SDIV
:
25530 /* TODO: Implement emulation of S32SDIV instruction. */
25531 MIPS_INVAL("OPC_MXU_S32SDIV");
25532 generate_exception_end(ctx
, EXCP_RI
);
25534 case OPC_MXU_S32SDIVR
:
25535 /* TODO: Implement emulation of S32SDIVR instruction. */
25536 MIPS_INVAL("OPC_MXU_S32SDIVR");
25537 generate_exception_end(ctx
, EXCP_RI
);
25540 MIPS_INVAL("decode_opc_mxu");
25541 generate_exception_end(ctx
, EXCP_RI
);
25548 * Decode MXU pool12
25550 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25551 * +-----------+---+---+-------+-------+-------+-------+-----------+
25552 * | SPECIAL2 |an2|x x| Xd | XRc | XRb | XRa |MXU__POOL12|
25553 * +-----------+---+---+-------+-------+-------+-------+-----------+
25556 static void decode_opc_mxu__pool12(CPUMIPSState
*env
, DisasContext
*ctx
)
25558 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25561 case OPC_MXU_D32ACC
:
25562 /* TODO: Implement emulation of D32ACC instruction. */
25563 MIPS_INVAL("OPC_MXU_D32ACC");
25564 generate_exception_end(ctx
, EXCP_RI
);
25566 case OPC_MXU_D32ACCM
:
25567 /* TODO: Implement emulation of D32ACCM instruction. */
25568 MIPS_INVAL("OPC_MXU_D32ACCM");
25569 generate_exception_end(ctx
, EXCP_RI
);
25571 case OPC_MXU_D32ASUM
:
25572 /* TODO: Implement emulation of D32ASUM instruction. */
25573 MIPS_INVAL("OPC_MXU_D32ASUM");
25574 generate_exception_end(ctx
, EXCP_RI
);
25577 MIPS_INVAL("decode_opc_mxu");
25578 generate_exception_end(ctx
, EXCP_RI
);
25585 * Decode MXU pool13
25587 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25588 * +-----------+---+---+-------+-------+-------+-------+-----------+
25589 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL13|
25590 * +-----------+---+---+-------+-------+-------+-------+-----------+
25593 static void decode_opc_mxu__pool13(CPUMIPSState
*env
, DisasContext
*ctx
)
25595 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25598 case OPC_MXU_Q16ACC
:
25599 /* TODO: Implement emulation of Q16ACC instruction. */
25600 MIPS_INVAL("OPC_MXU_Q16ACC");
25601 generate_exception_end(ctx
, EXCP_RI
);
25603 case OPC_MXU_Q16ACCM
:
25604 /* TODO: Implement emulation of Q16ACCM instruction. */
25605 MIPS_INVAL("OPC_MXU_Q16ACCM");
25606 generate_exception_end(ctx
, EXCP_RI
);
25608 case OPC_MXU_Q16ASUM
:
25609 /* TODO: Implement emulation of Q16ASUM instruction. */
25610 MIPS_INVAL("OPC_MXU_Q16ASUM");
25611 generate_exception_end(ctx
, EXCP_RI
);
25614 MIPS_INVAL("decode_opc_mxu");
25615 generate_exception_end(ctx
, EXCP_RI
);
25622 * Decode MXU pool14
25625 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25626 * +-----------+---+---+-------+-------+-------+-------+-----------+
25627 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL14|
25628 * +-----------+---+---+-------+-------+-------+-------+-----------+
25631 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25632 * +-----------+---+---+-------+-------+-------+-------+-----------+
25633 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL14|
25634 * +-----------+---+---+-------+-------+-------+-------+-----------+
25637 static void decode_opc_mxu__pool14(CPUMIPSState
*env
, DisasContext
*ctx
)
25639 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25642 case OPC_MXU_Q8ADDE
:
25643 /* TODO: Implement emulation of Q8ADDE instruction. */
25644 MIPS_INVAL("OPC_MXU_Q8ADDE");
25645 generate_exception_end(ctx
, EXCP_RI
);
25647 case OPC_MXU_D8SUM
:
25648 /* TODO: Implement emulation of D8SUM instruction. */
25649 MIPS_INVAL("OPC_MXU_D8SUM");
25650 generate_exception_end(ctx
, EXCP_RI
);
25652 case OPC_MXU_D8SUMC
:
25653 /* TODO: Implement emulation of D8SUMC instruction. */
25654 MIPS_INVAL("OPC_MXU_D8SUMC");
25655 generate_exception_end(ctx
, EXCP_RI
);
25658 MIPS_INVAL("decode_opc_mxu");
25659 generate_exception_end(ctx
, EXCP_RI
);
25666 * Decode MXU pool15
25668 * S32MUL, S32MULU, S32EXTRV:
25669 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25670 * +-----------+---------+---------+---+-------+-------+-----------+
25671 * | SPECIAL2 | rs | rt |x x| XRd | XRa |MXU__POOL15|
25672 * +-----------+---------+---------+---+-------+-------+-----------+
25675 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25676 * +-----------+---------+---------+---+-------+-------+-----------+
25677 * | SPECIAL2 | rb | sft5 |x x| XRd | XRa |MXU__POOL15|
25678 * +-----------+---------+---------+---+-------+-------+-----------+
25681 static void decode_opc_mxu__pool15(CPUMIPSState
*env
, DisasContext
*ctx
)
25683 uint32_t opcode
= extract32(ctx
->opcode
, 14, 2);
25686 case OPC_MXU_S32MUL
:
25687 /* TODO: Implement emulation of S32MUL instruction. */
25688 MIPS_INVAL("OPC_MXU_S32MUL");
25689 generate_exception_end(ctx
, EXCP_RI
);
25691 case OPC_MXU_S32MULU
:
25692 /* TODO: Implement emulation of S32MULU instruction. */
25693 MIPS_INVAL("OPC_MXU_S32MULU");
25694 generate_exception_end(ctx
, EXCP_RI
);
25696 case OPC_MXU_S32EXTR
:
25697 /* TODO: Implement emulation of S32EXTR instruction. */
25698 MIPS_INVAL("OPC_MXU_S32EXTR");
25699 generate_exception_end(ctx
, EXCP_RI
);
25701 case OPC_MXU_S32EXTRV
:
25702 /* TODO: Implement emulation of S32EXTRV instruction. */
25703 MIPS_INVAL("OPC_MXU_S32EXTRV");
25704 generate_exception_end(ctx
, EXCP_RI
);
25707 MIPS_INVAL("decode_opc_mxu");
25708 generate_exception_end(ctx
, EXCP_RI
);
25715 * Decode MXU pool16
25718 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25719 * +-----------+---------+-----+-------+-------+-------+-----------+
25720 * | SPECIAL2 | rb |x x x| XRc | XRb | XRa |MXU__POOL16|
25721 * +-----------+---------+-----+-------+-------+-------+-----------+
25724 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25725 * +-----------+---------+-----+-------+-------+-------+-----------+
25726 * | SPECIAL2 | rs |x x x| XRc | XRb | XRa |MXU__POOL16|
25727 * +-----------+---------+-----+-------+-------+-------+-----------+
25730 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25731 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25732 * | SPECIAL2 | s3 |0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25733 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25736 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25737 * +-----------+-----+---+-----+-------+---------------+-----------+
25738 * | SPECIAL2 |optn3|0 0|x x x| XRc | s8 |MXU__POOL16|
25739 * +-----------+-----+---+-----+-------+---------------+-----------+
25741 * S32NOR, S32AND, S32OR, S32XOR:
25742 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25743 * +-----------+---------+-----+-------+-------+-------+-----------+
25744 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25745 * +-----------+---------+-----+-------+-------+-------+-----------+
25748 static void decode_opc_mxu__pool16(CPUMIPSState
*env
, DisasContext
*ctx
)
25750 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25753 case OPC_MXU_D32SARW
:
25754 /* TODO: Implement emulation of D32SARW instruction. */
25755 MIPS_INVAL("OPC_MXU_D32SARW");
25756 generate_exception_end(ctx
, EXCP_RI
);
25758 case OPC_MXU_S32ALN
:
25759 /* TODO: Implement emulation of S32ALN instruction. */
25760 MIPS_INVAL("OPC_MXU_S32ALN");
25761 generate_exception_end(ctx
, EXCP_RI
);
25763 case OPC_MXU_S32ALNI
:
25764 /* TODO: Implement emulation of S32ALNI instruction. */
25765 MIPS_INVAL("OPC_MXU_S32ALNI");
25766 generate_exception_end(ctx
, EXCP_RI
);
25768 case OPC_MXU_S32LUI
:
25769 /* TODO: Implement emulation of S32LUI instruction. */
25770 MIPS_INVAL("OPC_MXU_S32LUI");
25771 generate_exception_end(ctx
, EXCP_RI
);
25773 case OPC_MXU_S32NOR
:
25774 gen_mxu_S32NOR(ctx
);
25776 case OPC_MXU_S32AND
:
25777 gen_mxu_S32AND(ctx
);
25779 case OPC_MXU_S32OR
:
25780 gen_mxu_S32OR(ctx
);
25782 case OPC_MXU_S32XOR
:
25783 gen_mxu_S32XOR(ctx
);
25786 MIPS_INVAL("decode_opc_mxu");
25787 generate_exception_end(ctx
, EXCP_RI
);
25794 * Decode MXU pool17
25796 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25797 * +-----------+---------+---------+---+---------+-----+-----------+
25798 * | SPECIAL2 | rs | rt |0 0| rd |x x x|MXU__POOL15|
25799 * +-----------+---------+---------+---+---------+-----+-----------+
25802 static void decode_opc_mxu__pool17(CPUMIPSState
*env
, DisasContext
*ctx
)
25804 uint32_t opcode
= extract32(ctx
->opcode
, 6, 2);
25808 /* TODO: Implement emulation of LXW instruction. */
25809 MIPS_INVAL("OPC_MXU_LXW");
25810 generate_exception_end(ctx
, EXCP_RI
);
25813 /* TODO: Implement emulation of LXH instruction. */
25814 MIPS_INVAL("OPC_MXU_LXH");
25815 generate_exception_end(ctx
, EXCP_RI
);
25818 /* TODO: Implement emulation of LXHU instruction. */
25819 MIPS_INVAL("OPC_MXU_LXHU");
25820 generate_exception_end(ctx
, EXCP_RI
);
25823 /* TODO: Implement emulation of LXB instruction. */
25824 MIPS_INVAL("OPC_MXU_LXB");
25825 generate_exception_end(ctx
, EXCP_RI
);
25828 /* TODO: Implement emulation of LXBU instruction. */
25829 MIPS_INVAL("OPC_MXU_LXBU");
25830 generate_exception_end(ctx
, EXCP_RI
);
25833 MIPS_INVAL("decode_opc_mxu");
25834 generate_exception_end(ctx
, EXCP_RI
);
25840 * Decode MXU pool18
25842 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25843 * +-----------+---------+-----+-------+-------+-------+-----------+
25844 * | SPECIAL2 | rb |x x x| XRd | XRa |0 0 0 0|MXU__POOL18|
25845 * +-----------+---------+-----+-------+-------+-------+-----------+
25848 static void decode_opc_mxu__pool18(CPUMIPSState
*env
, DisasContext
*ctx
)
25850 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25853 case OPC_MXU_D32SLLV
:
25854 /* TODO: Implement emulation of D32SLLV instruction. */
25855 MIPS_INVAL("OPC_MXU_D32SLLV");
25856 generate_exception_end(ctx
, EXCP_RI
);
25858 case OPC_MXU_D32SLRV
:
25859 /* TODO: Implement emulation of D32SLRV instruction. */
25860 MIPS_INVAL("OPC_MXU_D32SLRV");
25861 generate_exception_end(ctx
, EXCP_RI
);
25863 case OPC_MXU_D32SARV
:
25864 /* TODO: Implement emulation of D32SARV instruction. */
25865 MIPS_INVAL("OPC_MXU_D32SARV");
25866 generate_exception_end(ctx
, EXCP_RI
);
25868 case OPC_MXU_Q16SLLV
:
25869 /* TODO: Implement emulation of Q16SLLV instruction. */
25870 MIPS_INVAL("OPC_MXU_Q16SLLV");
25871 generate_exception_end(ctx
, EXCP_RI
);
25873 case OPC_MXU_Q16SLRV
:
25874 /* TODO: Implement emulation of Q16SLRV instruction. */
25875 MIPS_INVAL("OPC_MXU_Q16SLRV");
25876 generate_exception_end(ctx
, EXCP_RI
);
25878 case OPC_MXU_Q16SARV
:
25879 /* TODO: Implement emulation of Q16SARV instruction. */
25880 MIPS_INVAL("OPC_MXU_Q16SARV");
25881 generate_exception_end(ctx
, EXCP_RI
);
25884 MIPS_INVAL("decode_opc_mxu");
25885 generate_exception_end(ctx
, EXCP_RI
);
25892 * Decode MXU pool19
25894 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25895 * +-----------+---+---+-------+-------+-------+-------+-----------+
25896 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL19|
25897 * +-----------+---+---+-------+-------+-------+-------+-----------+
25900 static void decode_opc_mxu__pool19(CPUMIPSState
*env
, DisasContext
*ctx
)
25902 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25905 case OPC_MXU_Q8MUL
:
25906 case OPC_MXU_Q8MULSU
:
25907 gen_mxu_q8mul_q8mulsu(ctx
);
25910 MIPS_INVAL("decode_opc_mxu");
25911 generate_exception_end(ctx
, EXCP_RI
);
25918 * Decode MXU pool20
25920 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25921 * +-----------+---------+-----+-------+-------+-------+-----------+
25922 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL20|
25923 * +-----------+---------+-----+-------+-------+-------+-----------+
25926 static void decode_opc_mxu__pool20(CPUMIPSState
*env
, DisasContext
*ctx
)
25928 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25931 case OPC_MXU_Q8MOVZ
:
25932 /* TODO: Implement emulation of Q8MOVZ instruction. */
25933 MIPS_INVAL("OPC_MXU_Q8MOVZ");
25934 generate_exception_end(ctx
, EXCP_RI
);
25936 case OPC_MXU_Q8MOVN
:
25937 /* TODO: Implement emulation of Q8MOVN instruction. */
25938 MIPS_INVAL("OPC_MXU_Q8MOVN");
25939 generate_exception_end(ctx
, EXCP_RI
);
25941 case OPC_MXU_D16MOVZ
:
25942 /* TODO: Implement emulation of D16MOVZ instruction. */
25943 MIPS_INVAL("OPC_MXU_D16MOVZ");
25944 generate_exception_end(ctx
, EXCP_RI
);
25946 case OPC_MXU_D16MOVN
:
25947 /* TODO: Implement emulation of D16MOVN instruction. */
25948 MIPS_INVAL("OPC_MXU_D16MOVN");
25949 generate_exception_end(ctx
, EXCP_RI
);
25951 case OPC_MXU_S32MOVZ
:
25952 /* TODO: Implement emulation of S32MOVZ instruction. */
25953 MIPS_INVAL("OPC_MXU_S32MOVZ");
25954 generate_exception_end(ctx
, EXCP_RI
);
25956 case OPC_MXU_S32MOVN
:
25957 /* TODO: Implement emulation of S32MOVN instruction. */
25958 MIPS_INVAL("OPC_MXU_S32MOVN");
25959 generate_exception_end(ctx
, EXCP_RI
);
25962 MIPS_INVAL("decode_opc_mxu");
25963 generate_exception_end(ctx
, EXCP_RI
);
25970 * Decode MXU pool21
25972 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25973 * +-----------+---+---+-------+-------+-------+-------+-----------+
25974 * | SPECIAL2 |an2|x x| XRd | XRc | XRb | XRa |MXU__POOL21|
25975 * +-----------+---+---+-------+-------+-------+-------+-----------+
25978 static void decode_opc_mxu__pool21(CPUMIPSState
*env
, DisasContext
*ctx
)
25980 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25983 case OPC_MXU_Q8MAC
:
25984 /* TODO: Implement emulation of Q8MAC instruction. */
25985 MIPS_INVAL("OPC_MXU_Q8MAC");
25986 generate_exception_end(ctx
, EXCP_RI
);
25988 case OPC_MXU_Q8MACSU
:
25989 /* TODO: Implement emulation of Q8MACSU instruction. */
25990 MIPS_INVAL("OPC_MXU_Q8MACSU");
25991 generate_exception_end(ctx
, EXCP_RI
);
25994 MIPS_INVAL("decode_opc_mxu");
25995 generate_exception_end(ctx
, EXCP_RI
);
26002 * Main MXU decoding function
26004 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26005 * +-----------+---------------------------------------+-----------+
26006 * | SPECIAL2 | |x x x x x x|
26007 * +-----------+---------------------------------------+-----------+
26010 static void decode_opc_mxu(CPUMIPSState
*env
, DisasContext
*ctx
)
26013 * TODO: Investigate necessity of including handling of
26014 * CLZ, CLO, SDBB in this function, as they belong to
26015 * SPECIAL2 opcode space for regular pre-R6 MIPS ISAs.
26017 uint32_t opcode
= extract32(ctx
->opcode
, 0, 6);
26019 if (opcode
== OPC__MXU_MUL
) {
26020 uint32_t rs
, rt
, rd
, op1
;
26022 rs
= extract32(ctx
->opcode
, 21, 5);
26023 rt
= extract32(ctx
->opcode
, 16, 5);
26024 rd
= extract32(ctx
->opcode
, 11, 5);
26025 op1
= MASK_SPECIAL2(ctx
->opcode
);
26027 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26032 if (opcode
== OPC_MXU_S32M2I
) {
26033 gen_mxu_s32m2i(ctx
);
26037 if (opcode
== OPC_MXU_S32I2M
) {
26038 gen_mxu_s32i2m(ctx
);
26043 TCGv t_mxu_cr
= tcg_temp_new();
26044 TCGLabel
*l_exit
= gen_new_label();
26046 gen_load_mxu_cr(t_mxu_cr
);
26047 tcg_gen_andi_tl(t_mxu_cr
, t_mxu_cr
, MXU_CR_MXU_EN
);
26048 tcg_gen_brcondi_tl(TCG_COND_NE
, t_mxu_cr
, MXU_CR_MXU_EN
, l_exit
);
26051 case OPC_MXU_S32MADD
:
26052 /* TODO: Implement emulation of S32MADD instruction. */
26053 MIPS_INVAL("OPC_MXU_S32MADD");
26054 generate_exception_end(ctx
, EXCP_RI
);
26056 case OPC_MXU_S32MADDU
:
26057 /* TODO: Implement emulation of S32MADDU instruction. */
26058 MIPS_INVAL("OPC_MXU_S32MADDU");
26059 generate_exception_end(ctx
, EXCP_RI
);
26061 case OPC_MXU__POOL00
:
26062 decode_opc_mxu__pool00(env
, ctx
);
26064 case OPC_MXU_S32MSUB
:
26065 /* TODO: Implement emulation of S32MSUB instruction. */
26066 MIPS_INVAL("OPC_MXU_S32MSUB");
26067 generate_exception_end(ctx
, EXCP_RI
);
26069 case OPC_MXU_S32MSUBU
:
26070 /* TODO: Implement emulation of S32MSUBU instruction. */
26071 MIPS_INVAL("OPC_MXU_S32MSUBU");
26072 generate_exception_end(ctx
, EXCP_RI
);
26074 case OPC_MXU__POOL01
:
26075 decode_opc_mxu__pool01(env
, ctx
);
26077 case OPC_MXU__POOL02
:
26078 decode_opc_mxu__pool02(env
, ctx
);
26080 case OPC_MXU_D16MUL
:
26081 gen_mxu_d16mul(ctx
);
26083 case OPC_MXU__POOL03
:
26084 decode_opc_mxu__pool03(env
, ctx
);
26086 case OPC_MXU_D16MAC
:
26087 gen_mxu_d16mac(ctx
);
26089 case OPC_MXU_D16MACF
:
26090 /* TODO: Implement emulation of D16MACF instruction. */
26091 MIPS_INVAL("OPC_MXU_D16MACF");
26092 generate_exception_end(ctx
, EXCP_RI
);
26094 case OPC_MXU_D16MADL
:
26095 /* TODO: Implement emulation of D16MADL instruction. */
26096 MIPS_INVAL("OPC_MXU_D16MADL");
26097 generate_exception_end(ctx
, EXCP_RI
);
26099 case OPC_MXU_S16MAD
:
26100 /* TODO: Implement emulation of S16MAD instruction. */
26101 MIPS_INVAL("OPC_MXU_S16MAD");
26102 generate_exception_end(ctx
, EXCP_RI
);
26104 case OPC_MXU_Q16ADD
:
26105 /* TODO: Implement emulation of Q16ADD instruction. */
26106 MIPS_INVAL("OPC_MXU_Q16ADD");
26107 generate_exception_end(ctx
, EXCP_RI
);
26109 case OPC_MXU_D16MACE
:
26110 /* TODO: Implement emulation of D16MACE instruction. */
26111 MIPS_INVAL("OPC_MXU_D16MACE");
26112 generate_exception_end(ctx
, EXCP_RI
);
26114 case OPC_MXU__POOL04
:
26115 decode_opc_mxu__pool04(env
, ctx
);
26117 case OPC_MXU__POOL05
:
26118 decode_opc_mxu__pool05(env
, ctx
);
26120 case OPC_MXU__POOL06
:
26121 decode_opc_mxu__pool06(env
, ctx
);
26123 case OPC_MXU__POOL07
:
26124 decode_opc_mxu__pool07(env
, ctx
);
26126 case OPC_MXU__POOL08
:
26127 decode_opc_mxu__pool08(env
, ctx
);
26129 case OPC_MXU__POOL09
:
26130 decode_opc_mxu__pool09(env
, ctx
);
26132 case OPC_MXU__POOL10
:
26133 decode_opc_mxu__pool10(env
, ctx
);
26135 case OPC_MXU__POOL11
:
26136 decode_opc_mxu__pool11(env
, ctx
);
26138 case OPC_MXU_D32ADD
:
26139 /* TODO: Implement emulation of D32ADD instruction. */
26140 MIPS_INVAL("OPC_MXU_D32ADD");
26141 generate_exception_end(ctx
, EXCP_RI
);
26143 case OPC_MXU__POOL12
:
26144 decode_opc_mxu__pool12(env
, ctx
);
26146 case OPC_MXU__POOL13
:
26147 decode_opc_mxu__pool13(env
, ctx
);
26149 case OPC_MXU__POOL14
:
26150 decode_opc_mxu__pool14(env
, ctx
);
26152 case OPC_MXU_Q8ACCE
:
26153 /* TODO: Implement emulation of Q8ACCE instruction. */
26154 MIPS_INVAL("OPC_MXU_Q8ACCE");
26155 generate_exception_end(ctx
, EXCP_RI
);
26157 case OPC_MXU_S8LDD
:
26158 gen_mxu_s8ldd(ctx
);
26160 case OPC_MXU_S8STD
:
26161 /* TODO: Implement emulation of S8STD instruction. */
26162 MIPS_INVAL("OPC_MXU_S8STD");
26163 generate_exception_end(ctx
, EXCP_RI
);
26165 case OPC_MXU_S8LDI
:
26166 /* TODO: Implement emulation of S8LDI instruction. */
26167 MIPS_INVAL("OPC_MXU_S8LDI");
26168 generate_exception_end(ctx
, EXCP_RI
);
26170 case OPC_MXU_S8SDI
:
26171 /* TODO: Implement emulation of S8SDI instruction. */
26172 MIPS_INVAL("OPC_MXU_S8SDI");
26173 generate_exception_end(ctx
, EXCP_RI
);
26175 case OPC_MXU__POOL15
:
26176 decode_opc_mxu__pool15(env
, ctx
);
26178 case OPC_MXU__POOL16
:
26179 decode_opc_mxu__pool16(env
, ctx
);
26181 case OPC_MXU__POOL17
:
26182 decode_opc_mxu__pool17(env
, ctx
);
26184 case OPC_MXU_S16LDD
:
26185 /* TODO: Implement emulation of S16LDD instruction. */
26186 MIPS_INVAL("OPC_MXU_S16LDD");
26187 generate_exception_end(ctx
, EXCP_RI
);
26189 case OPC_MXU_S16STD
:
26190 /* TODO: Implement emulation of S16STD instruction. */
26191 MIPS_INVAL("OPC_MXU_S16STD");
26192 generate_exception_end(ctx
, EXCP_RI
);
26194 case OPC_MXU_S16LDI
:
26195 /* TODO: Implement emulation of S16LDI instruction. */
26196 MIPS_INVAL("OPC_MXU_S16LDI");
26197 generate_exception_end(ctx
, EXCP_RI
);
26199 case OPC_MXU_S16SDI
:
26200 /* TODO: Implement emulation of S16SDI instruction. */
26201 MIPS_INVAL("OPC_MXU_S16SDI");
26202 generate_exception_end(ctx
, EXCP_RI
);
26204 case OPC_MXU_D32SLL
:
26205 /* TODO: Implement emulation of D32SLL instruction. */
26206 MIPS_INVAL("OPC_MXU_D32SLL");
26207 generate_exception_end(ctx
, EXCP_RI
);
26209 case OPC_MXU_D32SLR
:
26210 /* TODO: Implement emulation of D32SLR instruction. */
26211 MIPS_INVAL("OPC_MXU_D32SLR");
26212 generate_exception_end(ctx
, EXCP_RI
);
26214 case OPC_MXU_D32SARL
:
26215 /* TODO: Implement emulation of D32SARL instruction. */
26216 MIPS_INVAL("OPC_MXU_D32SARL");
26217 generate_exception_end(ctx
, EXCP_RI
);
26219 case OPC_MXU_D32SAR
:
26220 /* TODO: Implement emulation of D32SAR instruction. */
26221 MIPS_INVAL("OPC_MXU_D32SAR");
26222 generate_exception_end(ctx
, EXCP_RI
);
26224 case OPC_MXU_Q16SLL
:
26225 /* TODO: Implement emulation of Q16SLL instruction. */
26226 MIPS_INVAL("OPC_MXU_Q16SLL");
26227 generate_exception_end(ctx
, EXCP_RI
);
26229 case OPC_MXU_Q16SLR
:
26230 /* TODO: Implement emulation of Q16SLR instruction. */
26231 MIPS_INVAL("OPC_MXU_Q16SLR");
26232 generate_exception_end(ctx
, EXCP_RI
);
26234 case OPC_MXU__POOL18
:
26235 decode_opc_mxu__pool18(env
, ctx
);
26237 case OPC_MXU_Q16SAR
:
26238 /* TODO: Implement emulation of Q16SAR instruction. */
26239 MIPS_INVAL("OPC_MXU_Q16SAR");
26240 generate_exception_end(ctx
, EXCP_RI
);
26242 case OPC_MXU__POOL19
:
26243 decode_opc_mxu__pool19(env
, ctx
);
26245 case OPC_MXU__POOL20
:
26246 decode_opc_mxu__pool20(env
, ctx
);
26248 case OPC_MXU__POOL21
:
26249 decode_opc_mxu__pool21(env
, ctx
);
26251 case OPC_MXU_Q16SCOP
:
26252 /* TODO: Implement emulation of Q16SCOP instruction. */
26253 MIPS_INVAL("OPC_MXU_Q16SCOP");
26254 generate_exception_end(ctx
, EXCP_RI
);
26256 case OPC_MXU_Q8MADL
:
26257 /* TODO: Implement emulation of Q8MADL instruction. */
26258 MIPS_INVAL("OPC_MXU_Q8MADL");
26259 generate_exception_end(ctx
, EXCP_RI
);
26261 case OPC_MXU_S32SFL
:
26262 /* TODO: Implement emulation of S32SFL instruction. */
26263 MIPS_INVAL("OPC_MXU_S32SFL");
26264 generate_exception_end(ctx
, EXCP_RI
);
26266 case OPC_MXU_Q8SAD
:
26267 /* TODO: Implement emulation of Q8SAD instruction. */
26268 MIPS_INVAL("OPC_MXU_Q8SAD");
26269 generate_exception_end(ctx
, EXCP_RI
);
26272 MIPS_INVAL("decode_opc_mxu");
26273 generate_exception_end(ctx
, EXCP_RI
);
26276 gen_set_label(l_exit
);
26277 tcg_temp_free(t_mxu_cr
);
26281 #endif /* !defined(TARGET_MIPS64) */
26284 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26289 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26291 rs
= (ctx
->opcode
>> 21) & 0x1f;
26292 rt
= (ctx
->opcode
>> 16) & 0x1f;
26293 rd
= (ctx
->opcode
>> 11) & 0x1f;
26295 op1
= MASK_SPECIAL2(ctx
->opcode
);
26297 case OPC_MADD
: /* Multiply and add/sub */
26301 check_insn(ctx
, ISA_MIPS32
);
26302 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
26305 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26308 case OPC_DIVU_G_2F
:
26309 case OPC_MULT_G_2F
:
26310 case OPC_MULTU_G_2F
:
26312 case OPC_MODU_G_2F
:
26313 check_insn(ctx
, INSN_LOONGSON2F
);
26314 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26318 check_insn(ctx
, ISA_MIPS32
);
26319 gen_cl(ctx
, op1
, rd
, rs
);
26322 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
26323 gen_helper_do_semihosting(cpu_env
);
26325 /* XXX: not clear which exception should be raised
26326 * when in debug mode...
26328 check_insn(ctx
, ISA_MIPS32
);
26329 generate_exception_end(ctx
, EXCP_DBp
);
26332 #if defined(TARGET_MIPS64)
26335 check_insn(ctx
, ISA_MIPS64
);
26336 check_mips_64(ctx
);
26337 gen_cl(ctx
, op1
, rd
, rs
);
26339 case OPC_DMULT_G_2F
:
26340 case OPC_DMULTU_G_2F
:
26341 case OPC_DDIV_G_2F
:
26342 case OPC_DDIVU_G_2F
:
26343 case OPC_DMOD_G_2F
:
26344 case OPC_DMODU_G_2F
:
26345 check_insn(ctx
, INSN_LOONGSON2F
);
26346 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26349 default: /* Invalid */
26350 MIPS_INVAL("special2_legacy");
26351 generate_exception_end(ctx
, EXCP_RI
);
26356 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
26358 int rs
, rt
, rd
, sa
;
26362 rs
= (ctx
->opcode
>> 21) & 0x1f;
26363 rt
= (ctx
->opcode
>> 16) & 0x1f;
26364 rd
= (ctx
->opcode
>> 11) & 0x1f;
26365 sa
= (ctx
->opcode
>> 6) & 0x1f;
26366 imm
= (int16_t)ctx
->opcode
>> 7;
26368 op1
= MASK_SPECIAL3(ctx
->opcode
);
26372 /* hint codes 24-31 are reserved and signal RI */
26373 generate_exception_end(ctx
, EXCP_RI
);
26375 /* Treat as NOP. */
26378 check_cp0_enabled(ctx
);
26379 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
26380 gen_cache_operation(ctx
, rt
, rs
, imm
);
26384 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
26387 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26392 /* Treat as NOP. */
26395 op2
= MASK_BSHFL(ctx
->opcode
);
26401 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
26404 gen_bitswap(ctx
, op2
, rd
, rt
);
26409 #if defined(TARGET_MIPS64)
26411 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
26414 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26417 check_mips_64(ctx
);
26420 /* Treat as NOP. */
26423 op2
= MASK_DBSHFL(ctx
->opcode
);
26433 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
26436 gen_bitswap(ctx
, op2
, rd
, rt
);
26443 default: /* Invalid */
26444 MIPS_INVAL("special3_r6");
26445 generate_exception_end(ctx
, EXCP_RI
);
26450 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26455 rs
= (ctx
->opcode
>> 21) & 0x1f;
26456 rt
= (ctx
->opcode
>> 16) & 0x1f;
26457 rd
= (ctx
->opcode
>> 11) & 0x1f;
26459 op1
= MASK_SPECIAL3(ctx
->opcode
);
26462 case OPC_DIVU_G_2E
:
26464 case OPC_MODU_G_2E
:
26465 case OPC_MULT_G_2E
:
26466 case OPC_MULTU_G_2E
:
26467 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
26468 * the same mask and op1. */
26469 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
26470 op2
= MASK_ADDUH_QB(ctx
->opcode
);
26473 case OPC_ADDUH_R_QB
:
26475 case OPC_ADDQH_R_PH
:
26477 case OPC_ADDQH_R_W
:
26479 case OPC_SUBUH_R_QB
:
26481 case OPC_SUBQH_R_PH
:
26483 case OPC_SUBQH_R_W
:
26484 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26489 case OPC_MULQ_RS_W
:
26490 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26493 MIPS_INVAL("MASK ADDUH.QB");
26494 generate_exception_end(ctx
, EXCP_RI
);
26497 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
26498 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26500 generate_exception_end(ctx
, EXCP_RI
);
26504 op2
= MASK_LX(ctx
->opcode
);
26506 #if defined(TARGET_MIPS64)
26512 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
26514 default: /* Invalid */
26515 MIPS_INVAL("MASK LX");
26516 generate_exception_end(ctx
, EXCP_RI
);
26520 case OPC_ABSQ_S_PH_DSP
:
26521 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
26523 case OPC_ABSQ_S_QB
:
26524 case OPC_ABSQ_S_PH
:
26526 case OPC_PRECEQ_W_PHL
:
26527 case OPC_PRECEQ_W_PHR
:
26528 case OPC_PRECEQU_PH_QBL
:
26529 case OPC_PRECEQU_PH_QBR
:
26530 case OPC_PRECEQU_PH_QBLA
:
26531 case OPC_PRECEQU_PH_QBRA
:
26532 case OPC_PRECEU_PH_QBL
:
26533 case OPC_PRECEU_PH_QBR
:
26534 case OPC_PRECEU_PH_QBLA
:
26535 case OPC_PRECEU_PH_QBRA
:
26536 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26543 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
26546 MIPS_INVAL("MASK ABSQ_S.PH");
26547 generate_exception_end(ctx
, EXCP_RI
);
26551 case OPC_ADDU_QB_DSP
:
26552 op2
= MASK_ADDU_QB(ctx
->opcode
);
26555 case OPC_ADDQ_S_PH
:
26558 case OPC_ADDU_S_QB
:
26560 case OPC_ADDU_S_PH
:
26562 case OPC_SUBQ_S_PH
:
26565 case OPC_SUBU_S_QB
:
26567 case OPC_SUBU_S_PH
:
26571 case OPC_RADDU_W_QB
:
26572 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26574 case OPC_MULEU_S_PH_QBL
:
26575 case OPC_MULEU_S_PH_QBR
:
26576 case OPC_MULQ_RS_PH
:
26577 case OPC_MULEQ_S_W_PHL
:
26578 case OPC_MULEQ_S_W_PHR
:
26579 case OPC_MULQ_S_PH
:
26580 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26582 default: /* Invalid */
26583 MIPS_INVAL("MASK ADDU.QB");
26584 generate_exception_end(ctx
, EXCP_RI
);
26589 case OPC_CMPU_EQ_QB_DSP
:
26590 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
26592 case OPC_PRECR_SRA_PH_W
:
26593 case OPC_PRECR_SRA_R_PH_W
:
26594 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
26596 case OPC_PRECR_QB_PH
:
26597 case OPC_PRECRQ_QB_PH
:
26598 case OPC_PRECRQ_PH_W
:
26599 case OPC_PRECRQ_RS_PH_W
:
26600 case OPC_PRECRQU_S_QB_PH
:
26601 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26603 case OPC_CMPU_EQ_QB
:
26604 case OPC_CMPU_LT_QB
:
26605 case OPC_CMPU_LE_QB
:
26606 case OPC_CMP_EQ_PH
:
26607 case OPC_CMP_LT_PH
:
26608 case OPC_CMP_LE_PH
:
26609 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26611 case OPC_CMPGU_EQ_QB
:
26612 case OPC_CMPGU_LT_QB
:
26613 case OPC_CMPGU_LE_QB
:
26614 case OPC_CMPGDU_EQ_QB
:
26615 case OPC_CMPGDU_LT_QB
:
26616 case OPC_CMPGDU_LE_QB
:
26619 case OPC_PACKRL_PH
:
26620 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26622 default: /* Invalid */
26623 MIPS_INVAL("MASK CMPU.EQ.QB");
26624 generate_exception_end(ctx
, EXCP_RI
);
26628 case OPC_SHLL_QB_DSP
:
26629 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
26631 case OPC_DPA_W_PH_DSP
:
26632 op2
= MASK_DPA_W_PH(ctx
->opcode
);
26634 case OPC_DPAU_H_QBL
:
26635 case OPC_DPAU_H_QBR
:
26636 case OPC_DPSU_H_QBL
:
26637 case OPC_DPSU_H_QBR
:
26639 case OPC_DPAX_W_PH
:
26640 case OPC_DPAQ_S_W_PH
:
26641 case OPC_DPAQX_S_W_PH
:
26642 case OPC_DPAQX_SA_W_PH
:
26644 case OPC_DPSX_W_PH
:
26645 case OPC_DPSQ_S_W_PH
:
26646 case OPC_DPSQX_S_W_PH
:
26647 case OPC_DPSQX_SA_W_PH
:
26648 case OPC_MULSAQ_S_W_PH
:
26649 case OPC_DPAQ_SA_L_W
:
26650 case OPC_DPSQ_SA_L_W
:
26651 case OPC_MAQ_S_W_PHL
:
26652 case OPC_MAQ_S_W_PHR
:
26653 case OPC_MAQ_SA_W_PHL
:
26654 case OPC_MAQ_SA_W_PHR
:
26655 case OPC_MULSA_W_PH
:
26656 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26658 default: /* Invalid */
26659 MIPS_INVAL("MASK DPAW.PH");
26660 generate_exception_end(ctx
, EXCP_RI
);
26665 op2
= MASK_INSV(ctx
->opcode
);
26676 t0
= tcg_temp_new();
26677 t1
= tcg_temp_new();
26679 gen_load_gpr(t0
, rt
);
26680 gen_load_gpr(t1
, rs
);
26682 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
26688 default: /* Invalid */
26689 MIPS_INVAL("MASK INSV");
26690 generate_exception_end(ctx
, EXCP_RI
);
26694 case OPC_APPEND_DSP
:
26695 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
26697 case OPC_EXTR_W_DSP
:
26698 op2
= MASK_EXTR_W(ctx
->opcode
);
26702 case OPC_EXTR_RS_W
:
26704 case OPC_EXTRV_S_H
:
26706 case OPC_EXTRV_R_W
:
26707 case OPC_EXTRV_RS_W
:
26712 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
26715 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26721 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26723 default: /* Invalid */
26724 MIPS_INVAL("MASK EXTR.W");
26725 generate_exception_end(ctx
, EXCP_RI
);
26729 #if defined(TARGET_MIPS64)
26730 case OPC_DDIV_G_2E
:
26731 case OPC_DDIVU_G_2E
:
26732 case OPC_DMULT_G_2E
:
26733 case OPC_DMULTU_G_2E
:
26734 case OPC_DMOD_G_2E
:
26735 case OPC_DMODU_G_2E
:
26736 check_insn(ctx
, INSN_LOONGSON2E
);
26737 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26739 case OPC_ABSQ_S_QH_DSP
:
26740 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
26742 case OPC_PRECEQ_L_PWL
:
26743 case OPC_PRECEQ_L_PWR
:
26744 case OPC_PRECEQ_PW_QHL
:
26745 case OPC_PRECEQ_PW_QHR
:
26746 case OPC_PRECEQ_PW_QHLA
:
26747 case OPC_PRECEQ_PW_QHRA
:
26748 case OPC_PRECEQU_QH_OBL
:
26749 case OPC_PRECEQU_QH_OBR
:
26750 case OPC_PRECEQU_QH_OBLA
:
26751 case OPC_PRECEQU_QH_OBRA
:
26752 case OPC_PRECEU_QH_OBL
:
26753 case OPC_PRECEU_QH_OBR
:
26754 case OPC_PRECEU_QH_OBLA
:
26755 case OPC_PRECEU_QH_OBRA
:
26756 case OPC_ABSQ_S_OB
:
26757 case OPC_ABSQ_S_PW
:
26758 case OPC_ABSQ_S_QH
:
26759 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26767 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
26769 default: /* Invalid */
26770 MIPS_INVAL("MASK ABSQ_S.QH");
26771 generate_exception_end(ctx
, EXCP_RI
);
26775 case OPC_ADDU_OB_DSP
:
26776 op2
= MASK_ADDU_OB(ctx
->opcode
);
26778 case OPC_RADDU_L_OB
:
26780 case OPC_SUBQ_S_PW
:
26782 case OPC_SUBQ_S_QH
:
26784 case OPC_SUBU_S_OB
:
26786 case OPC_SUBU_S_QH
:
26788 case OPC_SUBUH_R_OB
:
26790 case OPC_ADDQ_S_PW
:
26792 case OPC_ADDQ_S_QH
:
26794 case OPC_ADDU_S_OB
:
26796 case OPC_ADDU_S_QH
:
26798 case OPC_ADDUH_R_OB
:
26799 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26801 case OPC_MULEQ_S_PW_QHL
:
26802 case OPC_MULEQ_S_PW_QHR
:
26803 case OPC_MULEU_S_QH_OBL
:
26804 case OPC_MULEU_S_QH_OBR
:
26805 case OPC_MULQ_RS_QH
:
26806 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26808 default: /* Invalid */
26809 MIPS_INVAL("MASK ADDU.OB");
26810 generate_exception_end(ctx
, EXCP_RI
);
26814 case OPC_CMPU_EQ_OB_DSP
:
26815 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
26817 case OPC_PRECR_SRA_QH_PW
:
26818 case OPC_PRECR_SRA_R_QH_PW
:
26819 /* Return value is rt. */
26820 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
26822 case OPC_PRECR_OB_QH
:
26823 case OPC_PRECRQ_OB_QH
:
26824 case OPC_PRECRQ_PW_L
:
26825 case OPC_PRECRQ_QH_PW
:
26826 case OPC_PRECRQ_RS_QH_PW
:
26827 case OPC_PRECRQU_S_OB_QH
:
26828 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26830 case OPC_CMPU_EQ_OB
:
26831 case OPC_CMPU_LT_OB
:
26832 case OPC_CMPU_LE_OB
:
26833 case OPC_CMP_EQ_QH
:
26834 case OPC_CMP_LT_QH
:
26835 case OPC_CMP_LE_QH
:
26836 case OPC_CMP_EQ_PW
:
26837 case OPC_CMP_LT_PW
:
26838 case OPC_CMP_LE_PW
:
26839 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26841 case OPC_CMPGDU_EQ_OB
:
26842 case OPC_CMPGDU_LT_OB
:
26843 case OPC_CMPGDU_LE_OB
:
26844 case OPC_CMPGU_EQ_OB
:
26845 case OPC_CMPGU_LT_OB
:
26846 case OPC_CMPGU_LE_OB
:
26847 case OPC_PACKRL_PW
:
26851 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26853 default: /* Invalid */
26854 MIPS_INVAL("MASK CMPU_EQ.OB");
26855 generate_exception_end(ctx
, EXCP_RI
);
26859 case OPC_DAPPEND_DSP
:
26860 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
26862 case OPC_DEXTR_W_DSP
:
26863 op2
= MASK_DEXTR_W(ctx
->opcode
);
26870 case OPC_DEXTR_R_L
:
26871 case OPC_DEXTR_RS_L
:
26873 case OPC_DEXTR_R_W
:
26874 case OPC_DEXTR_RS_W
:
26875 case OPC_DEXTR_S_H
:
26877 case OPC_DEXTRV_R_L
:
26878 case OPC_DEXTRV_RS_L
:
26879 case OPC_DEXTRV_S_H
:
26881 case OPC_DEXTRV_R_W
:
26882 case OPC_DEXTRV_RS_W
:
26883 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
26888 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26890 default: /* Invalid */
26891 MIPS_INVAL("MASK EXTR.W");
26892 generate_exception_end(ctx
, EXCP_RI
);
26896 case OPC_DPAQ_W_QH_DSP
:
26897 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
26899 case OPC_DPAU_H_OBL
:
26900 case OPC_DPAU_H_OBR
:
26901 case OPC_DPSU_H_OBL
:
26902 case OPC_DPSU_H_OBR
:
26904 case OPC_DPAQ_S_W_QH
:
26906 case OPC_DPSQ_S_W_QH
:
26907 case OPC_MULSAQ_S_W_QH
:
26908 case OPC_DPAQ_SA_L_PW
:
26909 case OPC_DPSQ_SA_L_PW
:
26910 case OPC_MULSAQ_S_L_PW
:
26911 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26913 case OPC_MAQ_S_W_QHLL
:
26914 case OPC_MAQ_S_W_QHLR
:
26915 case OPC_MAQ_S_W_QHRL
:
26916 case OPC_MAQ_S_W_QHRR
:
26917 case OPC_MAQ_SA_W_QHLL
:
26918 case OPC_MAQ_SA_W_QHLR
:
26919 case OPC_MAQ_SA_W_QHRL
:
26920 case OPC_MAQ_SA_W_QHRR
:
26921 case OPC_MAQ_S_L_PWL
:
26922 case OPC_MAQ_S_L_PWR
:
26927 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26929 default: /* Invalid */
26930 MIPS_INVAL("MASK DPAQ.W.QH");
26931 generate_exception_end(ctx
, EXCP_RI
);
26935 case OPC_DINSV_DSP
:
26936 op2
= MASK_INSV(ctx
->opcode
);
26947 t0
= tcg_temp_new();
26948 t1
= tcg_temp_new();
26950 gen_load_gpr(t0
, rt
);
26951 gen_load_gpr(t1
, rs
);
26953 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
26959 default: /* Invalid */
26960 MIPS_INVAL("MASK DINSV");
26961 generate_exception_end(ctx
, EXCP_RI
);
26965 case OPC_SHLL_OB_DSP
:
26966 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
26969 default: /* Invalid */
26970 MIPS_INVAL("special3_legacy");
26971 generate_exception_end(ctx
, EXCP_RI
);
26976 static void decode_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
26978 uint32_t opc
= MASK_MMI0(ctx
->opcode
);
26981 case MMI_OPC_0_PADDW
: /* TODO: MMI_OPC_0_PADDW */
26982 case MMI_OPC_0_PSUBW
: /* TODO: MMI_OPC_0_PSUBW */
26983 case MMI_OPC_0_PCGTW
: /* TODO: MMI_OPC_0_PCGTW */
26984 case MMI_OPC_0_PMAXW
: /* TODO: MMI_OPC_0_PMAXW */
26985 case MMI_OPC_0_PADDH
: /* TODO: MMI_OPC_0_PADDH */
26986 case MMI_OPC_0_PSUBH
: /* TODO: MMI_OPC_0_PSUBH */
26987 case MMI_OPC_0_PCGTH
: /* TODO: MMI_OPC_0_PCGTH */
26988 case MMI_OPC_0_PMAXH
: /* TODO: MMI_OPC_0_PMAXH */
26989 case MMI_OPC_0_PADDB
: /* TODO: MMI_OPC_0_PADDB */
26990 case MMI_OPC_0_PSUBB
: /* TODO: MMI_OPC_0_PSUBB */
26991 case MMI_OPC_0_PCGTB
: /* TODO: MMI_OPC_0_PCGTB */
26992 case MMI_OPC_0_PADDSW
: /* TODO: MMI_OPC_0_PADDSW */
26993 case MMI_OPC_0_PSUBSW
: /* TODO: MMI_OPC_0_PSUBSW */
26994 case MMI_OPC_0_PEXTLW
: /* TODO: MMI_OPC_0_PEXTLW */
26995 case MMI_OPC_0_PPACW
: /* TODO: MMI_OPC_0_PPACW */
26996 case MMI_OPC_0_PADDSH
: /* TODO: MMI_OPC_0_PADDSH */
26997 case MMI_OPC_0_PSUBSH
: /* TODO: MMI_OPC_0_PSUBSH */
26998 case MMI_OPC_0_PEXTLH
: /* TODO: MMI_OPC_0_PEXTLH */
26999 case MMI_OPC_0_PPACH
: /* TODO: MMI_OPC_0_PPACH */
27000 case MMI_OPC_0_PADDSB
: /* TODO: MMI_OPC_0_PADDSB */
27001 case MMI_OPC_0_PSUBSB
: /* TODO: MMI_OPC_0_PSUBSB */
27002 case MMI_OPC_0_PEXTLB
: /* TODO: MMI_OPC_0_PEXTLB */
27003 case MMI_OPC_0_PPACB
: /* TODO: MMI_OPC_0_PPACB */
27004 case MMI_OPC_0_PEXT5
: /* TODO: MMI_OPC_0_PEXT5 */
27005 case MMI_OPC_0_PPAC5
: /* TODO: MMI_OPC_0_PPAC5 */
27006 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI0 */
27009 MIPS_INVAL("TX79 MMI class MMI0");
27010 generate_exception_end(ctx
, EXCP_RI
);
27015 static void decode_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
27017 uint32_t opc
= MASK_MMI1(ctx
->opcode
);
27020 case MMI_OPC_1_PABSW
: /* TODO: MMI_OPC_1_PABSW */
27021 case MMI_OPC_1_PCEQW
: /* TODO: MMI_OPC_1_PCEQW */
27022 case MMI_OPC_1_PMINW
: /* TODO: MMI_OPC_1_PMINW */
27023 case MMI_OPC_1_PADSBH
: /* TODO: MMI_OPC_1_PADSBH */
27024 case MMI_OPC_1_PABSH
: /* TODO: MMI_OPC_1_PABSH */
27025 case MMI_OPC_1_PCEQH
: /* TODO: MMI_OPC_1_PCEQH */
27026 case MMI_OPC_1_PMINH
: /* TODO: MMI_OPC_1_PMINH */
27027 case MMI_OPC_1_PCEQB
: /* TODO: MMI_OPC_1_PCEQB */
27028 case MMI_OPC_1_PADDUW
: /* TODO: MMI_OPC_1_PADDUW */
27029 case MMI_OPC_1_PSUBUW
: /* TODO: MMI_OPC_1_PSUBUW */
27030 case MMI_OPC_1_PEXTUW
: /* TODO: MMI_OPC_1_PEXTUW */
27031 case MMI_OPC_1_PADDUH
: /* TODO: MMI_OPC_1_PADDUH */
27032 case MMI_OPC_1_PSUBUH
: /* TODO: MMI_OPC_1_PSUBUH */
27033 case MMI_OPC_1_PEXTUH
: /* TODO: MMI_OPC_1_PEXTUH */
27034 case MMI_OPC_1_PADDUB
: /* TODO: MMI_OPC_1_PADDUB */
27035 case MMI_OPC_1_PSUBUB
: /* TODO: MMI_OPC_1_PSUBUB */
27036 case MMI_OPC_1_PEXTUB
: /* TODO: MMI_OPC_1_PEXTUB */
27037 case MMI_OPC_1_QFSRV
: /* TODO: MMI_OPC_1_QFSRV */
27038 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI1 */
27041 MIPS_INVAL("TX79 MMI class MMI1");
27042 generate_exception_end(ctx
, EXCP_RI
);
27047 static void decode_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
27049 uint32_t opc
= MASK_MMI2(ctx
->opcode
);
27052 case MMI_OPC_2_PMADDW
: /* TODO: MMI_OPC_2_PMADDW */
27053 case MMI_OPC_2_PSLLVW
: /* TODO: MMI_OPC_2_PSLLVW */
27054 case MMI_OPC_2_PSRLVW
: /* TODO: MMI_OPC_2_PSRLVW */
27055 case MMI_OPC_2_PMSUBW
: /* TODO: MMI_OPC_2_PMSUBW */
27056 case MMI_OPC_2_PMFHI
: /* TODO: MMI_OPC_2_PMFHI */
27057 case MMI_OPC_2_PMFLO
: /* TODO: MMI_OPC_2_PMFLO */
27058 case MMI_OPC_2_PINTH
: /* TODO: MMI_OPC_2_PINTH */
27059 case MMI_OPC_2_PMULTW
: /* TODO: MMI_OPC_2_PMULTW */
27060 case MMI_OPC_2_PDIVW
: /* TODO: MMI_OPC_2_PDIVW */
27061 case MMI_OPC_2_PCPYLD
: /* TODO: MMI_OPC_2_PCPYLD */
27062 case MMI_OPC_2_PMADDH
: /* TODO: MMI_OPC_2_PMADDH */
27063 case MMI_OPC_2_PHMADH
: /* TODO: MMI_OPC_2_PHMADH */
27064 case MMI_OPC_2_PAND
: /* TODO: MMI_OPC_2_PAND */
27065 case MMI_OPC_2_PXOR
: /* TODO: MMI_OPC_2_PXOR */
27066 case MMI_OPC_2_PMSUBH
: /* TODO: MMI_OPC_2_PMSUBH */
27067 case MMI_OPC_2_PHMSBH
: /* TODO: MMI_OPC_2_PHMSBH */
27068 case MMI_OPC_2_PEXEH
: /* TODO: MMI_OPC_2_PEXEH */
27069 case MMI_OPC_2_PREVH
: /* TODO: MMI_OPC_2_PREVH */
27070 case MMI_OPC_2_PMULTH
: /* TODO: MMI_OPC_2_PMULTH */
27071 case MMI_OPC_2_PDIVBW
: /* TODO: MMI_OPC_2_PDIVBW */
27072 case MMI_OPC_2_PEXEW
: /* TODO: MMI_OPC_2_PEXEW */
27073 case MMI_OPC_2_PROT3W
: /* TODO: MMI_OPC_2_PROT3W */
27074 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI2 */
27077 MIPS_INVAL("TX79 MMI class MMI2");
27078 generate_exception_end(ctx
, EXCP_RI
);
27083 static void decode_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
27085 uint32_t opc
= MASK_MMI3(ctx
->opcode
);
27088 case MMI_OPC_3_PMADDUW
: /* TODO: MMI_OPC_3_PMADDUW */
27089 case MMI_OPC_3_PSRAVW
: /* TODO: MMI_OPC_3_PSRAVW */
27090 case MMI_OPC_3_PMTHI
: /* TODO: MMI_OPC_3_PMTHI */
27091 case MMI_OPC_3_PMTLO
: /* TODO: MMI_OPC_3_PMTLO */
27092 case MMI_OPC_3_PINTEH
: /* TODO: MMI_OPC_3_PINTEH */
27093 case MMI_OPC_3_PMULTUW
: /* TODO: MMI_OPC_3_PMULTUW */
27094 case MMI_OPC_3_PDIVUW
: /* TODO: MMI_OPC_3_PDIVUW */
27095 case MMI_OPC_3_PCPYUD
: /* TODO: MMI_OPC_3_PCPYUD */
27096 case MMI_OPC_3_POR
: /* TODO: MMI_OPC_3_POR */
27097 case MMI_OPC_3_PNOR
: /* TODO: MMI_OPC_3_PNOR */
27098 case MMI_OPC_3_PEXCH
: /* TODO: MMI_OPC_3_PEXCH */
27099 case MMI_OPC_3_PCPYH
: /* TODO: MMI_OPC_3_PCPYH */
27100 case MMI_OPC_3_PEXCW
: /* TODO: MMI_OPC_3_PEXCW */
27101 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI3 */
27104 MIPS_INVAL("TX79 MMI class MMI3");
27105 generate_exception_end(ctx
, EXCP_RI
);
27110 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
27112 uint32_t opc
= MASK_MMI(ctx
->opcode
);
27113 int rs
= extract32(ctx
->opcode
, 21, 5);
27114 int rt
= extract32(ctx
->opcode
, 16, 5);
27115 int rd
= extract32(ctx
->opcode
, 11, 5);
27118 case MMI_OPC_CLASS_MMI0
:
27119 decode_mmi0(env
, ctx
);
27121 case MMI_OPC_CLASS_MMI1
:
27122 decode_mmi1(env
, ctx
);
27124 case MMI_OPC_CLASS_MMI2
:
27125 decode_mmi2(env
, ctx
);
27127 case MMI_OPC_CLASS_MMI3
:
27128 decode_mmi3(env
, ctx
);
27130 case MMI_OPC_MULT1
:
27131 case MMI_OPC_MULTU1
:
27132 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
27135 case MMI_OPC_DIVU1
:
27136 gen_div1_tx79(ctx
, opc
, rs
, rt
);
27138 case MMI_OPC_MTLO1
:
27139 case MMI_OPC_MTHI1
:
27140 gen_HILO1_tx79(ctx
, opc
, rs
);
27142 case MMI_OPC_MFLO1
:
27143 case MMI_OPC_MFHI1
:
27144 gen_HILO1_tx79(ctx
, opc
, rd
);
27146 case MMI_OPC_MADD
: /* TODO: MMI_OPC_MADD */
27147 case MMI_OPC_MADDU
: /* TODO: MMI_OPC_MADDU */
27148 case MMI_OPC_PLZCW
: /* TODO: MMI_OPC_PLZCW */
27149 case MMI_OPC_MADD1
: /* TODO: MMI_OPC_MADD1 */
27150 case MMI_OPC_MADDU1
: /* TODO: MMI_OPC_MADDU1 */
27151 case MMI_OPC_PMFHL
: /* TODO: MMI_OPC_PMFHL */
27152 case MMI_OPC_PMTHL
: /* TODO: MMI_OPC_PMTHL */
27153 case MMI_OPC_PSLLH
: /* TODO: MMI_OPC_PSLLH */
27154 case MMI_OPC_PSRLH
: /* TODO: MMI_OPC_PSRLH */
27155 case MMI_OPC_PSRAH
: /* TODO: MMI_OPC_PSRAH */
27156 case MMI_OPC_PSLLW
: /* TODO: MMI_OPC_PSLLW */
27157 case MMI_OPC_PSRLW
: /* TODO: MMI_OPC_PSRLW */
27158 case MMI_OPC_PSRAW
: /* TODO: MMI_OPC_PSRAW */
27159 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI */
27162 MIPS_INVAL("TX79 MMI class");
27163 generate_exception_end(ctx
, EXCP_RI
);
27168 static void gen_mmi_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
27170 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_LQ */
27173 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
27175 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_SQ */
27179 * The TX79-specific instruction Store Quadword
27181 * +--------+-------+-------+------------------------+
27182 * | 011111 | base | rt | offset | SQ
27183 * +--------+-------+-------+------------------------+
27186 * has the same opcode as the Read Hardware Register instruction
27188 * +--------+-------+-------+-------+-------+--------+
27189 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
27190 * +--------+-------+-------+-------+-------+--------+
27193 * that is required, trapped and emulated by the Linux kernel. However, all
27194 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
27195 * offset is odd. Therefore all valid SQ instructions can execute normally.
27196 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
27197 * between SQ and RDHWR, as the Linux kernel does.
27199 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
27201 int base
= extract32(ctx
->opcode
, 21, 5);
27202 int rt
= extract32(ctx
->opcode
, 16, 5);
27203 int offset
= extract32(ctx
->opcode
, 0, 16);
27205 #ifdef CONFIG_USER_ONLY
27206 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
27207 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
27209 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
27210 int rd
= extract32(ctx
->opcode
, 11, 5);
27212 gen_rdhwr(ctx
, rt
, rd
, 0);
27217 gen_mmi_sq(ctx
, base
, rt
, offset
);
27220 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
27222 int rs
, rt
, rd
, sa
;
27226 rs
= (ctx
->opcode
>> 21) & 0x1f;
27227 rt
= (ctx
->opcode
>> 16) & 0x1f;
27228 rd
= (ctx
->opcode
>> 11) & 0x1f;
27229 sa
= (ctx
->opcode
>> 6) & 0x1f;
27230 imm
= sextract32(ctx
->opcode
, 7, 9);
27232 op1
= MASK_SPECIAL3(ctx
->opcode
);
27235 * EVA loads and stores overlap Loongson 2E instructions decoded by
27236 * decode_opc_special3_legacy(), so be careful to allow their decoding when
27243 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27251 check_cp0_enabled(ctx
);
27252 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27256 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27261 check_cp0_enabled(ctx
);
27262 gen_st(ctx
, op1
, rt
, rs
, imm
);
27265 check_cp0_enabled(ctx
);
27266 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
27269 check_cp0_enabled(ctx
);
27270 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27271 gen_cache_operation(ctx
, rt
, rs
, imm
);
27273 /* Treat as NOP. */
27276 check_cp0_enabled(ctx
);
27277 /* Treat as NOP. */
27285 check_insn(ctx
, ISA_MIPS32R2
);
27286 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27289 op2
= MASK_BSHFL(ctx
->opcode
);
27296 check_insn(ctx
, ISA_MIPS32R6
);
27297 decode_opc_special3_r6(env
, ctx
);
27300 check_insn(ctx
, ISA_MIPS32R2
);
27301 gen_bshfl(ctx
, op2
, rt
, rd
);
27305 #if defined(TARGET_MIPS64)
27312 check_insn(ctx
, ISA_MIPS64R2
);
27313 check_mips_64(ctx
);
27314 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27317 op2
= MASK_DBSHFL(ctx
->opcode
);
27328 check_insn(ctx
, ISA_MIPS32R6
);
27329 decode_opc_special3_r6(env
, ctx
);
27332 check_insn(ctx
, ISA_MIPS64R2
);
27333 check_mips_64(ctx
);
27334 op2
= MASK_DBSHFL(ctx
->opcode
);
27335 gen_bshfl(ctx
, op2
, rt
, rd
);
27341 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
27346 TCGv t0
= tcg_temp_new();
27347 TCGv t1
= tcg_temp_new();
27349 gen_load_gpr(t0
, rt
);
27350 gen_load_gpr(t1
, rs
);
27351 gen_helper_fork(t0
, t1
);
27359 TCGv t0
= tcg_temp_new();
27361 gen_load_gpr(t0
, rs
);
27362 gen_helper_yield(t0
, cpu_env
, t0
);
27363 gen_store_gpr(t0
, rd
);
27368 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27369 decode_opc_special3_r6(env
, ctx
);
27371 decode_opc_special3_legacy(env
, ctx
);
27376 /* MIPS SIMD Architecture (MSA) */
27377 static inline int check_msa_access(DisasContext
*ctx
)
27379 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
27380 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
27381 generate_exception_end(ctx
, EXCP_RI
);
27385 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
27386 if (ctx
->insn_flags
& ASE_MSA
) {
27387 generate_exception_end(ctx
, EXCP_MSADIS
);
27390 generate_exception_end(ctx
, EXCP_RI
);
27397 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
27399 /* generates tcg ops to check if any element is 0 */
27400 /* Note this function only works with MSA_WRLEN = 128 */
27401 uint64_t eval_zero_or_big
= 0;
27402 uint64_t eval_big
= 0;
27403 TCGv_i64 t0
= tcg_temp_new_i64();
27404 TCGv_i64 t1
= tcg_temp_new_i64();
27407 eval_zero_or_big
= 0x0101010101010101ULL
;
27408 eval_big
= 0x8080808080808080ULL
;
27411 eval_zero_or_big
= 0x0001000100010001ULL
;
27412 eval_big
= 0x8000800080008000ULL
;
27415 eval_zero_or_big
= 0x0000000100000001ULL
;
27416 eval_big
= 0x8000000080000000ULL
;
27419 eval_zero_or_big
= 0x0000000000000001ULL
;
27420 eval_big
= 0x8000000000000000ULL
;
27423 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
27424 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
27425 tcg_gen_andi_i64(t0
, t0
, eval_big
);
27426 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
27427 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
27428 tcg_gen_andi_i64(t1
, t1
, eval_big
);
27429 tcg_gen_or_i64(t0
, t0
, t1
);
27430 /* if all bits are zero then all elements are not zero */
27431 /* if some bit is non-zero then some element is zero */
27432 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
27433 tcg_gen_trunc_i64_tl(tresult
, t0
);
27434 tcg_temp_free_i64(t0
);
27435 tcg_temp_free_i64(t1
);
27438 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
27440 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27441 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27442 int64_t s16
= (int16_t)ctx
->opcode
;
27444 check_msa_access(ctx
);
27446 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
27447 generate_exception_end(ctx
, EXCP_RI
);
27454 TCGv_i64 t0
= tcg_temp_new_i64();
27455 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
27456 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
27457 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
27458 tcg_gen_trunc_i64_tl(bcond
, t0
);
27459 tcg_temp_free_i64(t0
);
27466 gen_check_zero_element(bcond
, df
, wt
);
27472 gen_check_zero_element(bcond
, df
, wt
);
27473 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
27477 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
27479 ctx
->hflags
|= MIPS_HFLAG_BC
;
27480 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
27483 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
27485 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
27486 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
27487 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27488 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27490 TCGv_i32 twd
= tcg_const_i32(wd
);
27491 TCGv_i32 tws
= tcg_const_i32(ws
);
27492 TCGv_i32 ti8
= tcg_const_i32(i8
);
27494 switch (MASK_MSA_I8(ctx
->opcode
)) {
27496 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
27499 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
27502 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
27505 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
27508 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
27511 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
27514 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
27520 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
27521 if (df
== DF_DOUBLE
) {
27522 generate_exception_end(ctx
, EXCP_RI
);
27524 TCGv_i32 tdf
= tcg_const_i32(df
);
27525 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
27526 tcg_temp_free_i32(tdf
);
27531 MIPS_INVAL("MSA instruction");
27532 generate_exception_end(ctx
, EXCP_RI
);
27536 tcg_temp_free_i32(twd
);
27537 tcg_temp_free_i32(tws
);
27538 tcg_temp_free_i32(ti8
);
27541 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
27543 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27544 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27545 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
27546 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
27547 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27548 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27550 TCGv_i32 tdf
= tcg_const_i32(df
);
27551 TCGv_i32 twd
= tcg_const_i32(wd
);
27552 TCGv_i32 tws
= tcg_const_i32(ws
);
27553 TCGv_i32 timm
= tcg_temp_new_i32();
27554 tcg_gen_movi_i32(timm
, u5
);
27556 switch (MASK_MSA_I5(ctx
->opcode
)) {
27558 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27561 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27563 case OPC_MAXI_S_df
:
27564 tcg_gen_movi_i32(timm
, s5
);
27565 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27567 case OPC_MAXI_U_df
:
27568 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27570 case OPC_MINI_S_df
:
27571 tcg_gen_movi_i32(timm
, s5
);
27572 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27574 case OPC_MINI_U_df
:
27575 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27578 tcg_gen_movi_i32(timm
, s5
);
27579 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27581 case OPC_CLTI_S_df
:
27582 tcg_gen_movi_i32(timm
, s5
);
27583 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27585 case OPC_CLTI_U_df
:
27586 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27588 case OPC_CLEI_S_df
:
27589 tcg_gen_movi_i32(timm
, s5
);
27590 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27592 case OPC_CLEI_U_df
:
27593 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27597 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
27598 tcg_gen_movi_i32(timm
, s10
);
27599 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
27603 MIPS_INVAL("MSA instruction");
27604 generate_exception_end(ctx
, EXCP_RI
);
27608 tcg_temp_free_i32(tdf
);
27609 tcg_temp_free_i32(twd
);
27610 tcg_temp_free_i32(tws
);
27611 tcg_temp_free_i32(timm
);
27614 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
27616 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27617 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
27618 uint32_t df
= 0, m
= 0;
27619 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27620 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27627 if ((dfm
& 0x40) == 0x00) {
27630 } else if ((dfm
& 0x60) == 0x40) {
27633 } else if ((dfm
& 0x70) == 0x60) {
27636 } else if ((dfm
& 0x78) == 0x70) {
27640 generate_exception_end(ctx
, EXCP_RI
);
27644 tdf
= tcg_const_i32(df
);
27645 tm
= tcg_const_i32(m
);
27646 twd
= tcg_const_i32(wd
);
27647 tws
= tcg_const_i32(ws
);
27649 switch (MASK_MSA_BIT(ctx
->opcode
)) {
27651 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27654 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
27657 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27660 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27663 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
27666 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
27668 case OPC_BINSLI_df
:
27669 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27671 case OPC_BINSRI_df
:
27672 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27675 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
27678 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
27681 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
27684 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27687 MIPS_INVAL("MSA instruction");
27688 generate_exception_end(ctx
, EXCP_RI
);
27692 tcg_temp_free_i32(tdf
);
27693 tcg_temp_free_i32(tm
);
27694 tcg_temp_free_i32(twd
);
27695 tcg_temp_free_i32(tws
);
27698 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
27700 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27701 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27702 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27703 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27704 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27706 TCGv_i32 tdf
= tcg_const_i32(df
);
27707 TCGv_i32 twd
= tcg_const_i32(wd
);
27708 TCGv_i32 tws
= tcg_const_i32(ws
);
27709 TCGv_i32 twt
= tcg_const_i32(wt
);
27711 switch (MASK_MSA_3R(ctx
->opcode
)) {
27713 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
27716 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27719 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
27722 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27724 case OPC_SUBS_S_df
:
27725 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27728 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27731 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
27734 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
27737 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
27740 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27742 case OPC_ADDS_A_df
:
27743 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27745 case OPC_SUBS_U_df
:
27746 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27749 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27752 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
27755 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
27758 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
27761 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27764 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27766 case OPC_ADDS_S_df
:
27767 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27769 case OPC_SUBSUS_U_df
:
27770 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27773 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27776 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
27779 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
27782 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
27785 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27788 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27790 case OPC_ADDS_U_df
:
27791 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27793 case OPC_SUBSUU_S_df
:
27794 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27797 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
27800 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
27803 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27806 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27809 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27811 case OPC_ASUB_S_df
:
27812 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27815 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27818 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
27821 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
27824 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27827 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27830 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27832 case OPC_ASUB_U_df
:
27833 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27836 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27839 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
27842 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
27845 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27847 case OPC_AVER_S_df
:
27848 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27851 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27854 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
27857 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
27860 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27862 case OPC_AVER_U_df
:
27863 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27866 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27869 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
27872 case OPC_DOTP_S_df
:
27873 case OPC_DOTP_U_df
:
27874 case OPC_DPADD_S_df
:
27875 case OPC_DPADD_U_df
:
27876 case OPC_DPSUB_S_df
:
27877 case OPC_HADD_S_df
:
27878 case OPC_DPSUB_U_df
:
27879 case OPC_HADD_U_df
:
27880 case OPC_HSUB_S_df
:
27881 case OPC_HSUB_U_df
:
27882 if (df
== DF_BYTE
) {
27883 generate_exception_end(ctx
, EXCP_RI
);
27886 switch (MASK_MSA_3R(ctx
->opcode
)) {
27887 case OPC_DOTP_S_df
:
27888 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27890 case OPC_DOTP_U_df
:
27891 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27893 case OPC_DPADD_S_df
:
27894 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27896 case OPC_DPADD_U_df
:
27897 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27899 case OPC_DPSUB_S_df
:
27900 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27902 case OPC_HADD_S_df
:
27903 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27905 case OPC_DPSUB_U_df
:
27906 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27908 case OPC_HADD_U_df
:
27909 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27911 case OPC_HSUB_S_df
:
27912 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27914 case OPC_HSUB_U_df
:
27915 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27920 MIPS_INVAL("MSA instruction");
27921 generate_exception_end(ctx
, EXCP_RI
);
27924 tcg_temp_free_i32(twd
);
27925 tcg_temp_free_i32(tws
);
27926 tcg_temp_free_i32(twt
);
27927 tcg_temp_free_i32(tdf
);
27930 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
27932 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
27933 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
27934 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
27935 TCGv telm
= tcg_temp_new();
27936 TCGv_i32 tsr
= tcg_const_i32(source
);
27937 TCGv_i32 tdt
= tcg_const_i32(dest
);
27939 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
27941 gen_load_gpr(telm
, source
);
27942 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
27945 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
27946 gen_store_gpr(telm
, dest
);
27949 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
27952 MIPS_INVAL("MSA instruction");
27953 generate_exception_end(ctx
, EXCP_RI
);
27957 tcg_temp_free(telm
);
27958 tcg_temp_free_i32(tdt
);
27959 tcg_temp_free_i32(tsr
);
27962 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
27965 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
27966 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27967 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27969 TCGv_i32 tws
= tcg_const_i32(ws
);
27970 TCGv_i32 twd
= tcg_const_i32(wd
);
27971 TCGv_i32 tn
= tcg_const_i32(n
);
27972 TCGv_i32 tdf
= tcg_const_i32(df
);
27974 switch (MASK_MSA_ELM(ctx
->opcode
)) {
27976 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
27978 case OPC_SPLATI_df
:
27979 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
27982 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
27984 case OPC_COPY_S_df
:
27985 case OPC_COPY_U_df
:
27986 case OPC_INSERT_df
:
27987 #if !defined(TARGET_MIPS64)
27988 /* Double format valid only for MIPS64 */
27989 if (df
== DF_DOUBLE
) {
27990 generate_exception_end(ctx
, EXCP_RI
);
27994 switch (MASK_MSA_ELM(ctx
->opcode
)) {
27995 case OPC_COPY_S_df
:
27996 if (likely(wd
!= 0)) {
27997 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
28000 case OPC_COPY_U_df
:
28001 if (likely(wd
!= 0)) {
28002 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
28005 case OPC_INSERT_df
:
28006 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
28011 MIPS_INVAL("MSA instruction");
28012 generate_exception_end(ctx
, EXCP_RI
);
28014 tcg_temp_free_i32(twd
);
28015 tcg_temp_free_i32(tws
);
28016 tcg_temp_free_i32(tn
);
28017 tcg_temp_free_i32(tdf
);
28020 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
28022 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
28023 uint32_t df
= 0, n
= 0;
28025 if ((dfn
& 0x30) == 0x00) {
28028 } else if ((dfn
& 0x38) == 0x20) {
28031 } else if ((dfn
& 0x3c) == 0x30) {
28034 } else if ((dfn
& 0x3e) == 0x38) {
28037 } else if (dfn
== 0x3E) {
28038 /* CTCMSA, CFCMSA, MOVE.V */
28039 gen_msa_elm_3e(env
, ctx
);
28042 generate_exception_end(ctx
, EXCP_RI
);
28046 gen_msa_elm_df(env
, ctx
, df
, n
);
28049 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28051 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28052 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
28053 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28054 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28055 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28057 TCGv_i32 twd
= tcg_const_i32(wd
);
28058 TCGv_i32 tws
= tcg_const_i32(ws
);
28059 TCGv_i32 twt
= tcg_const_i32(wt
);
28060 TCGv_i32 tdf
= tcg_temp_new_i32();
28062 /* adjust df value for floating-point instruction */
28063 tcg_gen_movi_i32(tdf
, df
+ 2);
28065 switch (MASK_MSA_3RF(ctx
->opcode
)) {
28067 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28070 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28073 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28076 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28079 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28082 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28085 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
28088 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28091 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28094 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28097 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28100 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28103 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28106 tcg_gen_movi_i32(tdf
, df
+ 1);
28107 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28110 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28113 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28115 case OPC_MADD_Q_df
:
28116 tcg_gen_movi_i32(tdf
, df
+ 1);
28117 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28120 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28122 case OPC_MSUB_Q_df
:
28123 tcg_gen_movi_i32(tdf
, df
+ 1);
28124 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28127 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28130 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
28133 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28136 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
28139 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28142 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28145 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28148 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28151 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28154 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28157 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28160 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28163 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
28165 case OPC_MULR_Q_df
:
28166 tcg_gen_movi_i32(tdf
, df
+ 1);
28167 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28170 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28172 case OPC_FMIN_A_df
:
28173 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28175 case OPC_MADDR_Q_df
:
28176 tcg_gen_movi_i32(tdf
, df
+ 1);
28177 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28180 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28183 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
28185 case OPC_MSUBR_Q_df
:
28186 tcg_gen_movi_i32(tdf
, df
+ 1);
28187 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28190 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28192 case OPC_FMAX_A_df
:
28193 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28196 MIPS_INVAL("MSA instruction");
28197 generate_exception_end(ctx
, EXCP_RI
);
28201 tcg_temp_free_i32(twd
);
28202 tcg_temp_free_i32(tws
);
28203 tcg_temp_free_i32(twt
);
28204 tcg_temp_free_i32(tdf
);
28207 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
28209 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28210 (op & (0x7 << 18)))
28211 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28212 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28213 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28214 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
28215 TCGv_i32 twd
= tcg_const_i32(wd
);
28216 TCGv_i32 tws
= tcg_const_i32(ws
);
28217 TCGv_i32 twt
= tcg_const_i32(wt
);
28218 TCGv_i32 tdf
= tcg_const_i32(df
);
28220 switch (MASK_MSA_2R(ctx
->opcode
)) {
28222 #if !defined(TARGET_MIPS64)
28223 /* Double format valid only for MIPS64 */
28224 if (df
== DF_DOUBLE
) {
28225 generate_exception_end(ctx
, EXCP_RI
);
28229 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
28232 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
28235 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
28238 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
28241 MIPS_INVAL("MSA instruction");
28242 generate_exception_end(ctx
, EXCP_RI
);
28246 tcg_temp_free_i32(twd
);
28247 tcg_temp_free_i32(tws
);
28248 tcg_temp_free_i32(twt
);
28249 tcg_temp_free_i32(tdf
);
28252 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28254 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28255 (op & (0xf << 17)))
28256 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28257 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28258 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28259 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
28260 TCGv_i32 twd
= tcg_const_i32(wd
);
28261 TCGv_i32 tws
= tcg_const_i32(ws
);
28262 TCGv_i32 twt
= tcg_const_i32(wt
);
28263 /* adjust df value for floating-point instruction */
28264 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
28266 switch (MASK_MSA_2RF(ctx
->opcode
)) {
28267 case OPC_FCLASS_df
:
28268 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
28270 case OPC_FTRUNC_S_df
:
28271 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
28273 case OPC_FTRUNC_U_df
:
28274 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
28277 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
28279 case OPC_FRSQRT_df
:
28280 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
28283 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
28286 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
28289 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
28291 case OPC_FEXUPL_df
:
28292 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
28294 case OPC_FEXUPR_df
:
28295 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
28298 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
28301 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
28303 case OPC_FTINT_S_df
:
28304 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
28306 case OPC_FTINT_U_df
:
28307 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
28309 case OPC_FFINT_S_df
:
28310 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
28312 case OPC_FFINT_U_df
:
28313 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
28317 tcg_temp_free_i32(twd
);
28318 tcg_temp_free_i32(tws
);
28319 tcg_temp_free_i32(twt
);
28320 tcg_temp_free_i32(tdf
);
28323 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
28325 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
28326 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28327 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28328 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28329 TCGv_i32 twd
= tcg_const_i32(wd
);
28330 TCGv_i32 tws
= tcg_const_i32(ws
);
28331 TCGv_i32 twt
= tcg_const_i32(wt
);
28333 switch (MASK_MSA_VEC(ctx
->opcode
)) {
28335 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
28338 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
28341 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
28344 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
28347 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
28350 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
28353 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
28356 MIPS_INVAL("MSA instruction");
28357 generate_exception_end(ctx
, EXCP_RI
);
28361 tcg_temp_free_i32(twd
);
28362 tcg_temp_free_i32(tws
);
28363 tcg_temp_free_i32(twt
);
28366 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
28368 switch (MASK_MSA_VEC(ctx
->opcode
)) {
28376 gen_msa_vec_v(env
, ctx
);
28379 gen_msa_2r(env
, ctx
);
28382 gen_msa_2rf(env
, ctx
);
28385 MIPS_INVAL("MSA instruction");
28386 generate_exception_end(ctx
, EXCP_RI
);
28391 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
28393 uint32_t opcode
= ctx
->opcode
;
28394 check_insn(ctx
, ASE_MSA
);
28395 check_msa_access(ctx
);
28397 switch (MASK_MSA_MINOR(opcode
)) {
28398 case OPC_MSA_I8_00
:
28399 case OPC_MSA_I8_01
:
28400 case OPC_MSA_I8_02
:
28401 gen_msa_i8(env
, ctx
);
28403 case OPC_MSA_I5_06
:
28404 case OPC_MSA_I5_07
:
28405 gen_msa_i5(env
, ctx
);
28407 case OPC_MSA_BIT_09
:
28408 case OPC_MSA_BIT_0A
:
28409 gen_msa_bit(env
, ctx
);
28411 case OPC_MSA_3R_0D
:
28412 case OPC_MSA_3R_0E
:
28413 case OPC_MSA_3R_0F
:
28414 case OPC_MSA_3R_10
:
28415 case OPC_MSA_3R_11
:
28416 case OPC_MSA_3R_12
:
28417 case OPC_MSA_3R_13
:
28418 case OPC_MSA_3R_14
:
28419 case OPC_MSA_3R_15
:
28420 gen_msa_3r(env
, ctx
);
28423 gen_msa_elm(env
, ctx
);
28425 case OPC_MSA_3RF_1A
:
28426 case OPC_MSA_3RF_1B
:
28427 case OPC_MSA_3RF_1C
:
28428 gen_msa_3rf(env
, ctx
);
28431 gen_msa_vec(env
, ctx
);
28442 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
28443 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
28444 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28445 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
28447 TCGv_i32 twd
= tcg_const_i32(wd
);
28448 TCGv taddr
= tcg_temp_new();
28449 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
28451 switch (MASK_MSA_MINOR(opcode
)) {
28453 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
28456 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
28459 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
28462 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
28465 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
28468 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
28471 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
28474 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
28478 tcg_temp_free_i32(twd
);
28479 tcg_temp_free(taddr
);
28483 MIPS_INVAL("MSA instruction");
28484 generate_exception_end(ctx
, EXCP_RI
);
28490 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
28493 int rs
, rt
, rd
, sa
;
28497 /* make sure instructions are on a word boundary */
28498 if (ctx
->base
.pc_next
& 0x3) {
28499 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
28500 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
28504 /* Handle blikely not taken case */
28505 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
28506 TCGLabel
*l1
= gen_new_label();
28508 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
28509 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
28510 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
28514 op
= MASK_OP_MAJOR(ctx
->opcode
);
28515 rs
= (ctx
->opcode
>> 21) & 0x1f;
28516 rt
= (ctx
->opcode
>> 16) & 0x1f;
28517 rd
= (ctx
->opcode
>> 11) & 0x1f;
28518 sa
= (ctx
->opcode
>> 6) & 0x1f;
28519 imm
= (int16_t)ctx
->opcode
;
28522 decode_opc_special(env
, ctx
);
28525 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
28526 decode_mmi(env
, ctx
);
28527 #if !defined(TARGET_MIPS64)
28528 } else if (ctx
->insn_flags
& ASE_MXU
) {
28529 decode_opc_mxu(env
, ctx
);
28532 decode_opc_special2_legacy(env
, ctx
);
28536 if (ctx
->insn_flags
& INSN_R5900
) {
28537 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
28539 decode_opc_special3(env
, ctx
);
28543 op1
= MASK_REGIMM(ctx
->opcode
);
28545 case OPC_BLTZL
: /* REGIMM branches */
28549 check_insn(ctx
, ISA_MIPS2
);
28550 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28554 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
28558 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28560 /* OPC_NAL, OPC_BAL */
28561 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
28563 generate_exception_end(ctx
, EXCP_RI
);
28566 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
28569 case OPC_TGEI
: /* REGIMM traps */
28576 check_insn(ctx
, ISA_MIPS2
);
28577 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28578 gen_trap(ctx
, op1
, rs
, -1, imm
);
28581 check_insn(ctx
, ISA_MIPS32R6
);
28582 generate_exception_end(ctx
, EXCP_RI
);
28585 check_insn(ctx
, ISA_MIPS32R2
);
28586 /* Break the TB to be able to sync copied instructions
28588 ctx
->base
.is_jmp
= DISAS_STOP
;
28590 case OPC_BPOSGE32
: /* MIPS DSP branch */
28591 #if defined(TARGET_MIPS64)
28595 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
28597 #if defined(TARGET_MIPS64)
28599 check_insn(ctx
, ISA_MIPS32R6
);
28600 check_mips_64(ctx
);
28602 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
28606 check_insn(ctx
, ISA_MIPS32R6
);
28607 check_mips_64(ctx
);
28609 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
28613 default: /* Invalid */
28614 MIPS_INVAL("regimm");
28615 generate_exception_end(ctx
, EXCP_RI
);
28620 check_cp0_enabled(ctx
);
28621 op1
= MASK_CP0(ctx
->opcode
);
28629 #if defined(TARGET_MIPS64)
28633 #ifndef CONFIG_USER_ONLY
28634 gen_cp0(env
, ctx
, op1
, rt
, rd
);
28635 #endif /* !CONFIG_USER_ONLY */
28653 #ifndef CONFIG_USER_ONLY
28654 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
28655 #endif /* !CONFIG_USER_ONLY */
28658 #ifndef CONFIG_USER_ONLY
28661 TCGv t0
= tcg_temp_new();
28663 op2
= MASK_MFMC0(ctx
->opcode
);
28667 gen_helper_dmt(t0
);
28668 gen_store_gpr(t0
, rt
);
28672 gen_helper_emt(t0
);
28673 gen_store_gpr(t0
, rt
);
28677 gen_helper_dvpe(t0
, cpu_env
);
28678 gen_store_gpr(t0
, rt
);
28682 gen_helper_evpe(t0
, cpu_env
);
28683 gen_store_gpr(t0
, rt
);
28686 check_insn(ctx
, ISA_MIPS32R6
);
28688 gen_helper_dvp(t0
, cpu_env
);
28689 gen_store_gpr(t0
, rt
);
28693 check_insn(ctx
, ISA_MIPS32R6
);
28695 gen_helper_evp(t0
, cpu_env
);
28696 gen_store_gpr(t0
, rt
);
28700 check_insn(ctx
, ISA_MIPS32R2
);
28701 save_cpu_state(ctx
, 1);
28702 gen_helper_di(t0
, cpu_env
);
28703 gen_store_gpr(t0
, rt
);
28704 /* Stop translation as we may have switched
28705 the execution mode. */
28706 ctx
->base
.is_jmp
= DISAS_STOP
;
28709 check_insn(ctx
, ISA_MIPS32R2
);
28710 save_cpu_state(ctx
, 1);
28711 gen_helper_ei(t0
, cpu_env
);
28712 gen_store_gpr(t0
, rt
);
28713 /* DISAS_STOP isn't sufficient, we need to ensure we break
28714 out of translated code to check for pending interrupts */
28715 gen_save_pc(ctx
->base
.pc_next
+ 4);
28716 ctx
->base
.is_jmp
= DISAS_EXIT
;
28718 default: /* Invalid */
28719 MIPS_INVAL("mfmc0");
28720 generate_exception_end(ctx
, EXCP_RI
);
28725 #endif /* !CONFIG_USER_ONLY */
28728 check_insn(ctx
, ISA_MIPS32R2
);
28729 gen_load_srsgpr(rt
, rd
);
28732 check_insn(ctx
, ISA_MIPS32R2
);
28733 gen_store_srsgpr(rt
, rd
);
28737 generate_exception_end(ctx
, EXCP_RI
);
28741 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
28742 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28743 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
28744 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28747 /* Arithmetic with immediate opcode */
28748 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
28752 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
28754 case OPC_SLTI
: /* Set on less than with immediate opcode */
28756 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
28758 case OPC_ANDI
: /* Arithmetic with immediate opcode */
28759 case OPC_LUI
: /* OPC_AUI */
28762 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
28764 case OPC_J
: /* Jump */
28766 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
28767 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
28770 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
28771 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28773 generate_exception_end(ctx
, EXCP_RI
);
28776 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
28777 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28780 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
28783 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
28784 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28786 generate_exception_end(ctx
, EXCP_RI
);
28789 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
28790 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28793 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
28796 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
28799 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
28801 check_insn(ctx
, ISA_MIPS32R6
);
28802 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
28803 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28806 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
28809 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
28811 check_insn(ctx
, ISA_MIPS32R6
);
28812 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
28813 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28818 check_insn(ctx
, ISA_MIPS2
);
28819 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28823 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
28825 case OPC_LL
: /* Load and stores */
28826 check_insn(ctx
, ISA_MIPS2
);
28827 if (ctx
->insn_flags
& INSN_R5900
) {
28828 check_insn_opc_user_only(ctx
, INSN_R5900
);
28833 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28841 gen_ld(ctx
, op
, rt
, rs
, imm
);
28845 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28850 gen_st(ctx
, op
, rt
, rs
, imm
);
28853 check_insn(ctx
, ISA_MIPS2
);
28854 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28855 if (ctx
->insn_flags
& INSN_R5900
) {
28856 check_insn_opc_user_only(ctx
, INSN_R5900
);
28858 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
28861 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28862 check_cp0_enabled(ctx
);
28863 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
28864 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
28865 gen_cache_operation(ctx
, rt
, rs
, imm
);
28867 /* Treat as NOP. */
28870 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28871 if (ctx
->insn_flags
& INSN_R5900
) {
28872 /* Treat as NOP. */
28874 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
28875 /* Treat as NOP. */
28879 /* Floating point (COP1). */
28884 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
28888 op1
= MASK_CP1(ctx
->opcode
);
28893 check_cp1_enabled(ctx
);
28894 check_insn(ctx
, ISA_MIPS32R2
);
28900 check_cp1_enabled(ctx
);
28901 gen_cp1(ctx
, op1
, rt
, rd
);
28903 #if defined(TARGET_MIPS64)
28906 check_cp1_enabled(ctx
);
28907 check_insn(ctx
, ISA_MIPS3
);
28908 check_mips_64(ctx
);
28909 gen_cp1(ctx
, op1
, rt
, rd
);
28912 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
28913 check_cp1_enabled(ctx
);
28914 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28916 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
28921 check_insn(ctx
, ASE_MIPS3D
);
28922 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
28923 (rt
>> 2) & 0x7, imm
<< 2);
28927 check_cp1_enabled(ctx
);
28928 check_insn(ctx
, ISA_MIPS32R6
);
28929 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
28933 check_cp1_enabled(ctx
);
28934 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28936 check_insn(ctx
, ASE_MIPS3D
);
28939 check_cp1_enabled(ctx
);
28940 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28941 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
28942 (rt
>> 2) & 0x7, imm
<< 2);
28949 check_cp1_enabled(ctx
);
28950 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
28956 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
28957 check_cp1_enabled(ctx
);
28958 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28960 case R6_OPC_CMP_AF_S
:
28961 case R6_OPC_CMP_UN_S
:
28962 case R6_OPC_CMP_EQ_S
:
28963 case R6_OPC_CMP_UEQ_S
:
28964 case R6_OPC_CMP_LT_S
:
28965 case R6_OPC_CMP_ULT_S
:
28966 case R6_OPC_CMP_LE_S
:
28967 case R6_OPC_CMP_ULE_S
:
28968 case R6_OPC_CMP_SAF_S
:
28969 case R6_OPC_CMP_SUN_S
:
28970 case R6_OPC_CMP_SEQ_S
:
28971 case R6_OPC_CMP_SEUQ_S
:
28972 case R6_OPC_CMP_SLT_S
:
28973 case R6_OPC_CMP_SULT_S
:
28974 case R6_OPC_CMP_SLE_S
:
28975 case R6_OPC_CMP_SULE_S
:
28976 case R6_OPC_CMP_OR_S
:
28977 case R6_OPC_CMP_UNE_S
:
28978 case R6_OPC_CMP_NE_S
:
28979 case R6_OPC_CMP_SOR_S
:
28980 case R6_OPC_CMP_SUNE_S
:
28981 case R6_OPC_CMP_SNE_S
:
28982 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
28984 case R6_OPC_CMP_AF_D
:
28985 case R6_OPC_CMP_UN_D
:
28986 case R6_OPC_CMP_EQ_D
:
28987 case R6_OPC_CMP_UEQ_D
:
28988 case R6_OPC_CMP_LT_D
:
28989 case R6_OPC_CMP_ULT_D
:
28990 case R6_OPC_CMP_LE_D
:
28991 case R6_OPC_CMP_ULE_D
:
28992 case R6_OPC_CMP_SAF_D
:
28993 case R6_OPC_CMP_SUN_D
:
28994 case R6_OPC_CMP_SEQ_D
:
28995 case R6_OPC_CMP_SEUQ_D
:
28996 case R6_OPC_CMP_SLT_D
:
28997 case R6_OPC_CMP_SULT_D
:
28998 case R6_OPC_CMP_SLE_D
:
28999 case R6_OPC_CMP_SULE_D
:
29000 case R6_OPC_CMP_OR_D
:
29001 case R6_OPC_CMP_UNE_D
:
29002 case R6_OPC_CMP_NE_D
:
29003 case R6_OPC_CMP_SOR_D
:
29004 case R6_OPC_CMP_SUNE_D
:
29005 case R6_OPC_CMP_SNE_D
:
29006 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29009 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
29010 rt
, rd
, sa
, (imm
>> 8) & 0x7);
29015 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29030 check_insn(ctx
, ASE_MSA
);
29031 gen_msa_branch(env
, ctx
, op1
);
29035 generate_exception_end(ctx
, EXCP_RI
);
29040 /* Compact branches [R6] and COP2 [non-R6] */
29041 case OPC_BC
: /* OPC_LWC2 */
29042 case OPC_BALC
: /* OPC_SWC2 */
29043 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29044 /* OPC_BC, OPC_BALC */
29045 gen_compute_compact_branch(ctx
, op
, 0, 0,
29046 sextract32(ctx
->opcode
<< 2, 0, 28));
29048 /* OPC_LWC2, OPC_SWC2 */
29049 /* COP2: Not implemented. */
29050 generate_exception_err(ctx
, EXCP_CpU
, 2);
29053 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
29054 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
29055 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29057 /* OPC_BEQZC, OPC_BNEZC */
29058 gen_compute_compact_branch(ctx
, op
, rs
, 0,
29059 sextract32(ctx
->opcode
<< 2, 0, 23));
29061 /* OPC_JIC, OPC_JIALC */
29062 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
29065 /* OPC_LWC2, OPC_SWC2 */
29066 /* COP2: Not implemented. */
29067 generate_exception_err(ctx
, EXCP_CpU
, 2);
29071 check_insn(ctx
, INSN_LOONGSON2F
);
29072 /* Note that these instructions use different fields. */
29073 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
29077 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29078 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
29079 check_cp1_enabled(ctx
);
29080 op1
= MASK_CP3(ctx
->opcode
);
29084 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29090 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29091 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
29094 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29095 /* Treat as NOP. */
29098 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29112 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29113 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
29117 generate_exception_end(ctx
, EXCP_RI
);
29121 generate_exception_err(ctx
, EXCP_CpU
, 1);
29125 #if defined(TARGET_MIPS64)
29126 /* MIPS64 opcodes */
29128 if (ctx
->insn_flags
& INSN_R5900
) {
29129 check_insn_opc_user_only(ctx
, INSN_R5900
);
29134 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29138 check_insn(ctx
, ISA_MIPS3
);
29139 check_mips_64(ctx
);
29140 gen_ld(ctx
, op
, rt
, rs
, imm
);
29144 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29147 check_insn(ctx
, ISA_MIPS3
);
29148 check_mips_64(ctx
);
29149 gen_st(ctx
, op
, rt
, rs
, imm
);
29152 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29153 check_insn(ctx
, ISA_MIPS3
);
29154 if (ctx
->insn_flags
& INSN_R5900
) {
29155 check_insn_opc_user_only(ctx
, INSN_R5900
);
29157 check_mips_64(ctx
);
29158 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
29160 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
29161 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29162 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
29163 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29166 check_insn(ctx
, ISA_MIPS3
);
29167 check_mips_64(ctx
);
29168 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29172 check_insn(ctx
, ISA_MIPS3
);
29173 check_mips_64(ctx
);
29174 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29177 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
29178 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29179 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29181 MIPS_INVAL("major opcode");
29182 generate_exception_end(ctx
, EXCP_RI
);
29186 case OPC_DAUI
: /* OPC_JALX */
29187 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29188 #if defined(TARGET_MIPS64)
29190 check_mips_64(ctx
);
29192 generate_exception(ctx
, EXCP_RI
);
29193 } else if (rt
!= 0) {
29194 TCGv t0
= tcg_temp_new();
29195 gen_load_gpr(t0
, rs
);
29196 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
29200 generate_exception_end(ctx
, EXCP_RI
);
29201 MIPS_INVAL("major opcode");
29205 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
29206 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29207 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29210 case OPC_MSA
: /* OPC_MDMX */
29211 if (ctx
->insn_flags
& INSN_R5900
) {
29212 gen_mmi_lq(env
, ctx
); /* MMI_OPC_LQ */
29214 /* MDMX: Not implemented. */
29219 check_insn(ctx
, ISA_MIPS32R6
);
29220 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
29222 default: /* Invalid */
29223 MIPS_INVAL("major opcode");
29224 generate_exception_end(ctx
, EXCP_RI
);
29229 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
29231 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29232 CPUMIPSState
*env
= cs
->env_ptr
;
29234 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
29235 ctx
->saved_pc
= -1;
29236 ctx
->insn_flags
= env
->insn_flags
;
29237 ctx
->CP0_Config1
= env
->CP0_Config1
;
29238 ctx
->CP0_Config2
= env
->CP0_Config2
;
29239 ctx
->CP0_Config3
= env
->CP0_Config3
;
29240 ctx
->CP0_Config5
= env
->CP0_Config5
;
29242 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
29243 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
29244 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
29245 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
29246 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
29247 ctx
->PAMask
= env
->PAMask
;
29248 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
29249 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
29250 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
29251 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
29252 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
29253 /* Restore delay slot state from the tb context. */
29254 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
29255 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
29256 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
29257 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
29258 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
29259 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
29260 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
29261 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
29262 restore_cpu_state(env
, ctx
);
29263 #ifdef CONFIG_USER_ONLY
29264 ctx
->mem_idx
= MIPS_HFLAG_UM
;
29266 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
29268 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
29269 MO_UNALN
: MO_ALIGN
;
29271 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
29275 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29279 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29281 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29283 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
29287 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
29288 const CPUBreakpoint
*bp
)
29290 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29292 save_cpu_state(ctx
, 1);
29293 ctx
->base
.is_jmp
= DISAS_NORETURN
;
29294 gen_helper_raise_exception_debug(cpu_env
);
29295 /* The address covered by the breakpoint must be included in
29296 [tb->pc, tb->pc + tb->size) in order to for it to be
29297 properly cleared -- thus we increment the PC here so that
29298 the logic setting tb->size below does the right thing. */
29299 ctx
->base
.pc_next
+= 4;
29303 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
29305 CPUMIPSState
*env
= cs
->env_ptr
;
29306 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29310 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
29311 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
29312 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29313 insn_bytes
= decode_nanomips_opc(env
, ctx
);
29314 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
29315 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
29317 decode_opc(env
, ctx
);
29318 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
29319 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29320 insn_bytes
= decode_micromips_opc(env
, ctx
);
29321 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
29322 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29323 insn_bytes
= decode_mips16_opc(env
, ctx
);
29325 generate_exception_end(ctx
, EXCP_RI
);
29326 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
29330 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
29331 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
29332 MIPS_HFLAG_FBNSLOT
))) {
29333 /* force to generate branch as there is neither delay nor
29337 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
29338 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
29339 /* Force to generate branch as microMIPS R6 doesn't restrict
29340 branches in the forbidden slot. */
29345 gen_branch(ctx
, insn_bytes
);
29347 ctx
->base
.pc_next
+= insn_bytes
;
29349 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
29352 /* Execute a branch and its delay slot as a single instruction.
29353 This is what GDB expects and is consistent with what the
29354 hardware does (e.g. if a delay slot instruction faults, the
29355 reported PC is the PC of the branch). */
29356 if (ctx
->base
.singlestep_enabled
&&
29357 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
29358 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
29360 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
29361 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
29365 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
29367 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29369 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
29370 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
29371 gen_helper_raise_exception_debug(cpu_env
);
29373 switch (ctx
->base
.is_jmp
) {
29375 gen_save_pc(ctx
->base
.pc_next
);
29376 tcg_gen_lookup_and_goto_ptr();
29379 case DISAS_TOO_MANY
:
29380 save_cpu_state(ctx
, 0);
29381 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
29384 tcg_gen_exit_tb(NULL
, 0);
29386 case DISAS_NORETURN
:
29389 g_assert_not_reached();
29394 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
29396 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
29397 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
29400 static const TranslatorOps mips_tr_ops
= {
29401 .init_disas_context
= mips_tr_init_disas_context
,
29402 .tb_start
= mips_tr_tb_start
,
29403 .insn_start
= mips_tr_insn_start
,
29404 .breakpoint_check
= mips_tr_breakpoint_check
,
29405 .translate_insn
= mips_tr_translate_insn
,
29406 .tb_stop
= mips_tr_tb_stop
,
29407 .disas_log
= mips_tr_disas_log
,
29410 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
29414 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
29417 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
29421 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
29423 #define printfpr(fp) \
29426 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
29427 " fd:%13g fs:%13g psu: %13g\n", \
29428 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
29429 (double)(fp)->fd, \
29430 (double)(fp)->fs[FP_ENDIAN_IDX], \
29431 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
29434 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
29435 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
29436 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
29437 " fd:%13g fs:%13g psu:%13g\n", \
29438 tmp.w[FP_ENDIAN_IDX], tmp.d, \
29440 (double)tmp.fs[FP_ENDIAN_IDX], \
29441 (double)tmp.fs[!FP_ENDIAN_IDX]); \
29446 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
29447 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
29448 get_float_exception_flags(&env
->active_fpu
.fp_status
));
29449 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
29450 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
29451 printfpr(&env
->active_fpu
.fpr
[i
]);
29457 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
29460 MIPSCPU
*cpu
= MIPS_CPU(cs
);
29461 CPUMIPSState
*env
= &cpu
->env
;
29464 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
29465 " LO=0x" TARGET_FMT_lx
" ds %04x "
29466 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
29467 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
29468 env
->hflags
, env
->btarget
, env
->bcond
);
29469 for (i
= 0; i
< 32; i
++) {
29471 cpu_fprintf(f
, "GPR%02d:", i
);
29472 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
29474 cpu_fprintf(f
, "\n");
29477 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
29478 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
29479 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
29481 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
29482 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
29483 env
->CP0_Config2
, env
->CP0_Config3
);
29484 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
29485 env
->CP0_Config4
, env
->CP0_Config5
);
29486 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
29487 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
29491 void mips_tcg_init(void)
29496 for (i
= 1; i
< 32; i
++)
29497 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
29498 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
29501 for (i
= 0; i
< 32; i
++) {
29502 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
29504 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
29505 /* The scalar floating-point unit (FPU) registers are mapped on
29506 * the MSA vector registers. */
29507 fpu_f64
[i
] = msa_wr_d
[i
* 2];
29508 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
29509 msa_wr_d
[i
* 2 + 1] =
29510 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
29513 cpu_PC
= tcg_global_mem_new(cpu_env
,
29514 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
29515 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
29516 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
29517 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
29519 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
29520 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
29523 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
29524 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
29526 bcond
= tcg_global_mem_new(cpu_env
,
29527 offsetof(CPUMIPSState
, bcond
), "bcond");
29528 btarget
= tcg_global_mem_new(cpu_env
,
29529 offsetof(CPUMIPSState
, btarget
), "btarget");
29530 hflags
= tcg_global_mem_new_i32(cpu_env
,
29531 offsetof(CPUMIPSState
, hflags
), "hflags");
29533 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
29534 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
29536 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
29537 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
29539 #if !defined(TARGET_MIPS64)
29540 for (i
= 0; i
< NUMBER_OF_MXU_REGISTERS
- 1; i
++) {
29541 mxu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
29542 offsetof(CPUMIPSState
,
29543 active_tc
.mxu_gpr
[i
]),
29547 mxu_CR
= tcg_global_mem_new(cpu_env
,
29548 offsetof(CPUMIPSState
, active_tc
.mxu_cr
),
29549 mxuregnames
[NUMBER_OF_MXU_REGISTERS
- 1]);
29553 #include "translate_init.inc.c"
29555 void cpu_mips_realize_env(CPUMIPSState
*env
)
29557 env
->exception_base
= (int32_t)0xBFC00000;
29559 #ifndef CONFIG_USER_ONLY
29560 mmu_init(env
, env
->cpu_model
);
29562 fpu_init(env
, env
->cpu_model
);
29563 mvp_init(env
, env
->cpu_model
);
29566 bool cpu_supports_cps_smp(const char *cpu_type
)
29568 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
29569 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
29572 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
29574 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
29575 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
29578 void cpu_set_exception_base(int vp_index
, target_ulong address
)
29580 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
29581 vp
->env
.exception_base
= address
;
29584 void cpu_state_reset(CPUMIPSState
*env
)
29586 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
29587 CPUState
*cs
= CPU(cpu
);
29589 /* Reset registers to their default values */
29590 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
29591 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
29592 #ifdef TARGET_WORDS_BIGENDIAN
29593 env
->CP0_Config0
|= (1 << CP0C0_BE
);
29595 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
29596 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
29597 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
29598 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
29599 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
29600 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
29601 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
29602 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
29603 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
29604 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
29605 << env
->cpu_model
->CP0_LLAddr_shift
;
29606 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
29607 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
29608 env
->CCRes
= env
->cpu_model
->CCRes
;
29609 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
29610 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
29611 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
29612 env
->current_tc
= 0;
29613 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
29614 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
29615 #if defined(TARGET_MIPS64)
29616 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
29617 env
->SEGMask
|= 3ULL << 62;
29620 env
->PABITS
= env
->cpu_model
->PABITS
;
29621 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
29622 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
29623 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
29624 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
29625 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
29626 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
29627 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
29628 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
29629 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
29630 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
29631 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
29632 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
29633 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
29634 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
29635 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
29636 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
29637 env
->msair
= env
->cpu_model
->MSAIR
;
29638 env
->insn_flags
= env
->cpu_model
->insn_flags
;
29640 #if defined(CONFIG_USER_ONLY)
29641 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
29642 # ifdef TARGET_MIPS64
29643 /* Enable 64-bit register mode. */
29644 env
->CP0_Status
|= (1 << CP0St_PX
);
29646 # ifdef TARGET_ABI_MIPSN64
29647 /* Enable 64-bit address mode. */
29648 env
->CP0_Status
|= (1 << CP0St_UX
);
29650 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
29651 hardware registers. */
29652 env
->CP0_HWREna
|= 0x0000000F;
29653 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
29654 env
->CP0_Status
|= (1 << CP0St_CU1
);
29656 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
29657 env
->CP0_Status
|= (1 << CP0St_MX
);
29659 # if defined(TARGET_MIPS64)
29660 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
29661 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
29662 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
29663 env
->CP0_Status
|= (1 << CP0St_FR
);
29667 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
29668 /* If the exception was raised from a delay slot,
29669 come back to the jump. */
29670 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
29671 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
29673 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
29675 env
->active_tc
.PC
= env
->exception_base
;
29676 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
29677 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
29678 env
->CP0_Wired
= 0;
29679 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
29680 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
29681 if (mips_um_ksegs_enabled()) {
29682 env
->CP0_EBase
|= 0x40000000;
29684 env
->CP0_EBase
|= (int32_t)0x80000000;
29686 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
29687 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
29689 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
29691 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
29692 /* vectored interrupts not implemented, timer on int 7,
29693 no performance counters. */
29694 env
->CP0_IntCtl
= 0xe0000000;
29698 for (i
= 0; i
< 7; i
++) {
29699 env
->CP0_WatchLo
[i
] = 0;
29700 env
->CP0_WatchHi
[i
] = 0x80000000;
29702 env
->CP0_WatchLo
[7] = 0;
29703 env
->CP0_WatchHi
[7] = 0;
29705 /* Count register increments in debug mode, EJTAG version 1 */
29706 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
29708 cpu_mips_store_count(env
, 1);
29710 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
29713 /* Only TC0 on VPE 0 starts as active. */
29714 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
29715 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
29716 env
->tcs
[i
].CP0_TCHalt
= 1;
29718 env
->active_tc
.CP0_TCHalt
= 1;
29721 if (cs
->cpu_index
== 0) {
29722 /* VPE0 starts up enabled. */
29723 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
29724 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
29726 /* TC0 starts up unhalted. */
29728 env
->active_tc
.CP0_TCHalt
= 0;
29729 env
->tcs
[0].CP0_TCHalt
= 0;
29730 /* With thread 0 active. */
29731 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
29732 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
29737 * Configure default legacy segmentation control. We use this regardless of
29738 * whether segmentation control is presented to the guest.
29740 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
29741 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
29742 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
29743 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
29744 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
29745 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
29747 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
29748 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
29749 (3 << CP0SC_C
)) << 16;
29750 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
29751 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
29752 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
29753 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
29754 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
29755 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
29756 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
29757 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
29759 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
29760 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
29761 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
29762 env
->CP0_Status
|= (1 << CP0St_FR
);
29765 if (env
->insn_flags
& ISA_MIPS32R6
) {
29767 env
->CP0_PWSize
= 0x40;
29773 env
->CP0_PWField
= 0x0C30C302;
29780 env
->CP0_PWField
= 0x02;
29783 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
29784 /* microMIPS on reset when Config3.ISA is 3 */
29785 env
->hflags
|= MIPS_HFLAG_M16
;
29789 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
29793 compute_hflags(env
);
29794 restore_fp_status(env
);
29795 restore_pamask(env
);
29796 cs
->exception_index
= EXCP_NONE
;
29798 if (semihosting_get_argc()) {
29799 /* UHI interface can be used to obtain argc and argv */
29800 env
->active_tc
.gpr
[4] = -1;
29804 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
29805 target_ulong
*data
)
29807 env
->active_tc
.PC
= data
[0];
29808 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
29809 env
->hflags
|= data
[1];
29810 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
29811 case MIPS_HFLAG_BR
:
29813 case MIPS_HFLAG_BC
:
29814 case MIPS_HFLAG_BL
:
29816 env
->btarget
= data
[2];