2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
467 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
468 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
469 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
470 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
474 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
477 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
478 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
479 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
480 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
481 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
482 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
483 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
484 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
485 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
486 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
487 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
490 /* MIPS DSP REGIMM opcodes */
492 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
493 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
496 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
500 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
501 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
502 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
505 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
507 /* MIPS DSP Arithmetic Sub-class */
508 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
509 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
510 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
515 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
516 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
517 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
518 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
519 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
522 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
523 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
525 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
526 /* MIPS DSP Multiply Sub-class insns */
527 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
528 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
529 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
530 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
531 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
532 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
535 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
536 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
538 /* MIPS DSP Arithmetic Sub-class */
539 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
548 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
549 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
550 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
551 /* MIPS DSP Multiply Sub-class insns */
552 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
553 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
554 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
555 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
558 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
560 /* MIPS DSP Arithmetic Sub-class */
561 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
572 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
573 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
574 /* DSP Bit/Manipulation Sub-class */
575 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
576 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
577 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
578 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
579 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
582 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
584 /* MIPS DSP Arithmetic Sub-class */
585 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
592 /* DSP Compare-Pick Sub-class */
593 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
600 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
601 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
602 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
603 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
604 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
605 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
606 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
607 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
610 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
612 /* MIPS DSP GPR-Based Shift Sub-class */
613 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
627 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
628 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
629 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
630 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
631 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
632 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
633 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
634 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
637 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
639 /* MIPS DSP Multiply Sub-class insns */
640 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
654 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
655 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
656 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
657 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
658 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
659 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
660 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
661 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
664 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
666 /* DSP Bit/Manipulation Sub-class */
667 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
670 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Append Sub-class */
673 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
674 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
675 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
678 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
680 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
681 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
685 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
686 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
687 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
688 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
689 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
690 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
691 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
692 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
693 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
694 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
695 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
696 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
697 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
700 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
702 /* MIPS DSP Arithmetic Sub-class */
703 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
719 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
720 /* DSP Bit/Manipulation Sub-class */
721 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
722 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
723 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
724 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
725 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
726 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
729 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
731 /* MIPS DSP Multiply Sub-class insns */
732 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
733 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
734 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
735 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
737 /* MIPS DSP Arithmetic Sub-class */
738 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
741 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
742 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
743 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
744 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
745 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
747 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
751 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
752 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
753 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
754 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
755 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
756 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
757 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
758 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
761 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
763 /* DSP Compare-Pick Sub-class */
764 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
783 /* MIPS DSP Arithmetic Sub-class */
784 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
785 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
786 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
787 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
788 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
789 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
794 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
796 /* DSP Append Sub-class */
797 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
798 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
799 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
800 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
803 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
805 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
806 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
819 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
820 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
821 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
822 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
823 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
824 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
825 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
826 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
829 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
831 /* DSP Bit/Manipulation Sub-class */
832 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
835 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
837 /* MIPS DSP Multiply Sub-class insns */
838 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
856 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
857 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
858 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
859 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
860 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
861 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
862 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
863 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
866 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
868 /* MIPS DSP GPR-Based Shift Sub-class */
869 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
887 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
888 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
889 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
890 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
891 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
892 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
893 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
894 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
897 /* Coprocessor 0 (rs field) */
898 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
901 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
902 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
903 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
904 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
905 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
906 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
907 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
908 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
909 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
910 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
911 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
912 OPC_C0
= (0x10 << 21) | OPC_CP0
,
913 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
914 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
915 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
916 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
917 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
918 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
919 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
920 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
921 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
922 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
923 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
924 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
925 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
926 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
927 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
931 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
934 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
935 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
936 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
937 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
938 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
939 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
940 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
941 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
944 /* Coprocessor 0 (with rs == C0) */
945 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
948 OPC_TLBR
= 0x01 | OPC_C0
,
949 OPC_TLBWI
= 0x02 | OPC_C0
,
950 OPC_TLBINV
= 0x03 | OPC_C0
,
951 OPC_TLBINVF
= 0x04 | OPC_C0
,
952 OPC_TLBWR
= 0x06 | OPC_C0
,
953 OPC_TLBP
= 0x08 | OPC_C0
,
954 OPC_RFE
= 0x10 | OPC_C0
,
955 OPC_ERET
= 0x18 | OPC_C0
,
956 OPC_DERET
= 0x1F | OPC_C0
,
957 OPC_WAIT
= 0x20 | OPC_C0
,
960 /* Coprocessor 1 (rs field) */
961 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
963 /* Values for the fmt field in FP instructions */
965 /* 0 - 15 are reserved */
966 FMT_S
= 16, /* single fp */
967 FMT_D
= 17, /* double fp */
968 FMT_E
= 18, /* extended fp */
969 FMT_Q
= 19, /* quad fp */
970 FMT_W
= 20, /* 32-bit fixed */
971 FMT_L
= 21, /* 64-bit fixed */
972 FMT_PS
= 22, /* paired single fp */
973 /* 23 - 31 are reserved */
977 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
978 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
979 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
980 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
981 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
982 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
983 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
984 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
985 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
986 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
987 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
988 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
989 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
990 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
991 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
992 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
993 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
994 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
995 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
996 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
997 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
998 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
999 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
1000 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
1001 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
1002 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
1003 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
1004 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
1005 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
1006 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1009 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1010 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1013 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1014 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1015 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1016 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1020 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1021 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1025 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1026 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1029 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1032 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1033 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1034 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1035 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1036 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1037 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1038 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1039 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1040 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1041 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1042 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1045 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1048 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1050 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1051 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1052 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1053 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1054 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1055 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1057 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1059 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1060 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1061 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1062 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1063 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1064 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1066 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1068 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1069 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1070 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1071 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1072 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1073 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1075 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1077 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1078 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1079 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1080 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1081 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1082 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1084 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1085 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1086 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1087 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1088 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1089 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1091 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1092 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1093 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1094 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1095 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1096 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1098 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1099 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1100 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1101 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1102 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1103 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1105 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1106 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1107 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1108 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1109 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1110 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1112 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1113 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1114 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1115 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1116 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1117 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1119 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1120 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1121 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1122 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1123 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1124 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1126 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1127 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1128 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1129 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1130 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1131 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1133 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1134 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1135 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1136 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1137 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1138 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1142 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1145 OPC_LWXC1
= 0x00 | OPC_CP3
,
1146 OPC_LDXC1
= 0x01 | OPC_CP3
,
1147 OPC_LUXC1
= 0x05 | OPC_CP3
,
1148 OPC_SWXC1
= 0x08 | OPC_CP3
,
1149 OPC_SDXC1
= 0x09 | OPC_CP3
,
1150 OPC_SUXC1
= 0x0D | OPC_CP3
,
1151 OPC_PREFX
= 0x0F | OPC_CP3
,
1152 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1153 OPC_MADD_S
= 0x20 | OPC_CP3
,
1154 OPC_MADD_D
= 0x21 | OPC_CP3
,
1155 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1156 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1157 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1158 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1159 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1160 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1161 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1162 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1163 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1164 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1168 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1170 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1171 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1172 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1173 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1174 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1175 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1176 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1177 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1178 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1179 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1180 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1181 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1182 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1183 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1184 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1185 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1186 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1187 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1188 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1189 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1190 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1192 /* MI10 instruction */
1193 OPC_LD_B
= (0x20) | OPC_MSA
,
1194 OPC_LD_H
= (0x21) | OPC_MSA
,
1195 OPC_LD_W
= (0x22) | OPC_MSA
,
1196 OPC_LD_D
= (0x23) | OPC_MSA
,
1197 OPC_ST_B
= (0x24) | OPC_MSA
,
1198 OPC_ST_H
= (0x25) | OPC_MSA
,
1199 OPC_ST_W
= (0x26) | OPC_MSA
,
1200 OPC_ST_D
= (0x27) | OPC_MSA
,
1204 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1205 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1206 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1207 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1208 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1209 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1210 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1211 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1212 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1213 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1214 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1215 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1216 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1218 /* I8 instruction */
1219 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1220 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1221 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1222 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1223 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1224 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1225 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1226 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1227 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1228 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1230 /* VEC/2R/2RF instruction */
1231 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1232 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1233 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1234 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1235 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1236 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1237 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1239 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1240 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1242 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1243 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1244 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1245 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1246 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1248 /* 2RF instruction df(bit 16) = _w, _d */
1249 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1250 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1251 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1252 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1253 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1254 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1255 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1256 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1257 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1258 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1259 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1260 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1261 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1262 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1263 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1264 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1266 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1267 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1268 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1269 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1270 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1272 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1274 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1275 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1277 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1278 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1279 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1280 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1281 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1282 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1283 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1284 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1285 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1286 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1287 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1288 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1289 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1290 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1292 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1293 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1294 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1295 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1296 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1297 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1298 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1299 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1300 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1301 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1302 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1303 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1304 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1305 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1306 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1307 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1308 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1309 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1310 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1311 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1312 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1313 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1314 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1315 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1316 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1317 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1318 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1319 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1320 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1321 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1322 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1323 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1324 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1325 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1326 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1327 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1328 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1329 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1331 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1332 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1333 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1334 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1335 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1336 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1337 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1338 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1339 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1340 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1342 /* 3RF instruction _df(bit 21) = _w, _d */
1343 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1346 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1347 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1348 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1349 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1350 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1351 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1352 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1353 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1357 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1358 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1359 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1361 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1362 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1363 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1364 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1365 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1366 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1367 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1370 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1373 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1374 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1375 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1376 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1377 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1378 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1379 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1380 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1381 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1382 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1383 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1385 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1386 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1387 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1388 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1389 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1390 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1391 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1392 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1393 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1394 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1395 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1396 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1397 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1403 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1404 * ============================================
1407 * MXU (full name: MIPS eXtension/enhanced Unit) is a SIMD extension of MIPS32
1408 * instructions set. It is designed to fit the needs of signal, graphical and
1409 * video processing applications. MXU instruction set is used in Xburst family
1410 * of microprocessors by Ingenic.
1412 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1413 * the control register.
1416 * The notation used in MXU assembler mnemonics
1417 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1419 * Register operands:
1421 * XRa, XRb, XRc, XRd - MXU registers
1422 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1424 * Non-register operands:
1426 * aptn1 - 1-bit accumulate add/subtract pattern
1427 * aptn2 - 2-bit accumulate add/subtract pattern
1428 * eptn2 - 2-bit execute add/subtract pattern
1429 * optn2 - 2-bit operand pattern
1430 * optn3 - 3-bit operand pattern
1431 * sft4 - 4-bit shift amount
1432 * strd2 - 2-bit stride amount
1436 * Level of parallelism: Operand size:
1437 * S - single operation at a time 32 - word
1438 * D - two operations in parallel 16 - half word
1439 * Q - four operations in parallel 8 - byte
1443 * ADD - Add or subtract
1444 * ADDC - Add with carry-in
1446 * ASUM - Sum together then accumulate (add or subtract)
1447 * ASUMC - Sum together then accumulate (add or subtract) with carry-in
1448 * AVG - Average between 2 operands
1449 * ABD - Absolute difference
1451 * AND - Logical bitwise 'and' operation
1453 * EXTR - Extract bits
1454 * I2M - Move from GPR register to MXU register
1455 * LDD - Load data from memory to XRF
1456 * LDI - Load data from memory to XRF (and increase the address base)
1457 * LUI - Load unsigned immediate
1459 * MULU - Unsigned multiply
1460 * MADD - 64-bit operand add 32x32 product
1461 * MSUB - 64-bit operand subtract 32x32 product
1462 * MAC - Multiply and accumulate (add or subtract)
1463 * MAD - Multiply and add or subtract
1464 * MAX - Maximum between 2 operands
1465 * MIN - Minimum between 2 operands
1466 * M2I - Move from MXU register to GPR register
1467 * MOVZ - Move if zero
1468 * MOVN - Move if non-zero
1469 * NOR - Logical bitwise 'nor' operation
1470 * OR - Logical bitwise 'or' operation
1471 * STD - Store data from XRF to memory
1472 * SDI - Store data from XRF to memory (and increase the address base)
1473 * SLT - Set of less than comparison
1474 * SAD - Sum of absolute differences
1475 * SLL - Logical shift left
1476 * SLR - Logical shift right
1477 * SAR - Arithmetic shift right
1480 * SCOP - Calculate x’s scope (-1, means x<0; 0, means x==0; 1, means x>0)
1481 * XOR - Logical bitwise 'exclusive or' operation
1485 * E - Expand results
1486 * F - Fixed point multiplication
1487 * L - Low part result
1488 * R - Doing rounding
1489 * V - Variable instead of immediate
1490 * W - Combine above L and V
1493 * The list of MXU instructions grouped by functionality
1494 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1496 * Load/Store instructions Multiplication instructions
1497 * ----------------------- ---------------------------
1499 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1500 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1501 * S32LDDV XRa, Rb, rc, strd2 S32MSUB XRa, XRd, Rs, Rt
1502 * S32STDV XRa, Rb, rc, strd2 S32MSUBU XRa, XRd, Rs, Rt
1503 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1504 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1505 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1506 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1507 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1508 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1509 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1510 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1511 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1512 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1513 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1514 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1515 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1516 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1517 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1518 * S16SDI XRa, Rb, s10, eptn2
1519 * S8LDD XRa, Rb, s8, eptn3
1520 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1521 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1522 * S8SDI XRa, Rb, s8, eptn3
1523 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1524 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1525 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1526 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1527 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1528 * S32CPS XRa, XRb, XRc
1529 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1530 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1531 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1532 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1533 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1534 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1535 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1536 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1537 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1538 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1539 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1540 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1541 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1542 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1543 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1544 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1545 * Q8SLT XRa, XRb, XRc
1546 * Q8SLTU XRa, XRb, XRc
1547 * Q8MOVZ XRa, XRb, XRc Shift instructions
1548 * Q8MOVN XRa, XRb, XRc ------------------
1550 * D32SLL XRa, XRb, XRc, XRd, sft4
1551 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1552 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1553 * D32SARL XRa, XRb, XRc, sft4
1554 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1555 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1556 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1557 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1558 * Q16SLL XRa, XRb, XRc, XRd, sft4
1559 * Q16SLR XRa, XRb, XRc, XRd, sft4
1560 * Miscellaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1561 * ------------------------- Q16SLLV XRa, XRb, Rb
1562 * Q16SLRV XRa, XRb, Rb
1563 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1564 * S32ALN XRa, XRb, XRc, Rb
1565 * S32ALNI XRa, XRb, XRc, s3
1566 * S32LUI XRa, s8, optn3 Move instructions
1567 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1568 * S32EXTRV XRa, XRb, Rs, Rt
1569 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1570 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1573 * The opcode organization of MXU instructions
1574 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1576 * The bits 31..26 of all MXU instructions are equal to 0x1C (also referred
1577 * as opcode SPECIAL2 in the base MIPS ISA). The organization and meaning of
1578 * other bits up to the instruction level is as follows:
1583 * ┌─ 000000 ─ OPC_MXU_S32MADD
1584 * ├─ 000001 ─ OPC_MXU_S32MADDU
1585 * ├─ 000010 ─ <not assigned> (non-MXU OPC_MUL)
1588 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1589 * │ ├─ 001 ─ OPC_MXU_S32MIN
1590 * │ ├─ 010 ─ OPC_MXU_D16MAX
1591 * │ ├─ 011 ─ OPC_MXU_D16MIN
1592 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1593 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1594 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1595 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1596 * ├─ 000100 ─ OPC_MXU_S32MSUB
1597 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1598 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1599 * │ ├─ 001 ─ OPC_MXU_D16SLT
1600 * │ ├─ 010 ─ OPC_MXU_D16AVG
1601 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1602 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1603 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1604 * │ └─ 111 ─ OPC_MXU_Q8ADD
1607 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1608 * │ ├─ 010 ─ OPC_MXU_D16CPS
1609 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1610 * │ └─ 110 ─ OPC_MXU_Q16SAT
1611 * ├─ 001000 ─ OPC_MXU_D16MUL
1613 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1614 * │ └─ 01 ─ OPC_MXU_D16MULE
1615 * ├─ 001010 ─ OPC_MXU_D16MAC
1616 * ├─ 001011 ─ OPC_MXU_D16MACF
1617 * ├─ 001100 ─ OPC_MXU_D16MADL
1618 * ├─ 001101 ─ OPC_MXU_S16MAD
1619 * ├─ 001110 ─ OPC_MXU_Q16ADD
1620 * ├─ 001111 ─ OPC_MXU_D16MACE 23
1621 * │ ┌─ 0 ─ OPC_MXU_S32LDD
1622 * ├─ 010000 ─ OPC_MXU__POOL04 ─┴─ 1 ─ OPC_MXU_S32LDDR
1625 * ├─ 010001 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32STD
1626 * │ └─ 1 ─ OPC_MXU_S32STDR
1629 * ├─ 010010 ─ OPC_MXU__POOL06 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1630 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1633 * ├─ 010011 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32STDV
1634 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1637 * ├─ 010100 ─ OPC_MXU__POOL08 ─┬─ 0 ─ OPC_MXU_S32LDI
1638 * │ └─ 1 ─ OPC_MXU_S32LDIR
1641 * ├─ 010101 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32SDI
1642 * │ └─ 1 ─ OPC_MXU_S32SDIR
1645 * ├─ 010110 ─ OPC_MXU__POOL10 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1646 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1649 * ├─ 010111 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1650 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1651 * ├─ 011000 ─ OPC_MXU_D32ADD
1653 * MXU ├─ 011001 ─ OPC_MXU__POOL12 ─┬─ 00 ─ OPC_MXU_D32ACC
1654 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1655 * │ └─ 10 ─ OPC_MXU_D32ASUM
1656 * ├─ 011010 ─ <not assigned>
1658 * ├─ 011011 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_Q16ACC
1659 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1660 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1663 * ├─ 011100 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1664 * │ ├─ 01 ─ OPC_MXU_D8SUM
1665 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1666 * ├─ 011110 ─ <not assigned>
1667 * ├─ 011111 ─ <not assigned>
1668 * ├─ 100000 ─ <not assigned> (overlaps with CLZ)
1669 * ├─ 100001 ─ <not assigned> (overlaps with CLO)
1670 * ├─ 100010 ─ OPC_MXU_S8LDD
1671 * ├─ 100011 ─ OPC_MXU_S8STD 15..14
1672 * ├─ 100100 ─ OPC_MXU_S8LDI ┌─ 00 ─ OPC_MXU_S32MUL
1673 * ├─ 100101 ─ OPC_MXU_S8SDI ├─ 00 ─ OPC_MXU_S32MULU
1674 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1675 * ├─ 100110 ─ OPC_MXU__POOL15 ─┴─ 00 ─ OPC_MXU_S32EXTRV
1678 * ├─ 100111 ─ OPC_MXU__POOL16 ─┬─ 000 ─ OPC_MXU_D32SARW
1679 * │ ├─ 001 ─ OPC_MXU_S32ALN
1680 * │ ├─ 010 ─ OPC_MXU_S32ALNI
1681 * │ ├─ 011 ─ OPC_MXU_S32NOR
1682 * │ ├─ 100 ─ OPC_MXU_S32AND
1683 * │ ├─ 101 ─ OPC_MXU_S32OR
1684 * │ ├─ 110 ─ OPC_MXU_S32XOR
1685 * │ └─ 111 ─ OPC_MXU_S32LUI
1688 * ├─ 101000 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_LXB
1689 * │ ├─ 001 ─ OPC_MXU_LXH
1690 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_LXW
1691 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_LXBU
1692 * ├─ 101011 ─ OPC_MXU_S16STD └─ 101 ─ OPC_MXU_LXHU
1693 * ├─ 101100 ─ OPC_MXU_S16LDI
1694 * ├─ 101101 ─ OPC_MXU_S16SDI
1695 * ├─ 101110 ─ OPC_MXU_S32M2I
1696 * ├─ 101111 ─ OPC_MXU_S32I2M
1697 * ├─ 110000 ─ OPC_MXU_D32SLL
1698 * ├─ 110001 ─ OPC_MXU_D32SLR 20..18
1699 * ├─ 110010 ─ OPC_MXU_D32SARL ┌─ 000 ─ OPC_MXU_D32SLLV
1700 * ├─ 110011 ─ OPC_MXU_D32SAR ├─ 001 ─ OPC_MXU_D32SLRV
1701 * ├─ 110100 ─ OPC_MXU_Q16SLL ├─ 010 ─ OPC_MXU_D32SARV
1702 * ├─ 110101 ─ OPC_MXU_Q16SLR ├─ 011 ─ OPC_MXU_Q16SLLV
1703 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1704 * ├─ 110110 ─ OPC_MXU__POOL18 ─┴─ 101 ─ OPC_MXU_Q16SARV
1706 * ├─ 110111 ─ OPC_MXU_Q16SAR
1708 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1709 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1712 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1713 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1714 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1715 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1716 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1717 * │ └─ 101 ─ OPC_MXU_S32MOVN
1720 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1721 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1722 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1723 * ├─ 111100 ─ OPC_MXU_Q8MADL
1724 * ├─ 111101 ─ OPC_MXU_S32SFL
1725 * ├─ 111110 ─ OPC_MXU_Q8SAD
1726 * └─ 111111 ─ <not assigned> (overlaps with SDBBP)
1731 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1732 * Programming Manual", Ingenic Semiconductor Co, Ltd., revision June 2, 2017
1736 OPC_MXU_S32MADD
= 0x00,
1737 OPC_MXU_S32MADDU
= 0x01,
1738 OPC__MXU_MUL
= 0x02,
1739 OPC_MXU__POOL00
= 0x03,
1740 OPC_MXU_S32MSUB
= 0x04,
1741 OPC_MXU_S32MSUBU
= 0x05,
1742 OPC_MXU__POOL01
= 0x06,
1743 OPC_MXU__POOL02
= 0x07,
1744 OPC_MXU_D16MUL
= 0x08,
1745 OPC_MXU__POOL03
= 0x09,
1746 OPC_MXU_D16MAC
= 0x0A,
1747 OPC_MXU_D16MACF
= 0x0B,
1748 OPC_MXU_D16MADL
= 0x0C,
1749 OPC_MXU_S16MAD
= 0x0D,
1750 OPC_MXU_Q16ADD
= 0x0E,
1751 OPC_MXU_D16MACE
= 0x0F,
1752 OPC_MXU__POOL04
= 0x10,
1753 OPC_MXU__POOL05
= 0x11,
1754 OPC_MXU__POOL06
= 0x12,
1755 OPC_MXU__POOL07
= 0x13,
1756 OPC_MXU__POOL08
= 0x14,
1757 OPC_MXU__POOL09
= 0x15,
1758 OPC_MXU__POOL10
= 0x16,
1759 OPC_MXU__POOL11
= 0x17,
1760 OPC_MXU_D32ADD
= 0x18,
1761 OPC_MXU__POOL12
= 0x19,
1762 /* not assigned 0x1A */
1763 OPC_MXU__POOL13
= 0x1B,
1764 OPC_MXU__POOL14
= 0x1C,
1765 OPC_MXU_Q8ACCE
= 0x1D,
1766 /* not assigned 0x1E */
1767 /* not assigned 0x1F */
1768 /* not assigned 0x20 */
1769 /* not assigned 0x21 */
1770 OPC_MXU_S8LDD
= 0x22,
1771 OPC_MXU_S8STD
= 0x23,
1772 OPC_MXU_S8LDI
= 0x24,
1773 OPC_MXU_S8SDI
= 0x25,
1774 OPC_MXU__POOL15
= 0x26,
1775 OPC_MXU__POOL16
= 0x27,
1776 OPC_MXU__POOL17
= 0x28,
1777 /* not assigned 0x29 */
1778 OPC_MXU_S16LDD
= 0x2A,
1779 OPC_MXU_S16STD
= 0x2B,
1780 OPC_MXU_S16LDI
= 0x2C,
1781 OPC_MXU_S16SDI
= 0x2D,
1782 OPC_MXU_S32M2I
= 0x2E,
1783 OPC_MXU_S32I2M
= 0x2F,
1784 OPC_MXU_D32SLL
= 0x30,
1785 OPC_MXU_D32SLR
= 0x31,
1786 OPC_MXU_D32SARL
= 0x32,
1787 OPC_MXU_D32SAR
= 0x33,
1788 OPC_MXU_Q16SLL
= 0x34,
1789 OPC_MXU_Q16SLR
= 0x35,
1790 OPC_MXU__POOL18
= 0x36,
1791 OPC_MXU_Q16SAR
= 0x37,
1792 OPC_MXU__POOL19
= 0x38,
1793 OPC_MXU__POOL20
= 0x39,
1794 OPC_MXU__POOL21
= 0x3A,
1795 OPC_MXU_Q16SCOP
= 0x3B,
1796 OPC_MXU_Q8MADL
= 0x3C,
1797 OPC_MXU_S32SFL
= 0x3D,
1798 OPC_MXU_Q8SAD
= 0x3E,
1799 /* not assigned 0x3F */
1807 OPC_MXU_S32MAX
= 0x00,
1808 OPC_MXU_S32MIN
= 0x01,
1809 OPC_MXU_D16MAX
= 0x02,
1810 OPC_MXU_D16MIN
= 0x03,
1811 OPC_MXU_Q8MAX
= 0x04,
1812 OPC_MXU_Q8MIN
= 0x05,
1813 OPC_MXU_Q8SLT
= 0x06,
1814 OPC_MXU_Q8SLTU
= 0x07,
1821 OPC_MXU_S32SLT
= 0x00,
1822 OPC_MXU_D16SLT
= 0x01,
1823 OPC_MXU_D16AVG
= 0x02,
1824 OPC_MXU_D16AVGR
= 0x03,
1825 OPC_MXU_Q8AVG
= 0x04,
1826 OPC_MXU_Q8AVGR
= 0x05,
1827 OPC_MXU_Q8ADD
= 0x07,
1834 OPC_MXU_S32CPS
= 0x00,
1835 OPC_MXU_D16CPS
= 0x02,
1836 OPC_MXU_Q8ABD
= 0x04,
1837 OPC_MXU_Q16SAT
= 0x06,
1844 OPC_MXU_D16MULF
= 0x00,
1845 OPC_MXU_D16MULE
= 0x01,
1852 OPC_MXU_S32LDD
= 0x00,
1853 OPC_MXU_S32LDDR
= 0x01,
1860 OPC_MXU_S32STD
= 0x00,
1861 OPC_MXU_S32STDR
= 0x01,
1868 OPC_MXU_S32LDDV
= 0x00,
1869 OPC_MXU_S32LDDVR
= 0x01,
1876 OPC_MXU_S32STDV
= 0x00,
1877 OPC_MXU_S32STDVR
= 0x01,
1884 OPC_MXU_S32LDI
= 0x00,
1885 OPC_MXU_S32LDIR
= 0x01,
1892 OPC_MXU_S32SDI
= 0x00,
1893 OPC_MXU_S32SDIR
= 0x01,
1900 OPC_MXU_S32LDIV
= 0x00,
1901 OPC_MXU_S32LDIVR
= 0x01,
1908 OPC_MXU_S32SDIV
= 0x00,
1909 OPC_MXU_S32SDIVR
= 0x01,
1916 OPC_MXU_D32ACC
= 0x00,
1917 OPC_MXU_D32ACCM
= 0x01,
1918 OPC_MXU_D32ASUM
= 0x02,
1925 OPC_MXU_Q16ACC
= 0x00,
1926 OPC_MXU_Q16ACCM
= 0x01,
1927 OPC_MXU_Q16ASUM
= 0x02,
1934 OPC_MXU_Q8ADDE
= 0x00,
1935 OPC_MXU_D8SUM
= 0x01,
1936 OPC_MXU_D8SUMC
= 0x02,
1943 OPC_MXU_S32MUL
= 0x00,
1944 OPC_MXU_S32MULU
= 0x01,
1945 OPC_MXU_S32EXTR
= 0x02,
1946 OPC_MXU_S32EXTRV
= 0x03,
1953 OPC_MXU_D32SARW
= 0x00,
1954 OPC_MXU_S32ALN
= 0x01,
1955 OPC_MXU_S32ALNI
= 0x02,
1956 OPC_MXU_S32NOR
= 0x03,
1957 OPC_MXU_S32AND
= 0x04,
1958 OPC_MXU_S32OR
= 0x05,
1959 OPC_MXU_S32XOR
= 0x06,
1960 OPC_MXU_S32LUI
= 0x07,
1970 OPC_MXU_LXBU
= 0x04,
1971 OPC_MXU_LXHU
= 0x05,
1978 OPC_MXU_D32SLLV
= 0x00,
1979 OPC_MXU_D32SLRV
= 0x01,
1980 OPC_MXU_D32SARV
= 0x03,
1981 OPC_MXU_Q16SLLV
= 0x04,
1982 OPC_MXU_Q16SLRV
= 0x05,
1983 OPC_MXU_Q16SARV
= 0x07,
1990 OPC_MXU_Q8MUL
= 0x00,
1991 OPC_MXU_Q8MULSU
= 0x01,
1998 OPC_MXU_Q8MOVZ
= 0x00,
1999 OPC_MXU_Q8MOVN
= 0x01,
2000 OPC_MXU_D16MOVZ
= 0x02,
2001 OPC_MXU_D16MOVN
= 0x03,
2002 OPC_MXU_S32MOVZ
= 0x04,
2003 OPC_MXU_S32MOVN
= 0x05,
2010 OPC_MXU_Q8MAC
= 0x00,
2011 OPC_MXU_Q8MACSU
= 0x01,
2015 * Overview of the TX79-specific instruction set
2016 * =============================================
2018 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
2019 * are only used by the specific quadword (128-bit) LQ/SQ load/store
2020 * instructions and certain multimedia instructions (MMIs). These MMIs
2021 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
2022 * or sixteen 8-bit paths.
2026 * The Toshiba TX System RISC TX79 Core Architecture manual,
2027 * https://wiki.qemu.org/File:C790.pdf
2029 * Three-Operand Multiply and Multiply-Add (4 instructions)
2030 * --------------------------------------------------------
2031 * MADD [rd,] rs, rt Multiply/Add
2032 * MADDU [rd,] rs, rt Multiply/Add Unsigned
2033 * MULT [rd,] rs, rt Multiply (3-operand)
2034 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
2036 * Multiply Instructions for Pipeline 1 (10 instructions)
2037 * ------------------------------------------------------
2038 * MULT1 [rd,] rs, rt Multiply Pipeline 1
2039 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
2040 * DIV1 rs, rt Divide Pipeline 1
2041 * DIVU1 rs, rt Divide Unsigned Pipeline 1
2042 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
2043 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
2044 * MFHI1 rd Move From HI1 Register
2045 * MFLO1 rd Move From LO1 Register
2046 * MTHI1 rs Move To HI1 Register
2047 * MTLO1 rs Move To LO1 Register
2049 * Arithmetic (19 instructions)
2050 * ----------------------------
2051 * PADDB rd, rs, rt Parallel Add Byte
2052 * PSUBB rd, rs, rt Parallel Subtract Byte
2053 * PADDH rd, rs, rt Parallel Add Halfword
2054 * PSUBH rd, rs, rt Parallel Subtract Halfword
2055 * PADDW rd, rs, rt Parallel Add Word
2056 * PSUBW rd, rs, rt Parallel Subtract Word
2057 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
2058 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
2059 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
2060 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
2061 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
2062 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
2063 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
2064 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
2065 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
2066 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
2067 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
2068 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
2069 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
2071 * Min/Max (4 instructions)
2072 * ------------------------
2073 * PMAXH rd, rs, rt Parallel Maximum Halfword
2074 * PMINH rd, rs, rt Parallel Minimum Halfword
2075 * PMAXW rd, rs, rt Parallel Maximum Word
2076 * PMINW rd, rs, rt Parallel Minimum Word
2078 * Absolute (2 instructions)
2079 * -------------------------
2080 * PABSH rd, rt Parallel Absolute Halfword
2081 * PABSW rd, rt Parallel Absolute Word
2083 * Logical (4 instructions)
2084 * ------------------------
2085 * PAND rd, rs, rt Parallel AND
2086 * POR rd, rs, rt Parallel OR
2087 * PXOR rd, rs, rt Parallel XOR
2088 * PNOR rd, rs, rt Parallel NOR
2090 * Shift (9 instructions)
2091 * ----------------------
2092 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2093 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2094 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2095 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2096 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2097 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2098 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2099 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2100 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2102 * Compare (6 instructions)
2103 * ------------------------
2104 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2105 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2106 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2107 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2108 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2109 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2111 * LZC (1 instruction)
2112 * -------------------
2113 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2115 * Quadword Load and Store (2 instructions)
2116 * ----------------------------------------
2117 * LQ rt, offset(base) Load Quadword
2118 * SQ rt, offset(base) Store Quadword
2120 * Multiply and Divide (19 instructions)
2121 * -------------------------------------
2122 * PMULTW rd, rs, rt Parallel Multiply Word
2123 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2124 * PDIVW rs, rt Parallel Divide Word
2125 * PDIVUW rs, rt Parallel Divide Unsigned Word
2126 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2127 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2128 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2129 * PMULTH rd, rs, rt Parallel Multiply Halfword
2130 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2131 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2132 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2133 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2134 * PDIVBW rs, rt Parallel Divide Broadcast Word
2135 * PMFHI rd Parallel Move From HI Register
2136 * PMFLO rd Parallel Move From LO Register
2137 * PMTHI rs Parallel Move To HI Register
2138 * PMTLO rs Parallel Move To LO Register
2139 * PMFHL rd Parallel Move From HI/LO Register
2140 * PMTHL rs Parallel Move To HI/LO Register
2142 * Pack/Extend (11 instructions)
2143 * -----------------------------
2144 * PPAC5 rd, rt Parallel Pack to 5 bits
2145 * PPACB rd, rs, rt Parallel Pack to Byte
2146 * PPACH rd, rs, rt Parallel Pack to Halfword
2147 * PPACW rd, rs, rt Parallel Pack to Word
2148 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2149 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2150 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2151 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2152 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2153 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2154 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2156 * Others (16 instructions)
2157 * ------------------------
2158 * PCPYH rd, rt Parallel Copy Halfword
2159 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2160 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2161 * PREVH rd, rt Parallel Reverse Halfword
2162 * PINTH rd, rs, rt Parallel Interleave Halfword
2163 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2164 * PEXEH rd, rt Parallel Exchange Even Halfword
2165 * PEXCH rd, rt Parallel Exchange Center Halfword
2166 * PEXEW rd, rt Parallel Exchange Even Word
2167 * PEXCW rd, rt Parallel Exchange Center Word
2168 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2169 * MFSA rd Move from Shift Amount Register
2170 * MTSA rs Move to Shift Amount Register
2171 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2172 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2173 * PROT3W rd, rt Parallel Rotate 3 Words
2175 * MMI (MultiMedia Instruction) encodings
2176 * ======================================
2178 * MMI instructions encoding table keys:
2180 * * This code is reserved for future use. An attempt to execute it
2181 * causes a Reserved Instruction exception.
2182 * % This code indicates an instruction class. The instruction word
2183 * must be further decoded by examining additional tables that show
2184 * the values for other instruction fields.
2185 * # This code is reserved for the unsupported instructions DMULT,
2186 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2187 * to execute it causes a Reserved Instruction exception.
2189 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
2192 * +--------+----------------------------------------+
2194 * +--------+----------------------------------------+
2196 * opcode bits 28..26
2197 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2198 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2199 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2200 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2201 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2202 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2203 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2204 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2205 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2206 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2207 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2211 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2212 MMI_OPC_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2213 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2217 * MMI instructions with opcode field = MMI:
2220 * +--------+-------------------------------+--------+
2221 * | MMI | |function|
2222 * +--------+-------------------------------+--------+
2224 * function bits 2..0
2225 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2226 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2227 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2228 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2229 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2230 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2231 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2232 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2233 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2234 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2235 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2238 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2240 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
2241 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
2242 MMI_OPC_PLZCW
= 0x04 | MMI_OPC_CLASS_MMI
,
2243 MMI_OPC_CLASS_MMI0
= 0x08 | MMI_OPC_CLASS_MMI
,
2244 MMI_OPC_CLASS_MMI2
= 0x09 | MMI_OPC_CLASS_MMI
,
2245 MMI_OPC_MFHI1
= 0x10 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFHI */
2246 MMI_OPC_MTHI1
= 0x11 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTHI */
2247 MMI_OPC_MFLO1
= 0x12 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFLO */
2248 MMI_OPC_MTLO1
= 0x13 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTLO */
2249 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
2250 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
2251 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
2252 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
2253 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
2254 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
2255 MMI_OPC_CLASS_MMI1
= 0x28 | MMI_OPC_CLASS_MMI
,
2256 MMI_OPC_CLASS_MMI3
= 0x29 | MMI_OPC_CLASS_MMI
,
2257 MMI_OPC_PMFHL
= 0x30 | MMI_OPC_CLASS_MMI
,
2258 MMI_OPC_PMTHL
= 0x31 | MMI_OPC_CLASS_MMI
,
2259 MMI_OPC_PSLLH
= 0x34 | MMI_OPC_CLASS_MMI
,
2260 MMI_OPC_PSRLH
= 0x36 | MMI_OPC_CLASS_MMI
,
2261 MMI_OPC_PSRAH
= 0x37 | MMI_OPC_CLASS_MMI
,
2262 MMI_OPC_PSLLW
= 0x3C | MMI_OPC_CLASS_MMI
,
2263 MMI_OPC_PSRLW
= 0x3E | MMI_OPC_CLASS_MMI
,
2264 MMI_OPC_PSRAW
= 0x3F | MMI_OPC_CLASS_MMI
,
2268 * MMI instructions with opcode field = MMI and bits 5..0 = MMI0:
2271 * +--------+----------------------+--------+--------+
2272 * | MMI | |function| MMI0 |
2273 * +--------+----------------------+--------+--------+
2275 * function bits 7..6
2276 * bits | 0 | 1 | 2 | 3
2277 * 10..8 | 00 | 01 | 10 | 11
2278 * -------+-------+-------+-------+-------
2279 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2280 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2281 * 2 010 | PADDB | PSUBB | PCGTB | *
2282 * 3 011 | * | * | * | *
2283 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2284 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2285 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2286 * 7 111 | * | * | PEXT5 | PPAC5
2289 #define MASK_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2291 MMI_OPC_0_PADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI0
,
2292 MMI_OPC_0_PSUBW
= (0x01 << 6) | MMI_OPC_CLASS_MMI0
,
2293 MMI_OPC_0_PCGTW
= (0x02 << 6) | MMI_OPC_CLASS_MMI0
,
2294 MMI_OPC_0_PMAXW
= (0x03 << 6) | MMI_OPC_CLASS_MMI0
,
2295 MMI_OPC_0_PADDH
= (0x04 << 6) | MMI_OPC_CLASS_MMI0
,
2296 MMI_OPC_0_PSUBH
= (0x05 << 6) | MMI_OPC_CLASS_MMI0
,
2297 MMI_OPC_0_PCGTH
= (0x06 << 6) | MMI_OPC_CLASS_MMI0
,
2298 MMI_OPC_0_PMAXH
= (0x07 << 6) | MMI_OPC_CLASS_MMI0
,
2299 MMI_OPC_0_PADDB
= (0x08 << 6) | MMI_OPC_CLASS_MMI0
,
2300 MMI_OPC_0_PSUBB
= (0x09 << 6) | MMI_OPC_CLASS_MMI0
,
2301 MMI_OPC_0_PCGTB
= (0x0A << 6) | MMI_OPC_CLASS_MMI0
,
2302 MMI_OPC_0_PADDSW
= (0x10 << 6) | MMI_OPC_CLASS_MMI0
,
2303 MMI_OPC_0_PSUBSW
= (0x11 << 6) | MMI_OPC_CLASS_MMI0
,
2304 MMI_OPC_0_PEXTLW
= (0x12 << 6) | MMI_OPC_CLASS_MMI0
,
2305 MMI_OPC_0_PPACW
= (0x13 << 6) | MMI_OPC_CLASS_MMI0
,
2306 MMI_OPC_0_PADDSH
= (0x14 << 6) | MMI_OPC_CLASS_MMI0
,
2307 MMI_OPC_0_PSUBSH
= (0x15 << 6) | MMI_OPC_CLASS_MMI0
,
2308 MMI_OPC_0_PEXTLH
= (0x16 << 6) | MMI_OPC_CLASS_MMI0
,
2309 MMI_OPC_0_PPACH
= (0x17 << 6) | MMI_OPC_CLASS_MMI0
,
2310 MMI_OPC_0_PADDSB
= (0x18 << 6) | MMI_OPC_CLASS_MMI0
,
2311 MMI_OPC_0_PSUBSB
= (0x19 << 6) | MMI_OPC_CLASS_MMI0
,
2312 MMI_OPC_0_PEXTLB
= (0x1A << 6) | MMI_OPC_CLASS_MMI0
,
2313 MMI_OPC_0_PPACB
= (0x1B << 6) | MMI_OPC_CLASS_MMI0
,
2314 MMI_OPC_0_PEXT5
= (0x1E << 6) | MMI_OPC_CLASS_MMI0
,
2315 MMI_OPC_0_PPAC5
= (0x1F << 6) | MMI_OPC_CLASS_MMI0
,
2319 * MMI instructions with opcode field = MMI and bits 5..0 = MMI1:
2322 * +--------+----------------------+--------+--------+
2323 * | MMI | |function| MMI1 |
2324 * +--------+----------------------+--------+--------+
2326 * function bits 7..6
2327 * bits | 0 | 1 | 2 | 3
2328 * 10..8 | 00 | 01 | 10 | 11
2329 * -------+-------+-------+-------+-------
2330 * 0 000 | * | PABSW | PCEQW | PMINW
2331 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2332 * 2 010 | * | * | PCEQB | *
2333 * 3 011 | * | * | * | *
2334 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2335 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2336 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2337 * 7 111 | * | * | * | *
2340 #define MASK_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2342 MMI_OPC_1_PABSW
= (0x01 << 6) | MMI_OPC_CLASS_MMI1
,
2343 MMI_OPC_1_PCEQW
= (0x02 << 6) | MMI_OPC_CLASS_MMI1
,
2344 MMI_OPC_1_PMINW
= (0x03 << 6) | MMI_OPC_CLASS_MMI1
,
2345 MMI_OPC_1_PADSBH
= (0x04 << 6) | MMI_OPC_CLASS_MMI1
,
2346 MMI_OPC_1_PABSH
= (0x05 << 6) | MMI_OPC_CLASS_MMI1
,
2347 MMI_OPC_1_PCEQH
= (0x06 << 6) | MMI_OPC_CLASS_MMI1
,
2348 MMI_OPC_1_PMINH
= (0x07 << 6) | MMI_OPC_CLASS_MMI1
,
2349 MMI_OPC_1_PCEQB
= (0x0A << 6) | MMI_OPC_CLASS_MMI1
,
2350 MMI_OPC_1_PADDUW
= (0x10 << 6) | MMI_OPC_CLASS_MMI1
,
2351 MMI_OPC_1_PSUBUW
= (0x11 << 6) | MMI_OPC_CLASS_MMI1
,
2352 MMI_OPC_1_PEXTUW
= (0x12 << 6) | MMI_OPC_CLASS_MMI1
,
2353 MMI_OPC_1_PADDUH
= (0x14 << 6) | MMI_OPC_CLASS_MMI1
,
2354 MMI_OPC_1_PSUBUH
= (0x15 << 6) | MMI_OPC_CLASS_MMI1
,
2355 MMI_OPC_1_PEXTUH
= (0x16 << 6) | MMI_OPC_CLASS_MMI1
,
2356 MMI_OPC_1_PADDUB
= (0x18 << 6) | MMI_OPC_CLASS_MMI1
,
2357 MMI_OPC_1_PSUBUB
= (0x19 << 6) | MMI_OPC_CLASS_MMI1
,
2358 MMI_OPC_1_PEXTUB
= (0x1A << 6) | MMI_OPC_CLASS_MMI1
,
2359 MMI_OPC_1_QFSRV
= (0x1B << 6) | MMI_OPC_CLASS_MMI1
,
2363 * MMI instructions with opcode field = MMI and bits 5..0 = MMI2:
2366 * +--------+----------------------+--------+--------+
2367 * | MMI | |function| MMI2 |
2368 * +--------+----------------------+--------+--------+
2370 * function bits 7..6
2371 * bits | 0 | 1 | 2 | 3
2372 * 10..8 | 00 | 01 | 10 | 11
2373 * -------+-------+-------+-------+-------
2374 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2375 * 1 001 | PMSUBW| * | * | *
2376 * 2 010 | PMFHI | PMFLO | PINTH | *
2377 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2378 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2379 * 5 101 | PMSUBH| PHMSBH| * | *
2380 * 6 110 | * | * | PEXEH | PREVH
2381 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2384 #define MASK_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2386 MMI_OPC_2_PMADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI2
,
2387 MMI_OPC_2_PSLLVW
= (0x02 << 6) | MMI_OPC_CLASS_MMI2
,
2388 MMI_OPC_2_PSRLVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI2
,
2389 MMI_OPC_2_PMSUBW
= (0x04 << 6) | MMI_OPC_CLASS_MMI2
,
2390 MMI_OPC_2_PMFHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI2
,
2391 MMI_OPC_2_PMFLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI2
,
2392 MMI_OPC_2_PINTH
= (0x0A << 6) | MMI_OPC_CLASS_MMI2
,
2393 MMI_OPC_2_PMULTW
= (0x0C << 6) | MMI_OPC_CLASS_MMI2
,
2394 MMI_OPC_2_PDIVW
= (0x0D << 6) | MMI_OPC_CLASS_MMI2
,
2395 MMI_OPC_2_PCPYLD
= (0x0E << 6) | MMI_OPC_CLASS_MMI2
,
2396 MMI_OPC_2_PMADDH
= (0x10 << 6) | MMI_OPC_CLASS_MMI2
,
2397 MMI_OPC_2_PHMADH
= (0x11 << 6) | MMI_OPC_CLASS_MMI2
,
2398 MMI_OPC_2_PAND
= (0x12 << 6) | MMI_OPC_CLASS_MMI2
,
2399 MMI_OPC_2_PXOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI2
,
2400 MMI_OPC_2_PMSUBH
= (0x14 << 6) | MMI_OPC_CLASS_MMI2
,
2401 MMI_OPC_2_PHMSBH
= (0x15 << 6) | MMI_OPC_CLASS_MMI2
,
2402 MMI_OPC_2_PEXEH
= (0x1A << 6) | MMI_OPC_CLASS_MMI2
,
2403 MMI_OPC_2_PREVH
= (0x1B << 6) | MMI_OPC_CLASS_MMI2
,
2404 MMI_OPC_2_PMULTH
= (0x1C << 6) | MMI_OPC_CLASS_MMI2
,
2405 MMI_OPC_2_PDIVBW
= (0x1D << 6) | MMI_OPC_CLASS_MMI2
,
2406 MMI_OPC_2_PEXEW
= (0x1E << 6) | MMI_OPC_CLASS_MMI2
,
2407 MMI_OPC_2_PROT3W
= (0x1F << 6) | MMI_OPC_CLASS_MMI2
,
2411 * MMI instructions with opcode field = MMI and bits 5..0 = MMI3:
2414 * +--------+----------------------+--------+--------+
2415 * | MMI | |function| MMI3 |
2416 * +--------+----------------------+--------+--------+
2418 * function bits 7..6
2419 * bits | 0 | 1 | 2 | 3
2420 * 10..8 | 00 | 01 | 10 | 11
2421 * -------+-------+-------+-------+-------
2422 * 0 000 |PMADDUW| * | * | PSRAVW
2423 * 1 001 | * | * | * | *
2424 * 2 010 | PMTHI | PMTLO | PINTEH| *
2425 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2426 * 4 100 | * | * | POR | PNOR
2427 * 5 101 | * | * | * | *
2428 * 6 110 | * | * | PEXCH | PCPYH
2429 * 7 111 | * | * | PEXCW | *
2432 #define MASK_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2434 MMI_OPC_3_PMADDUW
= (0x00 << 6) | MMI_OPC_CLASS_MMI3
,
2435 MMI_OPC_3_PSRAVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI3
,
2436 MMI_OPC_3_PMTHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI3
,
2437 MMI_OPC_3_PMTLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI3
,
2438 MMI_OPC_3_PINTEH
= (0x0A << 6) | MMI_OPC_CLASS_MMI3
,
2439 MMI_OPC_3_PMULTUW
= (0x0C << 6) | MMI_OPC_CLASS_MMI3
,
2440 MMI_OPC_3_PDIVUW
= (0x0D << 6) | MMI_OPC_CLASS_MMI3
,
2441 MMI_OPC_3_PCPYUD
= (0x0E << 6) | MMI_OPC_CLASS_MMI3
,
2442 MMI_OPC_3_POR
= (0x12 << 6) | MMI_OPC_CLASS_MMI3
,
2443 MMI_OPC_3_PNOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI3
,
2444 MMI_OPC_3_PEXCH
= (0x1A << 6) | MMI_OPC_CLASS_MMI3
,
2445 MMI_OPC_3_PCPYH
= (0x1B << 6) | MMI_OPC_CLASS_MMI3
,
2446 MMI_OPC_3_PEXCW
= (0x1E << 6) | MMI_OPC_CLASS_MMI3
,
2449 /* global register indices */
2450 static TCGv cpu_gpr
[32], cpu_PC
;
2451 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2452 static TCGv cpu_dspctrl
, btarget
, bcond
;
2453 static TCGv_i32 hflags
;
2454 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2455 static TCGv_i64 fpu_f64
[32];
2456 static TCGv_i64 msa_wr_d
[64];
2459 static TCGv mxu_gpr
[NUMBER_OF_MXU_REGISTERS
- 1];
2462 #include "exec/gen-icount.h"
2464 #define gen_helper_0e0i(name, arg) do { \
2465 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2466 gen_helper_##name(cpu_env, helper_tmp); \
2467 tcg_temp_free_i32(helper_tmp); \
2470 #define gen_helper_0e1i(name, arg1, arg2) do { \
2471 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2472 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2473 tcg_temp_free_i32(helper_tmp); \
2476 #define gen_helper_1e0i(name, ret, arg1) do { \
2477 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2478 gen_helper_##name(ret, cpu_env, helper_tmp); \
2479 tcg_temp_free_i32(helper_tmp); \
2482 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2483 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2484 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2485 tcg_temp_free_i32(helper_tmp); \
2488 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2489 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2490 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2491 tcg_temp_free_i32(helper_tmp); \
2494 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2495 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2496 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2497 tcg_temp_free_i32(helper_tmp); \
2500 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2501 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2502 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2503 tcg_temp_free_i32(helper_tmp); \
2506 typedef struct DisasContext
{
2507 DisasContextBase base
;
2508 target_ulong saved_pc
;
2509 target_ulong page_start
;
2511 uint64_t insn_flags
;
2512 int32_t CP0_Config1
;
2513 int32_t CP0_Config2
;
2514 int32_t CP0_Config3
;
2515 int32_t CP0_Config5
;
2516 /* Routine used to access memory */
2518 TCGMemOp default_tcg_memop_mask
;
2519 uint32_t hflags
, saved_hflags
;
2520 target_ulong btarget
;
2531 int CP0_LLAddr_shift
;
2540 #define DISAS_STOP DISAS_TARGET_0
2541 #define DISAS_EXIT DISAS_TARGET_1
2543 static const char * const regnames
[] = {
2544 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2545 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2546 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2547 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2550 static const char * const regnames_HI
[] = {
2551 "HI0", "HI1", "HI2", "HI3",
2554 static const char * const regnames_LO
[] = {
2555 "LO0", "LO1", "LO2", "LO3",
2558 static const char * const fregnames
[] = {
2559 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2560 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2561 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2562 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2565 static const char * const msaregnames
[] = {
2566 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2567 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2568 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2569 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2570 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2571 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2572 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2573 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2574 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2575 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2576 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2577 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2578 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2579 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2580 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2581 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2584 static const char * const mxuregnames
[] = {
2585 "XR1", "XR2", "XR3", "XR4", "XR5", "XR6", "XR7", "XR8",
2586 "XR9", "XR10", "XR11", "XR12", "XR13", "XR14", "XR15", "MXU_CR",
2589 #define LOG_DISAS(...) \
2591 if (MIPS_DEBUG_DISAS) { \
2592 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2596 #define MIPS_INVAL(op) \
2598 if (MIPS_DEBUG_DISAS) { \
2599 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2600 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2601 ctx->base.pc_next, ctx->opcode, op, \
2602 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2603 ((ctx->opcode >> 16) & 0x1F)); \
2607 /* General purpose registers moves. */
2608 static inline void gen_load_gpr (TCGv t
, int reg
)
2611 tcg_gen_movi_tl(t
, 0);
2613 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2616 static inline void gen_store_gpr (TCGv t
, int reg
)
2619 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2622 /* Moves to/from shadow registers. */
2623 static inline void gen_load_srsgpr (int from
, int to
)
2625 TCGv t0
= tcg_temp_new();
2628 tcg_gen_movi_tl(t0
, 0);
2630 TCGv_i32 t2
= tcg_temp_new_i32();
2631 TCGv_ptr addr
= tcg_temp_new_ptr();
2633 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2634 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2635 tcg_gen_andi_i32(t2
, t2
, 0xf);
2636 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2637 tcg_gen_ext_i32_ptr(addr
, t2
);
2638 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2640 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2641 tcg_temp_free_ptr(addr
);
2642 tcg_temp_free_i32(t2
);
2644 gen_store_gpr(t0
, to
);
2648 static inline void gen_store_srsgpr (int from
, int to
)
2651 TCGv t0
= tcg_temp_new();
2652 TCGv_i32 t2
= tcg_temp_new_i32();
2653 TCGv_ptr addr
= tcg_temp_new_ptr();
2655 gen_load_gpr(t0
, from
);
2656 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2657 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2658 tcg_gen_andi_i32(t2
, t2
, 0xf);
2659 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2660 tcg_gen_ext_i32_ptr(addr
, t2
);
2661 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2663 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2664 tcg_temp_free_ptr(addr
);
2665 tcg_temp_free_i32(t2
);
2670 /* MXU General purpose registers moves. */
2671 static inline void gen_load_mxu_gpr(TCGv t
, unsigned int reg
)
2674 tcg_gen_movi_tl(t
, 0);
2675 } else if (reg
<= 15) {
2676 tcg_gen_mov_tl(t
, mxu_gpr
[reg
- 1]);
2680 static inline void gen_store_mxu_gpr(TCGv t
, unsigned int reg
)
2682 if (reg
> 0 && reg
<= 15) {
2683 tcg_gen_mov_tl(mxu_gpr
[reg
- 1], t
);
2687 /* MXU control register moves. */
2688 static inline void gen_load_mxu_cr(TCGv t
)
2690 tcg_gen_mov_tl(t
, mxu_CR
);
2693 static inline void gen_store_mxu_cr(TCGv t
)
2695 /* TODO: Add handling of RW rules for MXU_CR. */
2696 tcg_gen_mov_tl(mxu_CR
, t
);
2701 static inline void gen_save_pc(target_ulong pc
)
2703 tcg_gen_movi_tl(cpu_PC
, pc
);
2706 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2708 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2709 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2710 gen_save_pc(ctx
->base
.pc_next
);
2711 ctx
->saved_pc
= ctx
->base
.pc_next
;
2713 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2714 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2715 ctx
->saved_hflags
= ctx
->hflags
;
2716 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2722 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2728 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2730 ctx
->saved_hflags
= ctx
->hflags
;
2731 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2737 ctx
->btarget
= env
->btarget
;
2742 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2744 TCGv_i32 texcp
= tcg_const_i32(excp
);
2745 TCGv_i32 terr
= tcg_const_i32(err
);
2746 save_cpu_state(ctx
, 1);
2747 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2748 tcg_temp_free_i32(terr
);
2749 tcg_temp_free_i32(texcp
);
2750 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2753 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2755 gen_helper_0e0i(raise_exception
, excp
);
2758 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2760 generate_exception_err(ctx
, excp
, 0);
2763 /* Floating point register moves. */
2764 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2766 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2767 generate_exception(ctx
, EXCP_RI
);
2769 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2772 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2775 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2776 generate_exception(ctx
, EXCP_RI
);
2778 t64
= tcg_temp_new_i64();
2779 tcg_gen_extu_i32_i64(t64
, t
);
2780 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2781 tcg_temp_free_i64(t64
);
2784 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2786 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2787 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2789 gen_load_fpr32(ctx
, t
, reg
| 1);
2793 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2795 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2796 TCGv_i64 t64
= tcg_temp_new_i64();
2797 tcg_gen_extu_i32_i64(t64
, t
);
2798 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2799 tcg_temp_free_i64(t64
);
2801 gen_store_fpr32(ctx
, t
, reg
| 1);
2805 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2807 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2808 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2810 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2814 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2816 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2817 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2820 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2821 t0
= tcg_temp_new_i64();
2822 tcg_gen_shri_i64(t0
, t
, 32);
2823 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2824 tcg_temp_free_i64(t0
);
2828 static inline int get_fp_bit (int cc
)
2836 /* Addresses computation */
2837 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
2839 tcg_gen_add_tl(ret
, arg0
, arg1
);
2841 #if defined(TARGET_MIPS64)
2842 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2843 tcg_gen_ext32s_i64(ret
, ret
);
2848 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2851 tcg_gen_addi_tl(ret
, base
, ofs
);
2853 #if defined(TARGET_MIPS64)
2854 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2855 tcg_gen_ext32s_i64(ret
, ret
);
2860 /* Addresses computation (translation time) */
2861 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2864 target_long sum
= base
+ offset
;
2866 #if defined(TARGET_MIPS64)
2867 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2874 /* Sign-extract the low 32-bits to a target_long. */
2875 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2877 #if defined(TARGET_MIPS64)
2878 tcg_gen_ext32s_i64(ret
, arg
);
2880 tcg_gen_extrl_i64_i32(ret
, arg
);
2884 /* Sign-extract the high 32-bits to a target_long. */
2885 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2887 #if defined(TARGET_MIPS64)
2888 tcg_gen_sari_i64(ret
, arg
, 32);
2890 tcg_gen_extrh_i64_i32(ret
, arg
);
2894 static inline void check_cp0_enabled(DisasContext
*ctx
)
2896 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
2897 generate_exception_err(ctx
, EXCP_CpU
, 0);
2900 static inline void check_cp1_enabled(DisasContext
*ctx
)
2902 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
2903 generate_exception_err(ctx
, EXCP_CpU
, 1);
2906 /* Verify that the processor is running with COP1X instructions enabled.
2907 This is associated with the nabla symbol in the MIPS32 and MIPS64
2910 static inline void check_cop1x(DisasContext
*ctx
)
2912 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
2913 generate_exception_end(ctx
, EXCP_RI
);
2916 /* Verify that the processor is running with 64-bit floating-point
2917 operations enabled. */
2919 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2921 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
2922 generate_exception_end(ctx
, EXCP_RI
);
2926 * Verify if floating point register is valid; an operation is not defined
2927 * if bit 0 of any register specification is set and the FR bit in the
2928 * Status register equals zero, since the register numbers specify an
2929 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2930 * in the Status register equals one, both even and odd register numbers
2931 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2933 * Multiple 64 bit wide registers can be checked by calling
2934 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2936 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2938 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
2939 generate_exception_end(ctx
, EXCP_RI
);
2942 /* Verify that the processor is running with DSP instructions enabled.
2943 This is enabled by CP0 Status register MX(24) bit.
2946 static inline void check_dsp(DisasContext
*ctx
)
2948 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2949 if (ctx
->insn_flags
& ASE_DSP
) {
2950 generate_exception_end(ctx
, EXCP_DSPDIS
);
2952 generate_exception_end(ctx
, EXCP_RI
);
2957 static inline void check_dsp_r2(DisasContext
*ctx
)
2959 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2960 if (ctx
->insn_flags
& ASE_DSP
) {
2961 generate_exception_end(ctx
, EXCP_DSPDIS
);
2963 generate_exception_end(ctx
, EXCP_RI
);
2968 static inline void check_dsp_r3(DisasContext
*ctx
)
2970 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2971 if (ctx
->insn_flags
& ASE_DSP
) {
2972 generate_exception_end(ctx
, EXCP_DSPDIS
);
2974 generate_exception_end(ctx
, EXCP_RI
);
2979 /* This code generates a "reserved instruction" exception if the
2980 CPU does not support the instruction set corresponding to flags. */
2981 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
2983 if (unlikely(!(ctx
->insn_flags
& flags
))) {
2984 generate_exception_end(ctx
, EXCP_RI
);
2988 /* This code generates a "reserved instruction" exception if the
2989 CPU has corresponding flag set which indicates that the instruction
2990 has been removed. */
2991 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
2993 if (unlikely(ctx
->insn_flags
& flags
)) {
2994 generate_exception_end(ctx
, EXCP_RI
);
2999 * The Linux kernel traps certain reserved instruction exceptions to
3000 * emulate the corresponding instructions. QEMU is the kernel in user
3001 * mode, so those traps are emulated by accepting the instructions.
3003 * A reserved instruction exception is generated for flagged CPUs if
3004 * QEMU runs in system mode.
3006 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
3008 #ifndef CONFIG_USER_ONLY
3009 check_insn_opc_removed(ctx
, flags
);
3013 /* This code generates a "reserved instruction" exception if the
3014 CPU does not support 64-bit paired-single (PS) floating point data type */
3015 static inline void check_ps(DisasContext
*ctx
)
3017 if (unlikely(!ctx
->ps
)) {
3018 generate_exception(ctx
, EXCP_RI
);
3020 check_cp1_64bitmode(ctx
);
3023 #ifdef TARGET_MIPS64
3024 /* This code generates a "reserved instruction" exception if 64-bit
3025 instructions are not enabled. */
3026 static inline void check_mips_64(DisasContext
*ctx
)
3028 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
3029 generate_exception_end(ctx
, EXCP_RI
);
3033 #ifndef CONFIG_USER_ONLY
3034 static inline void check_mvh(DisasContext
*ctx
)
3036 if (unlikely(!ctx
->mvh
)) {
3037 generate_exception(ctx
, EXCP_RI
);
3043 * This code generates a "reserved instruction" exception if the
3044 * Config5 XNP bit is set.
3046 static inline void check_xnp(DisasContext
*ctx
)
3048 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
3049 generate_exception_end(ctx
, EXCP_RI
);
3053 #ifndef CONFIG_USER_ONLY
3055 * This code generates a "reserved instruction" exception if the
3056 * Config3 PW bit is NOT set.
3058 static inline void check_pw(DisasContext
*ctx
)
3060 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
3061 generate_exception_end(ctx
, EXCP_RI
);
3067 * This code generates a "reserved instruction" exception if the
3068 * Config3 MT bit is NOT set.
3070 static inline void check_mt(DisasContext
*ctx
)
3072 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3073 generate_exception_end(ctx
, EXCP_RI
);
3077 #ifndef CONFIG_USER_ONLY
3079 * This code generates a "coprocessor unusable" exception if CP0 is not
3080 * available, and, if that is not the case, generates a "reserved instruction"
3081 * exception if the Config5 MT bit is NOT set. This is needed for availability
3082 * control of some of MT ASE instructions.
3084 static inline void check_cp0_mt(DisasContext
*ctx
)
3086 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
3087 generate_exception_err(ctx
, EXCP_CpU
, 0);
3089 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3090 generate_exception_err(ctx
, EXCP_RI
, 0);
3097 * This code generates a "reserved instruction" exception if the
3098 * Config5 NMS bit is set.
3100 static inline void check_nms(DisasContext
*ctx
)
3102 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
3103 generate_exception_end(ctx
, EXCP_RI
);
3108 * This code generates a "reserved instruction" exception if the
3109 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
3110 * Config2 TL, and Config5 L2C are unset.
3112 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
3114 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
3115 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
3116 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
3117 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
3118 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
3119 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))
3121 generate_exception_end(ctx
, EXCP_RI
);
3126 * This code generates a "reserved instruction" exception if the
3127 * Config5 EVA bit is NOT set.
3129 static inline void check_eva(DisasContext
*ctx
)
3131 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
3132 generate_exception_end(ctx
, EXCP_RI
);
3137 /* Define small wrappers for gen_load_fpr* so that we have a uniform
3138 calling interface for 32 and 64-bit FPRs. No sense in changing
3139 all callers for gen_load_fpr32 when we need the CTX parameter for
3141 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
3142 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
3143 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
3144 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
3145 int ft, int fs, int cc) \
3147 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
3148 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
3157 check_cp1_registers(ctx, fs | ft); \
3165 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
3166 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
3168 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
3169 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
3170 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
3171 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
3172 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
3173 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
3174 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
3175 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
3176 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
3177 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
3178 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
3179 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
3180 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
3181 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
3182 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
3183 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
3186 tcg_temp_free_i##bits (fp0); \
3187 tcg_temp_free_i##bits (fp1); \
3190 FOP_CONDS(, 0, d
, FMT_D
, 64)
3191 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3192 FOP_CONDS(, 0, s
, FMT_S
, 32)
3193 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3194 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3195 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3198 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3199 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
3200 int ft, int fs, int fd) \
3202 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3203 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3204 if (ifmt == FMT_D) { \
3205 check_cp1_registers(ctx, fs | ft | fd); \
3207 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3208 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3211 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3214 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3217 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3220 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3223 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3226 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3229 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3232 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3235 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3238 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3241 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3244 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3247 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3250 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3253 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3256 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3259 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3262 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3265 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3268 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3271 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3274 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3280 tcg_temp_free_i ## bits (fp0); \
3281 tcg_temp_free_i ## bits (fp1); \
3284 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3285 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3287 #undef gen_ldcmp_fpr32
3288 #undef gen_ldcmp_fpr64
3290 /* load/store instructions. */
3291 #ifdef CONFIG_USER_ONLY
3292 #define OP_LD_ATOMIC(insn,fname) \
3293 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3294 DisasContext *ctx) \
3296 TCGv t0 = tcg_temp_new(); \
3297 tcg_gen_mov_tl(t0, arg1); \
3298 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3299 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3300 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3301 tcg_temp_free(t0); \
3304 #define OP_LD_ATOMIC(insn,fname) \
3305 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3306 DisasContext *ctx) \
3308 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3311 OP_LD_ATOMIC(ll
,ld32s
);
3312 #if defined(TARGET_MIPS64)
3313 OP_LD_ATOMIC(lld
,ld64
);
3317 #ifdef CONFIG_USER_ONLY
3318 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3319 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3320 DisasContext *ctx) \
3322 TCGv t0 = tcg_temp_new(); \
3323 TCGLabel *l1 = gen_new_label(); \
3324 TCGLabel *l2 = gen_new_label(); \
3326 tcg_gen_andi_tl(t0, arg2, almask); \
3327 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
3328 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
3329 generate_exception(ctx, EXCP_AdES); \
3330 gen_set_label(l1); \
3331 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3332 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
3333 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
3334 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
3335 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
3336 generate_exception_end(ctx, EXCP_SC); \
3337 gen_set_label(l2); \
3338 tcg_gen_movi_tl(t0, 0); \
3339 gen_store_gpr(t0, rt); \
3340 tcg_temp_free(t0); \
3343 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3344 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3345 DisasContext *ctx) \
3347 TCGv t0 = tcg_temp_new(); \
3348 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
3349 gen_store_gpr(t0, rt); \
3350 tcg_temp_free(t0); \
3353 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
3354 #if defined(TARGET_MIPS64)
3355 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
3359 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
3360 int base
, int offset
)
3363 tcg_gen_movi_tl(addr
, offset
);
3364 } else if (offset
== 0) {
3365 gen_load_gpr(addr
, base
);
3367 tcg_gen_movi_tl(addr
, offset
);
3368 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3372 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
3374 target_ulong pc
= ctx
->base
.pc_next
;
3376 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3377 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3382 pc
&= ~(target_ulong
)3;
3387 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3388 int rt
, int base
, int offset
)
3391 int mem_idx
= ctx
->mem_idx
;
3393 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3394 /* Loongson CPU uses a load to zero register for prefetch.
3395 We emulate it as a NOP. On other CPU we must perform the
3396 actual memory access. */
3400 t0
= tcg_temp_new();
3401 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3404 #if defined(TARGET_MIPS64)
3406 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3407 ctx
->default_tcg_memop_mask
);
3408 gen_store_gpr(t0
, rt
);
3411 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3412 ctx
->default_tcg_memop_mask
);
3413 gen_store_gpr(t0
, rt
);
3417 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3418 gen_store_gpr(t0
, rt
);
3421 t1
= tcg_temp_new();
3422 /* Do a byte access to possibly trigger a page
3423 fault with the unaligned address. */
3424 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3425 tcg_gen_andi_tl(t1
, t0
, 7);
3426 #ifndef TARGET_WORDS_BIGENDIAN
3427 tcg_gen_xori_tl(t1
, t1
, 7);
3429 tcg_gen_shli_tl(t1
, t1
, 3);
3430 tcg_gen_andi_tl(t0
, t0
, ~7);
3431 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3432 tcg_gen_shl_tl(t0
, t0
, t1
);
3433 t2
= tcg_const_tl(-1);
3434 tcg_gen_shl_tl(t2
, t2
, t1
);
3435 gen_load_gpr(t1
, rt
);
3436 tcg_gen_andc_tl(t1
, t1
, t2
);
3438 tcg_gen_or_tl(t0
, t0
, t1
);
3440 gen_store_gpr(t0
, rt
);
3443 t1
= tcg_temp_new();
3444 /* Do a byte access to possibly trigger a page
3445 fault with the unaligned address. */
3446 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3447 tcg_gen_andi_tl(t1
, t0
, 7);
3448 #ifdef TARGET_WORDS_BIGENDIAN
3449 tcg_gen_xori_tl(t1
, t1
, 7);
3451 tcg_gen_shli_tl(t1
, t1
, 3);
3452 tcg_gen_andi_tl(t0
, t0
, ~7);
3453 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3454 tcg_gen_shr_tl(t0
, t0
, t1
);
3455 tcg_gen_xori_tl(t1
, t1
, 63);
3456 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3457 tcg_gen_shl_tl(t2
, t2
, t1
);
3458 gen_load_gpr(t1
, rt
);
3459 tcg_gen_and_tl(t1
, t1
, t2
);
3461 tcg_gen_or_tl(t0
, t0
, t1
);
3463 gen_store_gpr(t0
, rt
);
3466 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3467 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3469 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3470 gen_store_gpr(t0
, rt
);
3474 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3475 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3477 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3478 gen_store_gpr(t0
, rt
);
3481 mem_idx
= MIPS_HFLAG_UM
;
3484 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3485 ctx
->default_tcg_memop_mask
);
3486 gen_store_gpr(t0
, rt
);
3489 mem_idx
= MIPS_HFLAG_UM
;
3492 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3493 ctx
->default_tcg_memop_mask
);
3494 gen_store_gpr(t0
, rt
);
3497 mem_idx
= MIPS_HFLAG_UM
;
3500 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3501 ctx
->default_tcg_memop_mask
);
3502 gen_store_gpr(t0
, rt
);
3505 mem_idx
= MIPS_HFLAG_UM
;
3508 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3509 gen_store_gpr(t0
, rt
);
3512 mem_idx
= MIPS_HFLAG_UM
;
3515 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3516 gen_store_gpr(t0
, rt
);
3519 mem_idx
= MIPS_HFLAG_UM
;
3522 t1
= tcg_temp_new();
3523 /* Do a byte access to possibly trigger a page
3524 fault with the unaligned address. */
3525 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3526 tcg_gen_andi_tl(t1
, t0
, 3);
3527 #ifndef TARGET_WORDS_BIGENDIAN
3528 tcg_gen_xori_tl(t1
, t1
, 3);
3530 tcg_gen_shli_tl(t1
, t1
, 3);
3531 tcg_gen_andi_tl(t0
, t0
, ~3);
3532 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3533 tcg_gen_shl_tl(t0
, t0
, t1
);
3534 t2
= tcg_const_tl(-1);
3535 tcg_gen_shl_tl(t2
, t2
, t1
);
3536 gen_load_gpr(t1
, rt
);
3537 tcg_gen_andc_tl(t1
, t1
, t2
);
3539 tcg_gen_or_tl(t0
, t0
, t1
);
3541 tcg_gen_ext32s_tl(t0
, t0
);
3542 gen_store_gpr(t0
, rt
);
3545 mem_idx
= MIPS_HFLAG_UM
;
3548 t1
= tcg_temp_new();
3549 /* Do a byte access to possibly trigger a page
3550 fault with the unaligned address. */
3551 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3552 tcg_gen_andi_tl(t1
, t0
, 3);
3553 #ifdef TARGET_WORDS_BIGENDIAN
3554 tcg_gen_xori_tl(t1
, t1
, 3);
3556 tcg_gen_shli_tl(t1
, t1
, 3);
3557 tcg_gen_andi_tl(t0
, t0
, ~3);
3558 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3559 tcg_gen_shr_tl(t0
, t0
, t1
);
3560 tcg_gen_xori_tl(t1
, t1
, 31);
3561 t2
= tcg_const_tl(0xfffffffeull
);
3562 tcg_gen_shl_tl(t2
, t2
, t1
);
3563 gen_load_gpr(t1
, rt
);
3564 tcg_gen_and_tl(t1
, t1
, t2
);
3566 tcg_gen_or_tl(t0
, t0
, t1
);
3568 tcg_gen_ext32s_tl(t0
, t0
);
3569 gen_store_gpr(t0
, rt
);
3572 mem_idx
= MIPS_HFLAG_UM
;
3576 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3577 gen_store_gpr(t0
, rt
);
3583 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3584 uint32_t reg1
, uint32_t reg2
)
3586 TCGv taddr
= tcg_temp_new();
3587 TCGv_i64 tval
= tcg_temp_new_i64();
3588 TCGv tmp1
= tcg_temp_new();
3589 TCGv tmp2
= tcg_temp_new();
3591 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3592 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3593 #ifdef TARGET_WORDS_BIGENDIAN
3594 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3596 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3598 gen_store_gpr(tmp1
, reg1
);
3599 tcg_temp_free(tmp1
);
3600 gen_store_gpr(tmp2
, reg2
);
3601 tcg_temp_free(tmp2
);
3602 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3603 tcg_temp_free_i64(tval
);
3604 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3605 tcg_temp_free(taddr
);
3609 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
3610 int base
, int offset
)
3612 TCGv t0
= tcg_temp_new();
3613 TCGv t1
= tcg_temp_new();
3614 int mem_idx
= ctx
->mem_idx
;
3616 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3617 gen_load_gpr(t1
, rt
);
3619 #if defined(TARGET_MIPS64)
3621 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3622 ctx
->default_tcg_memop_mask
);
3625 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3628 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3632 mem_idx
= MIPS_HFLAG_UM
;
3635 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3636 ctx
->default_tcg_memop_mask
);
3639 mem_idx
= MIPS_HFLAG_UM
;
3642 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3643 ctx
->default_tcg_memop_mask
);
3646 mem_idx
= MIPS_HFLAG_UM
;
3649 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3652 mem_idx
= MIPS_HFLAG_UM
;
3655 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3658 mem_idx
= MIPS_HFLAG_UM
;
3661 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3669 /* Store conditional */
3670 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
3671 int base
, int16_t offset
)
3674 int mem_idx
= ctx
->mem_idx
;
3676 #ifdef CONFIG_USER_ONLY
3677 t0
= tcg_temp_local_new();
3678 t1
= tcg_temp_local_new();
3680 t0
= tcg_temp_new();
3681 t1
= tcg_temp_new();
3683 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3684 gen_load_gpr(t1
, rt
);
3686 #if defined(TARGET_MIPS64)
3689 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
3693 mem_idx
= MIPS_HFLAG_UM
;
3697 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
3704 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3705 uint32_t reg1
, uint32_t reg2
)
3707 TCGv taddr
= tcg_temp_local_new();
3708 TCGv lladdr
= tcg_temp_local_new();
3709 TCGv_i64 tval
= tcg_temp_new_i64();
3710 TCGv_i64 llval
= tcg_temp_new_i64();
3711 TCGv_i64 val
= tcg_temp_new_i64();
3712 TCGv tmp1
= tcg_temp_new();
3713 TCGv tmp2
= tcg_temp_new();
3714 TCGLabel
*lab_fail
= gen_new_label();
3715 TCGLabel
*lab_done
= gen_new_label();
3717 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3719 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3720 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3722 gen_load_gpr(tmp1
, reg1
);
3723 gen_load_gpr(tmp2
, reg2
);
3725 #ifdef TARGET_WORDS_BIGENDIAN
3726 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3728 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3731 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3732 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3733 ctx
->mem_idx
, MO_64
);
3735 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3737 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3739 gen_set_label(lab_fail
);
3742 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3744 gen_set_label(lab_done
);
3745 tcg_gen_movi_tl(lladdr
, -1);
3746 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3749 /* Load and store */
3750 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
3753 /* Don't do NOP if destination is zero: we must perform the actual
3758 TCGv_i32 fp0
= tcg_temp_new_i32();
3759 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3760 ctx
->default_tcg_memop_mask
);
3761 gen_store_fpr32(ctx
, fp0
, ft
);
3762 tcg_temp_free_i32(fp0
);
3767 TCGv_i32 fp0
= tcg_temp_new_i32();
3768 gen_load_fpr32(ctx
, fp0
, ft
);
3769 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3770 ctx
->default_tcg_memop_mask
);
3771 tcg_temp_free_i32(fp0
);
3776 TCGv_i64 fp0
= tcg_temp_new_i64();
3777 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3778 ctx
->default_tcg_memop_mask
);
3779 gen_store_fpr64(ctx
, fp0
, ft
);
3780 tcg_temp_free_i64(fp0
);
3785 TCGv_i64 fp0
= tcg_temp_new_i64();
3786 gen_load_fpr64(ctx
, fp0
, ft
);
3787 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3788 ctx
->default_tcg_memop_mask
);
3789 tcg_temp_free_i64(fp0
);
3793 MIPS_INVAL("flt_ldst");
3794 generate_exception_end(ctx
, EXCP_RI
);
3799 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3800 int rs
, int16_t imm
)
3802 TCGv t0
= tcg_temp_new();
3804 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3805 check_cp1_enabled(ctx
);
3809 check_insn(ctx
, ISA_MIPS2
);
3812 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3813 gen_flt_ldst(ctx
, op
, rt
, t0
);
3816 generate_exception_err(ctx
, EXCP_CpU
, 1);
3821 /* Arithmetic with immediate operand */
3822 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3823 int rt
, int rs
, int imm
)
3825 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3827 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3828 /* If no destination, treat it as a NOP.
3829 For addi, we must generate the overflow exception when needed. */
3835 TCGv t0
= tcg_temp_local_new();
3836 TCGv t1
= tcg_temp_new();
3837 TCGv t2
= tcg_temp_new();
3838 TCGLabel
*l1
= gen_new_label();
3840 gen_load_gpr(t1
, rs
);
3841 tcg_gen_addi_tl(t0
, t1
, uimm
);
3842 tcg_gen_ext32s_tl(t0
, t0
);
3844 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3845 tcg_gen_xori_tl(t2
, t0
, uimm
);
3846 tcg_gen_and_tl(t1
, t1
, t2
);
3848 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3850 /* operands of same sign, result different sign */
3851 generate_exception(ctx
, EXCP_OVERFLOW
);
3853 tcg_gen_ext32s_tl(t0
, t0
);
3854 gen_store_gpr(t0
, rt
);
3860 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3861 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3863 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3866 #if defined(TARGET_MIPS64)
3869 TCGv t0
= tcg_temp_local_new();
3870 TCGv t1
= tcg_temp_new();
3871 TCGv t2
= tcg_temp_new();
3872 TCGLabel
*l1
= gen_new_label();
3874 gen_load_gpr(t1
, rs
);
3875 tcg_gen_addi_tl(t0
, t1
, uimm
);
3877 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3878 tcg_gen_xori_tl(t2
, t0
, uimm
);
3879 tcg_gen_and_tl(t1
, t1
, t2
);
3881 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3883 /* operands of same sign, result different sign */
3884 generate_exception(ctx
, EXCP_OVERFLOW
);
3886 gen_store_gpr(t0
, rt
);
3892 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3894 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3901 /* Logic with immediate operand */
3902 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3903 int rt
, int rs
, int16_t imm
)
3908 /* If no destination, treat it as a NOP. */
3911 uimm
= (uint16_t)imm
;
3914 if (likely(rs
!= 0))
3915 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3917 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3921 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3923 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3926 if (likely(rs
!= 0))
3927 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3929 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3932 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3934 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3935 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3937 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3946 /* Set on less than with immediate operand */
3947 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3948 int rt
, int rs
, int16_t imm
)
3950 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3954 /* If no destination, treat it as a NOP. */
3957 t0
= tcg_temp_new();
3958 gen_load_gpr(t0
, rs
);
3961 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3964 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3970 /* Shifts with immediate operand */
3971 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3972 int rt
, int rs
, int16_t imm
)
3974 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3978 /* If no destination, treat it as a NOP. */
3982 t0
= tcg_temp_new();
3983 gen_load_gpr(t0
, rs
);
3986 tcg_gen_shli_tl(t0
, t0
, uimm
);
3987 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3990 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3994 tcg_gen_ext32u_tl(t0
, t0
);
3995 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3997 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4002 TCGv_i32 t1
= tcg_temp_new_i32();
4004 tcg_gen_trunc_tl_i32(t1
, t0
);
4005 tcg_gen_rotri_i32(t1
, t1
, uimm
);
4006 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
4007 tcg_temp_free_i32(t1
);
4009 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4012 #if defined(TARGET_MIPS64)
4014 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
4017 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4020 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4024 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
4026 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
4030 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4033 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4036 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4039 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4047 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
4048 int rd
, int rs
, int rt
)
4050 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
4051 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
4052 /* If no destination, treat it as a NOP.
4053 For add & sub, we must generate the overflow exception when needed. */
4060 TCGv t0
= tcg_temp_local_new();
4061 TCGv t1
= tcg_temp_new();
4062 TCGv t2
= tcg_temp_new();
4063 TCGLabel
*l1
= gen_new_label();
4065 gen_load_gpr(t1
, rs
);
4066 gen_load_gpr(t2
, rt
);
4067 tcg_gen_add_tl(t0
, t1
, t2
);
4068 tcg_gen_ext32s_tl(t0
, t0
);
4069 tcg_gen_xor_tl(t1
, t1
, t2
);
4070 tcg_gen_xor_tl(t2
, t0
, t2
);
4071 tcg_gen_andc_tl(t1
, t2
, t1
);
4073 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4075 /* operands of same sign, result different sign */
4076 generate_exception(ctx
, EXCP_OVERFLOW
);
4078 gen_store_gpr(t0
, rd
);
4083 if (rs
!= 0 && rt
!= 0) {
4084 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4085 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4086 } else if (rs
== 0 && rt
!= 0) {
4087 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4088 } else if (rs
!= 0 && rt
== 0) {
4089 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4091 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4096 TCGv t0
= tcg_temp_local_new();
4097 TCGv t1
= tcg_temp_new();
4098 TCGv t2
= tcg_temp_new();
4099 TCGLabel
*l1
= gen_new_label();
4101 gen_load_gpr(t1
, rs
);
4102 gen_load_gpr(t2
, rt
);
4103 tcg_gen_sub_tl(t0
, t1
, t2
);
4104 tcg_gen_ext32s_tl(t0
, t0
);
4105 tcg_gen_xor_tl(t2
, t1
, t2
);
4106 tcg_gen_xor_tl(t1
, t0
, t1
);
4107 tcg_gen_and_tl(t1
, t1
, t2
);
4109 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4111 /* operands of different sign, first operand and result different sign */
4112 generate_exception(ctx
, EXCP_OVERFLOW
);
4114 gen_store_gpr(t0
, rd
);
4119 if (rs
!= 0 && rt
!= 0) {
4120 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4121 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4122 } else if (rs
== 0 && rt
!= 0) {
4123 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4124 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4125 } else if (rs
!= 0 && rt
== 0) {
4126 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4128 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4131 #if defined(TARGET_MIPS64)
4134 TCGv t0
= tcg_temp_local_new();
4135 TCGv t1
= tcg_temp_new();
4136 TCGv t2
= tcg_temp_new();
4137 TCGLabel
*l1
= gen_new_label();
4139 gen_load_gpr(t1
, rs
);
4140 gen_load_gpr(t2
, rt
);
4141 tcg_gen_add_tl(t0
, t1
, t2
);
4142 tcg_gen_xor_tl(t1
, t1
, t2
);
4143 tcg_gen_xor_tl(t2
, t0
, t2
);
4144 tcg_gen_andc_tl(t1
, t2
, t1
);
4146 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4148 /* operands of same sign, result different sign */
4149 generate_exception(ctx
, EXCP_OVERFLOW
);
4151 gen_store_gpr(t0
, rd
);
4156 if (rs
!= 0 && rt
!= 0) {
4157 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4158 } else if (rs
== 0 && rt
!= 0) {
4159 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4160 } else if (rs
!= 0 && rt
== 0) {
4161 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4163 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4168 TCGv t0
= tcg_temp_local_new();
4169 TCGv t1
= tcg_temp_new();
4170 TCGv t2
= tcg_temp_new();
4171 TCGLabel
*l1
= gen_new_label();
4173 gen_load_gpr(t1
, rs
);
4174 gen_load_gpr(t2
, rt
);
4175 tcg_gen_sub_tl(t0
, t1
, t2
);
4176 tcg_gen_xor_tl(t2
, t1
, t2
);
4177 tcg_gen_xor_tl(t1
, t0
, t1
);
4178 tcg_gen_and_tl(t1
, t1
, t2
);
4180 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4182 /* operands of different sign, first operand and result different sign */
4183 generate_exception(ctx
, EXCP_OVERFLOW
);
4185 gen_store_gpr(t0
, rd
);
4190 if (rs
!= 0 && rt
!= 0) {
4191 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4192 } else if (rs
== 0 && rt
!= 0) {
4193 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4194 } else if (rs
!= 0 && rt
== 0) {
4195 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4197 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4202 if (likely(rs
!= 0 && rt
!= 0)) {
4203 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4204 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4206 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4212 /* Conditional move */
4213 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4214 int rd
, int rs
, int rt
)
4219 /* If no destination, treat it as a NOP. */
4223 t0
= tcg_temp_new();
4224 gen_load_gpr(t0
, rt
);
4225 t1
= tcg_const_tl(0);
4226 t2
= tcg_temp_new();
4227 gen_load_gpr(t2
, rs
);
4230 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4233 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4236 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4239 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4248 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4249 int rd
, int rs
, int rt
)
4252 /* If no destination, treat it as a NOP. */
4258 if (likely(rs
!= 0 && rt
!= 0)) {
4259 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4261 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4265 if (rs
!= 0 && rt
!= 0) {
4266 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4267 } else if (rs
== 0 && rt
!= 0) {
4268 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4269 } else if (rs
!= 0 && rt
== 0) {
4270 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4272 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4276 if (likely(rs
!= 0 && rt
!= 0)) {
4277 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4278 } else if (rs
== 0 && rt
!= 0) {
4279 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4280 } else if (rs
!= 0 && rt
== 0) {
4281 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4283 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4287 if (likely(rs
!= 0 && rt
!= 0)) {
4288 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4289 } else if (rs
== 0 && rt
!= 0) {
4290 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4291 } else if (rs
!= 0 && rt
== 0) {
4292 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4294 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4300 /* Set on lower than */
4301 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4302 int rd
, int rs
, int rt
)
4307 /* If no destination, treat it as a NOP. */
4311 t0
= tcg_temp_new();
4312 t1
= tcg_temp_new();
4313 gen_load_gpr(t0
, rs
);
4314 gen_load_gpr(t1
, rt
);
4317 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4320 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4328 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4329 int rd
, int rs
, int rt
)
4334 /* If no destination, treat it as a NOP.
4335 For add & sub, we must generate the overflow exception when needed. */
4339 t0
= tcg_temp_new();
4340 t1
= tcg_temp_new();
4341 gen_load_gpr(t0
, rs
);
4342 gen_load_gpr(t1
, rt
);
4345 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4346 tcg_gen_shl_tl(t0
, t1
, t0
);
4347 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4350 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4351 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4354 tcg_gen_ext32u_tl(t1
, t1
);
4355 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4356 tcg_gen_shr_tl(t0
, t1
, t0
);
4357 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4361 TCGv_i32 t2
= tcg_temp_new_i32();
4362 TCGv_i32 t3
= tcg_temp_new_i32();
4364 tcg_gen_trunc_tl_i32(t2
, t0
);
4365 tcg_gen_trunc_tl_i32(t3
, t1
);
4366 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4367 tcg_gen_rotr_i32(t2
, t3
, t2
);
4368 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4369 tcg_temp_free_i32(t2
);
4370 tcg_temp_free_i32(t3
);
4373 #if defined(TARGET_MIPS64)
4375 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4376 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4379 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4380 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4383 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4384 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4387 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4388 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4396 /* Copy GPR to and from TX79 HI1/LO1 register. */
4397 static void gen_HILO1_tx79(DisasContext
*ctx
, uint32_t opc
, int reg
)
4399 if (reg
== 0 && (opc
== MMI_OPC_MFHI1
|| opc
== MMI_OPC_MFLO1
)) {
4406 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[1]);
4409 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[1]);
4413 tcg_gen_mov_tl(cpu_HI
[1], cpu_gpr
[reg
]);
4415 tcg_gen_movi_tl(cpu_HI
[1], 0);
4420 tcg_gen_mov_tl(cpu_LO
[1], cpu_gpr
[reg
]);
4422 tcg_gen_movi_tl(cpu_LO
[1], 0);
4426 MIPS_INVAL("mfthilo1 TX79");
4427 generate_exception_end(ctx
, EXCP_RI
);
4432 /* Arithmetic on HI/LO registers */
4433 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4435 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
4446 #if defined(TARGET_MIPS64)
4448 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4452 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4456 #if defined(TARGET_MIPS64)
4458 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4462 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4467 #if defined(TARGET_MIPS64)
4469 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4473 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4476 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4481 #if defined(TARGET_MIPS64)
4483 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4487 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4490 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4496 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4499 TCGv t0
= tcg_const_tl(addr
);
4500 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4501 gen_store_gpr(t0
, reg
);
4505 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4511 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4514 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4515 addr
= addr_add(ctx
, pc
, offset
);
4516 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4520 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4521 addr
= addr_add(ctx
, pc
, offset
);
4522 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4524 #if defined(TARGET_MIPS64)
4527 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4528 addr
= addr_add(ctx
, pc
, offset
);
4529 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4533 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4536 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4537 addr
= addr_add(ctx
, pc
, offset
);
4538 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4543 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4544 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4545 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4548 #if defined(TARGET_MIPS64)
4549 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4550 case R6_OPC_LDPC
+ (1 << 16):
4551 case R6_OPC_LDPC
+ (2 << 16):
4552 case R6_OPC_LDPC
+ (3 << 16):
4554 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4555 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4556 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4560 MIPS_INVAL("OPC_PCREL");
4561 generate_exception_end(ctx
, EXCP_RI
);
4568 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4577 t0
= tcg_temp_new();
4578 t1
= tcg_temp_new();
4580 gen_load_gpr(t0
, rs
);
4581 gen_load_gpr(t1
, rt
);
4586 TCGv t2
= tcg_temp_new();
4587 TCGv t3
= tcg_temp_new();
4588 tcg_gen_ext32s_tl(t0
, t0
);
4589 tcg_gen_ext32s_tl(t1
, t1
);
4590 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4591 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4592 tcg_gen_and_tl(t2
, t2
, t3
);
4593 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4594 tcg_gen_or_tl(t2
, t2
, t3
);
4595 tcg_gen_movi_tl(t3
, 0);
4596 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4597 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4598 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4605 TCGv t2
= tcg_temp_new();
4606 TCGv t3
= tcg_temp_new();
4607 tcg_gen_ext32s_tl(t0
, t0
);
4608 tcg_gen_ext32s_tl(t1
, t1
);
4609 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4610 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4611 tcg_gen_and_tl(t2
, t2
, t3
);
4612 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4613 tcg_gen_or_tl(t2
, t2
, t3
);
4614 tcg_gen_movi_tl(t3
, 0);
4615 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4616 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4617 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4624 TCGv t2
= tcg_const_tl(0);
4625 TCGv t3
= tcg_const_tl(1);
4626 tcg_gen_ext32u_tl(t0
, t0
);
4627 tcg_gen_ext32u_tl(t1
, t1
);
4628 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4629 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4630 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4637 TCGv t2
= tcg_const_tl(0);
4638 TCGv t3
= tcg_const_tl(1);
4639 tcg_gen_ext32u_tl(t0
, t0
);
4640 tcg_gen_ext32u_tl(t1
, t1
);
4641 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4642 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4643 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4650 TCGv_i32 t2
= tcg_temp_new_i32();
4651 TCGv_i32 t3
= tcg_temp_new_i32();
4652 tcg_gen_trunc_tl_i32(t2
, t0
);
4653 tcg_gen_trunc_tl_i32(t3
, t1
);
4654 tcg_gen_mul_i32(t2
, t2
, t3
);
4655 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4656 tcg_temp_free_i32(t2
);
4657 tcg_temp_free_i32(t3
);
4662 TCGv_i32 t2
= tcg_temp_new_i32();
4663 TCGv_i32 t3
= tcg_temp_new_i32();
4664 tcg_gen_trunc_tl_i32(t2
, t0
);
4665 tcg_gen_trunc_tl_i32(t3
, t1
);
4666 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4667 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4668 tcg_temp_free_i32(t2
);
4669 tcg_temp_free_i32(t3
);
4674 TCGv_i32 t2
= tcg_temp_new_i32();
4675 TCGv_i32 t3
= tcg_temp_new_i32();
4676 tcg_gen_trunc_tl_i32(t2
, t0
);
4677 tcg_gen_trunc_tl_i32(t3
, t1
);
4678 tcg_gen_mul_i32(t2
, t2
, t3
);
4679 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4680 tcg_temp_free_i32(t2
);
4681 tcg_temp_free_i32(t3
);
4686 TCGv_i32 t2
= tcg_temp_new_i32();
4687 TCGv_i32 t3
= tcg_temp_new_i32();
4688 tcg_gen_trunc_tl_i32(t2
, t0
);
4689 tcg_gen_trunc_tl_i32(t3
, t1
);
4690 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4691 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4692 tcg_temp_free_i32(t2
);
4693 tcg_temp_free_i32(t3
);
4696 #if defined(TARGET_MIPS64)
4699 TCGv t2
= tcg_temp_new();
4700 TCGv t3
= tcg_temp_new();
4701 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4702 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4703 tcg_gen_and_tl(t2
, t2
, t3
);
4704 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4705 tcg_gen_or_tl(t2
, t2
, t3
);
4706 tcg_gen_movi_tl(t3
, 0);
4707 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4708 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4715 TCGv t2
= tcg_temp_new();
4716 TCGv t3
= tcg_temp_new();
4717 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4718 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4719 tcg_gen_and_tl(t2
, t2
, t3
);
4720 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4721 tcg_gen_or_tl(t2
, t2
, t3
);
4722 tcg_gen_movi_tl(t3
, 0);
4723 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4724 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4731 TCGv t2
= tcg_const_tl(0);
4732 TCGv t3
= tcg_const_tl(1);
4733 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4734 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4741 TCGv t2
= tcg_const_tl(0);
4742 TCGv t3
= tcg_const_tl(1);
4743 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4744 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4750 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4754 TCGv t2
= tcg_temp_new();
4755 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4760 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4764 TCGv t2
= tcg_temp_new();
4765 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4771 MIPS_INVAL("r6 mul/div");
4772 generate_exception_end(ctx
, EXCP_RI
);
4780 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
4784 t0
= tcg_temp_new();
4785 t1
= tcg_temp_new();
4787 gen_load_gpr(t0
, rs
);
4788 gen_load_gpr(t1
, rt
);
4793 TCGv t2
= tcg_temp_new();
4794 TCGv t3
= tcg_temp_new();
4795 tcg_gen_ext32s_tl(t0
, t0
);
4796 tcg_gen_ext32s_tl(t1
, t1
);
4797 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4798 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4799 tcg_gen_and_tl(t2
, t2
, t3
);
4800 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4801 tcg_gen_or_tl(t2
, t2
, t3
);
4802 tcg_gen_movi_tl(t3
, 0);
4803 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4804 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
4805 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
4806 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4807 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4814 TCGv t2
= tcg_const_tl(0);
4815 TCGv t3
= tcg_const_tl(1);
4816 tcg_gen_ext32u_tl(t0
, t0
);
4817 tcg_gen_ext32u_tl(t1
, t1
);
4818 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4819 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
4820 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
4821 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4822 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4828 MIPS_INVAL("div1 TX79");
4829 generate_exception_end(ctx
, EXCP_RI
);
4837 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4838 int acc
, int rs
, int rt
)
4842 t0
= tcg_temp_new();
4843 t1
= tcg_temp_new();
4845 gen_load_gpr(t0
, rs
);
4846 gen_load_gpr(t1
, rt
);
4855 TCGv t2
= tcg_temp_new();
4856 TCGv t3
= tcg_temp_new();
4857 tcg_gen_ext32s_tl(t0
, t0
);
4858 tcg_gen_ext32s_tl(t1
, t1
);
4859 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4860 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4861 tcg_gen_and_tl(t2
, t2
, t3
);
4862 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4863 tcg_gen_or_tl(t2
, t2
, t3
);
4864 tcg_gen_movi_tl(t3
, 0);
4865 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4866 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4867 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4868 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4869 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4876 TCGv t2
= tcg_const_tl(0);
4877 TCGv t3
= tcg_const_tl(1);
4878 tcg_gen_ext32u_tl(t0
, t0
);
4879 tcg_gen_ext32u_tl(t1
, t1
);
4880 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4881 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4882 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4883 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4884 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4891 TCGv_i32 t2
= tcg_temp_new_i32();
4892 TCGv_i32 t3
= tcg_temp_new_i32();
4893 tcg_gen_trunc_tl_i32(t2
, t0
);
4894 tcg_gen_trunc_tl_i32(t3
, t1
);
4895 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4896 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4897 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4898 tcg_temp_free_i32(t2
);
4899 tcg_temp_free_i32(t3
);
4904 TCGv_i32 t2
= tcg_temp_new_i32();
4905 TCGv_i32 t3
= tcg_temp_new_i32();
4906 tcg_gen_trunc_tl_i32(t2
, t0
);
4907 tcg_gen_trunc_tl_i32(t3
, t1
);
4908 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4909 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4910 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4911 tcg_temp_free_i32(t2
);
4912 tcg_temp_free_i32(t3
);
4915 #if defined(TARGET_MIPS64)
4918 TCGv t2
= tcg_temp_new();
4919 TCGv t3
= tcg_temp_new();
4920 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4921 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4922 tcg_gen_and_tl(t2
, t2
, t3
);
4923 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4924 tcg_gen_or_tl(t2
, t2
, t3
);
4925 tcg_gen_movi_tl(t3
, 0);
4926 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4927 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4928 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4935 TCGv t2
= tcg_const_tl(0);
4936 TCGv t3
= tcg_const_tl(1);
4937 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4938 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4939 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4945 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4948 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4953 TCGv_i64 t2
= tcg_temp_new_i64();
4954 TCGv_i64 t3
= tcg_temp_new_i64();
4956 tcg_gen_ext_tl_i64(t2
, t0
);
4957 tcg_gen_ext_tl_i64(t3
, t1
);
4958 tcg_gen_mul_i64(t2
, t2
, t3
);
4959 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4960 tcg_gen_add_i64(t2
, t2
, t3
);
4961 tcg_temp_free_i64(t3
);
4962 gen_move_low32(cpu_LO
[acc
], t2
);
4963 gen_move_high32(cpu_HI
[acc
], t2
);
4964 tcg_temp_free_i64(t2
);
4969 TCGv_i64 t2
= tcg_temp_new_i64();
4970 TCGv_i64 t3
= tcg_temp_new_i64();
4972 tcg_gen_ext32u_tl(t0
, t0
);
4973 tcg_gen_ext32u_tl(t1
, t1
);
4974 tcg_gen_extu_tl_i64(t2
, t0
);
4975 tcg_gen_extu_tl_i64(t3
, t1
);
4976 tcg_gen_mul_i64(t2
, t2
, t3
);
4977 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4978 tcg_gen_add_i64(t2
, t2
, t3
);
4979 tcg_temp_free_i64(t3
);
4980 gen_move_low32(cpu_LO
[acc
], t2
);
4981 gen_move_high32(cpu_HI
[acc
], t2
);
4982 tcg_temp_free_i64(t2
);
4987 TCGv_i64 t2
= tcg_temp_new_i64();
4988 TCGv_i64 t3
= tcg_temp_new_i64();
4990 tcg_gen_ext_tl_i64(t2
, t0
);
4991 tcg_gen_ext_tl_i64(t3
, t1
);
4992 tcg_gen_mul_i64(t2
, t2
, t3
);
4993 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4994 tcg_gen_sub_i64(t2
, t3
, t2
);
4995 tcg_temp_free_i64(t3
);
4996 gen_move_low32(cpu_LO
[acc
], t2
);
4997 gen_move_high32(cpu_HI
[acc
], t2
);
4998 tcg_temp_free_i64(t2
);
5003 TCGv_i64 t2
= tcg_temp_new_i64();
5004 TCGv_i64 t3
= tcg_temp_new_i64();
5006 tcg_gen_ext32u_tl(t0
, t0
);
5007 tcg_gen_ext32u_tl(t1
, t1
);
5008 tcg_gen_extu_tl_i64(t2
, t0
);
5009 tcg_gen_extu_tl_i64(t3
, t1
);
5010 tcg_gen_mul_i64(t2
, t2
, t3
);
5011 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5012 tcg_gen_sub_i64(t2
, t3
, t2
);
5013 tcg_temp_free_i64(t3
);
5014 gen_move_low32(cpu_LO
[acc
], t2
);
5015 gen_move_high32(cpu_HI
[acc
], t2
);
5016 tcg_temp_free_i64(t2
);
5020 MIPS_INVAL("mul/div");
5021 generate_exception_end(ctx
, EXCP_RI
);
5030 * These MULT and MULTU instructions implemented in for example the
5031 * Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
5032 * architectures are special three-operand variants with the syntax
5034 * MULT[U][1] rd, rs, rt
5038 * (rd, LO, HI) <- rs * rt
5040 * where the low-order 32-bits of the result is placed into both the
5041 * GPR rd and the special register LO. The high-order 32-bits of the
5042 * result is placed into the special register HI.
5044 * If the GPR rd is omitted in assembly language, it is taken to be 0,
5045 * which is the zero register that always reads as 0.
5047 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
5048 int rd
, int rs
, int rt
)
5050 TCGv t0
= tcg_temp_new();
5051 TCGv t1
= tcg_temp_new();
5054 gen_load_gpr(t0
, rs
);
5055 gen_load_gpr(t1
, rt
);
5063 TCGv_i32 t2
= tcg_temp_new_i32();
5064 TCGv_i32 t3
= tcg_temp_new_i32();
5065 tcg_gen_trunc_tl_i32(t2
, t0
);
5066 tcg_gen_trunc_tl_i32(t3
, t1
);
5067 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
5069 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5071 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5072 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5073 tcg_temp_free_i32(t2
);
5074 tcg_temp_free_i32(t3
);
5077 case MMI_OPC_MULTU1
:
5082 TCGv_i32 t2
= tcg_temp_new_i32();
5083 TCGv_i32 t3
= tcg_temp_new_i32();
5084 tcg_gen_trunc_tl_i32(t2
, t0
);
5085 tcg_gen_trunc_tl_i32(t3
, t1
);
5086 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
5088 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5090 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5091 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5092 tcg_temp_free_i32(t2
);
5093 tcg_temp_free_i32(t3
);
5097 MIPS_INVAL("mul TXx9");
5098 generate_exception_end(ctx
, EXCP_RI
);
5107 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
5108 int rd
, int rs
, int rt
)
5110 TCGv t0
= tcg_temp_new();
5111 TCGv t1
= tcg_temp_new();
5113 gen_load_gpr(t0
, rs
);
5114 gen_load_gpr(t1
, rt
);
5117 case OPC_VR54XX_MULS
:
5118 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
5120 case OPC_VR54XX_MULSU
:
5121 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
5123 case OPC_VR54XX_MACC
:
5124 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
5126 case OPC_VR54XX_MACCU
:
5127 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
5129 case OPC_VR54XX_MSAC
:
5130 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
5132 case OPC_VR54XX_MSACU
:
5133 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
5135 case OPC_VR54XX_MULHI
:
5136 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
5138 case OPC_VR54XX_MULHIU
:
5139 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
5141 case OPC_VR54XX_MULSHI
:
5142 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
5144 case OPC_VR54XX_MULSHIU
:
5145 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
5147 case OPC_VR54XX_MACCHI
:
5148 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
5150 case OPC_VR54XX_MACCHIU
:
5151 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
5153 case OPC_VR54XX_MSACHI
:
5154 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
5156 case OPC_VR54XX_MSACHIU
:
5157 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
5160 MIPS_INVAL("mul vr54xx");
5161 generate_exception_end(ctx
, EXCP_RI
);
5164 gen_store_gpr(t0
, rd
);
5171 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
5181 gen_load_gpr(t0
, rs
);
5186 #if defined(TARGET_MIPS64)
5190 tcg_gen_not_tl(t0
, t0
);
5199 tcg_gen_ext32u_tl(t0
, t0
);
5200 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
5201 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
5203 #if defined(TARGET_MIPS64)
5208 tcg_gen_clzi_i64(t0
, t0
, 64);
5214 /* Godson integer instructions */
5215 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
5216 int rd
, int rs
, int rt
)
5228 case OPC_MULTU_G_2E
:
5229 case OPC_MULTU_G_2F
:
5230 #if defined(TARGET_MIPS64)
5231 case OPC_DMULT_G_2E
:
5232 case OPC_DMULT_G_2F
:
5233 case OPC_DMULTU_G_2E
:
5234 case OPC_DMULTU_G_2F
:
5236 t0
= tcg_temp_new();
5237 t1
= tcg_temp_new();
5240 t0
= tcg_temp_local_new();
5241 t1
= tcg_temp_local_new();
5245 gen_load_gpr(t0
, rs
);
5246 gen_load_gpr(t1
, rt
);
5251 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5252 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5254 case OPC_MULTU_G_2E
:
5255 case OPC_MULTU_G_2F
:
5256 tcg_gen_ext32u_tl(t0
, t0
);
5257 tcg_gen_ext32u_tl(t1
, t1
);
5258 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5259 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5264 TCGLabel
*l1
= gen_new_label();
5265 TCGLabel
*l2
= gen_new_label();
5266 TCGLabel
*l3
= gen_new_label();
5267 tcg_gen_ext32s_tl(t0
, t0
);
5268 tcg_gen_ext32s_tl(t1
, t1
);
5269 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5270 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5273 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5274 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5275 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5278 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5279 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5286 TCGLabel
*l1
= gen_new_label();
5287 TCGLabel
*l2
= gen_new_label();
5288 tcg_gen_ext32u_tl(t0
, t0
);
5289 tcg_gen_ext32u_tl(t1
, t1
);
5290 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5291 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5294 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5295 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5302 TCGLabel
*l1
= gen_new_label();
5303 TCGLabel
*l2
= gen_new_label();
5304 TCGLabel
*l3
= gen_new_label();
5305 tcg_gen_ext32u_tl(t0
, t0
);
5306 tcg_gen_ext32u_tl(t1
, t1
);
5307 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5308 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5309 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5311 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5314 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5315 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5322 TCGLabel
*l1
= gen_new_label();
5323 TCGLabel
*l2
= gen_new_label();
5324 tcg_gen_ext32u_tl(t0
, t0
);
5325 tcg_gen_ext32u_tl(t1
, t1
);
5326 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5327 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5330 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5331 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5335 #if defined(TARGET_MIPS64)
5336 case OPC_DMULT_G_2E
:
5337 case OPC_DMULT_G_2F
:
5338 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5340 case OPC_DMULTU_G_2E
:
5341 case OPC_DMULTU_G_2F
:
5342 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5347 TCGLabel
*l1
= gen_new_label();
5348 TCGLabel
*l2
= gen_new_label();
5349 TCGLabel
*l3
= gen_new_label();
5350 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5351 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5354 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5355 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5356 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5359 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5363 case OPC_DDIVU_G_2E
:
5364 case OPC_DDIVU_G_2F
:
5366 TCGLabel
*l1
= gen_new_label();
5367 TCGLabel
*l2
= gen_new_label();
5368 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5369 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5372 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5379 TCGLabel
*l1
= gen_new_label();
5380 TCGLabel
*l2
= gen_new_label();
5381 TCGLabel
*l3
= gen_new_label();
5382 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5383 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5384 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5386 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5389 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5393 case OPC_DMODU_G_2E
:
5394 case OPC_DMODU_G_2F
:
5396 TCGLabel
*l1
= gen_new_label();
5397 TCGLabel
*l2
= gen_new_label();
5398 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5399 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5402 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5413 /* Loongson multimedia instructions */
5414 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5416 uint32_t opc
, shift_max
;
5419 opc
= MASK_LMI(ctx
->opcode
);
5425 t0
= tcg_temp_local_new_i64();
5426 t1
= tcg_temp_local_new_i64();
5429 t0
= tcg_temp_new_i64();
5430 t1
= tcg_temp_new_i64();
5434 check_cp1_enabled(ctx
);
5435 gen_load_fpr64(ctx
, t0
, rs
);
5436 gen_load_fpr64(ctx
, t1
, rt
);
5438 #define LMI_HELPER(UP, LO) \
5439 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5440 #define LMI_HELPER_1(UP, LO) \
5441 case OPC_##UP: gen_helper_##LO(t0, t0); break
5442 #define LMI_DIRECT(UP, LO, OP) \
5443 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5446 LMI_HELPER(PADDSH
, paddsh
);
5447 LMI_HELPER(PADDUSH
, paddush
);
5448 LMI_HELPER(PADDH
, paddh
);
5449 LMI_HELPER(PADDW
, paddw
);
5450 LMI_HELPER(PADDSB
, paddsb
);
5451 LMI_HELPER(PADDUSB
, paddusb
);
5452 LMI_HELPER(PADDB
, paddb
);
5454 LMI_HELPER(PSUBSH
, psubsh
);
5455 LMI_HELPER(PSUBUSH
, psubush
);
5456 LMI_HELPER(PSUBH
, psubh
);
5457 LMI_HELPER(PSUBW
, psubw
);
5458 LMI_HELPER(PSUBSB
, psubsb
);
5459 LMI_HELPER(PSUBUSB
, psubusb
);
5460 LMI_HELPER(PSUBB
, psubb
);
5462 LMI_HELPER(PSHUFH
, pshufh
);
5463 LMI_HELPER(PACKSSWH
, packsswh
);
5464 LMI_HELPER(PACKSSHB
, packsshb
);
5465 LMI_HELPER(PACKUSHB
, packushb
);
5467 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5468 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5469 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5470 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5471 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5472 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5474 LMI_HELPER(PAVGH
, pavgh
);
5475 LMI_HELPER(PAVGB
, pavgb
);
5476 LMI_HELPER(PMAXSH
, pmaxsh
);
5477 LMI_HELPER(PMINSH
, pminsh
);
5478 LMI_HELPER(PMAXUB
, pmaxub
);
5479 LMI_HELPER(PMINUB
, pminub
);
5481 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5482 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5483 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5484 LMI_HELPER(PCMPGTH
, pcmpgth
);
5485 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5486 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5488 LMI_HELPER(PSLLW
, psllw
);
5489 LMI_HELPER(PSLLH
, psllh
);
5490 LMI_HELPER(PSRLW
, psrlw
);
5491 LMI_HELPER(PSRLH
, psrlh
);
5492 LMI_HELPER(PSRAW
, psraw
);
5493 LMI_HELPER(PSRAH
, psrah
);
5495 LMI_HELPER(PMULLH
, pmullh
);
5496 LMI_HELPER(PMULHH
, pmulhh
);
5497 LMI_HELPER(PMULHUH
, pmulhuh
);
5498 LMI_HELPER(PMADDHW
, pmaddhw
);
5500 LMI_HELPER(PASUBUB
, pasubub
);
5501 LMI_HELPER_1(BIADD
, biadd
);
5502 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5504 LMI_DIRECT(PADDD
, paddd
, add
);
5505 LMI_DIRECT(PSUBD
, psubd
, sub
);
5506 LMI_DIRECT(XOR_CP2
, xor, xor);
5507 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5508 LMI_DIRECT(AND_CP2
, and, and);
5509 LMI_DIRECT(OR_CP2
, or, or);
5512 tcg_gen_andc_i64(t0
, t1
, t0
);
5516 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5519 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5522 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5525 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5529 tcg_gen_andi_i64(t1
, t1
, 3);
5530 tcg_gen_shli_i64(t1
, t1
, 4);
5531 tcg_gen_shr_i64(t0
, t0
, t1
);
5532 tcg_gen_ext16u_i64(t0
, t0
);
5536 tcg_gen_add_i64(t0
, t0
, t1
);
5537 tcg_gen_ext32s_i64(t0
, t0
);
5540 tcg_gen_sub_i64(t0
, t0
, t1
);
5541 tcg_gen_ext32s_i64(t0
, t0
);
5563 /* Make sure shift count isn't TCG undefined behaviour. */
5564 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5569 tcg_gen_shl_i64(t0
, t0
, t1
);
5573 /* Since SRA is UndefinedResult without sign-extended inputs,
5574 we can treat SRA and DSRA the same. */
5575 tcg_gen_sar_i64(t0
, t0
, t1
);
5578 /* We want to shift in zeros for SRL; zero-extend first. */
5579 tcg_gen_ext32u_i64(t0
, t0
);
5582 tcg_gen_shr_i64(t0
, t0
, t1
);
5586 if (shift_max
== 32) {
5587 tcg_gen_ext32s_i64(t0
, t0
);
5590 /* Shifts larger than MAX produce zero. */
5591 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5592 tcg_gen_neg_i64(t1
, t1
);
5593 tcg_gen_and_i64(t0
, t0
, t1
);
5599 TCGv_i64 t2
= tcg_temp_new_i64();
5600 TCGLabel
*lab
= gen_new_label();
5602 tcg_gen_mov_i64(t2
, t0
);
5603 tcg_gen_add_i64(t0
, t1
, t2
);
5604 if (opc
== OPC_ADD_CP2
) {
5605 tcg_gen_ext32s_i64(t0
, t0
);
5607 tcg_gen_xor_i64(t1
, t1
, t2
);
5608 tcg_gen_xor_i64(t2
, t2
, t0
);
5609 tcg_gen_andc_i64(t1
, t2
, t1
);
5610 tcg_temp_free_i64(t2
);
5611 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5612 generate_exception(ctx
, EXCP_OVERFLOW
);
5620 TCGv_i64 t2
= tcg_temp_new_i64();
5621 TCGLabel
*lab
= gen_new_label();
5623 tcg_gen_mov_i64(t2
, t0
);
5624 tcg_gen_sub_i64(t0
, t1
, t2
);
5625 if (opc
== OPC_SUB_CP2
) {
5626 tcg_gen_ext32s_i64(t0
, t0
);
5628 tcg_gen_xor_i64(t1
, t1
, t2
);
5629 tcg_gen_xor_i64(t2
, t2
, t0
);
5630 tcg_gen_and_i64(t1
, t1
, t2
);
5631 tcg_temp_free_i64(t2
);
5632 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5633 generate_exception(ctx
, EXCP_OVERFLOW
);
5639 tcg_gen_ext32u_i64(t0
, t0
);
5640 tcg_gen_ext32u_i64(t1
, t1
);
5641 tcg_gen_mul_i64(t0
, t0
, t1
);
5650 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
5651 FD field is the CC field? */
5653 MIPS_INVAL("loongson_cp2");
5654 generate_exception_end(ctx
, EXCP_RI
);
5661 gen_store_fpr64(ctx
, t0
, rd
);
5663 tcg_temp_free_i64(t0
);
5664 tcg_temp_free_i64(t1
);
5668 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5669 int rs
, int rt
, int16_t imm
)
5672 TCGv t0
= tcg_temp_new();
5673 TCGv t1
= tcg_temp_new();
5676 /* Load needed operands */
5684 /* Compare two registers */
5686 gen_load_gpr(t0
, rs
);
5687 gen_load_gpr(t1
, rt
);
5697 /* Compare register to immediate */
5698 if (rs
!= 0 || imm
!= 0) {
5699 gen_load_gpr(t0
, rs
);
5700 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5707 case OPC_TEQ
: /* rs == rs */
5708 case OPC_TEQI
: /* r0 == 0 */
5709 case OPC_TGE
: /* rs >= rs */
5710 case OPC_TGEI
: /* r0 >= 0 */
5711 case OPC_TGEU
: /* rs >= rs unsigned */
5712 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5714 generate_exception_end(ctx
, EXCP_TRAP
);
5716 case OPC_TLT
: /* rs < rs */
5717 case OPC_TLTI
: /* r0 < 0 */
5718 case OPC_TLTU
: /* rs < rs unsigned */
5719 case OPC_TLTIU
: /* r0 < 0 unsigned */
5720 case OPC_TNE
: /* rs != rs */
5721 case OPC_TNEI
: /* r0 != 0 */
5722 /* Never trap: treat as NOP. */
5726 TCGLabel
*l1
= gen_new_label();
5731 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5735 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5739 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5743 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5747 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5751 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5754 generate_exception(ctx
, EXCP_TRAP
);
5761 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5763 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5767 #ifndef CONFIG_USER_ONLY
5768 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5774 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5776 if (use_goto_tb(ctx
, dest
)) {
5779 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5782 if (ctx
->base
.singlestep_enabled
) {
5783 save_cpu_state(ctx
, 0);
5784 gen_helper_raise_exception_debug(cpu_env
);
5786 tcg_gen_lookup_and_goto_ptr();
5790 /* Branches (before delay slot) */
5791 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5793 int rs
, int rt
, int32_t offset
,
5796 target_ulong btgt
= -1;
5798 int bcond_compute
= 0;
5799 TCGv t0
= tcg_temp_new();
5800 TCGv t1
= tcg_temp_new();
5802 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5803 #ifdef MIPS_DEBUG_DISAS
5804 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5805 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5807 generate_exception_end(ctx
, EXCP_RI
);
5811 /* Load needed operands */
5817 /* Compare two registers */
5819 gen_load_gpr(t0
, rs
);
5820 gen_load_gpr(t1
, rt
);
5823 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5837 /* Compare to zero */
5839 gen_load_gpr(t0
, rs
);
5842 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5845 #if defined(TARGET_MIPS64)
5847 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5849 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5852 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5857 /* Jump to immediate */
5858 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5863 /* Jump to register */
5864 if (offset
!= 0 && offset
!= 16) {
5865 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5866 others are reserved. */
5867 MIPS_INVAL("jump hint");
5868 generate_exception_end(ctx
, EXCP_RI
);
5871 gen_load_gpr(btarget
, rs
);
5874 MIPS_INVAL("branch/jump");
5875 generate_exception_end(ctx
, EXCP_RI
);
5878 if (bcond_compute
== 0) {
5879 /* No condition to be computed */
5881 case OPC_BEQ
: /* rx == rx */
5882 case OPC_BEQL
: /* rx == rx likely */
5883 case OPC_BGEZ
: /* 0 >= 0 */
5884 case OPC_BGEZL
: /* 0 >= 0 likely */
5885 case OPC_BLEZ
: /* 0 <= 0 */
5886 case OPC_BLEZL
: /* 0 <= 0 likely */
5888 ctx
->hflags
|= MIPS_HFLAG_B
;
5890 case OPC_BGEZAL
: /* 0 >= 0 */
5891 case OPC_BGEZALL
: /* 0 >= 0 likely */
5892 /* Always take and link */
5894 ctx
->hflags
|= MIPS_HFLAG_B
;
5896 case OPC_BNE
: /* rx != rx */
5897 case OPC_BGTZ
: /* 0 > 0 */
5898 case OPC_BLTZ
: /* 0 < 0 */
5901 case OPC_BLTZAL
: /* 0 < 0 */
5902 /* Handle as an unconditional branch to get correct delay
5905 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5906 ctx
->hflags
|= MIPS_HFLAG_B
;
5908 case OPC_BLTZALL
: /* 0 < 0 likely */
5909 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5910 /* Skip the instruction in the delay slot */
5911 ctx
->base
.pc_next
+= 4;
5913 case OPC_BNEL
: /* rx != rx likely */
5914 case OPC_BGTZL
: /* 0 > 0 likely */
5915 case OPC_BLTZL
: /* 0 < 0 likely */
5916 /* Skip the instruction in the delay slot */
5917 ctx
->base
.pc_next
+= 4;
5920 ctx
->hflags
|= MIPS_HFLAG_B
;
5923 ctx
->hflags
|= MIPS_HFLAG_BX
;
5927 ctx
->hflags
|= MIPS_HFLAG_B
;
5930 ctx
->hflags
|= MIPS_HFLAG_BR
;
5934 ctx
->hflags
|= MIPS_HFLAG_BR
;
5937 MIPS_INVAL("branch/jump");
5938 generate_exception_end(ctx
, EXCP_RI
);
5944 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5947 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5950 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5953 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5956 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5959 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5962 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5966 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5970 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5973 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5976 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5979 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5982 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5985 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5988 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5990 #if defined(TARGET_MIPS64)
5992 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
5996 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5999 ctx
->hflags
|= MIPS_HFLAG_BC
;
6002 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6005 ctx
->hflags
|= MIPS_HFLAG_BL
;
6008 MIPS_INVAL("conditional branch/jump");
6009 generate_exception_end(ctx
, EXCP_RI
);
6014 ctx
->btarget
= btgt
;
6016 switch (delayslot_size
) {
6018 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
6021 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
6026 int post_delay
= insn_bytes
+ delayslot_size
;
6027 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
6029 tcg_gen_movi_tl(cpu_gpr
[blink
],
6030 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
6034 if (insn_bytes
== 2)
6035 ctx
->hflags
|= MIPS_HFLAG_B16
;
6041 /* nanoMIPS Branches */
6042 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
6044 int rs
, int rt
, int32_t offset
)
6046 target_ulong btgt
= -1;
6047 int bcond_compute
= 0;
6048 TCGv t0
= tcg_temp_new();
6049 TCGv t1
= tcg_temp_new();
6051 /* Load needed operands */
6055 /* Compare two registers */
6057 gen_load_gpr(t0
, rs
);
6058 gen_load_gpr(t1
, rt
);
6061 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6064 /* Compare to zero */
6066 gen_load_gpr(t0
, rs
);
6069 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6072 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
6074 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6078 /* Jump to register */
6079 if (offset
!= 0 && offset
!= 16) {
6080 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
6081 others are reserved. */
6082 MIPS_INVAL("jump hint");
6083 generate_exception_end(ctx
, EXCP_RI
);
6086 gen_load_gpr(btarget
, rs
);
6089 MIPS_INVAL("branch/jump");
6090 generate_exception_end(ctx
, EXCP_RI
);
6093 if (bcond_compute
== 0) {
6094 /* No condition to be computed */
6096 case OPC_BEQ
: /* rx == rx */
6098 ctx
->hflags
|= MIPS_HFLAG_B
;
6100 case OPC_BGEZAL
: /* 0 >= 0 */
6101 /* Always take and link */
6102 tcg_gen_movi_tl(cpu_gpr
[31],
6103 ctx
->base
.pc_next
+ insn_bytes
);
6104 ctx
->hflags
|= MIPS_HFLAG_B
;
6106 case OPC_BNE
: /* rx != rx */
6107 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6108 /* Skip the instruction in the delay slot */
6109 ctx
->base
.pc_next
+= 4;
6112 ctx
->hflags
|= MIPS_HFLAG_BR
;
6116 tcg_gen_movi_tl(cpu_gpr
[rt
],
6117 ctx
->base
.pc_next
+ insn_bytes
);
6119 ctx
->hflags
|= MIPS_HFLAG_BR
;
6122 MIPS_INVAL("branch/jump");
6123 generate_exception_end(ctx
, EXCP_RI
);
6129 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6132 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6135 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6136 tcg_gen_movi_tl(cpu_gpr
[31],
6137 ctx
->base
.pc_next
+ insn_bytes
);
6140 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6142 ctx
->hflags
|= MIPS_HFLAG_BC
;
6145 MIPS_INVAL("conditional branch/jump");
6146 generate_exception_end(ctx
, EXCP_RI
);
6151 ctx
->btarget
= btgt
;
6154 if (insn_bytes
== 2) {
6155 ctx
->hflags
|= MIPS_HFLAG_B16
;
6162 /* special3 bitfield operations */
6163 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
6164 int rs
, int lsb
, int msb
)
6166 TCGv t0
= tcg_temp_new();
6167 TCGv t1
= tcg_temp_new();
6169 gen_load_gpr(t1
, rs
);
6172 if (lsb
+ msb
> 31) {
6176 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6178 /* The two checks together imply that lsb == 0,
6179 so this is a simple sign-extension. */
6180 tcg_gen_ext32s_tl(t0
, t1
);
6183 #if defined(TARGET_MIPS64)
6192 if (lsb
+ msb
> 63) {
6195 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6202 gen_load_gpr(t0
, rt
);
6203 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6204 tcg_gen_ext32s_tl(t0
, t0
);
6206 #if defined(TARGET_MIPS64)
6217 gen_load_gpr(t0
, rt
);
6218 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6223 MIPS_INVAL("bitops");
6224 generate_exception_end(ctx
, EXCP_RI
);
6229 gen_store_gpr(t0
, rt
);
6234 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6239 /* If no destination, treat it as a NOP. */
6243 t0
= tcg_temp_new();
6244 gen_load_gpr(t0
, rt
);
6248 TCGv t1
= tcg_temp_new();
6249 TCGv t2
= tcg_const_tl(0x00FF00FF);
6251 tcg_gen_shri_tl(t1
, t0
, 8);
6252 tcg_gen_and_tl(t1
, t1
, t2
);
6253 tcg_gen_and_tl(t0
, t0
, t2
);
6254 tcg_gen_shli_tl(t0
, t0
, 8);
6255 tcg_gen_or_tl(t0
, t0
, t1
);
6258 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6262 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6265 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6267 #if defined(TARGET_MIPS64)
6270 TCGv t1
= tcg_temp_new();
6271 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6273 tcg_gen_shri_tl(t1
, t0
, 8);
6274 tcg_gen_and_tl(t1
, t1
, t2
);
6275 tcg_gen_and_tl(t0
, t0
, t2
);
6276 tcg_gen_shli_tl(t0
, t0
, 8);
6277 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6284 TCGv t1
= tcg_temp_new();
6285 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6287 tcg_gen_shri_tl(t1
, t0
, 16);
6288 tcg_gen_and_tl(t1
, t1
, t2
);
6289 tcg_gen_and_tl(t0
, t0
, t2
);
6290 tcg_gen_shli_tl(t0
, t0
, 16);
6291 tcg_gen_or_tl(t0
, t0
, t1
);
6292 tcg_gen_shri_tl(t1
, t0
, 32);
6293 tcg_gen_shli_tl(t0
, t0
, 32);
6294 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6301 MIPS_INVAL("bsfhl");
6302 generate_exception_end(ctx
, EXCP_RI
);
6309 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6318 t0
= tcg_temp_new();
6319 t1
= tcg_temp_new();
6320 gen_load_gpr(t0
, rs
);
6321 gen_load_gpr(t1
, rt
);
6322 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6323 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6324 if (opc
== OPC_LSA
) {
6325 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6334 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6342 t0
= tcg_temp_new();
6343 if (bits
== 0 || bits
== wordsz
) {
6345 gen_load_gpr(t0
, rt
);
6347 gen_load_gpr(t0
, rs
);
6351 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6353 #if defined(TARGET_MIPS64)
6355 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6360 TCGv t1
= tcg_temp_new();
6361 gen_load_gpr(t0
, rt
);
6362 gen_load_gpr(t1
, rs
);
6366 TCGv_i64 t2
= tcg_temp_new_i64();
6367 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6368 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6369 gen_move_low32(cpu_gpr
[rd
], t2
);
6370 tcg_temp_free_i64(t2
);
6373 #if defined(TARGET_MIPS64)
6375 tcg_gen_shli_tl(t0
, t0
, bits
);
6376 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6377 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6387 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6390 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6393 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6396 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6399 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6406 t0
= tcg_temp_new();
6407 gen_load_gpr(t0
, rt
);
6410 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6412 #if defined(TARGET_MIPS64)
6414 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6421 #ifndef CONFIG_USER_ONLY
6422 /* CP0 (MMU and control) */
6423 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6425 TCGv_i64 t0
= tcg_temp_new_i64();
6426 TCGv_i64 t1
= tcg_temp_new_i64();
6428 tcg_gen_ext_tl_i64(t0
, arg
);
6429 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6430 #if defined(TARGET_MIPS64)
6431 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6433 tcg_gen_concat32_i64(t1
, t1
, t0
);
6435 tcg_gen_st_i64(t1
, cpu_env
, off
);
6436 tcg_temp_free_i64(t1
);
6437 tcg_temp_free_i64(t0
);
6440 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6442 TCGv_i64 t0
= tcg_temp_new_i64();
6443 TCGv_i64 t1
= tcg_temp_new_i64();
6445 tcg_gen_ext_tl_i64(t0
, arg
);
6446 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6447 tcg_gen_concat32_i64(t1
, t1
, t0
);
6448 tcg_gen_st_i64(t1
, cpu_env
, off
);
6449 tcg_temp_free_i64(t1
);
6450 tcg_temp_free_i64(t0
);
6453 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6455 TCGv_i64 t0
= tcg_temp_new_i64();
6457 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6458 #if defined(TARGET_MIPS64)
6459 tcg_gen_shri_i64(t0
, t0
, 30);
6461 tcg_gen_shri_i64(t0
, t0
, 32);
6463 gen_move_low32(arg
, t0
);
6464 tcg_temp_free_i64(t0
);
6467 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6469 TCGv_i64 t0
= tcg_temp_new_i64();
6471 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6472 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6473 gen_move_low32(arg
, t0
);
6474 tcg_temp_free_i64(t0
);
6477 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
6479 TCGv_i32 t0
= tcg_temp_new_i32();
6481 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6482 tcg_gen_ext_i32_tl(arg
, t0
);
6483 tcg_temp_free_i32(t0
);
6486 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
6488 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6489 tcg_gen_ext32s_tl(arg
, arg
);
6492 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
6494 TCGv_i32 t0
= tcg_temp_new_i32();
6496 tcg_gen_trunc_tl_i32(t0
, arg
);
6497 tcg_gen_st_i32(t0
, cpu_env
, off
);
6498 tcg_temp_free_i32(t0
);
6501 #define CP0_CHECK(c) \
6504 goto cp0_unimplemented; \
6508 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6510 const char *rn
= "invalid";
6516 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6517 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6521 goto cp0_unimplemented
;
6527 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6528 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6532 goto cp0_unimplemented
;
6538 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
6539 ctx
->CP0_LLAddr_shift
);
6543 CP0_CHECK(ctx
->mrp
);
6544 gen_helper_mfhc0_maar(arg
, cpu_env
);
6548 goto cp0_unimplemented
;
6557 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6561 goto cp0_unimplemented
;
6565 goto cp0_unimplemented
;
6567 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
6571 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6572 tcg_gen_movi_tl(arg
, 0);
6575 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6577 const char *rn
= "invalid";
6578 uint64_t mask
= ctx
->PAMask
>> 36;
6584 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6585 tcg_gen_andi_tl(arg
, arg
, mask
);
6586 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6590 goto cp0_unimplemented
;
6596 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6597 tcg_gen_andi_tl(arg
, arg
, mask
);
6598 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6602 goto cp0_unimplemented
;
6608 /* LLAddr is read-only (the only exception is bit 0 if LLB is
6609 supported); the CP0_LLAddr_rw_bitmask does not seem to be
6610 relevant for modern MIPS cores supporting MTHC0, therefore
6611 treating MTHC0 to LLAddr as NOP. */
6615 CP0_CHECK(ctx
->mrp
);
6616 gen_helper_mthc0_maar(cpu_env
, arg
);
6620 goto cp0_unimplemented
;
6629 tcg_gen_andi_tl(arg
, arg
, mask
);
6630 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6634 goto cp0_unimplemented
;
6638 goto cp0_unimplemented
;
6640 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
6643 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6646 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6648 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6649 tcg_gen_movi_tl(arg
, 0);
6651 tcg_gen_movi_tl(arg
, ~0);
6655 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6657 const char *rn
= "invalid";
6660 check_insn(ctx
, ISA_MIPS32
);
6666 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6670 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6671 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6675 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6676 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6680 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6681 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6686 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6690 goto cp0_unimplemented
;
6696 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6697 gen_helper_mfc0_random(arg
, cpu_env
);
6701 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6702 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6706 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6707 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6711 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6712 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6716 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6717 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6721 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6722 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6726 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6727 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6728 rn
= "VPEScheFBack";
6731 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6732 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6736 goto cp0_unimplemented
;
6743 TCGv_i64 tmp
= tcg_temp_new_i64();
6744 tcg_gen_ld_i64(tmp
, cpu_env
,
6745 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6746 #if defined(TARGET_MIPS64)
6748 /* Move RI/XI fields to bits 31:30 */
6749 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6750 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6753 gen_move_low32(arg
, tmp
);
6754 tcg_temp_free_i64(tmp
);
6759 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6760 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6764 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6765 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6769 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6770 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6774 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6775 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6779 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6780 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6784 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6785 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6789 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6790 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6794 goto cp0_unimplemented
;
6801 TCGv_i64 tmp
= tcg_temp_new_i64();
6802 tcg_gen_ld_i64(tmp
, cpu_env
,
6803 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6804 #if defined(TARGET_MIPS64)
6806 /* Move RI/XI fields to bits 31:30 */
6807 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6808 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6811 gen_move_low32(arg
, tmp
);
6812 tcg_temp_free_i64(tmp
);
6818 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6819 rn
= "GlobalNumber";
6822 goto cp0_unimplemented
;
6828 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6829 tcg_gen_ext32s_tl(arg
, arg
);
6833 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
6834 rn
= "ContextConfig";
6835 goto cp0_unimplemented
;
6837 CP0_CHECK(ctx
->ulri
);
6838 tcg_gen_ld_tl(arg
, cpu_env
,
6839 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6840 tcg_gen_ext32s_tl(arg
, arg
);
6844 goto cp0_unimplemented
;
6850 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6854 check_insn(ctx
, ISA_MIPS32R2
);
6855 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6860 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6861 tcg_gen_ext32s_tl(arg
, arg
);
6866 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6867 tcg_gen_ext32s_tl(arg
, arg
);
6872 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6873 tcg_gen_ext32s_tl(arg
, arg
);
6878 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6883 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
6888 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
6892 goto cp0_unimplemented
;
6898 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6902 check_insn(ctx
, ISA_MIPS32R2
);
6903 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6907 check_insn(ctx
, ISA_MIPS32R2
);
6908 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6912 check_insn(ctx
, ISA_MIPS32R2
);
6913 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6917 check_insn(ctx
, ISA_MIPS32R2
);
6918 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6922 check_insn(ctx
, ISA_MIPS32R2
);
6923 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6928 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
6932 goto cp0_unimplemented
;
6938 check_insn(ctx
, ISA_MIPS32R2
);
6939 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6943 goto cp0_unimplemented
;
6949 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6950 tcg_gen_ext32s_tl(arg
, arg
);
6955 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6960 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6965 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
6966 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
6970 goto cp0_unimplemented
;
6976 /* Mark as an IO operation because we read the time. */
6977 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6980 gen_helper_mfc0_count(arg
, cpu_env
);
6981 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6984 /* Break the TB to be able to take timer interrupts immediately
6985 after reading count. DISAS_STOP isn't sufficient, we need to
6986 ensure we break completely out of translated code. */
6987 gen_save_pc(ctx
->base
.pc_next
+ 4);
6988 ctx
->base
.is_jmp
= DISAS_EXIT
;
6991 /* 6,7 are implementation dependent */
6993 goto cp0_unimplemented
;
6999 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7000 tcg_gen_ext32s_tl(arg
, arg
);
7004 goto cp0_unimplemented
;
7010 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7013 /* 6,7 are implementation dependent */
7015 goto cp0_unimplemented
;
7021 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7025 check_insn(ctx
, ISA_MIPS32R2
);
7026 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7030 check_insn(ctx
, ISA_MIPS32R2
);
7031 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7035 check_insn(ctx
, ISA_MIPS32R2
);
7036 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7040 goto cp0_unimplemented
;
7046 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7050 goto cp0_unimplemented
;
7056 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7057 tcg_gen_ext32s_tl(arg
, arg
);
7061 goto cp0_unimplemented
;
7067 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7071 check_insn(ctx
, ISA_MIPS32R2
);
7072 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7073 tcg_gen_ext32s_tl(arg
, arg
);
7077 check_insn(ctx
, ISA_MIPS32R2
);
7078 CP0_CHECK(ctx
->cmgcr
);
7079 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7080 tcg_gen_ext32s_tl(arg
, arg
);
7084 goto cp0_unimplemented
;
7090 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7094 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7098 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7102 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7106 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7110 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7113 /* 6,7 are implementation dependent */
7115 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7119 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7123 goto cp0_unimplemented
;
7129 gen_helper_mfc0_lladdr(arg
, cpu_env
);
7133 CP0_CHECK(ctx
->mrp
);
7134 gen_helper_mfc0_maar(arg
, cpu_env
);
7138 CP0_CHECK(ctx
->mrp
);
7139 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7143 goto cp0_unimplemented
;
7156 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7157 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
7161 goto cp0_unimplemented
;
7174 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7175 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
7179 goto cp0_unimplemented
;
7185 #if defined(TARGET_MIPS64)
7186 check_insn(ctx
, ISA_MIPS3
);
7187 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7188 tcg_gen_ext32s_tl(arg
, arg
);
7193 goto cp0_unimplemented
;
7197 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7198 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7201 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7205 goto cp0_unimplemented
;
7209 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7210 rn
= "'Diagnostic"; /* implementation dependent */
7215 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7219 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
7220 rn
= "TraceControl";
7221 goto cp0_unimplemented
;
7223 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
7224 rn
= "TraceControl2";
7225 goto cp0_unimplemented
;
7227 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
7228 rn
= "UserTraceData";
7229 goto cp0_unimplemented
;
7231 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
7233 goto cp0_unimplemented
;
7235 goto cp0_unimplemented
;
7242 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7243 tcg_gen_ext32s_tl(arg
, arg
);
7247 goto cp0_unimplemented
;
7253 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7254 rn
= "Performance0";
7257 // gen_helper_mfc0_performance1(arg);
7258 rn
= "Performance1";
7259 goto cp0_unimplemented
;
7261 // gen_helper_mfc0_performance2(arg);
7262 rn
= "Performance2";
7263 goto cp0_unimplemented
;
7265 // gen_helper_mfc0_performance3(arg);
7266 rn
= "Performance3";
7267 goto cp0_unimplemented
;
7269 // gen_helper_mfc0_performance4(arg);
7270 rn
= "Performance4";
7271 goto cp0_unimplemented
;
7273 // gen_helper_mfc0_performance5(arg);
7274 rn
= "Performance5";
7275 goto cp0_unimplemented
;
7277 // gen_helper_mfc0_performance6(arg);
7278 rn
= "Performance6";
7279 goto cp0_unimplemented
;
7281 // gen_helper_mfc0_performance7(arg);
7282 rn
= "Performance7";
7283 goto cp0_unimplemented
;
7285 goto cp0_unimplemented
;
7291 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7295 goto cp0_unimplemented
;
7304 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7308 goto cp0_unimplemented
;
7318 TCGv_i64 tmp
= tcg_temp_new_i64();
7319 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7320 gen_move_low32(arg
, tmp
);
7321 tcg_temp_free_i64(tmp
);
7329 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7333 goto cp0_unimplemented
;
7342 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7349 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7353 goto cp0_unimplemented
;
7359 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7360 tcg_gen_ext32s_tl(arg
, arg
);
7364 goto cp0_unimplemented
;
7371 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7380 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7381 tcg_gen_ld_tl(arg
, cpu_env
,
7382 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7383 tcg_gen_ext32s_tl(arg
, arg
);
7387 goto cp0_unimplemented
;
7391 goto cp0_unimplemented
;
7393 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
7397 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7398 gen_mfc0_unimplemented(ctx
, arg
);
7401 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7403 const char *rn
= "invalid";
7406 check_insn(ctx
, ISA_MIPS32
);
7408 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7416 gen_helper_mtc0_index(cpu_env
, arg
);
7420 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7421 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7425 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7430 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7440 goto cp0_unimplemented
;
7450 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7451 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7455 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7456 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7460 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7461 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7465 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7466 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7470 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7471 tcg_gen_st_tl(arg
, cpu_env
,
7472 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7476 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7477 tcg_gen_st_tl(arg
, cpu_env
,
7478 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7479 rn
= "VPEScheFBack";
7482 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7483 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7487 goto cp0_unimplemented
;
7493 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7497 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7498 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7502 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7503 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7507 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7508 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7512 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7513 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7517 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7518 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7522 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7523 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7527 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7528 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7532 goto cp0_unimplemented
;
7538 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7544 rn
= "GlobalNumber";
7547 goto cp0_unimplemented
;
7553 gen_helper_mtc0_context(cpu_env
, arg
);
7557 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7558 rn
= "ContextConfig";
7559 goto cp0_unimplemented
;
7561 CP0_CHECK(ctx
->ulri
);
7562 tcg_gen_st_tl(arg
, cpu_env
,
7563 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7567 goto cp0_unimplemented
;
7573 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7577 check_insn(ctx
, ISA_MIPS32R2
);
7578 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7580 ctx
->base
.is_jmp
= DISAS_STOP
;
7584 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7589 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7594 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7599 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7604 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7609 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7613 goto cp0_unimplemented
;
7619 gen_helper_mtc0_wired(cpu_env
, arg
);
7623 check_insn(ctx
, ISA_MIPS32R2
);
7624 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7628 check_insn(ctx
, ISA_MIPS32R2
);
7629 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7633 check_insn(ctx
, ISA_MIPS32R2
);
7634 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7638 check_insn(ctx
, ISA_MIPS32R2
);
7639 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7643 check_insn(ctx
, ISA_MIPS32R2
);
7644 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7649 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7653 goto cp0_unimplemented
;
7659 check_insn(ctx
, ISA_MIPS32R2
);
7660 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7661 ctx
->base
.is_jmp
= DISAS_STOP
;
7665 goto cp0_unimplemented
;
7687 goto cp0_unimplemented
;
7693 gen_helper_mtc0_count(cpu_env
, arg
);
7696 /* 6,7 are implementation dependent */
7698 goto cp0_unimplemented
;
7704 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7708 goto cp0_unimplemented
;
7714 gen_helper_mtc0_compare(cpu_env
, arg
);
7717 /* 6,7 are implementation dependent */
7719 goto cp0_unimplemented
;
7725 save_cpu_state(ctx
, 1);
7726 gen_helper_mtc0_status(cpu_env
, arg
);
7727 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7728 gen_save_pc(ctx
->base
.pc_next
+ 4);
7729 ctx
->base
.is_jmp
= DISAS_EXIT
;
7733 check_insn(ctx
, ISA_MIPS32R2
);
7734 gen_helper_mtc0_intctl(cpu_env
, arg
);
7735 /* Stop translation as we may have switched the execution mode */
7736 ctx
->base
.is_jmp
= DISAS_STOP
;
7740 check_insn(ctx
, ISA_MIPS32R2
);
7741 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7742 /* Stop translation as we may have switched the execution mode */
7743 ctx
->base
.is_jmp
= DISAS_STOP
;
7747 check_insn(ctx
, ISA_MIPS32R2
);
7748 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7749 /* Stop translation as we may have switched the execution mode */
7750 ctx
->base
.is_jmp
= DISAS_STOP
;
7754 goto cp0_unimplemented
;
7760 save_cpu_state(ctx
, 1);
7761 gen_helper_mtc0_cause(cpu_env
, arg
);
7762 /* Stop translation as we may have triggered an interrupt.
7763 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7764 * translated code to check for pending interrupts. */
7765 gen_save_pc(ctx
->base
.pc_next
+ 4);
7766 ctx
->base
.is_jmp
= DISAS_EXIT
;
7770 goto cp0_unimplemented
;
7776 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7780 goto cp0_unimplemented
;
7790 check_insn(ctx
, ISA_MIPS32R2
);
7791 gen_helper_mtc0_ebase(cpu_env
, arg
);
7795 goto cp0_unimplemented
;
7801 gen_helper_mtc0_config0(cpu_env
, arg
);
7803 /* Stop translation as we may have switched the execution mode */
7804 ctx
->base
.is_jmp
= DISAS_STOP
;
7807 /* ignored, read only */
7811 gen_helper_mtc0_config2(cpu_env
, arg
);
7813 /* Stop translation as we may have switched the execution mode */
7814 ctx
->base
.is_jmp
= DISAS_STOP
;
7817 gen_helper_mtc0_config3(cpu_env
, arg
);
7819 /* Stop translation as we may have switched the execution mode */
7820 ctx
->base
.is_jmp
= DISAS_STOP
;
7823 gen_helper_mtc0_config4(cpu_env
, arg
);
7825 ctx
->base
.is_jmp
= DISAS_STOP
;
7828 gen_helper_mtc0_config5(cpu_env
, arg
);
7830 /* Stop translation as we may have switched the execution mode */
7831 ctx
->base
.is_jmp
= DISAS_STOP
;
7833 /* 6,7 are implementation dependent */
7843 rn
= "Invalid config selector";
7844 goto cp0_unimplemented
;
7850 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7854 CP0_CHECK(ctx
->mrp
);
7855 gen_helper_mtc0_maar(cpu_env
, arg
);
7859 CP0_CHECK(ctx
->mrp
);
7860 gen_helper_mtc0_maari(cpu_env
, arg
);
7864 goto cp0_unimplemented
;
7877 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7878 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7882 goto cp0_unimplemented
;
7895 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7896 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7900 goto cp0_unimplemented
;
7906 #if defined(TARGET_MIPS64)
7907 check_insn(ctx
, ISA_MIPS3
);
7908 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7913 goto cp0_unimplemented
;
7917 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7918 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7921 gen_helper_mtc0_framemask(cpu_env
, arg
);
7925 goto cp0_unimplemented
;
7930 rn
= "Diagnostic"; /* implementation dependent */
7935 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7936 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7937 gen_save_pc(ctx
->base
.pc_next
+ 4);
7938 ctx
->base
.is_jmp
= DISAS_EXIT
;
7942 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7943 rn
= "TraceControl";
7944 /* Stop translation as we may have switched the execution mode */
7945 ctx
->base
.is_jmp
= DISAS_STOP
;
7946 goto cp0_unimplemented
;
7948 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7949 rn
= "TraceControl2";
7950 /* Stop translation as we may have switched the execution mode */
7951 ctx
->base
.is_jmp
= DISAS_STOP
;
7952 goto cp0_unimplemented
;
7954 /* Stop translation as we may have switched the execution mode */
7955 ctx
->base
.is_jmp
= DISAS_STOP
;
7956 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7957 rn
= "UserTraceData";
7958 /* Stop translation as we may have switched the execution mode */
7959 ctx
->base
.is_jmp
= DISAS_STOP
;
7960 goto cp0_unimplemented
;
7962 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7963 /* Stop translation as we may have switched the execution mode */
7964 ctx
->base
.is_jmp
= DISAS_STOP
;
7966 goto cp0_unimplemented
;
7968 goto cp0_unimplemented
;
7975 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7979 goto cp0_unimplemented
;
7985 gen_helper_mtc0_performance0(cpu_env
, arg
);
7986 rn
= "Performance0";
7989 // gen_helper_mtc0_performance1(arg);
7990 rn
= "Performance1";
7991 goto cp0_unimplemented
;
7993 // gen_helper_mtc0_performance2(arg);
7994 rn
= "Performance2";
7995 goto cp0_unimplemented
;
7997 // gen_helper_mtc0_performance3(arg);
7998 rn
= "Performance3";
7999 goto cp0_unimplemented
;
8001 // gen_helper_mtc0_performance4(arg);
8002 rn
= "Performance4";
8003 goto cp0_unimplemented
;
8005 // gen_helper_mtc0_performance5(arg);
8006 rn
= "Performance5";
8007 goto cp0_unimplemented
;
8009 // gen_helper_mtc0_performance6(arg);
8010 rn
= "Performance6";
8011 goto cp0_unimplemented
;
8013 // gen_helper_mtc0_performance7(arg);
8014 rn
= "Performance7";
8015 goto cp0_unimplemented
;
8017 goto cp0_unimplemented
;
8023 gen_helper_mtc0_errctl(cpu_env
, arg
);
8024 ctx
->base
.is_jmp
= DISAS_STOP
;
8028 goto cp0_unimplemented
;
8041 goto cp0_unimplemented
;
8050 gen_helper_mtc0_taglo(cpu_env
, arg
);
8057 gen_helper_mtc0_datalo(cpu_env
, arg
);
8061 goto cp0_unimplemented
;
8070 gen_helper_mtc0_taghi(cpu_env
, arg
);
8077 gen_helper_mtc0_datahi(cpu_env
, arg
);
8082 goto cp0_unimplemented
;
8088 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8092 goto cp0_unimplemented
;
8099 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8108 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8109 tcg_gen_st_tl(arg
, cpu_env
,
8110 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8114 goto cp0_unimplemented
;
8118 goto cp0_unimplemented
;
8120 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
8122 /* For simplicity assume that all writes can cause interrupts. */
8123 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8125 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
8126 * translated code to check for pending interrupts. */
8127 gen_save_pc(ctx
->base
.pc_next
+ 4);
8128 ctx
->base
.is_jmp
= DISAS_EXIT
;
8133 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8136 #if defined(TARGET_MIPS64)
8137 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8139 const char *rn
= "invalid";
8142 check_insn(ctx
, ISA_MIPS64
);
8148 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
8152 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8153 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
8157 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8158 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
8162 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8163 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
8168 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
8172 goto cp0_unimplemented
;
8178 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8179 gen_helper_mfc0_random(arg
, cpu_env
);
8183 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8184 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
8188 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8189 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
8193 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8194 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
8198 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8199 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
8203 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8204 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8208 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8209 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8210 rn
= "VPEScheFBack";
8213 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8214 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
8218 goto cp0_unimplemented
;
8224 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
8228 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8229 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8233 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8234 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8238 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8239 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8243 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8244 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8248 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8249 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8253 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8254 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8258 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8259 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8263 goto cp0_unimplemented
;
8269 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8274 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8275 rn
= "GlobalNumber";
8278 goto cp0_unimplemented
;
8284 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8288 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8289 rn
= "ContextConfig";
8290 goto cp0_unimplemented
;
8292 CP0_CHECK(ctx
->ulri
);
8293 tcg_gen_ld_tl(arg
, cpu_env
,
8294 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8298 goto cp0_unimplemented
;
8304 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8308 check_insn(ctx
, ISA_MIPS32R2
);
8309 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8314 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8319 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8324 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8329 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8334 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8339 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8343 goto cp0_unimplemented
;
8349 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8353 check_insn(ctx
, ISA_MIPS32R2
);
8354 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8358 check_insn(ctx
, ISA_MIPS32R2
);
8359 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8363 check_insn(ctx
, ISA_MIPS32R2
);
8364 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8368 check_insn(ctx
, ISA_MIPS32R2
);
8369 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8373 check_insn(ctx
, ISA_MIPS32R2
);
8374 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8379 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8383 goto cp0_unimplemented
;
8389 check_insn(ctx
, ISA_MIPS32R2
);
8390 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8394 goto cp0_unimplemented
;
8400 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8405 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8410 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8415 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8416 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8420 goto cp0_unimplemented
;
8426 /* Mark as an IO operation because we read the time. */
8427 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8430 gen_helper_mfc0_count(arg
, cpu_env
);
8431 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8434 /* Break the TB to be able to take timer interrupts immediately
8435 after reading count. DISAS_STOP isn't sufficient, we need to
8436 ensure we break completely out of translated code. */
8437 gen_save_pc(ctx
->base
.pc_next
+ 4);
8438 ctx
->base
.is_jmp
= DISAS_EXIT
;
8441 /* 6,7 are implementation dependent */
8443 goto cp0_unimplemented
;
8449 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8453 goto cp0_unimplemented
;
8459 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8462 /* 6,7 are implementation dependent */
8464 goto cp0_unimplemented
;
8470 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8474 check_insn(ctx
, ISA_MIPS32R2
);
8475 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8479 check_insn(ctx
, ISA_MIPS32R2
);
8480 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8484 check_insn(ctx
, ISA_MIPS32R2
);
8485 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8489 goto cp0_unimplemented
;
8495 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8499 goto cp0_unimplemented
;
8505 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8509 goto cp0_unimplemented
;
8515 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8519 check_insn(ctx
, ISA_MIPS32R2
);
8520 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8524 check_insn(ctx
, ISA_MIPS32R2
);
8525 CP0_CHECK(ctx
->cmgcr
);
8526 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8530 goto cp0_unimplemented
;
8536 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8540 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8544 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8548 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8552 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8556 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8559 /* 6,7 are implementation dependent */
8561 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8565 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8569 goto cp0_unimplemented
;
8575 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8579 CP0_CHECK(ctx
->mrp
);
8580 gen_helper_dmfc0_maar(arg
, cpu_env
);
8584 CP0_CHECK(ctx
->mrp
);
8585 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8589 goto cp0_unimplemented
;
8602 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8603 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8607 goto cp0_unimplemented
;
8620 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8621 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8625 goto cp0_unimplemented
;
8631 check_insn(ctx
, ISA_MIPS3
);
8632 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8636 goto cp0_unimplemented
;
8640 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8641 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8644 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8648 goto cp0_unimplemented
;
8652 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8653 rn
= "'Diagnostic"; /* implementation dependent */
8658 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8662 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8663 rn
= "TraceControl";
8664 goto cp0_unimplemented
;
8666 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8667 rn
= "TraceControl2";
8668 goto cp0_unimplemented
;
8670 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8671 rn
= "UserTraceData";
8672 goto cp0_unimplemented
;
8674 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8676 goto cp0_unimplemented
;
8678 goto cp0_unimplemented
;
8685 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8689 goto cp0_unimplemented
;
8695 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8696 rn
= "Performance0";
8699 // gen_helper_dmfc0_performance1(arg);
8700 rn
= "Performance1";
8701 goto cp0_unimplemented
;
8703 // gen_helper_dmfc0_performance2(arg);
8704 rn
= "Performance2";
8705 goto cp0_unimplemented
;
8707 // gen_helper_dmfc0_performance3(arg);
8708 rn
= "Performance3";
8709 goto cp0_unimplemented
;
8711 // gen_helper_dmfc0_performance4(arg);
8712 rn
= "Performance4";
8713 goto cp0_unimplemented
;
8715 // gen_helper_dmfc0_performance5(arg);
8716 rn
= "Performance5";
8717 goto cp0_unimplemented
;
8719 // gen_helper_dmfc0_performance6(arg);
8720 rn
= "Performance6";
8721 goto cp0_unimplemented
;
8723 // gen_helper_dmfc0_performance7(arg);
8724 rn
= "Performance7";
8725 goto cp0_unimplemented
;
8727 goto cp0_unimplemented
;
8733 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8737 goto cp0_unimplemented
;
8747 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8751 goto cp0_unimplemented
;
8760 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8767 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8771 goto cp0_unimplemented
;
8780 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8787 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8791 goto cp0_unimplemented
;
8797 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8801 goto cp0_unimplemented
;
8808 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8817 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8818 tcg_gen_ld_tl(arg
, cpu_env
,
8819 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8823 goto cp0_unimplemented
;
8827 goto cp0_unimplemented
;
8829 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
8833 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8834 gen_mfc0_unimplemented(ctx
, arg
);
8837 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8839 const char *rn
= "invalid";
8842 check_insn(ctx
, ISA_MIPS64
);
8844 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8852 gen_helper_mtc0_index(cpu_env
, arg
);
8856 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8857 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
8861 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8866 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8876 goto cp0_unimplemented
;
8886 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8887 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
8891 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8892 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
8896 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8897 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
8901 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8902 gen_helper_mtc0_yqmask(cpu_env
, arg
);
8906 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8907 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8911 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8912 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8913 rn
= "VPEScheFBack";
8916 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8917 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
8921 goto cp0_unimplemented
;
8927 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
8931 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8932 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
8936 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8937 gen_helper_mtc0_tcbind(cpu_env
, arg
);
8941 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8942 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
8946 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8947 gen_helper_mtc0_tchalt(cpu_env
, arg
);
8951 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8952 gen_helper_mtc0_tccontext(cpu_env
, arg
);
8956 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8957 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
8961 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8962 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
8966 goto cp0_unimplemented
;
8972 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
8978 rn
= "GlobalNumber";
8981 goto cp0_unimplemented
;
8987 gen_helper_mtc0_context(cpu_env
, arg
);
8991 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
8992 rn
= "ContextConfig";
8993 goto cp0_unimplemented
;
8995 CP0_CHECK(ctx
->ulri
);
8996 tcg_gen_st_tl(arg
, cpu_env
,
8997 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
9001 goto cp0_unimplemented
;
9007 gen_helper_mtc0_pagemask(cpu_env
, arg
);
9011 check_insn(ctx
, ISA_MIPS32R2
);
9012 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
9017 gen_helper_mtc0_segctl0(cpu_env
, arg
);
9022 gen_helper_mtc0_segctl1(cpu_env
, arg
);
9027 gen_helper_mtc0_segctl2(cpu_env
, arg
);
9032 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
9037 gen_helper_mtc0_pwfield(cpu_env
, arg
);
9042 gen_helper_mtc0_pwsize(cpu_env
, arg
);
9046 goto cp0_unimplemented
;
9052 gen_helper_mtc0_wired(cpu_env
, arg
);
9056 check_insn(ctx
, ISA_MIPS32R2
);
9057 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
9061 check_insn(ctx
, ISA_MIPS32R2
);
9062 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
9066 check_insn(ctx
, ISA_MIPS32R2
);
9067 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
9071 check_insn(ctx
, ISA_MIPS32R2
);
9072 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
9076 check_insn(ctx
, ISA_MIPS32R2
);
9077 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
9082 gen_helper_mtc0_pwctl(cpu_env
, arg
);
9086 goto cp0_unimplemented
;
9092 check_insn(ctx
, ISA_MIPS32R2
);
9093 gen_helper_mtc0_hwrena(cpu_env
, arg
);
9094 ctx
->base
.is_jmp
= DISAS_STOP
;
9098 goto cp0_unimplemented
;
9120 goto cp0_unimplemented
;
9126 gen_helper_mtc0_count(cpu_env
, arg
);
9129 /* 6,7 are implementation dependent */
9131 goto cp0_unimplemented
;
9133 /* Stop translation as we may have switched the execution mode */
9134 ctx
->base
.is_jmp
= DISAS_STOP
;
9139 gen_helper_mtc0_entryhi(cpu_env
, arg
);
9143 goto cp0_unimplemented
;
9149 gen_helper_mtc0_compare(cpu_env
, arg
);
9152 /* 6,7 are implementation dependent */
9154 goto cp0_unimplemented
;
9156 /* Stop translation as we may have switched the execution mode */
9157 ctx
->base
.is_jmp
= DISAS_STOP
;
9162 save_cpu_state(ctx
, 1);
9163 gen_helper_mtc0_status(cpu_env
, arg
);
9164 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9165 gen_save_pc(ctx
->base
.pc_next
+ 4);
9166 ctx
->base
.is_jmp
= DISAS_EXIT
;
9170 check_insn(ctx
, ISA_MIPS32R2
);
9171 gen_helper_mtc0_intctl(cpu_env
, arg
);
9172 /* Stop translation as we may have switched the execution mode */
9173 ctx
->base
.is_jmp
= DISAS_STOP
;
9177 check_insn(ctx
, ISA_MIPS32R2
);
9178 gen_helper_mtc0_srsctl(cpu_env
, arg
);
9179 /* Stop translation as we may have switched the execution mode */
9180 ctx
->base
.is_jmp
= DISAS_STOP
;
9184 check_insn(ctx
, ISA_MIPS32R2
);
9185 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
9186 /* Stop translation as we may have switched the execution mode */
9187 ctx
->base
.is_jmp
= DISAS_STOP
;
9191 goto cp0_unimplemented
;
9197 save_cpu_state(ctx
, 1);
9198 gen_helper_mtc0_cause(cpu_env
, arg
);
9199 /* Stop translation as we may have triggered an interrupt.
9200 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9201 * translated code to check for pending interrupts. */
9202 gen_save_pc(ctx
->base
.pc_next
+ 4);
9203 ctx
->base
.is_jmp
= DISAS_EXIT
;
9207 goto cp0_unimplemented
;
9213 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
9217 goto cp0_unimplemented
;
9227 check_insn(ctx
, ISA_MIPS32R2
);
9228 gen_helper_mtc0_ebase(cpu_env
, arg
);
9232 goto cp0_unimplemented
;
9238 gen_helper_mtc0_config0(cpu_env
, arg
);
9240 /* Stop translation as we may have switched the execution mode */
9241 ctx
->base
.is_jmp
= DISAS_STOP
;
9244 /* ignored, read only */
9248 gen_helper_mtc0_config2(cpu_env
, arg
);
9250 /* Stop translation as we may have switched the execution mode */
9251 ctx
->base
.is_jmp
= DISAS_STOP
;
9254 gen_helper_mtc0_config3(cpu_env
, arg
);
9256 /* Stop translation as we may have switched the execution mode */
9257 ctx
->base
.is_jmp
= DISAS_STOP
;
9260 /* currently ignored */
9264 gen_helper_mtc0_config5(cpu_env
, arg
);
9266 /* Stop translation as we may have switched the execution mode */
9267 ctx
->base
.is_jmp
= DISAS_STOP
;
9269 /* 6,7 are implementation dependent */
9271 rn
= "Invalid config selector";
9272 goto cp0_unimplemented
;
9278 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9282 CP0_CHECK(ctx
->mrp
);
9283 gen_helper_mtc0_maar(cpu_env
, arg
);
9287 CP0_CHECK(ctx
->mrp
);
9288 gen_helper_mtc0_maari(cpu_env
, arg
);
9292 goto cp0_unimplemented
;
9305 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9306 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9310 goto cp0_unimplemented
;
9323 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9324 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9328 goto cp0_unimplemented
;
9334 check_insn(ctx
, ISA_MIPS3
);
9335 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9339 goto cp0_unimplemented
;
9343 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9344 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9347 gen_helper_mtc0_framemask(cpu_env
, arg
);
9351 goto cp0_unimplemented
;
9356 rn
= "Diagnostic"; /* implementation dependent */
9361 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9362 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9363 gen_save_pc(ctx
->base
.pc_next
+ 4);
9364 ctx
->base
.is_jmp
= DISAS_EXIT
;
9368 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9369 /* Stop translation as we may have switched the execution mode */
9370 ctx
->base
.is_jmp
= DISAS_STOP
;
9371 rn
= "TraceControl";
9372 goto cp0_unimplemented
;
9374 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9375 /* Stop translation as we may have switched the execution mode */
9376 ctx
->base
.is_jmp
= DISAS_STOP
;
9377 rn
= "TraceControl2";
9378 goto cp0_unimplemented
;
9380 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9381 /* Stop translation as we may have switched the execution mode */
9382 ctx
->base
.is_jmp
= DISAS_STOP
;
9383 rn
= "UserTraceData";
9384 goto cp0_unimplemented
;
9386 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9387 /* Stop translation as we may have switched the execution mode */
9388 ctx
->base
.is_jmp
= DISAS_STOP
;
9390 goto cp0_unimplemented
;
9392 goto cp0_unimplemented
;
9399 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9403 goto cp0_unimplemented
;
9409 gen_helper_mtc0_performance0(cpu_env
, arg
);
9410 rn
= "Performance0";
9413 // gen_helper_mtc0_performance1(cpu_env, arg);
9414 rn
= "Performance1";
9415 goto cp0_unimplemented
;
9417 // gen_helper_mtc0_performance2(cpu_env, arg);
9418 rn
= "Performance2";
9419 goto cp0_unimplemented
;
9421 // gen_helper_mtc0_performance3(cpu_env, arg);
9422 rn
= "Performance3";
9423 goto cp0_unimplemented
;
9425 // gen_helper_mtc0_performance4(cpu_env, arg);
9426 rn
= "Performance4";
9427 goto cp0_unimplemented
;
9429 // gen_helper_mtc0_performance5(cpu_env, arg);
9430 rn
= "Performance5";
9431 goto cp0_unimplemented
;
9433 // gen_helper_mtc0_performance6(cpu_env, arg);
9434 rn
= "Performance6";
9435 goto cp0_unimplemented
;
9437 // gen_helper_mtc0_performance7(cpu_env, arg);
9438 rn
= "Performance7";
9439 goto cp0_unimplemented
;
9441 goto cp0_unimplemented
;
9447 gen_helper_mtc0_errctl(cpu_env
, arg
);
9448 ctx
->base
.is_jmp
= DISAS_STOP
;
9452 goto cp0_unimplemented
;
9465 goto cp0_unimplemented
;
9474 gen_helper_mtc0_taglo(cpu_env
, arg
);
9481 gen_helper_mtc0_datalo(cpu_env
, arg
);
9485 goto cp0_unimplemented
;
9494 gen_helper_mtc0_taghi(cpu_env
, arg
);
9501 gen_helper_mtc0_datahi(cpu_env
, arg
);
9506 goto cp0_unimplemented
;
9512 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9516 goto cp0_unimplemented
;
9523 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9532 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9533 tcg_gen_st_tl(arg
, cpu_env
,
9534 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9538 goto cp0_unimplemented
;
9542 goto cp0_unimplemented
;
9544 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
9546 /* For simplicity assume that all writes can cause interrupts. */
9547 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9549 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
9550 * translated code to check for pending interrupts. */
9551 gen_save_pc(ctx
->base
.pc_next
+ 4);
9552 ctx
->base
.is_jmp
= DISAS_EXIT
;
9557 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
9559 #endif /* TARGET_MIPS64 */
9561 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9562 int u
, int sel
, int h
)
9564 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9565 TCGv t0
= tcg_temp_local_new();
9567 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9568 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9569 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9570 tcg_gen_movi_tl(t0
, -1);
9571 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9572 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9573 tcg_gen_movi_tl(t0
, -1);
9579 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9582 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9592 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9595 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9598 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9601 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9604 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9607 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9610 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9613 gen_mfc0(ctx
, t0
, rt
, sel
);
9620 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9623 gen_mfc0(ctx
, t0
, rt
, sel
);
9629 gen_helper_mftc0_status(t0
, cpu_env
);
9632 gen_mfc0(ctx
, t0
, rt
, sel
);
9638 gen_helper_mftc0_cause(t0
, cpu_env
);
9648 gen_helper_mftc0_epc(t0
, cpu_env
);
9658 gen_helper_mftc0_ebase(t0
, cpu_env
);
9675 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9685 gen_helper_mftc0_debug(t0
, cpu_env
);
9688 gen_mfc0(ctx
, t0
, rt
, sel
);
9693 gen_mfc0(ctx
, t0
, rt
, sel
);
9695 } else switch (sel
) {
9696 /* GPR registers. */
9698 gen_helper_1e0i(mftgpr
, t0
, rt
);
9700 /* Auxiliary CPU registers */
9704 gen_helper_1e0i(mftlo
, t0
, 0);
9707 gen_helper_1e0i(mfthi
, t0
, 0);
9710 gen_helper_1e0i(mftacx
, t0
, 0);
9713 gen_helper_1e0i(mftlo
, t0
, 1);
9716 gen_helper_1e0i(mfthi
, t0
, 1);
9719 gen_helper_1e0i(mftacx
, t0
, 1);
9722 gen_helper_1e0i(mftlo
, t0
, 2);
9725 gen_helper_1e0i(mfthi
, t0
, 2);
9728 gen_helper_1e0i(mftacx
, t0
, 2);
9731 gen_helper_1e0i(mftlo
, t0
, 3);
9734 gen_helper_1e0i(mfthi
, t0
, 3);
9737 gen_helper_1e0i(mftacx
, t0
, 3);
9740 gen_helper_mftdsp(t0
, cpu_env
);
9746 /* Floating point (COP1). */
9748 /* XXX: For now we support only a single FPU context. */
9750 TCGv_i32 fp0
= tcg_temp_new_i32();
9752 gen_load_fpr32(ctx
, fp0
, rt
);
9753 tcg_gen_ext_i32_tl(t0
, fp0
);
9754 tcg_temp_free_i32(fp0
);
9756 TCGv_i32 fp0
= tcg_temp_new_i32();
9758 gen_load_fpr32h(ctx
, fp0
, rt
);
9759 tcg_gen_ext_i32_tl(t0
, fp0
);
9760 tcg_temp_free_i32(fp0
);
9764 /* XXX: For now we support only a single FPU context. */
9765 gen_helper_1e0i(cfc1
, t0
, rt
);
9767 /* COP2: Not implemented. */
9774 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9775 gen_store_gpr(t0
, rd
);
9781 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9782 generate_exception_end(ctx
, EXCP_RI
);
9785 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9786 int u
, int sel
, int h
)
9788 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9789 TCGv t0
= tcg_temp_local_new();
9791 gen_load_gpr(t0
, rt
);
9792 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9793 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9794 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9796 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9797 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9804 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
9807 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
9817 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
9820 gen_helper_mttc0_tcbind(cpu_env
, t0
);
9823 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
9826 gen_helper_mttc0_tchalt(cpu_env
, t0
);
9829 gen_helper_mttc0_tccontext(cpu_env
, t0
);
9832 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
9835 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
9838 gen_mtc0(ctx
, t0
, rd
, sel
);
9845 gen_helper_mttc0_entryhi(cpu_env
, t0
);
9848 gen_mtc0(ctx
, t0
, rd
, sel
);
9854 gen_helper_mttc0_status(cpu_env
, t0
);
9857 gen_mtc0(ctx
, t0
, rd
, sel
);
9863 gen_helper_mttc0_cause(cpu_env
, t0
);
9873 gen_helper_mttc0_ebase(cpu_env
, t0
);
9883 gen_helper_mttc0_debug(cpu_env
, t0
);
9886 gen_mtc0(ctx
, t0
, rd
, sel
);
9891 gen_mtc0(ctx
, t0
, rd
, sel
);
9893 } else switch (sel
) {
9894 /* GPR registers. */
9896 gen_helper_0e1i(mttgpr
, t0
, rd
);
9898 /* Auxiliary CPU registers */
9902 gen_helper_0e1i(mttlo
, t0
, 0);
9905 gen_helper_0e1i(mtthi
, t0
, 0);
9908 gen_helper_0e1i(mttacx
, t0
, 0);
9911 gen_helper_0e1i(mttlo
, t0
, 1);
9914 gen_helper_0e1i(mtthi
, t0
, 1);
9917 gen_helper_0e1i(mttacx
, t0
, 1);
9920 gen_helper_0e1i(mttlo
, t0
, 2);
9923 gen_helper_0e1i(mtthi
, t0
, 2);
9926 gen_helper_0e1i(mttacx
, t0
, 2);
9929 gen_helper_0e1i(mttlo
, t0
, 3);
9932 gen_helper_0e1i(mtthi
, t0
, 3);
9935 gen_helper_0e1i(mttacx
, t0
, 3);
9938 gen_helper_mttdsp(cpu_env
, t0
);
9944 /* Floating point (COP1). */
9946 /* XXX: For now we support only a single FPU context. */
9948 TCGv_i32 fp0
= tcg_temp_new_i32();
9950 tcg_gen_trunc_tl_i32(fp0
, t0
);
9951 gen_store_fpr32(ctx
, fp0
, rd
);
9952 tcg_temp_free_i32(fp0
);
9954 TCGv_i32 fp0
= tcg_temp_new_i32();
9956 tcg_gen_trunc_tl_i32(fp0
, t0
);
9957 gen_store_fpr32h(ctx
, fp0
, rd
);
9958 tcg_temp_free_i32(fp0
);
9962 /* XXX: For now we support only a single FPU context. */
9964 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
9966 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
9967 tcg_temp_free_i32(fs_tmp
);
9969 /* Stop translation as we may have changed hflags */
9970 ctx
->base
.is_jmp
= DISAS_STOP
;
9972 /* COP2: Not implemented. */
9979 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
9985 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
9986 generate_exception_end(ctx
, EXCP_RI
);
9989 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
9991 const char *opn
= "ldst";
9993 check_cp0_enabled(ctx
);
10000 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10005 TCGv t0
= tcg_temp_new();
10007 gen_load_gpr(t0
, rt
);
10008 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10013 #if defined(TARGET_MIPS64)
10015 check_insn(ctx
, ISA_MIPS3
);
10017 /* Treat as NOP. */
10020 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10024 check_insn(ctx
, ISA_MIPS3
);
10026 TCGv t0
= tcg_temp_new();
10028 gen_load_gpr(t0
, rt
);
10029 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10038 /* Treat as NOP. */
10041 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10047 TCGv t0
= tcg_temp_new();
10048 gen_load_gpr(t0
, rt
);
10049 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10055 check_cp0_enabled(ctx
);
10057 /* Treat as NOP. */
10060 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
10061 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10065 check_cp0_enabled(ctx
);
10066 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
10067 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10072 if (!env
->tlb
->helper_tlbwi
)
10074 gen_helper_tlbwi(cpu_env
);
10078 if (ctx
->ie
>= 2) {
10079 if (!env
->tlb
->helper_tlbinv
) {
10082 gen_helper_tlbinv(cpu_env
);
10083 } /* treat as nop if TLBINV not supported */
10087 if (ctx
->ie
>= 2) {
10088 if (!env
->tlb
->helper_tlbinvf
) {
10091 gen_helper_tlbinvf(cpu_env
);
10092 } /* treat as nop if TLBINV not supported */
10096 if (!env
->tlb
->helper_tlbwr
)
10098 gen_helper_tlbwr(cpu_env
);
10102 if (!env
->tlb
->helper_tlbp
)
10104 gen_helper_tlbp(cpu_env
);
10108 if (!env
->tlb
->helper_tlbr
)
10110 gen_helper_tlbr(cpu_env
);
10112 case OPC_ERET
: /* OPC_ERETNC */
10113 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10114 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10117 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
10118 if (ctx
->opcode
& (1 << bit_shift
)) {
10121 check_insn(ctx
, ISA_MIPS32R5
);
10122 gen_helper_eretnc(cpu_env
);
10126 check_insn(ctx
, ISA_MIPS2
);
10127 gen_helper_eret(cpu_env
);
10129 ctx
->base
.is_jmp
= DISAS_EXIT
;
10134 check_insn(ctx
, ISA_MIPS32
);
10135 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10136 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10139 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10141 generate_exception_end(ctx
, EXCP_RI
);
10143 gen_helper_deret(cpu_env
);
10144 ctx
->base
.is_jmp
= DISAS_EXIT
;
10149 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
10150 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10151 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10154 /* If we get an exception, we want to restart at next instruction */
10155 ctx
->base
.pc_next
+= 4;
10156 save_cpu_state(ctx
, 1);
10157 ctx
->base
.pc_next
-= 4;
10158 gen_helper_wait(cpu_env
);
10159 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10164 generate_exception_end(ctx
, EXCP_RI
);
10167 (void)opn
; /* avoid a compiler warning */
10169 #endif /* !CONFIG_USER_ONLY */
10171 /* CP1 Branches (before delay slot) */
10172 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
10173 int32_t cc
, int32_t offset
)
10175 target_ulong btarget
;
10176 TCGv_i32 t0
= tcg_temp_new_i32();
10178 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10179 generate_exception_end(ctx
, EXCP_RI
);
10184 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
10186 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
10190 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10191 tcg_gen_not_i32(t0
, t0
);
10192 tcg_gen_andi_i32(t0
, t0
, 1);
10193 tcg_gen_extu_i32_tl(bcond
, t0
);
10196 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10197 tcg_gen_not_i32(t0
, t0
);
10198 tcg_gen_andi_i32(t0
, t0
, 1);
10199 tcg_gen_extu_i32_tl(bcond
, t0
);
10202 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10203 tcg_gen_andi_i32(t0
, t0
, 1);
10204 tcg_gen_extu_i32_tl(bcond
, t0
);
10207 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10208 tcg_gen_andi_i32(t0
, t0
, 1);
10209 tcg_gen_extu_i32_tl(bcond
, t0
);
10211 ctx
->hflags
|= MIPS_HFLAG_BL
;
10215 TCGv_i32 t1
= tcg_temp_new_i32();
10216 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10217 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10218 tcg_gen_nand_i32(t0
, t0
, t1
);
10219 tcg_temp_free_i32(t1
);
10220 tcg_gen_andi_i32(t0
, t0
, 1);
10221 tcg_gen_extu_i32_tl(bcond
, t0
);
10226 TCGv_i32 t1
= tcg_temp_new_i32();
10227 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10228 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10229 tcg_gen_or_i32(t0
, t0
, t1
);
10230 tcg_temp_free_i32(t1
);
10231 tcg_gen_andi_i32(t0
, t0
, 1);
10232 tcg_gen_extu_i32_tl(bcond
, t0
);
10237 TCGv_i32 t1
= tcg_temp_new_i32();
10238 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10239 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10240 tcg_gen_and_i32(t0
, t0
, t1
);
10241 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10242 tcg_gen_and_i32(t0
, t0
, t1
);
10243 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10244 tcg_gen_nand_i32(t0
, t0
, t1
);
10245 tcg_temp_free_i32(t1
);
10246 tcg_gen_andi_i32(t0
, t0
, 1);
10247 tcg_gen_extu_i32_tl(bcond
, t0
);
10252 TCGv_i32 t1
= tcg_temp_new_i32();
10253 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10254 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10255 tcg_gen_or_i32(t0
, t0
, t1
);
10256 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10257 tcg_gen_or_i32(t0
, t0
, t1
);
10258 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10259 tcg_gen_or_i32(t0
, t0
, t1
);
10260 tcg_temp_free_i32(t1
);
10261 tcg_gen_andi_i32(t0
, t0
, 1);
10262 tcg_gen_extu_i32_tl(bcond
, t0
);
10265 ctx
->hflags
|= MIPS_HFLAG_BC
;
10268 MIPS_INVAL("cp1 cond branch");
10269 generate_exception_end(ctx
, EXCP_RI
);
10272 ctx
->btarget
= btarget
;
10273 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10275 tcg_temp_free_i32(t0
);
10278 /* R6 CP1 Branches */
10279 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10280 int32_t ft
, int32_t offset
,
10281 int delayslot_size
)
10283 target_ulong btarget
;
10284 TCGv_i64 t0
= tcg_temp_new_i64();
10286 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10287 #ifdef MIPS_DEBUG_DISAS
10288 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10289 "\n", ctx
->base
.pc_next
);
10291 generate_exception_end(ctx
, EXCP_RI
);
10295 gen_load_fpr64(ctx
, t0
, ft
);
10296 tcg_gen_andi_i64(t0
, t0
, 1);
10298 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10302 tcg_gen_xori_i64(t0
, t0
, 1);
10303 ctx
->hflags
|= MIPS_HFLAG_BC
;
10306 /* t0 already set */
10307 ctx
->hflags
|= MIPS_HFLAG_BC
;
10310 MIPS_INVAL("cp1 cond branch");
10311 generate_exception_end(ctx
, EXCP_RI
);
10315 tcg_gen_trunc_i64_tl(bcond
, t0
);
10317 ctx
->btarget
= btarget
;
10319 switch (delayslot_size
) {
10321 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10324 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10329 tcg_temp_free_i64(t0
);
10332 /* Coprocessor 1 (FPU) */
10334 #define FOP(func, fmt) (((fmt) << 21) | (func))
10337 OPC_ADD_S
= FOP(0, FMT_S
),
10338 OPC_SUB_S
= FOP(1, FMT_S
),
10339 OPC_MUL_S
= FOP(2, FMT_S
),
10340 OPC_DIV_S
= FOP(3, FMT_S
),
10341 OPC_SQRT_S
= FOP(4, FMT_S
),
10342 OPC_ABS_S
= FOP(5, FMT_S
),
10343 OPC_MOV_S
= FOP(6, FMT_S
),
10344 OPC_NEG_S
= FOP(7, FMT_S
),
10345 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10346 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10347 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10348 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10349 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10350 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10351 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10352 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10353 OPC_SEL_S
= FOP(16, FMT_S
),
10354 OPC_MOVCF_S
= FOP(17, FMT_S
),
10355 OPC_MOVZ_S
= FOP(18, FMT_S
),
10356 OPC_MOVN_S
= FOP(19, FMT_S
),
10357 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10358 OPC_RECIP_S
= FOP(21, FMT_S
),
10359 OPC_RSQRT_S
= FOP(22, FMT_S
),
10360 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10361 OPC_MADDF_S
= FOP(24, FMT_S
),
10362 OPC_MSUBF_S
= FOP(25, FMT_S
),
10363 OPC_RINT_S
= FOP(26, FMT_S
),
10364 OPC_CLASS_S
= FOP(27, FMT_S
),
10365 OPC_MIN_S
= FOP(28, FMT_S
),
10366 OPC_RECIP2_S
= FOP(28, FMT_S
),
10367 OPC_MINA_S
= FOP(29, FMT_S
),
10368 OPC_RECIP1_S
= FOP(29, FMT_S
),
10369 OPC_MAX_S
= FOP(30, FMT_S
),
10370 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10371 OPC_MAXA_S
= FOP(31, FMT_S
),
10372 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10373 OPC_CVT_D_S
= FOP(33, FMT_S
),
10374 OPC_CVT_W_S
= FOP(36, FMT_S
),
10375 OPC_CVT_L_S
= FOP(37, FMT_S
),
10376 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10377 OPC_CMP_F_S
= FOP (48, FMT_S
),
10378 OPC_CMP_UN_S
= FOP (49, FMT_S
),
10379 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
10380 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
10381 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
10382 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
10383 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
10384 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
10385 OPC_CMP_SF_S
= FOP (56, FMT_S
),
10386 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
10387 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
10388 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
10389 OPC_CMP_LT_S
= FOP (60, FMT_S
),
10390 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
10391 OPC_CMP_LE_S
= FOP (62, FMT_S
),
10392 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
10394 OPC_ADD_D
= FOP(0, FMT_D
),
10395 OPC_SUB_D
= FOP(1, FMT_D
),
10396 OPC_MUL_D
= FOP(2, FMT_D
),
10397 OPC_DIV_D
= FOP(3, FMT_D
),
10398 OPC_SQRT_D
= FOP(4, FMT_D
),
10399 OPC_ABS_D
= FOP(5, FMT_D
),
10400 OPC_MOV_D
= FOP(6, FMT_D
),
10401 OPC_NEG_D
= FOP(7, FMT_D
),
10402 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10403 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10404 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10405 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10406 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10407 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10408 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10409 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10410 OPC_SEL_D
= FOP(16, FMT_D
),
10411 OPC_MOVCF_D
= FOP(17, FMT_D
),
10412 OPC_MOVZ_D
= FOP(18, FMT_D
),
10413 OPC_MOVN_D
= FOP(19, FMT_D
),
10414 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10415 OPC_RECIP_D
= FOP(21, FMT_D
),
10416 OPC_RSQRT_D
= FOP(22, FMT_D
),
10417 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10418 OPC_MADDF_D
= FOP(24, FMT_D
),
10419 OPC_MSUBF_D
= FOP(25, FMT_D
),
10420 OPC_RINT_D
= FOP(26, FMT_D
),
10421 OPC_CLASS_D
= FOP(27, FMT_D
),
10422 OPC_MIN_D
= FOP(28, FMT_D
),
10423 OPC_RECIP2_D
= FOP(28, FMT_D
),
10424 OPC_MINA_D
= FOP(29, FMT_D
),
10425 OPC_RECIP1_D
= FOP(29, FMT_D
),
10426 OPC_MAX_D
= FOP(30, FMT_D
),
10427 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10428 OPC_MAXA_D
= FOP(31, FMT_D
),
10429 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10430 OPC_CVT_S_D
= FOP(32, FMT_D
),
10431 OPC_CVT_W_D
= FOP(36, FMT_D
),
10432 OPC_CVT_L_D
= FOP(37, FMT_D
),
10433 OPC_CMP_F_D
= FOP (48, FMT_D
),
10434 OPC_CMP_UN_D
= FOP (49, FMT_D
),
10435 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
10436 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
10437 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
10438 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
10439 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
10440 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
10441 OPC_CMP_SF_D
= FOP (56, FMT_D
),
10442 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
10443 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
10444 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
10445 OPC_CMP_LT_D
= FOP (60, FMT_D
),
10446 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
10447 OPC_CMP_LE_D
= FOP (62, FMT_D
),
10448 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
10450 OPC_CVT_S_W
= FOP(32, FMT_W
),
10451 OPC_CVT_D_W
= FOP(33, FMT_W
),
10452 OPC_CVT_S_L
= FOP(32, FMT_L
),
10453 OPC_CVT_D_L
= FOP(33, FMT_L
),
10454 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10456 OPC_ADD_PS
= FOP(0, FMT_PS
),
10457 OPC_SUB_PS
= FOP(1, FMT_PS
),
10458 OPC_MUL_PS
= FOP(2, FMT_PS
),
10459 OPC_DIV_PS
= FOP(3, FMT_PS
),
10460 OPC_ABS_PS
= FOP(5, FMT_PS
),
10461 OPC_MOV_PS
= FOP(6, FMT_PS
),
10462 OPC_NEG_PS
= FOP(7, FMT_PS
),
10463 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10464 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10465 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10466 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10467 OPC_MULR_PS
= FOP(26, FMT_PS
),
10468 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10469 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10470 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10471 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10473 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10474 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10475 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10476 OPC_PLL_PS
= FOP(44, FMT_PS
),
10477 OPC_PLU_PS
= FOP(45, FMT_PS
),
10478 OPC_PUL_PS
= FOP(46, FMT_PS
),
10479 OPC_PUU_PS
= FOP(47, FMT_PS
),
10480 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
10481 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
10482 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
10483 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
10484 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
10485 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
10486 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
10487 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
10488 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
10489 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
10490 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
10491 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
10492 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
10493 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
10494 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
10495 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
10499 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10500 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10501 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10502 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10503 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10504 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10505 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10506 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10507 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10508 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10509 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10510 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10511 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10512 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10513 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10514 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10515 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10516 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10517 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10518 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10519 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10520 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10522 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10523 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10524 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10525 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10526 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10527 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10528 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10529 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10530 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10531 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10532 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10533 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10534 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10535 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10536 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10537 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10538 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10539 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10540 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10541 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10542 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10543 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10545 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10547 TCGv t0
= tcg_temp_new();
10552 TCGv_i32 fp0
= tcg_temp_new_i32();
10554 gen_load_fpr32(ctx
, fp0
, fs
);
10555 tcg_gen_ext_i32_tl(t0
, fp0
);
10556 tcg_temp_free_i32(fp0
);
10558 gen_store_gpr(t0
, rt
);
10561 gen_load_gpr(t0
, rt
);
10563 TCGv_i32 fp0
= tcg_temp_new_i32();
10565 tcg_gen_trunc_tl_i32(fp0
, t0
);
10566 gen_store_fpr32(ctx
, fp0
, fs
);
10567 tcg_temp_free_i32(fp0
);
10571 gen_helper_1e0i(cfc1
, t0
, fs
);
10572 gen_store_gpr(t0
, rt
);
10575 gen_load_gpr(t0
, rt
);
10576 save_cpu_state(ctx
, 0);
10578 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10580 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10581 tcg_temp_free_i32(fs_tmp
);
10583 /* Stop translation as we may have changed hflags */
10584 ctx
->base
.is_jmp
= DISAS_STOP
;
10586 #if defined(TARGET_MIPS64)
10588 gen_load_fpr64(ctx
, t0
, fs
);
10589 gen_store_gpr(t0
, rt
);
10592 gen_load_gpr(t0
, rt
);
10593 gen_store_fpr64(ctx
, t0
, fs
);
10598 TCGv_i32 fp0
= tcg_temp_new_i32();
10600 gen_load_fpr32h(ctx
, fp0
, fs
);
10601 tcg_gen_ext_i32_tl(t0
, fp0
);
10602 tcg_temp_free_i32(fp0
);
10604 gen_store_gpr(t0
, rt
);
10607 gen_load_gpr(t0
, rt
);
10609 TCGv_i32 fp0
= tcg_temp_new_i32();
10611 tcg_gen_trunc_tl_i32(fp0
, t0
);
10612 gen_store_fpr32h(ctx
, fp0
, fs
);
10613 tcg_temp_free_i32(fp0
);
10617 MIPS_INVAL("cp1 move");
10618 generate_exception_end(ctx
, EXCP_RI
);
10626 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10633 /* Treat as NOP. */
10638 cond
= TCG_COND_EQ
;
10640 cond
= TCG_COND_NE
;
10642 l1
= gen_new_label();
10643 t0
= tcg_temp_new_i32();
10644 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10645 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10646 tcg_temp_free_i32(t0
);
10648 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10650 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10655 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10659 TCGv_i32 t0
= tcg_temp_new_i32();
10660 TCGLabel
*l1
= gen_new_label();
10663 cond
= TCG_COND_EQ
;
10665 cond
= TCG_COND_NE
;
10667 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10668 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10669 gen_load_fpr32(ctx
, t0
, fs
);
10670 gen_store_fpr32(ctx
, t0
, fd
);
10672 tcg_temp_free_i32(t0
);
10675 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
10678 TCGv_i32 t0
= tcg_temp_new_i32();
10680 TCGLabel
*l1
= gen_new_label();
10683 cond
= TCG_COND_EQ
;
10685 cond
= TCG_COND_NE
;
10687 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10688 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10689 tcg_temp_free_i32(t0
);
10690 fp0
= tcg_temp_new_i64();
10691 gen_load_fpr64(ctx
, fp0
, fs
);
10692 gen_store_fpr64(ctx
, fp0
, fd
);
10693 tcg_temp_free_i64(fp0
);
10697 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10701 TCGv_i32 t0
= tcg_temp_new_i32();
10702 TCGLabel
*l1
= gen_new_label();
10703 TCGLabel
*l2
= gen_new_label();
10706 cond
= TCG_COND_EQ
;
10708 cond
= TCG_COND_NE
;
10710 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10711 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10712 gen_load_fpr32(ctx
, t0
, fs
);
10713 gen_store_fpr32(ctx
, t0
, fd
);
10716 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10717 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10718 gen_load_fpr32h(ctx
, t0
, fs
);
10719 gen_store_fpr32h(ctx
, t0
, fd
);
10720 tcg_temp_free_i32(t0
);
10724 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10727 TCGv_i32 t1
= tcg_const_i32(0);
10728 TCGv_i32 fp0
= tcg_temp_new_i32();
10729 TCGv_i32 fp1
= tcg_temp_new_i32();
10730 TCGv_i32 fp2
= tcg_temp_new_i32();
10731 gen_load_fpr32(ctx
, fp0
, fd
);
10732 gen_load_fpr32(ctx
, fp1
, ft
);
10733 gen_load_fpr32(ctx
, fp2
, fs
);
10737 tcg_gen_andi_i32(fp0
, fp0
, 1);
10738 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10741 tcg_gen_andi_i32(fp1
, fp1
, 1);
10742 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10745 tcg_gen_andi_i32(fp1
, fp1
, 1);
10746 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10749 MIPS_INVAL("gen_sel_s");
10750 generate_exception_end(ctx
, EXCP_RI
);
10754 gen_store_fpr32(ctx
, fp0
, fd
);
10755 tcg_temp_free_i32(fp2
);
10756 tcg_temp_free_i32(fp1
);
10757 tcg_temp_free_i32(fp0
);
10758 tcg_temp_free_i32(t1
);
10761 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10764 TCGv_i64 t1
= tcg_const_i64(0);
10765 TCGv_i64 fp0
= tcg_temp_new_i64();
10766 TCGv_i64 fp1
= tcg_temp_new_i64();
10767 TCGv_i64 fp2
= tcg_temp_new_i64();
10768 gen_load_fpr64(ctx
, fp0
, fd
);
10769 gen_load_fpr64(ctx
, fp1
, ft
);
10770 gen_load_fpr64(ctx
, fp2
, fs
);
10774 tcg_gen_andi_i64(fp0
, fp0
, 1);
10775 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10778 tcg_gen_andi_i64(fp1
, fp1
, 1);
10779 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10782 tcg_gen_andi_i64(fp1
, fp1
, 1);
10783 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10786 MIPS_INVAL("gen_sel_d");
10787 generate_exception_end(ctx
, EXCP_RI
);
10791 gen_store_fpr64(ctx
, fp0
, fd
);
10792 tcg_temp_free_i64(fp2
);
10793 tcg_temp_free_i64(fp1
);
10794 tcg_temp_free_i64(fp0
);
10795 tcg_temp_free_i64(t1
);
10798 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
10799 int ft
, int fs
, int fd
, int cc
)
10801 uint32_t func
= ctx
->opcode
& 0x3f;
10805 TCGv_i32 fp0
= tcg_temp_new_i32();
10806 TCGv_i32 fp1
= tcg_temp_new_i32();
10808 gen_load_fpr32(ctx
, fp0
, fs
);
10809 gen_load_fpr32(ctx
, fp1
, ft
);
10810 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
10811 tcg_temp_free_i32(fp1
);
10812 gen_store_fpr32(ctx
, fp0
, fd
);
10813 tcg_temp_free_i32(fp0
);
10818 TCGv_i32 fp0
= tcg_temp_new_i32();
10819 TCGv_i32 fp1
= tcg_temp_new_i32();
10821 gen_load_fpr32(ctx
, fp0
, fs
);
10822 gen_load_fpr32(ctx
, fp1
, ft
);
10823 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
10824 tcg_temp_free_i32(fp1
);
10825 gen_store_fpr32(ctx
, fp0
, fd
);
10826 tcg_temp_free_i32(fp0
);
10831 TCGv_i32 fp0
= tcg_temp_new_i32();
10832 TCGv_i32 fp1
= tcg_temp_new_i32();
10834 gen_load_fpr32(ctx
, fp0
, fs
);
10835 gen_load_fpr32(ctx
, fp1
, ft
);
10836 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
10837 tcg_temp_free_i32(fp1
);
10838 gen_store_fpr32(ctx
, fp0
, fd
);
10839 tcg_temp_free_i32(fp0
);
10844 TCGv_i32 fp0
= tcg_temp_new_i32();
10845 TCGv_i32 fp1
= tcg_temp_new_i32();
10847 gen_load_fpr32(ctx
, fp0
, fs
);
10848 gen_load_fpr32(ctx
, fp1
, ft
);
10849 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
10850 tcg_temp_free_i32(fp1
);
10851 gen_store_fpr32(ctx
, fp0
, fd
);
10852 tcg_temp_free_i32(fp0
);
10857 TCGv_i32 fp0
= tcg_temp_new_i32();
10859 gen_load_fpr32(ctx
, fp0
, fs
);
10860 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
10861 gen_store_fpr32(ctx
, fp0
, fd
);
10862 tcg_temp_free_i32(fp0
);
10867 TCGv_i32 fp0
= tcg_temp_new_i32();
10869 gen_load_fpr32(ctx
, fp0
, fs
);
10870 if (ctx
->abs2008
) {
10871 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
10873 gen_helper_float_abs_s(fp0
, fp0
);
10875 gen_store_fpr32(ctx
, fp0
, fd
);
10876 tcg_temp_free_i32(fp0
);
10881 TCGv_i32 fp0
= tcg_temp_new_i32();
10883 gen_load_fpr32(ctx
, fp0
, fs
);
10884 gen_store_fpr32(ctx
, fp0
, fd
);
10885 tcg_temp_free_i32(fp0
);
10890 TCGv_i32 fp0
= tcg_temp_new_i32();
10892 gen_load_fpr32(ctx
, fp0
, fs
);
10893 if (ctx
->abs2008
) {
10894 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
10896 gen_helper_float_chs_s(fp0
, fp0
);
10898 gen_store_fpr32(ctx
, fp0
, fd
);
10899 tcg_temp_free_i32(fp0
);
10902 case OPC_ROUND_L_S
:
10903 check_cp1_64bitmode(ctx
);
10905 TCGv_i32 fp32
= tcg_temp_new_i32();
10906 TCGv_i64 fp64
= tcg_temp_new_i64();
10908 gen_load_fpr32(ctx
, fp32
, fs
);
10909 if (ctx
->nan2008
) {
10910 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
10912 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
10914 tcg_temp_free_i32(fp32
);
10915 gen_store_fpr64(ctx
, fp64
, fd
);
10916 tcg_temp_free_i64(fp64
);
10919 case OPC_TRUNC_L_S
:
10920 check_cp1_64bitmode(ctx
);
10922 TCGv_i32 fp32
= tcg_temp_new_i32();
10923 TCGv_i64 fp64
= tcg_temp_new_i64();
10925 gen_load_fpr32(ctx
, fp32
, fs
);
10926 if (ctx
->nan2008
) {
10927 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
10929 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
10931 tcg_temp_free_i32(fp32
);
10932 gen_store_fpr64(ctx
, fp64
, fd
);
10933 tcg_temp_free_i64(fp64
);
10937 check_cp1_64bitmode(ctx
);
10939 TCGv_i32 fp32
= tcg_temp_new_i32();
10940 TCGv_i64 fp64
= tcg_temp_new_i64();
10942 gen_load_fpr32(ctx
, fp32
, fs
);
10943 if (ctx
->nan2008
) {
10944 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
10946 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
10948 tcg_temp_free_i32(fp32
);
10949 gen_store_fpr64(ctx
, fp64
, fd
);
10950 tcg_temp_free_i64(fp64
);
10953 case OPC_FLOOR_L_S
:
10954 check_cp1_64bitmode(ctx
);
10956 TCGv_i32 fp32
= tcg_temp_new_i32();
10957 TCGv_i64 fp64
= tcg_temp_new_i64();
10959 gen_load_fpr32(ctx
, fp32
, fs
);
10960 if (ctx
->nan2008
) {
10961 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
10963 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
10965 tcg_temp_free_i32(fp32
);
10966 gen_store_fpr64(ctx
, fp64
, fd
);
10967 tcg_temp_free_i64(fp64
);
10970 case OPC_ROUND_W_S
:
10972 TCGv_i32 fp0
= tcg_temp_new_i32();
10974 gen_load_fpr32(ctx
, fp0
, fs
);
10975 if (ctx
->nan2008
) {
10976 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
10978 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
10980 gen_store_fpr32(ctx
, fp0
, fd
);
10981 tcg_temp_free_i32(fp0
);
10984 case OPC_TRUNC_W_S
:
10986 TCGv_i32 fp0
= tcg_temp_new_i32();
10988 gen_load_fpr32(ctx
, fp0
, fs
);
10989 if (ctx
->nan2008
) {
10990 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
10992 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
10994 gen_store_fpr32(ctx
, fp0
, fd
);
10995 tcg_temp_free_i32(fp0
);
11000 TCGv_i32 fp0
= tcg_temp_new_i32();
11002 gen_load_fpr32(ctx
, fp0
, fs
);
11003 if (ctx
->nan2008
) {
11004 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
11006 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
11008 gen_store_fpr32(ctx
, fp0
, fd
);
11009 tcg_temp_free_i32(fp0
);
11012 case OPC_FLOOR_W_S
:
11014 TCGv_i32 fp0
= tcg_temp_new_i32();
11016 gen_load_fpr32(ctx
, fp0
, fs
);
11017 if (ctx
->nan2008
) {
11018 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
11020 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
11022 gen_store_fpr32(ctx
, fp0
, fd
);
11023 tcg_temp_free_i32(fp0
);
11027 check_insn(ctx
, ISA_MIPS32R6
);
11028 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11031 check_insn(ctx
, ISA_MIPS32R6
);
11032 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11035 check_insn(ctx
, ISA_MIPS32R6
);
11036 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11039 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11040 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11043 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11045 TCGLabel
*l1
= gen_new_label();
11049 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11051 fp0
= tcg_temp_new_i32();
11052 gen_load_fpr32(ctx
, fp0
, fs
);
11053 gen_store_fpr32(ctx
, fp0
, fd
);
11054 tcg_temp_free_i32(fp0
);
11059 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11061 TCGLabel
*l1
= gen_new_label();
11065 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11066 fp0
= tcg_temp_new_i32();
11067 gen_load_fpr32(ctx
, fp0
, fs
);
11068 gen_store_fpr32(ctx
, fp0
, fd
);
11069 tcg_temp_free_i32(fp0
);
11076 TCGv_i32 fp0
= tcg_temp_new_i32();
11078 gen_load_fpr32(ctx
, fp0
, fs
);
11079 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
11080 gen_store_fpr32(ctx
, fp0
, fd
);
11081 tcg_temp_free_i32(fp0
);
11086 TCGv_i32 fp0
= tcg_temp_new_i32();
11088 gen_load_fpr32(ctx
, fp0
, fs
);
11089 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
11090 gen_store_fpr32(ctx
, fp0
, fd
);
11091 tcg_temp_free_i32(fp0
);
11095 check_insn(ctx
, ISA_MIPS32R6
);
11097 TCGv_i32 fp0
= tcg_temp_new_i32();
11098 TCGv_i32 fp1
= tcg_temp_new_i32();
11099 TCGv_i32 fp2
= tcg_temp_new_i32();
11100 gen_load_fpr32(ctx
, fp0
, fs
);
11101 gen_load_fpr32(ctx
, fp1
, ft
);
11102 gen_load_fpr32(ctx
, fp2
, fd
);
11103 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11104 gen_store_fpr32(ctx
, fp2
, fd
);
11105 tcg_temp_free_i32(fp2
);
11106 tcg_temp_free_i32(fp1
);
11107 tcg_temp_free_i32(fp0
);
11111 check_insn(ctx
, ISA_MIPS32R6
);
11113 TCGv_i32 fp0
= tcg_temp_new_i32();
11114 TCGv_i32 fp1
= tcg_temp_new_i32();
11115 TCGv_i32 fp2
= tcg_temp_new_i32();
11116 gen_load_fpr32(ctx
, fp0
, fs
);
11117 gen_load_fpr32(ctx
, fp1
, ft
);
11118 gen_load_fpr32(ctx
, fp2
, fd
);
11119 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11120 gen_store_fpr32(ctx
, fp2
, fd
);
11121 tcg_temp_free_i32(fp2
);
11122 tcg_temp_free_i32(fp1
);
11123 tcg_temp_free_i32(fp0
);
11127 check_insn(ctx
, ISA_MIPS32R6
);
11129 TCGv_i32 fp0
= tcg_temp_new_i32();
11130 gen_load_fpr32(ctx
, fp0
, fs
);
11131 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
11132 gen_store_fpr32(ctx
, fp0
, fd
);
11133 tcg_temp_free_i32(fp0
);
11137 check_insn(ctx
, ISA_MIPS32R6
);
11139 TCGv_i32 fp0
= tcg_temp_new_i32();
11140 gen_load_fpr32(ctx
, fp0
, fs
);
11141 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
11142 gen_store_fpr32(ctx
, fp0
, fd
);
11143 tcg_temp_free_i32(fp0
);
11146 case OPC_MIN_S
: /* OPC_RECIP2_S */
11147 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11149 TCGv_i32 fp0
= tcg_temp_new_i32();
11150 TCGv_i32 fp1
= tcg_temp_new_i32();
11151 TCGv_i32 fp2
= tcg_temp_new_i32();
11152 gen_load_fpr32(ctx
, fp0
, fs
);
11153 gen_load_fpr32(ctx
, fp1
, ft
);
11154 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
11155 gen_store_fpr32(ctx
, fp2
, fd
);
11156 tcg_temp_free_i32(fp2
);
11157 tcg_temp_free_i32(fp1
);
11158 tcg_temp_free_i32(fp0
);
11161 check_cp1_64bitmode(ctx
);
11163 TCGv_i32 fp0
= tcg_temp_new_i32();
11164 TCGv_i32 fp1
= tcg_temp_new_i32();
11166 gen_load_fpr32(ctx
, fp0
, fs
);
11167 gen_load_fpr32(ctx
, fp1
, ft
);
11168 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
11169 tcg_temp_free_i32(fp1
);
11170 gen_store_fpr32(ctx
, fp0
, fd
);
11171 tcg_temp_free_i32(fp0
);
11175 case OPC_MINA_S
: /* OPC_RECIP1_S */
11176 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11178 TCGv_i32 fp0
= tcg_temp_new_i32();
11179 TCGv_i32 fp1
= tcg_temp_new_i32();
11180 TCGv_i32 fp2
= tcg_temp_new_i32();
11181 gen_load_fpr32(ctx
, fp0
, fs
);
11182 gen_load_fpr32(ctx
, fp1
, ft
);
11183 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
11184 gen_store_fpr32(ctx
, fp2
, fd
);
11185 tcg_temp_free_i32(fp2
);
11186 tcg_temp_free_i32(fp1
);
11187 tcg_temp_free_i32(fp0
);
11190 check_cp1_64bitmode(ctx
);
11192 TCGv_i32 fp0
= tcg_temp_new_i32();
11194 gen_load_fpr32(ctx
, fp0
, fs
);
11195 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
11196 gen_store_fpr32(ctx
, fp0
, fd
);
11197 tcg_temp_free_i32(fp0
);
11201 case OPC_MAX_S
: /* OPC_RSQRT1_S */
11202 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11204 TCGv_i32 fp0
= tcg_temp_new_i32();
11205 TCGv_i32 fp1
= tcg_temp_new_i32();
11206 gen_load_fpr32(ctx
, fp0
, fs
);
11207 gen_load_fpr32(ctx
, fp1
, ft
);
11208 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
11209 gen_store_fpr32(ctx
, fp1
, fd
);
11210 tcg_temp_free_i32(fp1
);
11211 tcg_temp_free_i32(fp0
);
11214 check_cp1_64bitmode(ctx
);
11216 TCGv_i32 fp0
= tcg_temp_new_i32();
11218 gen_load_fpr32(ctx
, fp0
, fs
);
11219 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
11220 gen_store_fpr32(ctx
, fp0
, fd
);
11221 tcg_temp_free_i32(fp0
);
11225 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11226 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11228 TCGv_i32 fp0
= tcg_temp_new_i32();
11229 TCGv_i32 fp1
= tcg_temp_new_i32();
11230 gen_load_fpr32(ctx
, fp0
, fs
);
11231 gen_load_fpr32(ctx
, fp1
, ft
);
11232 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11233 gen_store_fpr32(ctx
, fp1
, fd
);
11234 tcg_temp_free_i32(fp1
);
11235 tcg_temp_free_i32(fp0
);
11238 check_cp1_64bitmode(ctx
);
11240 TCGv_i32 fp0
= tcg_temp_new_i32();
11241 TCGv_i32 fp1
= tcg_temp_new_i32();
11243 gen_load_fpr32(ctx
, fp0
, fs
);
11244 gen_load_fpr32(ctx
, fp1
, ft
);
11245 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11246 tcg_temp_free_i32(fp1
);
11247 gen_store_fpr32(ctx
, fp0
, fd
);
11248 tcg_temp_free_i32(fp0
);
11253 check_cp1_registers(ctx
, fd
);
11255 TCGv_i32 fp32
= tcg_temp_new_i32();
11256 TCGv_i64 fp64
= tcg_temp_new_i64();
11258 gen_load_fpr32(ctx
, fp32
, fs
);
11259 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11260 tcg_temp_free_i32(fp32
);
11261 gen_store_fpr64(ctx
, fp64
, fd
);
11262 tcg_temp_free_i64(fp64
);
11267 TCGv_i32 fp0
= tcg_temp_new_i32();
11269 gen_load_fpr32(ctx
, fp0
, fs
);
11270 if (ctx
->nan2008
) {
11271 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11273 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11275 gen_store_fpr32(ctx
, fp0
, fd
);
11276 tcg_temp_free_i32(fp0
);
11280 check_cp1_64bitmode(ctx
);
11282 TCGv_i32 fp32
= tcg_temp_new_i32();
11283 TCGv_i64 fp64
= tcg_temp_new_i64();
11285 gen_load_fpr32(ctx
, fp32
, fs
);
11286 if (ctx
->nan2008
) {
11287 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11289 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11291 tcg_temp_free_i32(fp32
);
11292 gen_store_fpr64(ctx
, fp64
, fd
);
11293 tcg_temp_free_i64(fp64
);
11299 TCGv_i64 fp64
= tcg_temp_new_i64();
11300 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11301 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11303 gen_load_fpr32(ctx
, fp32_0
, fs
);
11304 gen_load_fpr32(ctx
, fp32_1
, ft
);
11305 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11306 tcg_temp_free_i32(fp32_1
);
11307 tcg_temp_free_i32(fp32_0
);
11308 gen_store_fpr64(ctx
, fp64
, fd
);
11309 tcg_temp_free_i64(fp64
);
11315 case OPC_CMP_UEQ_S
:
11316 case OPC_CMP_OLT_S
:
11317 case OPC_CMP_ULT_S
:
11318 case OPC_CMP_OLE_S
:
11319 case OPC_CMP_ULE_S
:
11321 case OPC_CMP_NGLE_S
:
11322 case OPC_CMP_SEQ_S
:
11323 case OPC_CMP_NGL_S
:
11325 case OPC_CMP_NGE_S
:
11327 case OPC_CMP_NGT_S
:
11328 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11329 if (ctx
->opcode
& (1 << 6)) {
11330 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
11332 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
11336 check_cp1_registers(ctx
, fs
| ft
| fd
);
11338 TCGv_i64 fp0
= tcg_temp_new_i64();
11339 TCGv_i64 fp1
= tcg_temp_new_i64();
11341 gen_load_fpr64(ctx
, fp0
, fs
);
11342 gen_load_fpr64(ctx
, fp1
, ft
);
11343 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11344 tcg_temp_free_i64(fp1
);
11345 gen_store_fpr64(ctx
, fp0
, fd
);
11346 tcg_temp_free_i64(fp0
);
11350 check_cp1_registers(ctx
, fs
| ft
| fd
);
11352 TCGv_i64 fp0
= tcg_temp_new_i64();
11353 TCGv_i64 fp1
= tcg_temp_new_i64();
11355 gen_load_fpr64(ctx
, fp0
, fs
);
11356 gen_load_fpr64(ctx
, fp1
, ft
);
11357 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11358 tcg_temp_free_i64(fp1
);
11359 gen_store_fpr64(ctx
, fp0
, fd
);
11360 tcg_temp_free_i64(fp0
);
11364 check_cp1_registers(ctx
, fs
| ft
| fd
);
11366 TCGv_i64 fp0
= tcg_temp_new_i64();
11367 TCGv_i64 fp1
= tcg_temp_new_i64();
11369 gen_load_fpr64(ctx
, fp0
, fs
);
11370 gen_load_fpr64(ctx
, fp1
, ft
);
11371 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11372 tcg_temp_free_i64(fp1
);
11373 gen_store_fpr64(ctx
, fp0
, fd
);
11374 tcg_temp_free_i64(fp0
);
11378 check_cp1_registers(ctx
, fs
| ft
| fd
);
11380 TCGv_i64 fp0
= tcg_temp_new_i64();
11381 TCGv_i64 fp1
= tcg_temp_new_i64();
11383 gen_load_fpr64(ctx
, fp0
, fs
);
11384 gen_load_fpr64(ctx
, fp1
, ft
);
11385 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11386 tcg_temp_free_i64(fp1
);
11387 gen_store_fpr64(ctx
, fp0
, fd
);
11388 tcg_temp_free_i64(fp0
);
11392 check_cp1_registers(ctx
, fs
| fd
);
11394 TCGv_i64 fp0
= tcg_temp_new_i64();
11396 gen_load_fpr64(ctx
, fp0
, fs
);
11397 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11398 gen_store_fpr64(ctx
, fp0
, fd
);
11399 tcg_temp_free_i64(fp0
);
11403 check_cp1_registers(ctx
, fs
| fd
);
11405 TCGv_i64 fp0
= tcg_temp_new_i64();
11407 gen_load_fpr64(ctx
, fp0
, fs
);
11408 if (ctx
->abs2008
) {
11409 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11411 gen_helper_float_abs_d(fp0
, fp0
);
11413 gen_store_fpr64(ctx
, fp0
, fd
);
11414 tcg_temp_free_i64(fp0
);
11418 check_cp1_registers(ctx
, fs
| fd
);
11420 TCGv_i64 fp0
= tcg_temp_new_i64();
11422 gen_load_fpr64(ctx
, fp0
, fs
);
11423 gen_store_fpr64(ctx
, fp0
, fd
);
11424 tcg_temp_free_i64(fp0
);
11428 check_cp1_registers(ctx
, fs
| fd
);
11430 TCGv_i64 fp0
= tcg_temp_new_i64();
11432 gen_load_fpr64(ctx
, fp0
, fs
);
11433 if (ctx
->abs2008
) {
11434 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11436 gen_helper_float_chs_d(fp0
, fp0
);
11438 gen_store_fpr64(ctx
, fp0
, fd
);
11439 tcg_temp_free_i64(fp0
);
11442 case OPC_ROUND_L_D
:
11443 check_cp1_64bitmode(ctx
);
11445 TCGv_i64 fp0
= tcg_temp_new_i64();
11447 gen_load_fpr64(ctx
, fp0
, fs
);
11448 if (ctx
->nan2008
) {
11449 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11451 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11453 gen_store_fpr64(ctx
, fp0
, fd
);
11454 tcg_temp_free_i64(fp0
);
11457 case OPC_TRUNC_L_D
:
11458 check_cp1_64bitmode(ctx
);
11460 TCGv_i64 fp0
= tcg_temp_new_i64();
11462 gen_load_fpr64(ctx
, fp0
, fs
);
11463 if (ctx
->nan2008
) {
11464 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11466 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11468 gen_store_fpr64(ctx
, fp0
, fd
);
11469 tcg_temp_free_i64(fp0
);
11473 check_cp1_64bitmode(ctx
);
11475 TCGv_i64 fp0
= tcg_temp_new_i64();
11477 gen_load_fpr64(ctx
, fp0
, fs
);
11478 if (ctx
->nan2008
) {
11479 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11481 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11483 gen_store_fpr64(ctx
, fp0
, fd
);
11484 tcg_temp_free_i64(fp0
);
11487 case OPC_FLOOR_L_D
:
11488 check_cp1_64bitmode(ctx
);
11490 TCGv_i64 fp0
= tcg_temp_new_i64();
11492 gen_load_fpr64(ctx
, fp0
, fs
);
11493 if (ctx
->nan2008
) {
11494 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11496 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11498 gen_store_fpr64(ctx
, fp0
, fd
);
11499 tcg_temp_free_i64(fp0
);
11502 case OPC_ROUND_W_D
:
11503 check_cp1_registers(ctx
, fs
);
11505 TCGv_i32 fp32
= tcg_temp_new_i32();
11506 TCGv_i64 fp64
= tcg_temp_new_i64();
11508 gen_load_fpr64(ctx
, fp64
, fs
);
11509 if (ctx
->nan2008
) {
11510 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11512 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11514 tcg_temp_free_i64(fp64
);
11515 gen_store_fpr32(ctx
, fp32
, fd
);
11516 tcg_temp_free_i32(fp32
);
11519 case OPC_TRUNC_W_D
:
11520 check_cp1_registers(ctx
, fs
);
11522 TCGv_i32 fp32
= tcg_temp_new_i32();
11523 TCGv_i64 fp64
= tcg_temp_new_i64();
11525 gen_load_fpr64(ctx
, fp64
, fs
);
11526 if (ctx
->nan2008
) {
11527 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11529 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11531 tcg_temp_free_i64(fp64
);
11532 gen_store_fpr32(ctx
, fp32
, fd
);
11533 tcg_temp_free_i32(fp32
);
11537 check_cp1_registers(ctx
, fs
);
11539 TCGv_i32 fp32
= tcg_temp_new_i32();
11540 TCGv_i64 fp64
= tcg_temp_new_i64();
11542 gen_load_fpr64(ctx
, fp64
, fs
);
11543 if (ctx
->nan2008
) {
11544 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11546 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11548 tcg_temp_free_i64(fp64
);
11549 gen_store_fpr32(ctx
, fp32
, fd
);
11550 tcg_temp_free_i32(fp32
);
11553 case OPC_FLOOR_W_D
:
11554 check_cp1_registers(ctx
, fs
);
11556 TCGv_i32 fp32
= tcg_temp_new_i32();
11557 TCGv_i64 fp64
= tcg_temp_new_i64();
11559 gen_load_fpr64(ctx
, fp64
, fs
);
11560 if (ctx
->nan2008
) {
11561 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11563 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11565 tcg_temp_free_i64(fp64
);
11566 gen_store_fpr32(ctx
, fp32
, fd
);
11567 tcg_temp_free_i32(fp32
);
11571 check_insn(ctx
, ISA_MIPS32R6
);
11572 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11575 check_insn(ctx
, ISA_MIPS32R6
);
11576 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11579 check_insn(ctx
, ISA_MIPS32R6
);
11580 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11583 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11584 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11587 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11589 TCGLabel
*l1
= gen_new_label();
11593 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11595 fp0
= tcg_temp_new_i64();
11596 gen_load_fpr64(ctx
, fp0
, fs
);
11597 gen_store_fpr64(ctx
, fp0
, fd
);
11598 tcg_temp_free_i64(fp0
);
11603 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11605 TCGLabel
*l1
= gen_new_label();
11609 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11610 fp0
= tcg_temp_new_i64();
11611 gen_load_fpr64(ctx
, fp0
, fs
);
11612 gen_store_fpr64(ctx
, fp0
, fd
);
11613 tcg_temp_free_i64(fp0
);
11619 check_cp1_registers(ctx
, fs
| fd
);
11621 TCGv_i64 fp0
= tcg_temp_new_i64();
11623 gen_load_fpr64(ctx
, fp0
, fs
);
11624 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11625 gen_store_fpr64(ctx
, fp0
, fd
);
11626 tcg_temp_free_i64(fp0
);
11630 check_cp1_registers(ctx
, fs
| fd
);
11632 TCGv_i64 fp0
= tcg_temp_new_i64();
11634 gen_load_fpr64(ctx
, fp0
, fs
);
11635 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11636 gen_store_fpr64(ctx
, fp0
, fd
);
11637 tcg_temp_free_i64(fp0
);
11641 check_insn(ctx
, ISA_MIPS32R6
);
11643 TCGv_i64 fp0
= tcg_temp_new_i64();
11644 TCGv_i64 fp1
= tcg_temp_new_i64();
11645 TCGv_i64 fp2
= tcg_temp_new_i64();
11646 gen_load_fpr64(ctx
, fp0
, fs
);
11647 gen_load_fpr64(ctx
, fp1
, ft
);
11648 gen_load_fpr64(ctx
, fp2
, fd
);
11649 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11650 gen_store_fpr64(ctx
, fp2
, fd
);
11651 tcg_temp_free_i64(fp2
);
11652 tcg_temp_free_i64(fp1
);
11653 tcg_temp_free_i64(fp0
);
11657 check_insn(ctx
, ISA_MIPS32R6
);
11659 TCGv_i64 fp0
= tcg_temp_new_i64();
11660 TCGv_i64 fp1
= tcg_temp_new_i64();
11661 TCGv_i64 fp2
= tcg_temp_new_i64();
11662 gen_load_fpr64(ctx
, fp0
, fs
);
11663 gen_load_fpr64(ctx
, fp1
, ft
);
11664 gen_load_fpr64(ctx
, fp2
, fd
);
11665 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11666 gen_store_fpr64(ctx
, fp2
, fd
);
11667 tcg_temp_free_i64(fp2
);
11668 tcg_temp_free_i64(fp1
);
11669 tcg_temp_free_i64(fp0
);
11673 check_insn(ctx
, ISA_MIPS32R6
);
11675 TCGv_i64 fp0
= tcg_temp_new_i64();
11676 gen_load_fpr64(ctx
, fp0
, fs
);
11677 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11678 gen_store_fpr64(ctx
, fp0
, fd
);
11679 tcg_temp_free_i64(fp0
);
11683 check_insn(ctx
, ISA_MIPS32R6
);
11685 TCGv_i64 fp0
= tcg_temp_new_i64();
11686 gen_load_fpr64(ctx
, fp0
, fs
);
11687 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11688 gen_store_fpr64(ctx
, fp0
, fd
);
11689 tcg_temp_free_i64(fp0
);
11692 case OPC_MIN_D
: /* OPC_RECIP2_D */
11693 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11695 TCGv_i64 fp0
= tcg_temp_new_i64();
11696 TCGv_i64 fp1
= tcg_temp_new_i64();
11697 gen_load_fpr64(ctx
, fp0
, fs
);
11698 gen_load_fpr64(ctx
, fp1
, ft
);
11699 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11700 gen_store_fpr64(ctx
, fp1
, fd
);
11701 tcg_temp_free_i64(fp1
);
11702 tcg_temp_free_i64(fp0
);
11705 check_cp1_64bitmode(ctx
);
11707 TCGv_i64 fp0
= tcg_temp_new_i64();
11708 TCGv_i64 fp1
= tcg_temp_new_i64();
11710 gen_load_fpr64(ctx
, fp0
, fs
);
11711 gen_load_fpr64(ctx
, fp1
, ft
);
11712 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11713 tcg_temp_free_i64(fp1
);
11714 gen_store_fpr64(ctx
, fp0
, fd
);
11715 tcg_temp_free_i64(fp0
);
11719 case OPC_MINA_D
: /* OPC_RECIP1_D */
11720 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11722 TCGv_i64 fp0
= tcg_temp_new_i64();
11723 TCGv_i64 fp1
= tcg_temp_new_i64();
11724 gen_load_fpr64(ctx
, fp0
, fs
);
11725 gen_load_fpr64(ctx
, fp1
, ft
);
11726 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11727 gen_store_fpr64(ctx
, fp1
, fd
);
11728 tcg_temp_free_i64(fp1
);
11729 tcg_temp_free_i64(fp0
);
11732 check_cp1_64bitmode(ctx
);
11734 TCGv_i64 fp0
= tcg_temp_new_i64();
11736 gen_load_fpr64(ctx
, fp0
, fs
);
11737 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11738 gen_store_fpr64(ctx
, fp0
, fd
);
11739 tcg_temp_free_i64(fp0
);
11743 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11744 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11746 TCGv_i64 fp0
= tcg_temp_new_i64();
11747 TCGv_i64 fp1
= tcg_temp_new_i64();
11748 gen_load_fpr64(ctx
, fp0
, fs
);
11749 gen_load_fpr64(ctx
, fp1
, ft
);
11750 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11751 gen_store_fpr64(ctx
, fp1
, fd
);
11752 tcg_temp_free_i64(fp1
);
11753 tcg_temp_free_i64(fp0
);
11756 check_cp1_64bitmode(ctx
);
11758 TCGv_i64 fp0
= tcg_temp_new_i64();
11760 gen_load_fpr64(ctx
, fp0
, fs
);
11761 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11762 gen_store_fpr64(ctx
, fp0
, fd
);
11763 tcg_temp_free_i64(fp0
);
11767 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11768 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11770 TCGv_i64 fp0
= tcg_temp_new_i64();
11771 TCGv_i64 fp1
= tcg_temp_new_i64();
11772 gen_load_fpr64(ctx
, fp0
, fs
);
11773 gen_load_fpr64(ctx
, fp1
, ft
);
11774 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11775 gen_store_fpr64(ctx
, fp1
, fd
);
11776 tcg_temp_free_i64(fp1
);
11777 tcg_temp_free_i64(fp0
);
11780 check_cp1_64bitmode(ctx
);
11782 TCGv_i64 fp0
= tcg_temp_new_i64();
11783 TCGv_i64 fp1
= tcg_temp_new_i64();
11785 gen_load_fpr64(ctx
, fp0
, fs
);
11786 gen_load_fpr64(ctx
, fp1
, ft
);
11787 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
11788 tcg_temp_free_i64(fp1
);
11789 gen_store_fpr64(ctx
, fp0
, fd
);
11790 tcg_temp_free_i64(fp0
);
11797 case OPC_CMP_UEQ_D
:
11798 case OPC_CMP_OLT_D
:
11799 case OPC_CMP_ULT_D
:
11800 case OPC_CMP_OLE_D
:
11801 case OPC_CMP_ULE_D
:
11803 case OPC_CMP_NGLE_D
:
11804 case OPC_CMP_SEQ_D
:
11805 case OPC_CMP_NGL_D
:
11807 case OPC_CMP_NGE_D
:
11809 case OPC_CMP_NGT_D
:
11810 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11811 if (ctx
->opcode
& (1 << 6)) {
11812 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
11814 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
11818 check_cp1_registers(ctx
, fs
);
11820 TCGv_i32 fp32
= tcg_temp_new_i32();
11821 TCGv_i64 fp64
= tcg_temp_new_i64();
11823 gen_load_fpr64(ctx
, fp64
, fs
);
11824 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
11825 tcg_temp_free_i64(fp64
);
11826 gen_store_fpr32(ctx
, fp32
, fd
);
11827 tcg_temp_free_i32(fp32
);
11831 check_cp1_registers(ctx
, fs
);
11833 TCGv_i32 fp32
= tcg_temp_new_i32();
11834 TCGv_i64 fp64
= tcg_temp_new_i64();
11836 gen_load_fpr64(ctx
, fp64
, fs
);
11837 if (ctx
->nan2008
) {
11838 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
11840 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
11842 tcg_temp_free_i64(fp64
);
11843 gen_store_fpr32(ctx
, fp32
, fd
);
11844 tcg_temp_free_i32(fp32
);
11848 check_cp1_64bitmode(ctx
);
11850 TCGv_i64 fp0
= tcg_temp_new_i64();
11852 gen_load_fpr64(ctx
, fp0
, fs
);
11853 if (ctx
->nan2008
) {
11854 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
11856 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
11858 gen_store_fpr64(ctx
, fp0
, fd
);
11859 tcg_temp_free_i64(fp0
);
11864 TCGv_i32 fp0
= tcg_temp_new_i32();
11866 gen_load_fpr32(ctx
, fp0
, fs
);
11867 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
11868 gen_store_fpr32(ctx
, fp0
, fd
);
11869 tcg_temp_free_i32(fp0
);
11873 check_cp1_registers(ctx
, fd
);
11875 TCGv_i32 fp32
= tcg_temp_new_i32();
11876 TCGv_i64 fp64
= tcg_temp_new_i64();
11878 gen_load_fpr32(ctx
, fp32
, fs
);
11879 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
11880 tcg_temp_free_i32(fp32
);
11881 gen_store_fpr64(ctx
, fp64
, fd
);
11882 tcg_temp_free_i64(fp64
);
11886 check_cp1_64bitmode(ctx
);
11888 TCGv_i32 fp32
= tcg_temp_new_i32();
11889 TCGv_i64 fp64
= tcg_temp_new_i64();
11891 gen_load_fpr64(ctx
, fp64
, fs
);
11892 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
11893 tcg_temp_free_i64(fp64
);
11894 gen_store_fpr32(ctx
, fp32
, fd
);
11895 tcg_temp_free_i32(fp32
);
11899 check_cp1_64bitmode(ctx
);
11901 TCGv_i64 fp0
= tcg_temp_new_i64();
11903 gen_load_fpr64(ctx
, fp0
, fs
);
11904 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
11905 gen_store_fpr64(ctx
, fp0
, fd
);
11906 tcg_temp_free_i64(fp0
);
11909 case OPC_CVT_PS_PW
:
11912 TCGv_i64 fp0
= tcg_temp_new_i64();
11914 gen_load_fpr64(ctx
, fp0
, fs
);
11915 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
11916 gen_store_fpr64(ctx
, fp0
, fd
);
11917 tcg_temp_free_i64(fp0
);
11923 TCGv_i64 fp0
= tcg_temp_new_i64();
11924 TCGv_i64 fp1
= tcg_temp_new_i64();
11926 gen_load_fpr64(ctx
, fp0
, fs
);
11927 gen_load_fpr64(ctx
, fp1
, ft
);
11928 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
11929 tcg_temp_free_i64(fp1
);
11930 gen_store_fpr64(ctx
, fp0
, fd
);
11931 tcg_temp_free_i64(fp0
);
11937 TCGv_i64 fp0
= tcg_temp_new_i64();
11938 TCGv_i64 fp1
= tcg_temp_new_i64();
11940 gen_load_fpr64(ctx
, fp0
, fs
);
11941 gen_load_fpr64(ctx
, fp1
, ft
);
11942 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
11943 tcg_temp_free_i64(fp1
);
11944 gen_store_fpr64(ctx
, fp0
, fd
);
11945 tcg_temp_free_i64(fp0
);
11951 TCGv_i64 fp0
= tcg_temp_new_i64();
11952 TCGv_i64 fp1
= tcg_temp_new_i64();
11954 gen_load_fpr64(ctx
, fp0
, fs
);
11955 gen_load_fpr64(ctx
, fp1
, ft
);
11956 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
11957 tcg_temp_free_i64(fp1
);
11958 gen_store_fpr64(ctx
, fp0
, fd
);
11959 tcg_temp_free_i64(fp0
);
11965 TCGv_i64 fp0
= tcg_temp_new_i64();
11967 gen_load_fpr64(ctx
, fp0
, fs
);
11968 gen_helper_float_abs_ps(fp0
, fp0
);
11969 gen_store_fpr64(ctx
, fp0
, fd
);
11970 tcg_temp_free_i64(fp0
);
11976 TCGv_i64 fp0
= tcg_temp_new_i64();
11978 gen_load_fpr64(ctx
, fp0
, fs
);
11979 gen_store_fpr64(ctx
, fp0
, fd
);
11980 tcg_temp_free_i64(fp0
);
11986 TCGv_i64 fp0
= tcg_temp_new_i64();
11988 gen_load_fpr64(ctx
, fp0
, fs
);
11989 gen_helper_float_chs_ps(fp0
, fp0
);
11990 gen_store_fpr64(ctx
, fp0
, fd
);
11991 tcg_temp_free_i64(fp0
);
11996 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
12001 TCGLabel
*l1
= gen_new_label();
12005 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
12006 fp0
= tcg_temp_new_i64();
12007 gen_load_fpr64(ctx
, fp0
, fs
);
12008 gen_store_fpr64(ctx
, fp0
, fd
);
12009 tcg_temp_free_i64(fp0
);
12016 TCGLabel
*l1
= gen_new_label();
12020 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
12021 fp0
= tcg_temp_new_i64();
12022 gen_load_fpr64(ctx
, fp0
, fs
);
12023 gen_store_fpr64(ctx
, fp0
, fd
);
12024 tcg_temp_free_i64(fp0
);
12032 TCGv_i64 fp0
= tcg_temp_new_i64();
12033 TCGv_i64 fp1
= tcg_temp_new_i64();
12035 gen_load_fpr64(ctx
, fp0
, ft
);
12036 gen_load_fpr64(ctx
, fp1
, fs
);
12037 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
12038 tcg_temp_free_i64(fp1
);
12039 gen_store_fpr64(ctx
, fp0
, fd
);
12040 tcg_temp_free_i64(fp0
);
12046 TCGv_i64 fp0
= tcg_temp_new_i64();
12047 TCGv_i64 fp1
= tcg_temp_new_i64();
12049 gen_load_fpr64(ctx
, fp0
, ft
);
12050 gen_load_fpr64(ctx
, fp1
, fs
);
12051 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
12052 tcg_temp_free_i64(fp1
);
12053 gen_store_fpr64(ctx
, fp0
, fd
);
12054 tcg_temp_free_i64(fp0
);
12057 case OPC_RECIP2_PS
:
12060 TCGv_i64 fp0
= tcg_temp_new_i64();
12061 TCGv_i64 fp1
= tcg_temp_new_i64();
12063 gen_load_fpr64(ctx
, fp0
, fs
);
12064 gen_load_fpr64(ctx
, fp1
, ft
);
12065 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
12066 tcg_temp_free_i64(fp1
);
12067 gen_store_fpr64(ctx
, fp0
, fd
);
12068 tcg_temp_free_i64(fp0
);
12071 case OPC_RECIP1_PS
:
12074 TCGv_i64 fp0
= tcg_temp_new_i64();
12076 gen_load_fpr64(ctx
, fp0
, fs
);
12077 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
12078 gen_store_fpr64(ctx
, fp0
, fd
);
12079 tcg_temp_free_i64(fp0
);
12082 case OPC_RSQRT1_PS
:
12085 TCGv_i64 fp0
= tcg_temp_new_i64();
12087 gen_load_fpr64(ctx
, fp0
, fs
);
12088 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
12089 gen_store_fpr64(ctx
, fp0
, fd
);
12090 tcg_temp_free_i64(fp0
);
12093 case OPC_RSQRT2_PS
:
12096 TCGv_i64 fp0
= tcg_temp_new_i64();
12097 TCGv_i64 fp1
= tcg_temp_new_i64();
12099 gen_load_fpr64(ctx
, fp0
, fs
);
12100 gen_load_fpr64(ctx
, fp1
, ft
);
12101 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
12102 tcg_temp_free_i64(fp1
);
12103 gen_store_fpr64(ctx
, fp0
, fd
);
12104 tcg_temp_free_i64(fp0
);
12108 check_cp1_64bitmode(ctx
);
12110 TCGv_i32 fp0
= tcg_temp_new_i32();
12112 gen_load_fpr32h(ctx
, fp0
, fs
);
12113 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
12114 gen_store_fpr32(ctx
, fp0
, fd
);
12115 tcg_temp_free_i32(fp0
);
12118 case OPC_CVT_PW_PS
:
12121 TCGv_i64 fp0
= tcg_temp_new_i64();
12123 gen_load_fpr64(ctx
, fp0
, fs
);
12124 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
12125 gen_store_fpr64(ctx
, fp0
, fd
);
12126 tcg_temp_free_i64(fp0
);
12130 check_cp1_64bitmode(ctx
);
12132 TCGv_i32 fp0
= tcg_temp_new_i32();
12134 gen_load_fpr32(ctx
, fp0
, fs
);
12135 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
12136 gen_store_fpr32(ctx
, fp0
, fd
);
12137 tcg_temp_free_i32(fp0
);
12143 TCGv_i32 fp0
= tcg_temp_new_i32();
12144 TCGv_i32 fp1
= tcg_temp_new_i32();
12146 gen_load_fpr32(ctx
, fp0
, fs
);
12147 gen_load_fpr32(ctx
, fp1
, ft
);
12148 gen_store_fpr32h(ctx
, fp0
, fd
);
12149 gen_store_fpr32(ctx
, fp1
, fd
);
12150 tcg_temp_free_i32(fp0
);
12151 tcg_temp_free_i32(fp1
);
12157 TCGv_i32 fp0
= tcg_temp_new_i32();
12158 TCGv_i32 fp1
= tcg_temp_new_i32();
12160 gen_load_fpr32(ctx
, fp0
, fs
);
12161 gen_load_fpr32h(ctx
, fp1
, ft
);
12162 gen_store_fpr32(ctx
, fp1
, fd
);
12163 gen_store_fpr32h(ctx
, fp0
, fd
);
12164 tcg_temp_free_i32(fp0
);
12165 tcg_temp_free_i32(fp1
);
12171 TCGv_i32 fp0
= tcg_temp_new_i32();
12172 TCGv_i32 fp1
= tcg_temp_new_i32();
12174 gen_load_fpr32h(ctx
, fp0
, fs
);
12175 gen_load_fpr32(ctx
, fp1
, ft
);
12176 gen_store_fpr32(ctx
, fp1
, fd
);
12177 gen_store_fpr32h(ctx
, fp0
, fd
);
12178 tcg_temp_free_i32(fp0
);
12179 tcg_temp_free_i32(fp1
);
12185 TCGv_i32 fp0
= tcg_temp_new_i32();
12186 TCGv_i32 fp1
= tcg_temp_new_i32();
12188 gen_load_fpr32h(ctx
, fp0
, fs
);
12189 gen_load_fpr32h(ctx
, fp1
, ft
);
12190 gen_store_fpr32(ctx
, fp1
, fd
);
12191 gen_store_fpr32h(ctx
, fp0
, fd
);
12192 tcg_temp_free_i32(fp0
);
12193 tcg_temp_free_i32(fp1
);
12197 case OPC_CMP_UN_PS
:
12198 case OPC_CMP_EQ_PS
:
12199 case OPC_CMP_UEQ_PS
:
12200 case OPC_CMP_OLT_PS
:
12201 case OPC_CMP_ULT_PS
:
12202 case OPC_CMP_OLE_PS
:
12203 case OPC_CMP_ULE_PS
:
12204 case OPC_CMP_SF_PS
:
12205 case OPC_CMP_NGLE_PS
:
12206 case OPC_CMP_SEQ_PS
:
12207 case OPC_CMP_NGL_PS
:
12208 case OPC_CMP_LT_PS
:
12209 case OPC_CMP_NGE_PS
:
12210 case OPC_CMP_LE_PS
:
12211 case OPC_CMP_NGT_PS
:
12212 if (ctx
->opcode
& (1 << 6)) {
12213 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
12215 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
12219 MIPS_INVAL("farith");
12220 generate_exception_end(ctx
, EXCP_RI
);
12225 /* Coprocessor 3 (FPU) */
12226 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
12227 int fd
, int fs
, int base
, int index
)
12229 TCGv t0
= tcg_temp_new();
12232 gen_load_gpr(t0
, index
);
12233 } else if (index
== 0) {
12234 gen_load_gpr(t0
, base
);
12236 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12238 /* Don't do NOP if destination is zero: we must perform the actual
12244 TCGv_i32 fp0
= tcg_temp_new_i32();
12246 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12247 tcg_gen_trunc_tl_i32(fp0
, t0
);
12248 gen_store_fpr32(ctx
, fp0
, fd
);
12249 tcg_temp_free_i32(fp0
);
12254 check_cp1_registers(ctx
, fd
);
12256 TCGv_i64 fp0
= tcg_temp_new_i64();
12257 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12258 gen_store_fpr64(ctx
, fp0
, fd
);
12259 tcg_temp_free_i64(fp0
);
12263 check_cp1_64bitmode(ctx
);
12264 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12266 TCGv_i64 fp0
= tcg_temp_new_i64();
12268 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12269 gen_store_fpr64(ctx
, fp0
, fd
);
12270 tcg_temp_free_i64(fp0
);
12276 TCGv_i32 fp0
= tcg_temp_new_i32();
12277 gen_load_fpr32(ctx
, fp0
, fs
);
12278 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12279 tcg_temp_free_i32(fp0
);
12284 check_cp1_registers(ctx
, fs
);
12286 TCGv_i64 fp0
= tcg_temp_new_i64();
12287 gen_load_fpr64(ctx
, fp0
, fs
);
12288 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12289 tcg_temp_free_i64(fp0
);
12293 check_cp1_64bitmode(ctx
);
12294 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12296 TCGv_i64 fp0
= tcg_temp_new_i64();
12297 gen_load_fpr64(ctx
, fp0
, fs
);
12298 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12299 tcg_temp_free_i64(fp0
);
12306 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
12307 int fd
, int fr
, int fs
, int ft
)
12313 TCGv t0
= tcg_temp_local_new();
12314 TCGv_i32 fp
= tcg_temp_new_i32();
12315 TCGv_i32 fph
= tcg_temp_new_i32();
12316 TCGLabel
*l1
= gen_new_label();
12317 TCGLabel
*l2
= gen_new_label();
12319 gen_load_gpr(t0
, fr
);
12320 tcg_gen_andi_tl(t0
, t0
, 0x7);
12322 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12323 gen_load_fpr32(ctx
, fp
, fs
);
12324 gen_load_fpr32h(ctx
, fph
, fs
);
12325 gen_store_fpr32(ctx
, fp
, fd
);
12326 gen_store_fpr32h(ctx
, fph
, fd
);
12329 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12331 #ifdef TARGET_WORDS_BIGENDIAN
12332 gen_load_fpr32(ctx
, fp
, fs
);
12333 gen_load_fpr32h(ctx
, fph
, ft
);
12334 gen_store_fpr32h(ctx
, fp
, fd
);
12335 gen_store_fpr32(ctx
, fph
, fd
);
12337 gen_load_fpr32h(ctx
, fph
, fs
);
12338 gen_load_fpr32(ctx
, fp
, ft
);
12339 gen_store_fpr32(ctx
, fph
, fd
);
12340 gen_store_fpr32h(ctx
, fp
, fd
);
12343 tcg_temp_free_i32(fp
);
12344 tcg_temp_free_i32(fph
);
12350 TCGv_i32 fp0
= tcg_temp_new_i32();
12351 TCGv_i32 fp1
= tcg_temp_new_i32();
12352 TCGv_i32 fp2
= tcg_temp_new_i32();
12354 gen_load_fpr32(ctx
, fp0
, fs
);
12355 gen_load_fpr32(ctx
, fp1
, ft
);
12356 gen_load_fpr32(ctx
, fp2
, fr
);
12357 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12358 tcg_temp_free_i32(fp0
);
12359 tcg_temp_free_i32(fp1
);
12360 gen_store_fpr32(ctx
, fp2
, fd
);
12361 tcg_temp_free_i32(fp2
);
12366 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12368 TCGv_i64 fp0
= tcg_temp_new_i64();
12369 TCGv_i64 fp1
= tcg_temp_new_i64();
12370 TCGv_i64 fp2
= tcg_temp_new_i64();
12372 gen_load_fpr64(ctx
, fp0
, fs
);
12373 gen_load_fpr64(ctx
, fp1
, ft
);
12374 gen_load_fpr64(ctx
, fp2
, fr
);
12375 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12376 tcg_temp_free_i64(fp0
);
12377 tcg_temp_free_i64(fp1
);
12378 gen_store_fpr64(ctx
, fp2
, fd
);
12379 tcg_temp_free_i64(fp2
);
12385 TCGv_i64 fp0
= tcg_temp_new_i64();
12386 TCGv_i64 fp1
= tcg_temp_new_i64();
12387 TCGv_i64 fp2
= tcg_temp_new_i64();
12389 gen_load_fpr64(ctx
, fp0
, fs
);
12390 gen_load_fpr64(ctx
, fp1
, ft
);
12391 gen_load_fpr64(ctx
, fp2
, fr
);
12392 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12393 tcg_temp_free_i64(fp0
);
12394 tcg_temp_free_i64(fp1
);
12395 gen_store_fpr64(ctx
, fp2
, fd
);
12396 tcg_temp_free_i64(fp2
);
12402 TCGv_i32 fp0
= tcg_temp_new_i32();
12403 TCGv_i32 fp1
= tcg_temp_new_i32();
12404 TCGv_i32 fp2
= tcg_temp_new_i32();
12406 gen_load_fpr32(ctx
, fp0
, fs
);
12407 gen_load_fpr32(ctx
, fp1
, ft
);
12408 gen_load_fpr32(ctx
, fp2
, fr
);
12409 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12410 tcg_temp_free_i32(fp0
);
12411 tcg_temp_free_i32(fp1
);
12412 gen_store_fpr32(ctx
, fp2
, fd
);
12413 tcg_temp_free_i32(fp2
);
12418 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12420 TCGv_i64 fp0
= tcg_temp_new_i64();
12421 TCGv_i64 fp1
= tcg_temp_new_i64();
12422 TCGv_i64 fp2
= tcg_temp_new_i64();
12424 gen_load_fpr64(ctx
, fp0
, fs
);
12425 gen_load_fpr64(ctx
, fp1
, ft
);
12426 gen_load_fpr64(ctx
, fp2
, fr
);
12427 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12428 tcg_temp_free_i64(fp0
);
12429 tcg_temp_free_i64(fp1
);
12430 gen_store_fpr64(ctx
, fp2
, fd
);
12431 tcg_temp_free_i64(fp2
);
12437 TCGv_i64 fp0
= tcg_temp_new_i64();
12438 TCGv_i64 fp1
= tcg_temp_new_i64();
12439 TCGv_i64 fp2
= tcg_temp_new_i64();
12441 gen_load_fpr64(ctx
, fp0
, fs
);
12442 gen_load_fpr64(ctx
, fp1
, ft
);
12443 gen_load_fpr64(ctx
, fp2
, fr
);
12444 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12445 tcg_temp_free_i64(fp0
);
12446 tcg_temp_free_i64(fp1
);
12447 gen_store_fpr64(ctx
, fp2
, fd
);
12448 tcg_temp_free_i64(fp2
);
12454 TCGv_i32 fp0
= tcg_temp_new_i32();
12455 TCGv_i32 fp1
= tcg_temp_new_i32();
12456 TCGv_i32 fp2
= tcg_temp_new_i32();
12458 gen_load_fpr32(ctx
, fp0
, fs
);
12459 gen_load_fpr32(ctx
, fp1
, ft
);
12460 gen_load_fpr32(ctx
, fp2
, fr
);
12461 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12462 tcg_temp_free_i32(fp0
);
12463 tcg_temp_free_i32(fp1
);
12464 gen_store_fpr32(ctx
, fp2
, fd
);
12465 tcg_temp_free_i32(fp2
);
12470 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12472 TCGv_i64 fp0
= tcg_temp_new_i64();
12473 TCGv_i64 fp1
= tcg_temp_new_i64();
12474 TCGv_i64 fp2
= tcg_temp_new_i64();
12476 gen_load_fpr64(ctx
, fp0
, fs
);
12477 gen_load_fpr64(ctx
, fp1
, ft
);
12478 gen_load_fpr64(ctx
, fp2
, fr
);
12479 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12480 tcg_temp_free_i64(fp0
);
12481 tcg_temp_free_i64(fp1
);
12482 gen_store_fpr64(ctx
, fp2
, fd
);
12483 tcg_temp_free_i64(fp2
);
12489 TCGv_i64 fp0
= tcg_temp_new_i64();
12490 TCGv_i64 fp1
= tcg_temp_new_i64();
12491 TCGv_i64 fp2
= tcg_temp_new_i64();
12493 gen_load_fpr64(ctx
, fp0
, fs
);
12494 gen_load_fpr64(ctx
, fp1
, ft
);
12495 gen_load_fpr64(ctx
, fp2
, fr
);
12496 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12497 tcg_temp_free_i64(fp0
);
12498 tcg_temp_free_i64(fp1
);
12499 gen_store_fpr64(ctx
, fp2
, fd
);
12500 tcg_temp_free_i64(fp2
);
12506 TCGv_i32 fp0
= tcg_temp_new_i32();
12507 TCGv_i32 fp1
= tcg_temp_new_i32();
12508 TCGv_i32 fp2
= tcg_temp_new_i32();
12510 gen_load_fpr32(ctx
, fp0
, fs
);
12511 gen_load_fpr32(ctx
, fp1
, ft
);
12512 gen_load_fpr32(ctx
, fp2
, fr
);
12513 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12514 tcg_temp_free_i32(fp0
);
12515 tcg_temp_free_i32(fp1
);
12516 gen_store_fpr32(ctx
, fp2
, fd
);
12517 tcg_temp_free_i32(fp2
);
12522 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12524 TCGv_i64 fp0
= tcg_temp_new_i64();
12525 TCGv_i64 fp1
= tcg_temp_new_i64();
12526 TCGv_i64 fp2
= tcg_temp_new_i64();
12528 gen_load_fpr64(ctx
, fp0
, fs
);
12529 gen_load_fpr64(ctx
, fp1
, ft
);
12530 gen_load_fpr64(ctx
, fp2
, fr
);
12531 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12532 tcg_temp_free_i64(fp0
);
12533 tcg_temp_free_i64(fp1
);
12534 gen_store_fpr64(ctx
, fp2
, fd
);
12535 tcg_temp_free_i64(fp2
);
12541 TCGv_i64 fp0
= tcg_temp_new_i64();
12542 TCGv_i64 fp1
= tcg_temp_new_i64();
12543 TCGv_i64 fp2
= tcg_temp_new_i64();
12545 gen_load_fpr64(ctx
, fp0
, fs
);
12546 gen_load_fpr64(ctx
, fp1
, ft
);
12547 gen_load_fpr64(ctx
, fp2
, fr
);
12548 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12549 tcg_temp_free_i64(fp0
);
12550 tcg_temp_free_i64(fp1
);
12551 gen_store_fpr64(ctx
, fp2
, fd
);
12552 tcg_temp_free_i64(fp2
);
12556 MIPS_INVAL("flt3_arith");
12557 generate_exception_end(ctx
, EXCP_RI
);
12562 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12566 #if !defined(CONFIG_USER_ONLY)
12567 /* The Linux kernel will emulate rdhwr if it's not supported natively.
12568 Therefore only check the ISA in system mode. */
12569 check_insn(ctx
, ISA_MIPS32R2
);
12571 t0
= tcg_temp_new();
12575 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12576 gen_store_gpr(t0
, rt
);
12579 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12580 gen_store_gpr(t0
, rt
);
12583 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12586 gen_helper_rdhwr_cc(t0
, cpu_env
);
12587 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12590 gen_store_gpr(t0
, rt
);
12591 /* Break the TB to be able to take timer interrupts immediately
12592 after reading count. DISAS_STOP isn't sufficient, we need to ensure
12593 we break completely out of translated code. */
12594 gen_save_pc(ctx
->base
.pc_next
+ 4);
12595 ctx
->base
.is_jmp
= DISAS_EXIT
;
12598 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12599 gen_store_gpr(t0
, rt
);
12602 check_insn(ctx
, ISA_MIPS32R6
);
12604 /* Performance counter registers are not implemented other than
12605 * control register 0.
12607 generate_exception(ctx
, EXCP_RI
);
12609 gen_helper_rdhwr_performance(t0
, cpu_env
);
12610 gen_store_gpr(t0
, rt
);
12613 check_insn(ctx
, ISA_MIPS32R6
);
12614 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12615 gen_store_gpr(t0
, rt
);
12618 #if defined(CONFIG_USER_ONLY)
12619 tcg_gen_ld_tl(t0
, cpu_env
,
12620 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12621 gen_store_gpr(t0
, rt
);
12624 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12625 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12626 tcg_gen_ld_tl(t0
, cpu_env
,
12627 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12628 gen_store_gpr(t0
, rt
);
12630 generate_exception_end(ctx
, EXCP_RI
);
12634 default: /* Invalid */
12635 MIPS_INVAL("rdhwr");
12636 generate_exception_end(ctx
, EXCP_RI
);
12642 static inline void clear_branch_hflags(DisasContext
*ctx
)
12644 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12645 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12646 save_cpu_state(ctx
, 0);
12648 /* it is not safe to save ctx->hflags as hflags may be changed
12649 in execution time by the instruction in delay / forbidden slot. */
12650 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12654 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12656 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12657 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12658 /* Branches completion */
12659 clear_branch_hflags(ctx
);
12660 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12661 /* FIXME: Need to clear can_do_io. */
12662 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12663 case MIPS_HFLAG_FBNSLOT
:
12664 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12667 /* unconditional branch */
12668 if (proc_hflags
& MIPS_HFLAG_BX
) {
12669 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12671 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12673 case MIPS_HFLAG_BL
:
12674 /* blikely taken case */
12675 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12677 case MIPS_HFLAG_BC
:
12678 /* Conditional branch */
12680 TCGLabel
*l1
= gen_new_label();
12682 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12683 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12685 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12688 case MIPS_HFLAG_BR
:
12689 /* unconditional branch to register */
12690 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12691 TCGv t0
= tcg_temp_new();
12692 TCGv_i32 t1
= tcg_temp_new_i32();
12694 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12695 tcg_gen_trunc_tl_i32(t1
, t0
);
12697 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12698 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12699 tcg_gen_or_i32(hflags
, hflags
, t1
);
12700 tcg_temp_free_i32(t1
);
12702 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12704 tcg_gen_mov_tl(cpu_PC
, btarget
);
12706 if (ctx
->base
.singlestep_enabled
) {
12707 save_cpu_state(ctx
, 0);
12708 gen_helper_raise_exception_debug(cpu_env
);
12710 tcg_gen_lookup_and_goto_ptr();
12713 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12719 /* Compact Branches */
12720 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12721 int rs
, int rt
, int32_t offset
)
12723 int bcond_compute
= 0;
12724 TCGv t0
= tcg_temp_new();
12725 TCGv t1
= tcg_temp_new();
12726 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12728 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12729 #ifdef MIPS_DEBUG_DISAS
12730 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12731 "\n", ctx
->base
.pc_next
);
12733 generate_exception_end(ctx
, EXCP_RI
);
12737 /* Load needed operands and calculate btarget */
12739 /* compact branch */
12740 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12741 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12742 gen_load_gpr(t0
, rs
);
12743 gen_load_gpr(t1
, rt
);
12745 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12746 if (rs
<= rt
&& rs
== 0) {
12747 /* OPC_BEQZALC, OPC_BNEZALC */
12748 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12751 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12752 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12753 gen_load_gpr(t0
, rs
);
12754 gen_load_gpr(t1
, rt
);
12756 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12758 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12759 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12760 if (rs
== 0 || rs
== rt
) {
12761 /* OPC_BLEZALC, OPC_BGEZALC */
12762 /* OPC_BGTZALC, OPC_BLTZALC */
12763 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12765 gen_load_gpr(t0
, rs
);
12766 gen_load_gpr(t1
, rt
);
12768 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12772 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12777 /* OPC_BEQZC, OPC_BNEZC */
12778 gen_load_gpr(t0
, rs
);
12780 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12782 /* OPC_JIC, OPC_JIALC */
12783 TCGv tbase
= tcg_temp_new();
12784 TCGv toffset
= tcg_temp_new();
12786 gen_load_gpr(tbase
, rt
);
12787 tcg_gen_movi_tl(toffset
, offset
);
12788 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
12789 tcg_temp_free(tbase
);
12790 tcg_temp_free(toffset
);
12794 MIPS_INVAL("Compact branch/jump");
12795 generate_exception_end(ctx
, EXCP_RI
);
12799 if (bcond_compute
== 0) {
12800 /* Uncoditional compact branch */
12803 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12806 ctx
->hflags
|= MIPS_HFLAG_BR
;
12809 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12812 ctx
->hflags
|= MIPS_HFLAG_B
;
12815 MIPS_INVAL("Compact branch/jump");
12816 generate_exception_end(ctx
, EXCP_RI
);
12820 /* Generating branch here as compact branches don't have delay slot */
12821 gen_branch(ctx
, 4);
12823 /* Conditional compact branch */
12824 TCGLabel
*fs
= gen_new_label();
12825 save_cpu_state(ctx
, 0);
12828 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12829 if (rs
== 0 && rt
!= 0) {
12831 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12832 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12834 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12837 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
12840 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12841 if (rs
== 0 && rt
!= 0) {
12843 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12844 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12846 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12849 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
12852 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12853 if (rs
== 0 && rt
!= 0) {
12855 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12856 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12858 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12861 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
12864 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12865 if (rs
== 0 && rt
!= 0) {
12867 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12868 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12870 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12873 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
12876 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12877 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12879 /* OPC_BOVC, OPC_BNVC */
12880 TCGv t2
= tcg_temp_new();
12881 TCGv t3
= tcg_temp_new();
12882 TCGv t4
= tcg_temp_new();
12883 TCGv input_overflow
= tcg_temp_new();
12885 gen_load_gpr(t0
, rs
);
12886 gen_load_gpr(t1
, rt
);
12887 tcg_gen_ext32s_tl(t2
, t0
);
12888 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
12889 tcg_gen_ext32s_tl(t3
, t1
);
12890 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
12891 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
12893 tcg_gen_add_tl(t4
, t2
, t3
);
12894 tcg_gen_ext32s_tl(t4
, t4
);
12895 tcg_gen_xor_tl(t2
, t2
, t3
);
12896 tcg_gen_xor_tl(t3
, t4
, t3
);
12897 tcg_gen_andc_tl(t2
, t3
, t2
);
12898 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
12899 tcg_gen_or_tl(t4
, t4
, input_overflow
);
12900 if (opc
== OPC_BOVC
) {
12902 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
12905 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
12907 tcg_temp_free(input_overflow
);
12911 } else if (rs
< rt
&& rs
== 0) {
12912 /* OPC_BEQZALC, OPC_BNEZALC */
12913 if (opc
== OPC_BEQZALC
) {
12915 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
12918 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
12921 /* OPC_BEQC, OPC_BNEC */
12922 if (opc
== OPC_BEQC
) {
12924 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
12927 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
12932 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
12935 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
12938 MIPS_INVAL("Compact conditional branch/jump");
12939 generate_exception_end(ctx
, EXCP_RI
);
12943 /* Generating branch here as compact branches don't have delay slot */
12944 gen_goto_tb(ctx
, 1, ctx
->btarget
);
12947 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
12955 /* ISA extensions (ASEs) */
12956 /* MIPS16 extension to MIPS32 */
12958 /* MIPS16 major opcodes */
12960 M16_OPC_ADDIUSP
= 0x00,
12961 M16_OPC_ADDIUPC
= 0x01,
12963 M16_OPC_JAL
= 0x03,
12964 M16_OPC_BEQZ
= 0x04,
12965 M16_OPC_BNEQZ
= 0x05,
12966 M16_OPC_SHIFT
= 0x06,
12968 M16_OPC_RRIA
= 0x08,
12969 M16_OPC_ADDIU8
= 0x09,
12970 M16_OPC_SLTI
= 0x0a,
12971 M16_OPC_SLTIU
= 0x0b,
12974 M16_OPC_CMPI
= 0x0e,
12978 M16_OPC_LWSP
= 0x12,
12980 M16_OPC_LBU
= 0x14,
12981 M16_OPC_LHU
= 0x15,
12982 M16_OPC_LWPC
= 0x16,
12983 M16_OPC_LWU
= 0x17,
12986 M16_OPC_SWSP
= 0x1a,
12988 M16_OPC_RRR
= 0x1c,
12990 M16_OPC_EXTEND
= 0x1e,
12994 /* I8 funct field */
13013 /* RR funct field */
13047 /* I64 funct field */
13055 I64_DADDIUPC
= 0x6,
13059 /* RR ry field for CNVT */
13061 RR_RY_CNVT_ZEB
= 0x0,
13062 RR_RY_CNVT_ZEH
= 0x1,
13063 RR_RY_CNVT_ZEW
= 0x2,
13064 RR_RY_CNVT_SEB
= 0x4,
13065 RR_RY_CNVT_SEH
= 0x5,
13066 RR_RY_CNVT_SEW
= 0x6,
13069 static int xlat (int r
)
13071 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13076 static void gen_mips16_save (DisasContext
*ctx
,
13077 int xsregs
, int aregs
,
13078 int do_ra
, int do_s0
, int do_s1
,
13081 TCGv t0
= tcg_temp_new();
13082 TCGv t1
= tcg_temp_new();
13083 TCGv t2
= tcg_temp_new();
13113 generate_exception_end(ctx
, EXCP_RI
);
13119 gen_base_offset_addr(ctx
, t0
, 29, 12);
13120 gen_load_gpr(t1
, 7);
13121 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13124 gen_base_offset_addr(ctx
, t0
, 29, 8);
13125 gen_load_gpr(t1
, 6);
13126 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13129 gen_base_offset_addr(ctx
, t0
, 29, 4);
13130 gen_load_gpr(t1
, 5);
13131 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13134 gen_base_offset_addr(ctx
, t0
, 29, 0);
13135 gen_load_gpr(t1
, 4);
13136 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13139 gen_load_gpr(t0
, 29);
13141 #define DECR_AND_STORE(reg) do { \
13142 tcg_gen_movi_tl(t2, -4); \
13143 gen_op_addr_add(ctx, t0, t0, t2); \
13144 gen_load_gpr(t1, reg); \
13145 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
13149 DECR_AND_STORE(31);
13154 DECR_AND_STORE(30);
13157 DECR_AND_STORE(23);
13160 DECR_AND_STORE(22);
13163 DECR_AND_STORE(21);
13166 DECR_AND_STORE(20);
13169 DECR_AND_STORE(19);
13172 DECR_AND_STORE(18);
13176 DECR_AND_STORE(17);
13179 DECR_AND_STORE(16);
13209 generate_exception_end(ctx
, EXCP_RI
);
13225 #undef DECR_AND_STORE
13227 tcg_gen_movi_tl(t2
, -framesize
);
13228 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13234 static void gen_mips16_restore (DisasContext
*ctx
,
13235 int xsregs
, int aregs
,
13236 int do_ra
, int do_s0
, int do_s1
,
13240 TCGv t0
= tcg_temp_new();
13241 TCGv t1
= tcg_temp_new();
13242 TCGv t2
= tcg_temp_new();
13244 tcg_gen_movi_tl(t2
, framesize
);
13245 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13247 #define DECR_AND_LOAD(reg) do { \
13248 tcg_gen_movi_tl(t2, -4); \
13249 gen_op_addr_add(ctx, t0, t0, t2); \
13250 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13251 gen_store_gpr(t1, reg); \
13315 generate_exception_end(ctx
, EXCP_RI
);
13331 #undef DECR_AND_LOAD
13333 tcg_gen_movi_tl(t2
, framesize
);
13334 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13340 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
13341 int is_64_bit
, int extended
)
13345 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13346 generate_exception_end(ctx
, EXCP_RI
);
13350 t0
= tcg_temp_new();
13352 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13353 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13355 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13361 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13364 TCGv_i32 t0
= tcg_const_i32(op
);
13365 TCGv t1
= tcg_temp_new();
13366 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13367 gen_helper_cache(cpu_env
, t1
, t0
);
13370 #if defined(TARGET_MIPS64)
13371 static void decode_i64_mips16 (DisasContext
*ctx
,
13372 int ry
, int funct
, int16_t offset
,
13377 check_insn(ctx
, ISA_MIPS3
);
13378 check_mips_64(ctx
);
13379 offset
= extended
? offset
: offset
<< 3;
13380 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13383 check_insn(ctx
, ISA_MIPS3
);
13384 check_mips_64(ctx
);
13385 offset
= extended
? offset
: offset
<< 3;
13386 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13389 check_insn(ctx
, ISA_MIPS3
);
13390 check_mips_64(ctx
);
13391 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13392 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13395 check_insn(ctx
, ISA_MIPS3
);
13396 check_mips_64(ctx
);
13397 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13398 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13401 check_insn(ctx
, ISA_MIPS3
);
13402 check_mips_64(ctx
);
13403 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13404 generate_exception_end(ctx
, EXCP_RI
);
13406 offset
= extended
? offset
: offset
<< 3;
13407 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13411 check_insn(ctx
, ISA_MIPS3
);
13412 check_mips_64(ctx
);
13413 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13414 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13417 check_insn(ctx
, ISA_MIPS3
);
13418 check_mips_64(ctx
);
13419 offset
= extended
? offset
: offset
<< 2;
13420 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13423 check_insn(ctx
, ISA_MIPS3
);
13424 check_mips_64(ctx
);
13425 offset
= extended
? offset
: offset
<< 2;
13426 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13432 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13434 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13435 int op
, rx
, ry
, funct
, sa
;
13436 int16_t imm
, offset
;
13438 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13439 op
= (ctx
->opcode
>> 11) & 0x1f;
13440 sa
= (ctx
->opcode
>> 22) & 0x1f;
13441 funct
= (ctx
->opcode
>> 8) & 0x7;
13442 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13443 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13444 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13445 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13446 | (ctx
->opcode
& 0x1f));
13448 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
13451 case M16_OPC_ADDIUSP
:
13452 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13454 case M16_OPC_ADDIUPC
:
13455 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13458 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13459 /* No delay slot, so just process as a normal instruction */
13462 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13463 /* No delay slot, so just process as a normal instruction */
13465 case M16_OPC_BNEQZ
:
13466 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13467 /* No delay slot, so just process as a normal instruction */
13469 case M16_OPC_SHIFT
:
13470 switch (ctx
->opcode
& 0x3) {
13472 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13475 #if defined(TARGET_MIPS64)
13476 check_mips_64(ctx
);
13477 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13479 generate_exception_end(ctx
, EXCP_RI
);
13483 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13486 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13490 #if defined(TARGET_MIPS64)
13492 check_insn(ctx
, ISA_MIPS3
);
13493 check_mips_64(ctx
);
13494 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13498 imm
= ctx
->opcode
& 0xf;
13499 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13500 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13501 imm
= (int16_t) (imm
<< 1) >> 1;
13502 if ((ctx
->opcode
>> 4) & 0x1) {
13503 #if defined(TARGET_MIPS64)
13504 check_mips_64(ctx
);
13505 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13507 generate_exception_end(ctx
, EXCP_RI
);
13510 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13513 case M16_OPC_ADDIU8
:
13514 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13517 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13519 case M16_OPC_SLTIU
:
13520 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13525 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13528 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13531 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13534 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13537 check_insn(ctx
, ISA_MIPS32
);
13539 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13540 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13541 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13542 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13543 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13544 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13545 | (ctx
->opcode
& 0xf)) << 3;
13547 if (ctx
->opcode
& (1 << 7)) {
13548 gen_mips16_save(ctx
, xsregs
, aregs
,
13549 do_ra
, do_s0
, do_s1
,
13552 gen_mips16_restore(ctx
, xsregs
, aregs
,
13553 do_ra
, do_s0
, do_s1
,
13559 generate_exception_end(ctx
, EXCP_RI
);
13564 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13567 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13569 #if defined(TARGET_MIPS64)
13571 check_insn(ctx
, ISA_MIPS3
);
13572 check_mips_64(ctx
);
13573 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13577 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13580 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13583 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13586 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13589 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13592 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13595 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13597 #if defined(TARGET_MIPS64)
13599 check_insn(ctx
, ISA_MIPS3
);
13600 check_mips_64(ctx
);
13601 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13605 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13608 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13611 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13614 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13616 #if defined(TARGET_MIPS64)
13618 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13622 generate_exception_end(ctx
, EXCP_RI
);
13629 static inline bool is_uhi(int sdbbp_code
)
13631 #ifdef CONFIG_USER_ONLY
13634 return semihosting_enabled() && sdbbp_code
== 1;
13638 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13642 int op
, cnvt_op
, op1
, offset
;
13646 op
= (ctx
->opcode
>> 11) & 0x1f;
13647 sa
= (ctx
->opcode
>> 2) & 0x7;
13648 sa
= sa
== 0 ? 8 : sa
;
13649 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13650 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13651 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13652 op1
= offset
= ctx
->opcode
& 0x1f;
13657 case M16_OPC_ADDIUSP
:
13659 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13661 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13664 case M16_OPC_ADDIUPC
:
13665 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13668 offset
= (ctx
->opcode
& 0x7ff) << 1;
13669 offset
= (int16_t)(offset
<< 4) >> 4;
13670 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13671 /* No delay slot, so just process as a normal instruction */
13674 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13675 offset
= (((ctx
->opcode
& 0x1f) << 21)
13676 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13678 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13679 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13683 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13684 ((int8_t)ctx
->opcode
) << 1, 0);
13685 /* No delay slot, so just process as a normal instruction */
13687 case M16_OPC_BNEQZ
:
13688 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13689 ((int8_t)ctx
->opcode
) << 1, 0);
13690 /* No delay slot, so just process as a normal instruction */
13692 case M16_OPC_SHIFT
:
13693 switch (ctx
->opcode
& 0x3) {
13695 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13698 #if defined(TARGET_MIPS64)
13699 check_insn(ctx
, ISA_MIPS3
);
13700 check_mips_64(ctx
);
13701 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13703 generate_exception_end(ctx
, EXCP_RI
);
13707 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13710 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13714 #if defined(TARGET_MIPS64)
13716 check_insn(ctx
, ISA_MIPS3
);
13717 check_mips_64(ctx
);
13718 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13723 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13725 if ((ctx
->opcode
>> 4) & 1) {
13726 #if defined(TARGET_MIPS64)
13727 check_insn(ctx
, ISA_MIPS3
);
13728 check_mips_64(ctx
);
13729 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13731 generate_exception_end(ctx
, EXCP_RI
);
13734 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13738 case M16_OPC_ADDIU8
:
13740 int16_t imm
= (int8_t) ctx
->opcode
;
13742 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13747 int16_t imm
= (uint8_t) ctx
->opcode
;
13748 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13751 case M16_OPC_SLTIU
:
13753 int16_t imm
= (uint8_t) ctx
->opcode
;
13754 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13761 funct
= (ctx
->opcode
>> 8) & 0x7;
13764 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
13765 ((int8_t)ctx
->opcode
) << 1, 0);
13768 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
13769 ((int8_t)ctx
->opcode
) << 1, 0);
13772 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
13775 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
13776 ((int8_t)ctx
->opcode
) << 3);
13779 check_insn(ctx
, ISA_MIPS32
);
13781 int do_ra
= ctx
->opcode
& (1 << 6);
13782 int do_s0
= ctx
->opcode
& (1 << 5);
13783 int do_s1
= ctx
->opcode
& (1 << 4);
13784 int framesize
= ctx
->opcode
& 0xf;
13786 if (framesize
== 0) {
13789 framesize
= framesize
<< 3;
13792 if (ctx
->opcode
& (1 << 7)) {
13793 gen_mips16_save(ctx
, 0, 0,
13794 do_ra
, do_s0
, do_s1
, framesize
);
13796 gen_mips16_restore(ctx
, 0, 0,
13797 do_ra
, do_s0
, do_s1
, framesize
);
13803 int rz
= xlat(ctx
->opcode
& 0x7);
13805 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
13806 ((ctx
->opcode
>> 5) & 0x7);
13807 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
13811 reg32
= ctx
->opcode
& 0x1f;
13812 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
13815 generate_exception_end(ctx
, EXCP_RI
);
13822 int16_t imm
= (uint8_t) ctx
->opcode
;
13824 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
13829 int16_t imm
= (uint8_t) ctx
->opcode
;
13830 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
13833 #if defined(TARGET_MIPS64)
13835 check_insn(ctx
, ISA_MIPS3
);
13836 check_mips_64(ctx
);
13837 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
13841 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13844 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
13847 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13850 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
13853 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13856 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
13859 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
13861 #if defined (TARGET_MIPS64)
13863 check_insn(ctx
, ISA_MIPS3
);
13864 check_mips_64(ctx
);
13865 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
13869 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13872 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
13875 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13878 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
13882 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
13885 switch (ctx
->opcode
& 0x3) {
13887 mips32_op
= OPC_ADDU
;
13890 mips32_op
= OPC_SUBU
;
13892 #if defined(TARGET_MIPS64)
13894 mips32_op
= OPC_DADDU
;
13895 check_insn(ctx
, ISA_MIPS3
);
13896 check_mips_64(ctx
);
13899 mips32_op
= OPC_DSUBU
;
13900 check_insn(ctx
, ISA_MIPS3
);
13901 check_mips_64(ctx
);
13905 generate_exception_end(ctx
, EXCP_RI
);
13909 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
13918 int nd
= (ctx
->opcode
>> 7) & 0x1;
13919 int link
= (ctx
->opcode
>> 6) & 0x1;
13920 int ra
= (ctx
->opcode
>> 5) & 0x1;
13923 check_insn(ctx
, ISA_MIPS32
);
13932 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
13937 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
13938 gen_helper_do_semihosting(cpu_env
);
13940 /* XXX: not clear which exception should be raised
13941 * when in debug mode...
13943 check_insn(ctx
, ISA_MIPS32
);
13944 generate_exception_end(ctx
, EXCP_DBp
);
13948 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
13951 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
13954 generate_exception_end(ctx
, EXCP_BREAK
);
13957 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
13960 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
13963 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
13965 #if defined (TARGET_MIPS64)
13967 check_insn(ctx
, ISA_MIPS3
);
13968 check_mips_64(ctx
);
13969 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
13973 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
13976 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
13979 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
13982 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
13985 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
13988 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
13991 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
13994 check_insn(ctx
, ISA_MIPS32
);
13996 case RR_RY_CNVT_ZEB
:
13997 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13999 case RR_RY_CNVT_ZEH
:
14000 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14002 case RR_RY_CNVT_SEB
:
14003 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14005 case RR_RY_CNVT_SEH
:
14006 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14008 #if defined (TARGET_MIPS64)
14009 case RR_RY_CNVT_ZEW
:
14010 check_insn(ctx
, ISA_MIPS64
);
14011 check_mips_64(ctx
);
14012 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14014 case RR_RY_CNVT_SEW
:
14015 check_insn(ctx
, ISA_MIPS64
);
14016 check_mips_64(ctx
);
14017 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14021 generate_exception_end(ctx
, EXCP_RI
);
14026 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
14028 #if defined (TARGET_MIPS64)
14030 check_insn(ctx
, ISA_MIPS3
);
14031 check_mips_64(ctx
);
14032 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
14035 check_insn(ctx
, ISA_MIPS3
);
14036 check_mips_64(ctx
);
14037 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
14040 check_insn(ctx
, ISA_MIPS3
);
14041 check_mips_64(ctx
);
14042 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
14045 check_insn(ctx
, ISA_MIPS3
);
14046 check_mips_64(ctx
);
14047 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
14051 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
14054 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
14057 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
14060 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
14062 #if defined (TARGET_MIPS64)
14064 check_insn(ctx
, ISA_MIPS3
);
14065 check_mips_64(ctx
);
14066 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
14069 check_insn(ctx
, ISA_MIPS3
);
14070 check_mips_64(ctx
);
14071 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
14074 check_insn(ctx
, ISA_MIPS3
);
14075 check_mips_64(ctx
);
14076 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
14079 check_insn(ctx
, ISA_MIPS3
);
14080 check_mips_64(ctx
);
14081 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
14085 generate_exception_end(ctx
, EXCP_RI
);
14089 case M16_OPC_EXTEND
:
14090 decode_extended_mips16_opc(env
, ctx
);
14093 #if defined(TARGET_MIPS64)
14095 funct
= (ctx
->opcode
>> 8) & 0x7;
14096 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
14100 generate_exception_end(ctx
, EXCP_RI
);
14107 /* microMIPS extension to MIPS32/MIPS64 */
14110 * microMIPS32/microMIPS64 major opcodes
14112 * 1. MIPS Architecture for Programmers Volume II-B:
14113 * The microMIPS32 Instruction Set (Revision 3.05)
14115 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
14117 * 2. MIPS Architecture For Programmers Volume II-A:
14118 * The MIPS64 Instruction Set (Revision 3.51)
14148 POOL32S
= 0x16, /* MIPS64 */
14149 DADDIU32
= 0x17, /* MIPS64 */
14178 /* 0x29 is reserved */
14191 /* 0x31 is reserved */
14204 SD32
= 0x36, /* MIPS64 */
14205 LD32
= 0x37, /* MIPS64 */
14207 /* 0x39 is reserved */
14223 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14245 /* POOL32A encoding of minor opcode field */
14248 /* These opcodes are distinguished only by bits 9..6; those bits are
14249 * what are recorded below. */
14286 /* The following can be distinguished by their lower 6 bits. */
14296 /* POOL32AXF encoding of minor opcode field extension */
14299 * 1. MIPS Architecture for Programmers Volume II-B:
14300 * The microMIPS32 Instruction Set (Revision 3.05)
14302 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14304 * 2. MIPS Architecture for Programmers VolumeIV-e:
14305 * The MIPS DSP Application-Specific Extension
14306 * to the microMIPS32 Architecture (Revision 2.34)
14308 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14323 /* begin of microMIPS32 DSP */
14325 /* bits 13..12 for 0x01 */
14331 /* bits 13..12 for 0x2a */
14337 /* bits 13..12 for 0x32 */
14341 /* end of microMIPS32 DSP */
14343 /* bits 15..12 for 0x2c */
14360 /* bits 15..12 for 0x34 */
14368 /* bits 15..12 for 0x3c */
14370 JR
= 0x0, /* alias */
14378 /* bits 15..12 for 0x05 */
14382 /* bits 15..12 for 0x0d */
14394 /* bits 15..12 for 0x15 */
14400 /* bits 15..12 for 0x1d */
14404 /* bits 15..12 for 0x2d */
14409 /* bits 15..12 for 0x35 */
14416 /* POOL32B encoding of minor opcode field (bits 15..12) */
14432 /* POOL32C encoding of minor opcode field (bits 15..12) */
14453 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14466 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14479 /* POOL32F encoding of minor opcode field (bits 5..0) */
14482 /* These are the bit 7..6 values */
14491 /* These are the bit 8..6 values */
14516 MOVZ_FMT_05
= 0x05,
14550 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14557 /* POOL32Fxf encoding of minor opcode extension field */
14595 /* POOL32I encoding of minor opcode field (bits 25..21) */
14625 /* These overlap and are distinguished by bit16 of the instruction */
14634 /* POOL16A encoding of minor opcode field */
14641 /* POOL16B encoding of minor opcode field */
14648 /* POOL16C encoding of minor opcode field */
14668 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14692 /* POOL16D encoding of minor opcode field */
14699 /* POOL16E encoding of minor opcode field */
14706 static int mmreg (int r
)
14708 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14713 /* Used for 16-bit store instructions. */
14714 static int mmreg2 (int r
)
14716 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14721 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14722 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14723 #define uMIPS_RS2(op) uMIPS_RS(op)
14724 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14725 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14726 #define uMIPS_RS5(op) (op & 0x1f)
14728 /* Signed immediate */
14729 #define SIMM(op, start, width) \
14730 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14733 /* Zero-extended immediate */
14734 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14736 static void gen_addiur1sp(DisasContext
*ctx
)
14738 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14740 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14743 static void gen_addiur2(DisasContext
*ctx
)
14745 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14746 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14747 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14749 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14752 static void gen_addiusp(DisasContext
*ctx
)
14754 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14757 if (encoded
<= 1) {
14758 decoded
= 256 + encoded
;
14759 } else if (encoded
<= 255) {
14761 } else if (encoded
<= 509) {
14762 decoded
= encoded
- 512;
14764 decoded
= encoded
- 768;
14767 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
14770 static void gen_addius5(DisasContext
*ctx
)
14772 int imm
= SIMM(ctx
->opcode
, 1, 4);
14773 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14775 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
14778 static void gen_andi16(DisasContext
*ctx
)
14780 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
14781 31, 32, 63, 64, 255, 32768, 65535 };
14782 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14783 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14784 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
14786 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
14789 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
14790 int base
, int16_t offset
)
14795 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
14796 generate_exception_end(ctx
, EXCP_RI
);
14800 t0
= tcg_temp_new();
14802 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14804 t1
= tcg_const_tl(reglist
);
14805 t2
= tcg_const_i32(ctx
->mem_idx
);
14807 save_cpu_state(ctx
, 1);
14810 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
14813 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
14815 #ifdef TARGET_MIPS64
14817 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
14820 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
14826 tcg_temp_free_i32(t2
);
14830 static void gen_pool16c_insn(DisasContext
*ctx
)
14832 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
14833 int rs
= mmreg(ctx
->opcode
& 0x7);
14835 switch (((ctx
->opcode
) >> 4) & 0x3f) {
14840 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
14846 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
14852 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
14858 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
14865 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14866 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14868 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
14877 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14878 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14880 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
14887 int reg
= ctx
->opcode
& 0x1f;
14889 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
14895 int reg
= ctx
->opcode
& 0x1f;
14896 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
14897 /* Let normal delay slot handling in our caller take us
14898 to the branch target. */
14903 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
14904 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14908 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
14909 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14913 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
14917 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
14920 generate_exception_end(ctx
, EXCP_BREAK
);
14923 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
14924 gen_helper_do_semihosting(cpu_env
);
14926 /* XXX: not clear which exception should be raised
14927 * when in debug mode...
14929 check_insn(ctx
, ISA_MIPS32
);
14930 generate_exception_end(ctx
, EXCP_DBp
);
14933 case JRADDIUSP
+ 0:
14934 case JRADDIUSP
+ 1:
14936 int imm
= ZIMM(ctx
->opcode
, 0, 5);
14937 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14938 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14939 /* Let normal delay slot handling in our caller take us
14940 to the branch target. */
14944 generate_exception_end(ctx
, EXCP_RI
);
14949 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
14952 int rd
, rs
, re
, rt
;
14953 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
14954 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
14955 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
14956 rd
= rd_enc
[enc_dest
];
14957 re
= re_enc
[enc_dest
];
14958 rs
= rs_rt_enc
[enc_rs
];
14959 rt
= rs_rt_enc
[enc_rt
];
14961 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
14963 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
14966 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
14968 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
14972 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
14974 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
14975 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
14977 switch (ctx
->opcode
& 0xf) {
14979 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
14982 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
14986 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14987 int offset
= extract32(ctx
->opcode
, 4, 4);
14988 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
14991 case R6_JRC16
: /* JRCADDIUSP */
14992 if ((ctx
->opcode
>> 4) & 1) {
14994 int imm
= extract32(ctx
->opcode
, 5, 5);
14995 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14996 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14999 rs
= extract32(ctx
->opcode
, 5, 5);
15000 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
15012 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15013 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15014 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
15015 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15019 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
15022 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
15026 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15027 int offset
= extract32(ctx
->opcode
, 4, 4);
15028 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
15031 case JALRC16
: /* BREAK16, SDBBP16 */
15032 switch (ctx
->opcode
& 0x3f) {
15034 case JALRC16
+ 0x20:
15036 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
15041 generate_exception(ctx
, EXCP_BREAK
);
15045 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
15046 gen_helper_do_semihosting(cpu_env
);
15048 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15049 generate_exception(ctx
, EXCP_RI
);
15051 generate_exception(ctx
, EXCP_DBp
);
15058 generate_exception(ctx
, EXCP_RI
);
15063 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
15065 TCGv t0
= tcg_temp_new();
15066 TCGv t1
= tcg_temp_new();
15068 gen_load_gpr(t0
, base
);
15071 gen_load_gpr(t1
, index
);
15072 tcg_gen_shli_tl(t1
, t1
, 2);
15073 gen_op_addr_add(ctx
, t0
, t1
, t0
);
15076 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15077 gen_store_gpr(t1
, rd
);
15083 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
15084 int base
, int16_t offset
)
15088 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
15089 generate_exception_end(ctx
, EXCP_RI
);
15093 t0
= tcg_temp_new();
15094 t1
= tcg_temp_new();
15096 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15101 generate_exception_end(ctx
, EXCP_RI
);
15104 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15105 gen_store_gpr(t1
, rd
);
15106 tcg_gen_movi_tl(t1
, 4);
15107 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15108 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15109 gen_store_gpr(t1
, rd
+1);
15112 gen_load_gpr(t1
, rd
);
15113 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15114 tcg_gen_movi_tl(t1
, 4);
15115 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15116 gen_load_gpr(t1
, rd
+1);
15117 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15119 #ifdef TARGET_MIPS64
15122 generate_exception_end(ctx
, EXCP_RI
);
15125 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15126 gen_store_gpr(t1
, rd
);
15127 tcg_gen_movi_tl(t1
, 8);
15128 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15129 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15130 gen_store_gpr(t1
, rd
+1);
15133 gen_load_gpr(t1
, rd
);
15134 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15135 tcg_gen_movi_tl(t1
, 8);
15136 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15137 gen_load_gpr(t1
, rd
+1);
15138 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15146 static void gen_sync(int stype
)
15148 TCGBar tcg_mo
= TCG_BAR_SC
;
15151 case 0x4: /* SYNC_WMB */
15152 tcg_mo
|= TCG_MO_ST_ST
;
15154 case 0x10: /* SYNC_MB */
15155 tcg_mo
|= TCG_MO_ALL
;
15157 case 0x11: /* SYNC_ACQUIRE */
15158 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
15160 case 0x12: /* SYNC_RELEASE */
15161 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
15163 case 0x13: /* SYNC_RMB */
15164 tcg_mo
|= TCG_MO_LD_LD
;
15167 tcg_mo
|= TCG_MO_ALL
;
15171 tcg_gen_mb(tcg_mo
);
15174 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
15176 int extension
= (ctx
->opcode
>> 6) & 0x3f;
15177 int minor
= (ctx
->opcode
>> 12) & 0xf;
15178 uint32_t mips32_op
;
15180 switch (extension
) {
15182 mips32_op
= OPC_TEQ
;
15185 mips32_op
= OPC_TGE
;
15188 mips32_op
= OPC_TGEU
;
15191 mips32_op
= OPC_TLT
;
15194 mips32_op
= OPC_TLTU
;
15197 mips32_op
= OPC_TNE
;
15199 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
15201 #ifndef CONFIG_USER_ONLY
15204 check_cp0_enabled(ctx
);
15206 /* Treat as NOP. */
15209 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
15213 check_cp0_enabled(ctx
);
15215 TCGv t0
= tcg_temp_new();
15217 gen_load_gpr(t0
, rt
);
15218 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
15224 switch (minor
& 3) {
15226 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15229 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15232 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15235 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15238 goto pool32axf_invalid
;
15242 switch (minor
& 3) {
15244 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15247 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15250 goto pool32axf_invalid
;
15256 check_insn(ctx
, ISA_MIPS32R6
);
15257 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15260 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15263 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15266 mips32_op
= OPC_CLO
;
15269 mips32_op
= OPC_CLZ
;
15271 check_insn(ctx
, ISA_MIPS32
);
15272 gen_cl(ctx
, mips32_op
, rt
, rs
);
15275 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15276 gen_rdhwr(ctx
, rt
, rs
, 0);
15279 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15282 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15283 mips32_op
= OPC_MULT
;
15286 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15287 mips32_op
= OPC_MULTU
;
15290 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15291 mips32_op
= OPC_DIV
;
15294 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15295 mips32_op
= OPC_DIVU
;
15298 check_insn(ctx
, ISA_MIPS32
);
15299 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15302 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15303 mips32_op
= OPC_MADD
;
15306 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15307 mips32_op
= OPC_MADDU
;
15310 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15311 mips32_op
= OPC_MSUB
;
15314 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15315 mips32_op
= OPC_MSUBU
;
15317 check_insn(ctx
, ISA_MIPS32
);
15318 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15321 goto pool32axf_invalid
;
15332 generate_exception_err(ctx
, EXCP_CpU
, 2);
15335 goto pool32axf_invalid
;
15340 case JALR
: /* JALRC */
15341 case JALR_HB
: /* JALRC_HB */
15342 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15343 /* JALRC, JALRC_HB */
15344 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15346 /* JALR, JALR_HB */
15347 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15348 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15353 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15354 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15355 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15358 goto pool32axf_invalid
;
15364 check_cp0_enabled(ctx
);
15365 check_insn(ctx
, ISA_MIPS32R2
);
15366 gen_load_srsgpr(rs
, rt
);
15369 check_cp0_enabled(ctx
);
15370 check_insn(ctx
, ISA_MIPS32R2
);
15371 gen_store_srsgpr(rs
, rt
);
15374 goto pool32axf_invalid
;
15377 #ifndef CONFIG_USER_ONLY
15381 mips32_op
= OPC_TLBP
;
15384 mips32_op
= OPC_TLBR
;
15387 mips32_op
= OPC_TLBWI
;
15390 mips32_op
= OPC_TLBWR
;
15393 mips32_op
= OPC_TLBINV
;
15396 mips32_op
= OPC_TLBINVF
;
15399 mips32_op
= OPC_WAIT
;
15402 mips32_op
= OPC_DERET
;
15405 mips32_op
= OPC_ERET
;
15407 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15410 goto pool32axf_invalid
;
15416 check_cp0_enabled(ctx
);
15418 TCGv t0
= tcg_temp_new();
15420 save_cpu_state(ctx
, 1);
15421 gen_helper_di(t0
, cpu_env
);
15422 gen_store_gpr(t0
, rs
);
15423 /* Stop translation as we may have switched the execution mode */
15424 ctx
->base
.is_jmp
= DISAS_STOP
;
15429 check_cp0_enabled(ctx
);
15431 TCGv t0
= tcg_temp_new();
15433 save_cpu_state(ctx
, 1);
15434 gen_helper_ei(t0
, cpu_env
);
15435 gen_store_gpr(t0
, rs
);
15436 /* DISAS_STOP isn't sufficient, we need to ensure we break out
15437 of translated code to check for pending interrupts. */
15438 gen_save_pc(ctx
->base
.pc_next
+ 4);
15439 ctx
->base
.is_jmp
= DISAS_EXIT
;
15444 goto pool32axf_invalid
;
15451 gen_sync(extract32(ctx
->opcode
, 16, 5));
15454 generate_exception_end(ctx
, EXCP_SYSCALL
);
15457 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15458 gen_helper_do_semihosting(cpu_env
);
15460 check_insn(ctx
, ISA_MIPS32
);
15461 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15462 generate_exception_end(ctx
, EXCP_RI
);
15464 generate_exception_end(ctx
, EXCP_DBp
);
15469 goto pool32axf_invalid
;
15473 switch (minor
& 3) {
15475 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15478 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15481 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15484 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15487 goto pool32axf_invalid
;
15491 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15494 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15497 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15500 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15503 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15506 goto pool32axf_invalid
;
15511 MIPS_INVAL("pool32axf");
15512 generate_exception_end(ctx
, EXCP_RI
);
15517 /* Values for microMIPS fmt field. Variable-width, depending on which
15518 formats the instruction supports. */
15537 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15539 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15540 uint32_t mips32_op
;
15542 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
15543 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
15544 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
15546 switch (extension
) {
15547 case FLOAT_1BIT_FMT(CFC1
, 0):
15548 mips32_op
= OPC_CFC1
;
15550 case FLOAT_1BIT_FMT(CTC1
, 0):
15551 mips32_op
= OPC_CTC1
;
15553 case FLOAT_1BIT_FMT(MFC1
, 0):
15554 mips32_op
= OPC_MFC1
;
15556 case FLOAT_1BIT_FMT(MTC1
, 0):
15557 mips32_op
= OPC_MTC1
;
15559 case FLOAT_1BIT_FMT(MFHC1
, 0):
15560 mips32_op
= OPC_MFHC1
;
15562 case FLOAT_1BIT_FMT(MTHC1
, 0):
15563 mips32_op
= OPC_MTHC1
;
15565 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15568 /* Reciprocal square root */
15569 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15570 mips32_op
= OPC_RSQRT_S
;
15572 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15573 mips32_op
= OPC_RSQRT_D
;
15577 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15578 mips32_op
= OPC_SQRT_S
;
15580 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15581 mips32_op
= OPC_SQRT_D
;
15585 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15586 mips32_op
= OPC_RECIP_S
;
15588 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15589 mips32_op
= OPC_RECIP_D
;
15593 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15594 mips32_op
= OPC_FLOOR_L_S
;
15596 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15597 mips32_op
= OPC_FLOOR_L_D
;
15599 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15600 mips32_op
= OPC_FLOOR_W_S
;
15602 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15603 mips32_op
= OPC_FLOOR_W_D
;
15607 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15608 mips32_op
= OPC_CEIL_L_S
;
15610 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15611 mips32_op
= OPC_CEIL_L_D
;
15613 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15614 mips32_op
= OPC_CEIL_W_S
;
15616 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15617 mips32_op
= OPC_CEIL_W_D
;
15621 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15622 mips32_op
= OPC_TRUNC_L_S
;
15624 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15625 mips32_op
= OPC_TRUNC_L_D
;
15627 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15628 mips32_op
= OPC_TRUNC_W_S
;
15630 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15631 mips32_op
= OPC_TRUNC_W_D
;
15635 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15636 mips32_op
= OPC_ROUND_L_S
;
15638 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15639 mips32_op
= OPC_ROUND_L_D
;
15641 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15642 mips32_op
= OPC_ROUND_W_S
;
15644 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15645 mips32_op
= OPC_ROUND_W_D
;
15648 /* Integer to floating-point conversion */
15649 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15650 mips32_op
= OPC_CVT_L_S
;
15652 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15653 mips32_op
= OPC_CVT_L_D
;
15655 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15656 mips32_op
= OPC_CVT_W_S
;
15658 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15659 mips32_op
= OPC_CVT_W_D
;
15662 /* Paired-foo conversions */
15663 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15664 mips32_op
= OPC_CVT_S_PL
;
15666 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15667 mips32_op
= OPC_CVT_S_PU
;
15669 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15670 mips32_op
= OPC_CVT_PW_PS
;
15672 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15673 mips32_op
= OPC_CVT_PS_PW
;
15676 /* Floating-point moves */
15677 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15678 mips32_op
= OPC_MOV_S
;
15680 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15681 mips32_op
= OPC_MOV_D
;
15683 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15684 mips32_op
= OPC_MOV_PS
;
15687 /* Absolute value */
15688 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15689 mips32_op
= OPC_ABS_S
;
15691 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15692 mips32_op
= OPC_ABS_D
;
15694 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15695 mips32_op
= OPC_ABS_PS
;
15699 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15700 mips32_op
= OPC_NEG_S
;
15702 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15703 mips32_op
= OPC_NEG_D
;
15705 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15706 mips32_op
= OPC_NEG_PS
;
15709 /* Reciprocal square root step */
15710 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15711 mips32_op
= OPC_RSQRT1_S
;
15713 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15714 mips32_op
= OPC_RSQRT1_D
;
15716 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15717 mips32_op
= OPC_RSQRT1_PS
;
15720 /* Reciprocal step */
15721 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15722 mips32_op
= OPC_RECIP1_S
;
15724 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15725 mips32_op
= OPC_RECIP1_S
;
15727 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15728 mips32_op
= OPC_RECIP1_PS
;
15731 /* Conversions from double */
15732 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15733 mips32_op
= OPC_CVT_D_S
;
15735 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15736 mips32_op
= OPC_CVT_D_W
;
15738 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15739 mips32_op
= OPC_CVT_D_L
;
15742 /* Conversions from single */
15743 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15744 mips32_op
= OPC_CVT_S_D
;
15746 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15747 mips32_op
= OPC_CVT_S_W
;
15749 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15750 mips32_op
= OPC_CVT_S_L
;
15752 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
15755 /* Conditional moves on floating-point codes */
15756 case COND_FLOAT_MOV(MOVT
, 0):
15757 case COND_FLOAT_MOV(MOVT
, 1):
15758 case COND_FLOAT_MOV(MOVT
, 2):
15759 case COND_FLOAT_MOV(MOVT
, 3):
15760 case COND_FLOAT_MOV(MOVT
, 4):
15761 case COND_FLOAT_MOV(MOVT
, 5):
15762 case COND_FLOAT_MOV(MOVT
, 6):
15763 case COND_FLOAT_MOV(MOVT
, 7):
15764 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15765 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
15767 case COND_FLOAT_MOV(MOVF
, 0):
15768 case COND_FLOAT_MOV(MOVF
, 1):
15769 case COND_FLOAT_MOV(MOVF
, 2):
15770 case COND_FLOAT_MOV(MOVF
, 3):
15771 case COND_FLOAT_MOV(MOVF
, 4):
15772 case COND_FLOAT_MOV(MOVF
, 5):
15773 case COND_FLOAT_MOV(MOVF
, 6):
15774 case COND_FLOAT_MOV(MOVF
, 7):
15775 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15776 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
15779 MIPS_INVAL("pool32fxf");
15780 generate_exception_end(ctx
, EXCP_RI
);
15785 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15789 int rt
, rs
, rd
, rr
;
15791 uint32_t op
, minor
, minor2
, mips32_op
;
15792 uint32_t cond
, fmt
, cc
;
15794 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
15795 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
15797 rt
= (ctx
->opcode
>> 21) & 0x1f;
15798 rs
= (ctx
->opcode
>> 16) & 0x1f;
15799 rd
= (ctx
->opcode
>> 11) & 0x1f;
15800 rr
= (ctx
->opcode
>> 6) & 0x1f;
15801 imm
= (int16_t) ctx
->opcode
;
15803 op
= (ctx
->opcode
>> 26) & 0x3f;
15806 minor
= ctx
->opcode
& 0x3f;
15809 minor
= (ctx
->opcode
>> 6) & 0xf;
15812 mips32_op
= OPC_SLL
;
15815 mips32_op
= OPC_SRA
;
15818 mips32_op
= OPC_SRL
;
15821 mips32_op
= OPC_ROTR
;
15823 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
15826 check_insn(ctx
, ISA_MIPS32R6
);
15827 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
15830 check_insn(ctx
, ISA_MIPS32R6
);
15831 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
15834 check_insn(ctx
, ISA_MIPS32R6
);
15835 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
15838 goto pool32a_invalid
;
15842 minor
= (ctx
->opcode
>> 6) & 0xf;
15846 mips32_op
= OPC_ADD
;
15849 mips32_op
= OPC_ADDU
;
15852 mips32_op
= OPC_SUB
;
15855 mips32_op
= OPC_SUBU
;
15858 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15859 mips32_op
= OPC_MUL
;
15861 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
15865 mips32_op
= OPC_SLLV
;
15868 mips32_op
= OPC_SRLV
;
15871 mips32_op
= OPC_SRAV
;
15874 mips32_op
= OPC_ROTRV
;
15876 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
15878 /* Logical operations */
15880 mips32_op
= OPC_AND
;
15883 mips32_op
= OPC_OR
;
15886 mips32_op
= OPC_NOR
;
15889 mips32_op
= OPC_XOR
;
15891 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
15893 /* Set less than */
15895 mips32_op
= OPC_SLT
;
15898 mips32_op
= OPC_SLTU
;
15900 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
15903 goto pool32a_invalid
;
15907 minor
= (ctx
->opcode
>> 6) & 0xf;
15909 /* Conditional moves */
15910 case MOVN
: /* MUL */
15911 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15913 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
15916 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
15919 case MOVZ
: /* MUH */
15920 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15922 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
15925 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
15929 check_insn(ctx
, ISA_MIPS32R6
);
15930 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
15933 check_insn(ctx
, ISA_MIPS32R6
);
15934 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
15936 case LWXS
: /* DIV */
15937 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15939 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
15942 gen_ldxs(ctx
, rs
, rt
, rd
);
15946 check_insn(ctx
, ISA_MIPS32R6
);
15947 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
15950 check_insn(ctx
, ISA_MIPS32R6
);
15951 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
15954 check_insn(ctx
, ISA_MIPS32R6
);
15955 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
15958 goto pool32a_invalid
;
15962 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
15965 check_insn(ctx
, ISA_MIPS32R6
);
15966 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
15967 extract32(ctx
->opcode
, 9, 2));
15970 check_insn(ctx
, ISA_MIPS32R6
);
15971 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
15974 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
15977 gen_pool32axf(env
, ctx
, rt
, rs
);
15980 generate_exception_end(ctx
, EXCP_BREAK
);
15983 check_insn(ctx
, ISA_MIPS32R6
);
15984 generate_exception_end(ctx
, EXCP_RI
);
15988 MIPS_INVAL("pool32a");
15989 generate_exception_end(ctx
, EXCP_RI
);
15994 minor
= (ctx
->opcode
>> 12) & 0xf;
15997 check_cp0_enabled(ctx
);
15998 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15999 gen_cache_operation(ctx
, rt
, rs
, imm
);
16004 /* COP2: Not implemented. */
16005 generate_exception_err(ctx
, EXCP_CpU
, 2);
16007 #ifdef TARGET_MIPS64
16010 check_insn(ctx
, ISA_MIPS3
);
16011 check_mips_64(ctx
);
16016 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16018 #ifdef TARGET_MIPS64
16021 check_insn(ctx
, ISA_MIPS3
);
16022 check_mips_64(ctx
);
16027 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16030 MIPS_INVAL("pool32b");
16031 generate_exception_end(ctx
, EXCP_RI
);
16036 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
16037 minor
= ctx
->opcode
& 0x3f;
16038 check_cp1_enabled(ctx
);
16041 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16042 mips32_op
= OPC_ALNV_PS
;
16045 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16046 mips32_op
= OPC_MADD_S
;
16049 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16050 mips32_op
= OPC_MADD_D
;
16053 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16054 mips32_op
= OPC_MADD_PS
;
16057 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16058 mips32_op
= OPC_MSUB_S
;
16061 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16062 mips32_op
= OPC_MSUB_D
;
16065 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16066 mips32_op
= OPC_MSUB_PS
;
16069 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16070 mips32_op
= OPC_NMADD_S
;
16073 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16074 mips32_op
= OPC_NMADD_D
;
16077 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16078 mips32_op
= OPC_NMADD_PS
;
16081 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16082 mips32_op
= OPC_NMSUB_S
;
16085 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16086 mips32_op
= OPC_NMSUB_D
;
16089 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16090 mips32_op
= OPC_NMSUB_PS
;
16092 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
16094 case CABS_COND_FMT
:
16095 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16096 cond
= (ctx
->opcode
>> 6) & 0xf;
16097 cc
= (ctx
->opcode
>> 13) & 0x7;
16098 fmt
= (ctx
->opcode
>> 10) & 0x3;
16101 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
16104 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
16107 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
16110 goto pool32f_invalid
;
16114 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16115 cond
= (ctx
->opcode
>> 6) & 0xf;
16116 cc
= (ctx
->opcode
>> 13) & 0x7;
16117 fmt
= (ctx
->opcode
>> 10) & 0x3;
16120 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
16123 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
16126 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
16129 goto pool32f_invalid
;
16133 check_insn(ctx
, ISA_MIPS32R6
);
16134 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16137 check_insn(ctx
, ISA_MIPS32R6
);
16138 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16141 gen_pool32fxf(ctx
, rt
, rs
);
16145 switch ((ctx
->opcode
>> 6) & 0x7) {
16147 mips32_op
= OPC_PLL_PS
;
16150 mips32_op
= OPC_PLU_PS
;
16153 mips32_op
= OPC_PUL_PS
;
16156 mips32_op
= OPC_PUU_PS
;
16159 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16160 mips32_op
= OPC_CVT_PS_S
;
16162 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16165 goto pool32f_invalid
;
16169 check_insn(ctx
, ISA_MIPS32R6
);
16170 switch ((ctx
->opcode
>> 9) & 0x3) {
16172 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
16175 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
16178 goto pool32f_invalid
;
16183 switch ((ctx
->opcode
>> 6) & 0x7) {
16185 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16186 mips32_op
= OPC_LWXC1
;
16189 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16190 mips32_op
= OPC_SWXC1
;
16193 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16194 mips32_op
= OPC_LDXC1
;
16197 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16198 mips32_op
= OPC_SDXC1
;
16201 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16202 mips32_op
= OPC_LUXC1
;
16205 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16206 mips32_op
= OPC_SUXC1
;
16208 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
16211 goto pool32f_invalid
;
16215 check_insn(ctx
, ISA_MIPS32R6
);
16216 switch ((ctx
->opcode
>> 9) & 0x3) {
16218 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
16221 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
16224 goto pool32f_invalid
;
16229 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16230 fmt
= (ctx
->opcode
>> 9) & 0x3;
16231 switch ((ctx
->opcode
>> 6) & 0x7) {
16235 mips32_op
= OPC_RSQRT2_S
;
16238 mips32_op
= OPC_RSQRT2_D
;
16241 mips32_op
= OPC_RSQRT2_PS
;
16244 goto pool32f_invalid
;
16250 mips32_op
= OPC_RECIP2_S
;
16253 mips32_op
= OPC_RECIP2_D
;
16256 mips32_op
= OPC_RECIP2_PS
;
16259 goto pool32f_invalid
;
16263 mips32_op
= OPC_ADDR_PS
;
16266 mips32_op
= OPC_MULR_PS
;
16268 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16271 goto pool32f_invalid
;
16275 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16276 cc
= (ctx
->opcode
>> 13) & 0x7;
16277 fmt
= (ctx
->opcode
>> 9) & 0x3;
16278 switch ((ctx
->opcode
>> 6) & 0x7) {
16279 case MOVF_FMT
: /* RINT_FMT */
16280 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16284 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16287 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16290 goto pool32f_invalid
;
16296 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16299 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16303 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16306 goto pool32f_invalid
;
16310 case MOVT_FMT
: /* CLASS_FMT */
16311 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16315 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16318 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16321 goto pool32f_invalid
;
16327 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16330 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16334 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16337 goto pool32f_invalid
;
16342 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16345 goto pool32f_invalid
;
16348 #define FINSN_3ARG_SDPS(prfx) \
16349 switch ((ctx->opcode >> 8) & 0x3) { \
16351 mips32_op = OPC_##prfx##_S; \
16354 mips32_op = OPC_##prfx##_D; \
16356 case FMT_SDPS_PS: \
16358 mips32_op = OPC_##prfx##_PS; \
16361 goto pool32f_invalid; \
16364 check_insn(ctx
, ISA_MIPS32R6
);
16365 switch ((ctx
->opcode
>> 9) & 0x3) {
16367 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16370 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16373 goto pool32f_invalid
;
16377 check_insn(ctx
, ISA_MIPS32R6
);
16378 switch ((ctx
->opcode
>> 9) & 0x3) {
16380 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16383 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16386 goto pool32f_invalid
;
16390 /* regular FP ops */
16391 switch ((ctx
->opcode
>> 6) & 0x3) {
16393 FINSN_3ARG_SDPS(ADD
);
16396 FINSN_3ARG_SDPS(SUB
);
16399 FINSN_3ARG_SDPS(MUL
);
16402 fmt
= (ctx
->opcode
>> 8) & 0x3;
16404 mips32_op
= OPC_DIV_D
;
16405 } else if (fmt
== 0) {
16406 mips32_op
= OPC_DIV_S
;
16408 goto pool32f_invalid
;
16412 goto pool32f_invalid
;
16417 switch ((ctx
->opcode
>> 6) & 0x7) {
16418 case MOVN_FMT
: /* SELEQZ_FMT */
16419 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16421 switch ((ctx
->opcode
>> 9) & 0x3) {
16423 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16426 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16429 goto pool32f_invalid
;
16433 FINSN_3ARG_SDPS(MOVN
);
16437 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16438 FINSN_3ARG_SDPS(MOVN
);
16440 case MOVZ_FMT
: /* SELNEZ_FMT */
16441 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16443 switch ((ctx
->opcode
>> 9) & 0x3) {
16445 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16448 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16451 goto pool32f_invalid
;
16455 FINSN_3ARG_SDPS(MOVZ
);
16459 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16460 FINSN_3ARG_SDPS(MOVZ
);
16463 check_insn(ctx
, ISA_MIPS32R6
);
16464 switch ((ctx
->opcode
>> 9) & 0x3) {
16466 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16469 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16472 goto pool32f_invalid
;
16476 check_insn(ctx
, ISA_MIPS32R6
);
16477 switch ((ctx
->opcode
>> 9) & 0x3) {
16479 mips32_op
= OPC_MADDF_S
;
16482 mips32_op
= OPC_MADDF_D
;
16485 goto pool32f_invalid
;
16489 check_insn(ctx
, ISA_MIPS32R6
);
16490 switch ((ctx
->opcode
>> 9) & 0x3) {
16492 mips32_op
= OPC_MSUBF_S
;
16495 mips32_op
= OPC_MSUBF_D
;
16498 goto pool32f_invalid
;
16502 goto pool32f_invalid
;
16506 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16510 MIPS_INVAL("pool32f");
16511 generate_exception_end(ctx
, EXCP_RI
);
16515 generate_exception_err(ctx
, EXCP_CpU
, 1);
16519 minor
= (ctx
->opcode
>> 21) & 0x1f;
16522 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16523 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16526 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16527 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16528 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16531 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16532 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16533 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16536 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16537 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16540 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16541 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16542 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16545 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16546 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16547 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16550 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16551 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16554 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16555 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16559 case TLTI
: /* BC1EQZC */
16560 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16562 check_cp1_enabled(ctx
);
16563 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16566 mips32_op
= OPC_TLTI
;
16570 case TGEI
: /* BC1NEZC */
16571 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16573 check_cp1_enabled(ctx
);
16574 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16577 mips32_op
= OPC_TGEI
;
16582 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16583 mips32_op
= OPC_TLTIU
;
16586 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16587 mips32_op
= OPC_TGEIU
;
16589 case TNEI
: /* SYNCI */
16590 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16592 /* Break the TB to be able to sync copied instructions
16594 ctx
->base
.is_jmp
= DISAS_STOP
;
16597 mips32_op
= OPC_TNEI
;
16602 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16603 mips32_op
= OPC_TEQI
;
16605 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16610 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16611 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16612 4, rs
, 0, imm
<< 1, 0);
16613 /* Compact branches don't have a delay slot, so just let
16614 the normal delay slot handling take us to the branch
16618 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16619 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16622 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16623 /* Break the TB to be able to sync copied instructions
16625 ctx
->base
.is_jmp
= DISAS_STOP
;
16629 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16630 /* COP2: Not implemented. */
16631 generate_exception_err(ctx
, EXCP_CpU
, 2);
16634 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16635 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16638 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16639 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16642 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16643 mips32_op
= OPC_BC1FANY4
;
16646 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16647 mips32_op
= OPC_BC1TANY4
;
16650 check_insn(ctx
, ASE_MIPS3D
);
16653 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16654 check_cp1_enabled(ctx
);
16655 gen_compute_branch1(ctx
, mips32_op
,
16656 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16658 generate_exception_err(ctx
, EXCP_CpU
, 1);
16663 /* MIPS DSP: not implemented */
16666 MIPS_INVAL("pool32i");
16667 generate_exception_end(ctx
, EXCP_RI
);
16672 minor
= (ctx
->opcode
>> 12) & 0xf;
16673 offset
= sextract32(ctx
->opcode
, 0,
16674 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16677 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16678 mips32_op
= OPC_LWL
;
16681 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16682 mips32_op
= OPC_SWL
;
16685 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16686 mips32_op
= OPC_LWR
;
16689 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16690 mips32_op
= OPC_SWR
;
16692 #if defined(TARGET_MIPS64)
16694 check_insn(ctx
, ISA_MIPS3
);
16695 check_mips_64(ctx
);
16696 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16697 mips32_op
= OPC_LDL
;
16700 check_insn(ctx
, ISA_MIPS3
);
16701 check_mips_64(ctx
);
16702 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16703 mips32_op
= OPC_SDL
;
16706 check_insn(ctx
, ISA_MIPS3
);
16707 check_mips_64(ctx
);
16708 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16709 mips32_op
= OPC_LDR
;
16712 check_insn(ctx
, ISA_MIPS3
);
16713 check_mips_64(ctx
);
16714 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16715 mips32_op
= OPC_SDR
;
16718 check_insn(ctx
, ISA_MIPS3
);
16719 check_mips_64(ctx
);
16720 mips32_op
= OPC_LWU
;
16723 check_insn(ctx
, ISA_MIPS3
);
16724 check_mips_64(ctx
);
16725 mips32_op
= OPC_LLD
;
16729 mips32_op
= OPC_LL
;
16732 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16735 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16738 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
16740 #if defined(TARGET_MIPS64)
16742 check_insn(ctx
, ISA_MIPS3
);
16743 check_mips_64(ctx
);
16744 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
16749 MIPS_INVAL("pool32c ld-eva");
16750 generate_exception_end(ctx
, EXCP_RI
);
16753 check_cp0_enabled(ctx
);
16755 minor2
= (ctx
->opcode
>> 9) & 0x7;
16756 offset
= sextract32(ctx
->opcode
, 0, 9);
16759 mips32_op
= OPC_LBUE
;
16762 mips32_op
= OPC_LHUE
;
16765 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16766 mips32_op
= OPC_LWLE
;
16769 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16770 mips32_op
= OPC_LWRE
;
16773 mips32_op
= OPC_LBE
;
16776 mips32_op
= OPC_LHE
;
16779 mips32_op
= OPC_LLE
;
16782 mips32_op
= OPC_LWE
;
16788 MIPS_INVAL("pool32c st-eva");
16789 generate_exception_end(ctx
, EXCP_RI
);
16792 check_cp0_enabled(ctx
);
16794 minor2
= (ctx
->opcode
>> 9) & 0x7;
16795 offset
= sextract32(ctx
->opcode
, 0, 9);
16798 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16799 mips32_op
= OPC_SWLE
;
16802 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16803 mips32_op
= OPC_SWRE
;
16806 /* Treat as no-op */
16807 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16808 /* hint codes 24-31 are reserved and signal RI */
16809 generate_exception(ctx
, EXCP_RI
);
16813 /* Treat as no-op */
16814 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16815 gen_cache_operation(ctx
, rt
, rs
, offset
);
16819 mips32_op
= OPC_SBE
;
16822 mips32_op
= OPC_SHE
;
16825 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
16828 mips32_op
= OPC_SWE
;
16833 /* Treat as no-op */
16834 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16835 /* hint codes 24-31 are reserved and signal RI */
16836 generate_exception(ctx
, EXCP_RI
);
16840 MIPS_INVAL("pool32c");
16841 generate_exception_end(ctx
, EXCP_RI
);
16845 case ADDI32
: /* AUI, LUI */
16846 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16848 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
16851 mips32_op
= OPC_ADDI
;
16856 mips32_op
= OPC_ADDIU
;
16858 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16861 /* Logical operations */
16863 mips32_op
= OPC_ORI
;
16866 mips32_op
= OPC_XORI
;
16869 mips32_op
= OPC_ANDI
;
16871 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16874 /* Set less than immediate */
16876 mips32_op
= OPC_SLTI
;
16879 mips32_op
= OPC_SLTIU
;
16881 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16884 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16885 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
16886 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
16887 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16889 case JALS32
: /* BOVC, BEQC, BEQZALC */
16890 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16893 mips32_op
= OPC_BOVC
;
16894 } else if (rs
< rt
&& rs
== 0) {
16896 mips32_op
= OPC_BEQZALC
;
16899 mips32_op
= OPC_BEQC
;
16901 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16904 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
16905 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
16906 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16909 case BEQ32
: /* BC */
16910 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16912 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
16913 sextract32(ctx
->opcode
<< 1, 0, 27));
16916 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
16919 case BNE32
: /* BALC */
16920 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16922 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
16923 sextract32(ctx
->opcode
<< 1, 0, 27));
16926 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
16929 case J32
: /* BGTZC, BLTZC, BLTC */
16930 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16931 if (rs
== 0 && rt
!= 0) {
16933 mips32_op
= OPC_BGTZC
;
16934 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16936 mips32_op
= OPC_BLTZC
;
16939 mips32_op
= OPC_BLTC
;
16941 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16944 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
16945 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16948 case JAL32
: /* BLEZC, BGEZC, BGEC */
16949 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16950 if (rs
== 0 && rt
!= 0) {
16952 mips32_op
= OPC_BLEZC
;
16953 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16955 mips32_op
= OPC_BGEZC
;
16958 mips32_op
= OPC_BGEC
;
16960 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16963 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
16964 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16965 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16968 /* Floating point (COP1) */
16970 mips32_op
= OPC_LWC1
;
16973 mips32_op
= OPC_LDC1
;
16976 mips32_op
= OPC_SWC1
;
16979 mips32_op
= OPC_SDC1
;
16981 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
16983 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16984 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16985 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16986 switch ((ctx
->opcode
>> 16) & 0x1f) {
16995 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16998 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
17001 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
17011 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17014 generate_exception(ctx
, EXCP_RI
);
17019 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
17020 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
17022 gen_addiupc(ctx
, reg
, offset
, 0, 0);
17025 case BNVC
: /* BNEC, BNEZALC */
17026 check_insn(ctx
, ISA_MIPS32R6
);
17029 mips32_op
= OPC_BNVC
;
17030 } else if (rs
< rt
&& rs
== 0) {
17032 mips32_op
= OPC_BNEZALC
;
17035 mips32_op
= OPC_BNEC
;
17037 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17039 case R6_BNEZC
: /* JIALC */
17040 check_insn(ctx
, ISA_MIPS32R6
);
17043 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
17044 sextract32(ctx
->opcode
<< 1, 0, 22));
17047 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
17050 case R6_BEQZC
: /* JIC */
17051 check_insn(ctx
, ISA_MIPS32R6
);
17054 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
17055 sextract32(ctx
->opcode
<< 1, 0, 22));
17058 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
17061 case BLEZALC
: /* BGEZALC, BGEUC */
17062 check_insn(ctx
, ISA_MIPS32R6
);
17063 if (rs
== 0 && rt
!= 0) {
17065 mips32_op
= OPC_BLEZALC
;
17066 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17068 mips32_op
= OPC_BGEZALC
;
17071 mips32_op
= OPC_BGEUC
;
17073 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17075 case BGTZALC
: /* BLTZALC, BLTUC */
17076 check_insn(ctx
, ISA_MIPS32R6
);
17077 if (rs
== 0 && rt
!= 0) {
17079 mips32_op
= OPC_BGTZALC
;
17080 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17082 mips32_op
= OPC_BLTZALC
;
17085 mips32_op
= OPC_BLTUC
;
17087 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17089 /* Loads and stores */
17091 mips32_op
= OPC_LB
;
17094 mips32_op
= OPC_LBU
;
17097 mips32_op
= OPC_LH
;
17100 mips32_op
= OPC_LHU
;
17103 mips32_op
= OPC_LW
;
17105 #ifdef TARGET_MIPS64
17107 check_insn(ctx
, ISA_MIPS3
);
17108 check_mips_64(ctx
);
17109 mips32_op
= OPC_LD
;
17112 check_insn(ctx
, ISA_MIPS3
);
17113 check_mips_64(ctx
);
17114 mips32_op
= OPC_SD
;
17118 mips32_op
= OPC_SB
;
17121 mips32_op
= OPC_SH
;
17124 mips32_op
= OPC_SW
;
17127 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
17130 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
17133 generate_exception_end(ctx
, EXCP_RI
);
17138 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
17142 /* make sure instructions are on a halfword boundary */
17143 if (ctx
->base
.pc_next
& 0x1) {
17144 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
17145 generate_exception_end(ctx
, EXCP_AdEL
);
17149 op
= (ctx
->opcode
>> 10) & 0x3f;
17150 /* Enforce properly-sized instructions in a delay slot */
17151 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
17152 switch (op
& 0x7) { /* MSB-3..MSB-5 */
17154 /* POOL32A, POOL32B, POOL32I, POOL32C */
17156 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
17158 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
17160 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
17162 /* LB32, LH32, LWC132, LDC132, LW32 */
17163 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
17164 generate_exception_end(ctx
, EXCP_RI
);
17169 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
17171 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
17173 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
17174 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
17175 generate_exception_end(ctx
, EXCP_RI
);
17185 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17186 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
17187 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
17190 switch (ctx
->opcode
& 0x1) {
17198 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17199 /* In the Release 6 the register number location in
17200 * the instruction encoding has changed.
17202 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
17204 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
17210 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17211 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
17212 int amount
= (ctx
->opcode
>> 1) & 0x7;
17214 amount
= amount
== 0 ? 8 : amount
;
17216 switch (ctx
->opcode
& 0x1) {
17225 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17229 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17230 gen_pool16c_r6_insn(ctx
);
17232 gen_pool16c_insn(ctx
);
17237 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17238 int rb
= 28; /* GP */
17239 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17241 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17245 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17246 if (ctx
->opcode
& 1) {
17247 generate_exception_end(ctx
, EXCP_RI
);
17250 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17251 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17252 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17253 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17258 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17259 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17260 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17261 offset
= (offset
== 0xf ? -1 : offset
);
17263 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17268 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17269 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17270 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17272 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17277 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17278 int rb
= 29; /* SP */
17279 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17281 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17286 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17287 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17288 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17290 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17295 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17296 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17297 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17299 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17304 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17305 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17306 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17308 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17313 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17314 int rb
= 29; /* SP */
17315 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17317 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17322 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17323 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17324 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17326 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17331 int rd
= uMIPS_RD5(ctx
->opcode
);
17332 int rs
= uMIPS_RS5(ctx
->opcode
);
17334 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17341 switch (ctx
->opcode
& 0x1) {
17351 switch (ctx
->opcode
& 0x1) {
17356 gen_addiur1sp(ctx
);
17360 case B16
: /* BC16 */
17361 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17362 sextract32(ctx
->opcode
, 0, 10) << 1,
17363 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17365 case BNEZ16
: /* BNEZC16 */
17366 case BEQZ16
: /* BEQZC16 */
17367 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17368 mmreg(uMIPS_RD(ctx
->opcode
)),
17369 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17370 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17375 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17376 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17378 imm
= (imm
== 0x7f ? -1 : imm
);
17379 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17385 generate_exception_end(ctx
, EXCP_RI
);
17388 decode_micromips32_opc(env
, ctx
);
17401 /* MAJOR, P16, and P32 pools opcodes */
17405 NM_MOVE_BALC
= 0x02,
17413 NM_P16_SHIFT
= 0x0c,
17431 NM_P_LS_U12
= 0x21,
17441 NM_P16_ADDU
= 0x2c,
17455 NM_MOVEPREV
= 0x3f,
17458 /* POOL32A instruction pool */
17460 NM_POOL32A0
= 0x00,
17461 NM_SPECIAL2
= 0x01,
17464 NM_POOL32A5
= 0x05,
17465 NM_POOL32A7
= 0x07,
17468 /* P.GP.W instruction pool */
17470 NM_ADDIUGP_W
= 0x00,
17475 /* P48I instruction pool */
17479 NM_ADDIUGP48
= 0x02,
17480 NM_ADDIUPC48
= 0x03,
17485 /* P.U12 instruction pool */
17494 NM_ADDIUNEG
= 0x08,
17501 /* POOL32F instruction pool */
17503 NM_POOL32F_0
= 0x00,
17504 NM_POOL32F_3
= 0x03,
17505 NM_POOL32F_5
= 0x05,
17508 /* POOL32S instruction pool */
17510 NM_POOL32S_0
= 0x00,
17511 NM_POOL32S_4
= 0x04,
17514 /* P.LUI instruction pool */
17520 /* P.GP.BH instruction pool */
17525 NM_ADDIUGP_B
= 0x03,
17528 NM_P_GP_CP1
= 0x06,
17531 /* P.LS.U12 instruction pool */
17536 NM_P_PREFU12
= 0x03,
17549 /* P.LS.S9 instruction pool */
17555 NM_P_LS_UAWM
= 0x05,
17558 /* P.BAL instruction pool */
17564 /* P.J instruction pool */
17567 NM_JALRC_HB
= 0x01,
17568 NM_P_BALRSC
= 0x08,
17571 /* P.BR1 instruction pool */
17579 /* P.BR2 instruction pool */
17586 /* P.BRI instruction pool */
17598 /* P16.SHIFT instruction pool */
17604 /* POOL16C instruction pool */
17606 NM_POOL16C_0
= 0x00,
17610 /* P16.A1 instruction pool */
17612 NM_ADDIUR1SP
= 0x01,
17615 /* P16.A2 instruction pool */
17618 NM_P_ADDIURS5
= 0x01,
17621 /* P16.ADDU instruction pool */
17627 /* P16.SR instruction pool */
17630 NM_RESTORE_JRC16
= 0x01,
17633 /* P16.4X4 instruction pool */
17639 /* P16.LB instruction pool */
17646 /* P16.LH instruction pool */
17653 /* P.RI instruction pool */
17656 NM_P_SYSCALL
= 0x01,
17661 /* POOL32A0 instruction pool */
17696 NM_D_E_MT_VPE
= 0x56,
17704 /* CRC32 instruction pool */
17714 /* POOL32A5 instruction pool */
17716 NM_CMP_EQ_PH
= 0x00,
17717 NM_CMP_LT_PH
= 0x08,
17718 NM_CMP_LE_PH
= 0x10,
17719 NM_CMPGU_EQ_QB
= 0x18,
17720 NM_CMPGU_LT_QB
= 0x20,
17721 NM_CMPGU_LE_QB
= 0x28,
17722 NM_CMPGDU_EQ_QB
= 0x30,
17723 NM_CMPGDU_LT_QB
= 0x38,
17724 NM_CMPGDU_LE_QB
= 0x40,
17725 NM_CMPU_EQ_QB
= 0x48,
17726 NM_CMPU_LT_QB
= 0x50,
17727 NM_CMPU_LE_QB
= 0x58,
17728 NM_ADDQ_S_W
= 0x60,
17729 NM_SUBQ_S_W
= 0x68,
17733 NM_ADDQ_S_PH
= 0x01,
17734 NM_ADDQH_R_PH
= 0x09,
17735 NM_ADDQH_R_W
= 0x11,
17736 NM_ADDU_S_QB
= 0x19,
17737 NM_ADDU_S_PH
= 0x21,
17738 NM_ADDUH_R_QB
= 0x29,
17739 NM_SHRAV_R_PH
= 0x31,
17740 NM_SHRAV_R_QB
= 0x39,
17741 NM_SUBQ_S_PH
= 0x41,
17742 NM_SUBQH_R_PH
= 0x49,
17743 NM_SUBQH_R_W
= 0x51,
17744 NM_SUBU_S_QB
= 0x59,
17745 NM_SUBU_S_PH
= 0x61,
17746 NM_SUBUH_R_QB
= 0x69,
17747 NM_SHLLV_S_PH
= 0x71,
17748 NM_PRECR_SRA_R_PH_W
= 0x79,
17750 NM_MULEU_S_PH_QBL
= 0x12,
17751 NM_MULEU_S_PH_QBR
= 0x1a,
17752 NM_MULQ_RS_PH
= 0x22,
17753 NM_MULQ_S_PH
= 0x2a,
17754 NM_MULQ_RS_W
= 0x32,
17755 NM_MULQ_S_W
= 0x3a,
17758 NM_SHRAV_R_W
= 0x5a,
17759 NM_SHRLV_PH
= 0x62,
17760 NM_SHRLV_QB
= 0x6a,
17761 NM_SHLLV_QB
= 0x72,
17762 NM_SHLLV_S_W
= 0x7a,
17766 NM_MULEQ_S_W_PHL
= 0x04,
17767 NM_MULEQ_S_W_PHR
= 0x0c,
17769 NM_MUL_S_PH
= 0x05,
17770 NM_PRECR_QB_PH
= 0x0d,
17771 NM_PRECRQ_QB_PH
= 0x15,
17772 NM_PRECRQ_PH_W
= 0x1d,
17773 NM_PRECRQ_RS_PH_W
= 0x25,
17774 NM_PRECRQU_S_QB_PH
= 0x2d,
17775 NM_PACKRL_PH
= 0x35,
17779 NM_SHRA_R_W
= 0x5e,
17780 NM_SHRA_R_PH
= 0x66,
17781 NM_SHLL_S_PH
= 0x76,
17782 NM_SHLL_S_W
= 0x7e,
17787 /* POOL32A7 instruction pool */
17792 NM_POOL32AXF
= 0x07,
17795 /* P.SR instruction pool */
17801 /* P.SHIFT instruction pool */
17809 /* P.ROTX instruction pool */
17814 /* P.INS instruction pool */
17819 /* P.EXT instruction pool */
17824 /* POOL32F_0 (fmt) instruction pool */
17829 NM_SELEQZ_S
= 0x07,
17830 NM_SELEQZ_D
= 0x47,
17834 NM_SELNEZ_S
= 0x0f,
17835 NM_SELNEZ_D
= 0x4f,
17850 /* POOL32F_3 instruction pool */
17854 NM_MINA_FMT
= 0x04,
17855 NM_MAXA_FMT
= 0x05,
17856 NM_POOL32FXF
= 0x07,
17859 /* POOL32F_5 instruction pool */
17861 NM_CMP_CONDN_S
= 0x00,
17862 NM_CMP_CONDN_D
= 0x02,
17865 /* P.GP.LH instruction pool */
17871 /* P.GP.SH instruction pool */
17876 /* P.GP.CP1 instruction pool */
17884 /* P.LS.S0 instruction pool */
17901 NM_P_PREFS9
= 0x03,
17907 /* P.LS.S1 instruction pool */
17909 NM_ASET_ACLR
= 0x02,
17917 /* P.LS.E0 instruction pool */
17933 /* P.PREFE instruction pool */
17939 /* P.LLE instruction pool */
17945 /* P.SCE instruction pool */
17951 /* P.LS.WM instruction pool */
17957 /* P.LS.UAWM instruction pool */
17963 /* P.BR3A instruction pool */
17969 NM_BPOSGE32C
= 0x04,
17972 /* P16.RI instruction pool */
17974 NM_P16_SYSCALL
= 0x01,
17979 /* POOL16C_0 instruction pool */
17981 NM_POOL16C_00
= 0x00,
17984 /* P16.JRC instruction pool */
17990 /* P.SYSCALL instruction pool */
17996 /* P.TRAP instruction pool */
18002 /* P.CMOVE instruction pool */
18008 /* POOL32Axf instruction pool */
18010 NM_POOL32AXF_1
= 0x01,
18011 NM_POOL32AXF_2
= 0x02,
18012 NM_POOL32AXF_4
= 0x04,
18013 NM_POOL32AXF_5
= 0x05,
18014 NM_POOL32AXF_7
= 0x07,
18017 /* POOL32Axf_1 instruction pool */
18019 NM_POOL32AXF_1_0
= 0x00,
18020 NM_POOL32AXF_1_1
= 0x01,
18021 NM_POOL32AXF_1_3
= 0x03,
18022 NM_POOL32AXF_1_4
= 0x04,
18023 NM_POOL32AXF_1_5
= 0x05,
18024 NM_POOL32AXF_1_7
= 0x07,
18027 /* POOL32Axf_2 instruction pool */
18029 NM_POOL32AXF_2_0_7
= 0x00,
18030 NM_POOL32AXF_2_8_15
= 0x01,
18031 NM_POOL32AXF_2_16_23
= 0x02,
18032 NM_POOL32AXF_2_24_31
= 0x03,
18035 /* POOL32Axf_7 instruction pool */
18037 NM_SHRA_R_QB
= 0x0,
18042 /* POOL32Axf_1_0 instruction pool */
18050 /* POOL32Axf_1_1 instruction pool */
18056 /* POOL32Axf_1_3 instruction pool */
18064 /* POOL32Axf_1_4 instruction pool */
18070 /* POOL32Axf_1_5 instruction pool */
18072 NM_MAQ_S_W_PHR
= 0x0,
18073 NM_MAQ_S_W_PHL
= 0x1,
18074 NM_MAQ_SA_W_PHR
= 0x2,
18075 NM_MAQ_SA_W_PHL
= 0x3,
18078 /* POOL32Axf_1_7 instruction pool */
18082 NM_EXTR_RS_W
= 0x2,
18086 /* POOL32Axf_2_0_7 instruction pool */
18089 NM_DPAQ_S_W_PH
= 0x1,
18091 NM_DPSQ_S_W_PH
= 0x3,
18098 /* POOL32Axf_2_8_15 instruction pool */
18100 NM_DPAX_W_PH
= 0x0,
18101 NM_DPAQ_SA_L_W
= 0x1,
18102 NM_DPSX_W_PH
= 0x2,
18103 NM_DPSQ_SA_L_W
= 0x3,
18106 NM_EXTRV_R_W
= 0x7,
18109 /* POOL32Axf_2_16_23 instruction pool */
18111 NM_DPAU_H_QBL
= 0x0,
18112 NM_DPAQX_S_W_PH
= 0x1,
18113 NM_DPSU_H_QBL
= 0x2,
18114 NM_DPSQX_S_W_PH
= 0x3,
18117 NM_MULSA_W_PH
= 0x6,
18118 NM_EXTRV_RS_W
= 0x7,
18121 /* POOL32Axf_2_24_31 instruction pool */
18123 NM_DPAU_H_QBR
= 0x0,
18124 NM_DPAQX_SA_W_PH
= 0x1,
18125 NM_DPSU_H_QBR
= 0x2,
18126 NM_DPSQX_SA_W_PH
= 0x3,
18129 NM_MULSAQ_S_W_PH
= 0x6,
18130 NM_EXTRV_S_H
= 0x7,
18133 /* POOL32Axf_{4, 5} instruction pool */
18152 /* nanoMIPS DSP instructions */
18153 NM_ABSQ_S_QB
= 0x00,
18154 NM_ABSQ_S_PH
= 0x08,
18155 NM_ABSQ_S_W
= 0x10,
18156 NM_PRECEQ_W_PHL
= 0x28,
18157 NM_PRECEQ_W_PHR
= 0x30,
18158 NM_PRECEQU_PH_QBL
= 0x38,
18159 NM_PRECEQU_PH_QBR
= 0x48,
18160 NM_PRECEU_PH_QBL
= 0x58,
18161 NM_PRECEU_PH_QBR
= 0x68,
18162 NM_PRECEQU_PH_QBLA
= 0x39,
18163 NM_PRECEQU_PH_QBRA
= 0x49,
18164 NM_PRECEU_PH_QBLA
= 0x59,
18165 NM_PRECEU_PH_QBRA
= 0x69,
18166 NM_REPLV_PH
= 0x01,
18167 NM_REPLV_QB
= 0x09,
18170 NM_RADDU_W_QB
= 0x78,
18176 /* PP.SR instruction pool */
18180 NM_RESTORE_JRC
= 0x03,
18183 /* P.SR.F instruction pool */
18186 NM_RESTOREF
= 0x01,
18189 /* P16.SYSCALL instruction pool */
18191 NM_SYSCALL16
= 0x00,
18192 NM_HYPCALL16
= 0x01,
18195 /* POOL16C_00 instruction pool */
18203 /* PP.LSX and PP.LSXS instruction pool */
18241 /* ERETx instruction pool */
18247 /* POOL32FxF_{0, 1} insturction pool */
18256 NM_CVT_S_PL
= 0x84,
18257 NM_CVT_S_PU
= 0xa4,
18259 NM_CVT_L_S
= 0x004,
18260 NM_CVT_L_D
= 0x104,
18261 NM_CVT_W_S
= 0x024,
18262 NM_CVT_W_D
= 0x124,
18264 NM_RSQRT_S
= 0x008,
18265 NM_RSQRT_D
= 0x108,
18270 NM_RECIP_S
= 0x048,
18271 NM_RECIP_D
= 0x148,
18273 NM_FLOOR_L_S
= 0x00c,
18274 NM_FLOOR_L_D
= 0x10c,
18276 NM_FLOOR_W_S
= 0x02c,
18277 NM_FLOOR_W_D
= 0x12c,
18279 NM_CEIL_L_S
= 0x04c,
18280 NM_CEIL_L_D
= 0x14c,
18281 NM_CEIL_W_S
= 0x06c,
18282 NM_CEIL_W_D
= 0x16c,
18283 NM_TRUNC_L_S
= 0x08c,
18284 NM_TRUNC_L_D
= 0x18c,
18285 NM_TRUNC_W_S
= 0x0ac,
18286 NM_TRUNC_W_D
= 0x1ac,
18287 NM_ROUND_L_S
= 0x0cc,
18288 NM_ROUND_L_D
= 0x1cc,
18289 NM_ROUND_W_S
= 0x0ec,
18290 NM_ROUND_W_D
= 0x1ec,
18298 NM_CVT_D_S
= 0x04d,
18299 NM_CVT_D_W
= 0x0cd,
18300 NM_CVT_D_L
= 0x14d,
18301 NM_CVT_S_D
= 0x06d,
18302 NM_CVT_S_W
= 0x0ed,
18303 NM_CVT_S_L
= 0x16d,
18306 /* P.LL instruction pool */
18312 /* P.SC instruction pool */
18318 /* P.DVP instruction pool */
18327 * nanoMIPS decoding engine
18332 /* extraction utilities */
18334 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18335 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18336 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18337 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18338 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18339 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18341 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18342 static inline int decode_gpr_gpr3(int r
)
18344 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18346 return map
[r
& 0x7];
18349 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18350 static inline int decode_gpr_gpr3_src_store(int r
)
18352 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18354 return map
[r
& 0x7];
18357 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18358 static inline int decode_gpr_gpr4(int r
)
18360 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18361 16, 17, 18, 19, 20, 21, 22, 23 };
18363 return map
[r
& 0xf];
18366 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18367 static inline int decode_gpr_gpr4_zero(int r
)
18369 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18370 16, 17, 18, 19, 20, 21, 22, 23 };
18372 return map
[r
& 0xf];
18376 /* extraction utilities */
18378 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18379 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18380 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18381 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18382 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18383 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18386 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18388 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18391 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18392 uint8_t gp
, uint16_t u
)
18395 TCGv va
= tcg_temp_new();
18396 TCGv t0
= tcg_temp_new();
18398 while (counter
!= count
) {
18399 bool use_gp
= gp
&& (counter
== count
- 1);
18400 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18401 int this_offset
= -((counter
+ 1) << 2);
18402 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18403 gen_load_gpr(t0
, this_rt
);
18404 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18405 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18409 /* adjust stack pointer */
18410 gen_adjust_sp(ctx
, -u
);
18416 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18417 uint8_t gp
, uint16_t u
)
18420 TCGv va
= tcg_temp_new();
18421 TCGv t0
= tcg_temp_new();
18423 while (counter
!= count
) {
18424 bool use_gp
= gp
&& (counter
== count
- 1);
18425 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18426 int this_offset
= u
- ((counter
+ 1) << 2);
18427 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18428 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18429 ctx
->default_tcg_memop_mask
);
18430 tcg_gen_ext32s_tl(t0
, t0
);
18431 gen_store_gpr(t0
, this_rt
);
18435 /* adjust stack pointer */
18436 gen_adjust_sp(ctx
, u
);
18442 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18444 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
18445 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
18447 switch (extract32(ctx
->opcode
, 2, 2)) {
18449 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18452 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18455 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18458 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18463 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18465 int rt
= extract32(ctx
->opcode
, 21, 5);
18466 int rs
= extract32(ctx
->opcode
, 16, 5);
18467 int rd
= extract32(ctx
->opcode
, 11, 5);
18469 switch (extract32(ctx
->opcode
, 3, 7)) {
18471 switch (extract32(ctx
->opcode
, 10, 1)) {
18474 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18478 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18484 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18488 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18491 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18494 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18497 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18500 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18503 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18506 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18509 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18513 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18516 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18519 switch (extract32(ctx
->opcode
, 10, 1)) {
18521 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18524 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18529 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18532 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18535 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18538 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18541 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18546 #ifndef CONFIG_USER_ONLY
18547 TCGv t0
= tcg_temp_new();
18548 switch (extract32(ctx
->opcode
, 10, 1)) {
18551 check_cp0_enabled(ctx
);
18552 gen_helper_dvp(t0
, cpu_env
);
18553 gen_store_gpr(t0
, rt
);
18558 check_cp0_enabled(ctx
);
18559 gen_helper_evp(t0
, cpu_env
);
18560 gen_store_gpr(t0
, rt
);
18567 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18572 TCGv t0
= tcg_temp_new();
18573 TCGv t1
= tcg_temp_new();
18574 TCGv t2
= tcg_temp_new();
18576 gen_load_gpr(t1
, rs
);
18577 gen_load_gpr(t2
, rt
);
18578 tcg_gen_add_tl(t0
, t1
, t2
);
18579 tcg_gen_ext32s_tl(t0
, t0
);
18580 tcg_gen_xor_tl(t1
, t1
, t2
);
18581 tcg_gen_xor_tl(t2
, t0
, t2
);
18582 tcg_gen_andc_tl(t1
, t2
, t1
);
18584 /* operands of same sign, result different sign */
18585 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18586 gen_store_gpr(t0
, rd
);
18594 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18597 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18600 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18603 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18606 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18609 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18612 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18615 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18617 #ifndef CONFIG_USER_ONLY
18619 check_cp0_enabled(ctx
);
18621 /* Treat as NOP. */
18624 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18627 check_cp0_enabled(ctx
);
18629 TCGv t0
= tcg_temp_new();
18631 gen_load_gpr(t0
, rt
);
18632 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18636 case NM_D_E_MT_VPE
:
18638 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18639 TCGv t0
= tcg_temp_new();
18646 gen_helper_dmt(t0
);
18647 gen_store_gpr(t0
, rt
);
18648 } else if (rs
== 0) {
18651 gen_helper_dvpe(t0
, cpu_env
);
18652 gen_store_gpr(t0
, rt
);
18654 generate_exception_end(ctx
, EXCP_RI
);
18661 gen_helper_emt(t0
);
18662 gen_store_gpr(t0
, rt
);
18663 } else if (rs
== 0) {
18666 gen_helper_evpe(t0
, cpu_env
);
18667 gen_store_gpr(t0
, rt
);
18669 generate_exception_end(ctx
, EXCP_RI
);
18680 TCGv t0
= tcg_temp_new();
18681 TCGv t1
= tcg_temp_new();
18683 gen_load_gpr(t0
, rt
);
18684 gen_load_gpr(t1
, rs
);
18685 gen_helper_fork(t0
, t1
);
18692 check_cp0_enabled(ctx
);
18694 /* Treat as NOP. */
18697 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18698 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18702 check_cp0_enabled(ctx
);
18703 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18704 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18709 TCGv t0
= tcg_temp_new();
18711 gen_load_gpr(t0
, rs
);
18712 gen_helper_yield(t0
, cpu_env
, t0
);
18713 gen_store_gpr(t0
, rt
);
18719 generate_exception_end(ctx
, EXCP_RI
);
18725 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18726 int ret
, int v1
, int v2
)
18732 t0
= tcg_temp_new_i32();
18734 v0_t
= tcg_temp_new();
18735 v1_t
= tcg_temp_new();
18737 tcg_gen_movi_i32(t0
, v2
>> 3);
18739 gen_load_gpr(v0_t
, ret
);
18740 gen_load_gpr(v1_t
, v1
);
18743 case NM_MAQ_S_W_PHR
:
18745 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18747 case NM_MAQ_S_W_PHL
:
18749 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18751 case NM_MAQ_SA_W_PHR
:
18753 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18755 case NM_MAQ_SA_W_PHL
:
18757 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18760 generate_exception_end(ctx
, EXCP_RI
);
18764 tcg_temp_free_i32(t0
);
18766 tcg_temp_free(v0_t
);
18767 tcg_temp_free(v1_t
);
18771 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18772 int ret
, int v1
, int v2
)
18775 TCGv t0
= tcg_temp_new();
18776 TCGv t1
= tcg_temp_new();
18777 TCGv v0_t
= tcg_temp_new();
18779 gen_load_gpr(v0_t
, v1
);
18782 case NM_POOL32AXF_1_0
:
18784 switch (extract32(ctx
->opcode
, 12, 2)) {
18786 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
18789 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
18792 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
18795 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
18799 case NM_POOL32AXF_1_1
:
18801 switch (extract32(ctx
->opcode
, 12, 2)) {
18803 tcg_gen_movi_tl(t0
, v2
);
18804 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
18807 tcg_gen_movi_tl(t0
, v2
>> 3);
18808 gen_helper_shilo(t0
, v0_t
, cpu_env
);
18811 generate_exception_end(ctx
, EXCP_RI
);
18815 case NM_POOL32AXF_1_3
:
18817 imm
= extract32(ctx
->opcode
, 14, 7);
18818 switch (extract32(ctx
->opcode
, 12, 2)) {
18820 tcg_gen_movi_tl(t0
, imm
);
18821 gen_helper_rddsp(t0
, t0
, cpu_env
);
18822 gen_store_gpr(t0
, ret
);
18825 gen_load_gpr(t0
, ret
);
18826 tcg_gen_movi_tl(t1
, imm
);
18827 gen_helper_wrdsp(t0
, t1
, cpu_env
);
18830 tcg_gen_movi_tl(t0
, v2
>> 3);
18831 tcg_gen_movi_tl(t1
, v1
);
18832 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
18833 gen_store_gpr(t0
, ret
);
18836 tcg_gen_movi_tl(t0
, v2
>> 3);
18837 tcg_gen_movi_tl(t1
, v1
);
18838 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
18839 gen_store_gpr(t0
, ret
);
18843 case NM_POOL32AXF_1_4
:
18845 tcg_gen_movi_tl(t0
, v2
>> 2);
18846 switch (extract32(ctx
->opcode
, 12, 1)) {
18848 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
18849 gen_store_gpr(t0
, ret
);
18852 gen_helper_shrl_qb(t0
, t0
, v0_t
);
18853 gen_store_gpr(t0
, ret
);
18857 case NM_POOL32AXF_1_5
:
18858 opc
= extract32(ctx
->opcode
, 12, 2);
18859 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
18861 case NM_POOL32AXF_1_7
:
18863 tcg_gen_movi_tl(t0
, v2
>> 3);
18864 tcg_gen_movi_tl(t1
, v1
);
18865 switch (extract32(ctx
->opcode
, 12, 2)) {
18867 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
18868 gen_store_gpr(t0
, ret
);
18871 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
18872 gen_store_gpr(t0
, ret
);
18875 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
18876 gen_store_gpr(t0
, ret
);
18879 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
18880 gen_store_gpr(t0
, ret
);
18885 generate_exception_end(ctx
, EXCP_RI
);
18891 tcg_temp_free(v0_t
);
18894 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
18895 TCGv v0
, TCGv v1
, int rd
)
18899 t0
= tcg_temp_new_i32();
18901 tcg_gen_movi_i32(t0
, rd
>> 3);
18904 case NM_POOL32AXF_2_0_7
:
18905 switch (extract32(ctx
->opcode
, 9, 3)) {
18908 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
18910 case NM_DPAQ_S_W_PH
:
18912 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18916 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
18918 case NM_DPSQ_S_W_PH
:
18920 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18923 generate_exception_end(ctx
, EXCP_RI
);
18927 case NM_POOL32AXF_2_8_15
:
18928 switch (extract32(ctx
->opcode
, 9, 3)) {
18931 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
18933 case NM_DPAQ_SA_L_W
:
18935 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18939 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
18941 case NM_DPSQ_SA_L_W
:
18943 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18946 generate_exception_end(ctx
, EXCP_RI
);
18950 case NM_POOL32AXF_2_16_23
:
18951 switch (extract32(ctx
->opcode
, 9, 3)) {
18952 case NM_DPAU_H_QBL
:
18954 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
18956 case NM_DPAQX_S_W_PH
:
18958 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18960 case NM_DPSU_H_QBL
:
18962 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
18964 case NM_DPSQX_S_W_PH
:
18966 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18968 case NM_MULSA_W_PH
:
18970 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
18973 generate_exception_end(ctx
, EXCP_RI
);
18977 case NM_POOL32AXF_2_24_31
:
18978 switch (extract32(ctx
->opcode
, 9, 3)) {
18979 case NM_DPAU_H_QBR
:
18981 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
18983 case NM_DPAQX_SA_W_PH
:
18985 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18987 case NM_DPSU_H_QBR
:
18989 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
18991 case NM_DPSQX_SA_W_PH
:
18993 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18995 case NM_MULSAQ_S_W_PH
:
18997 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19000 generate_exception_end(ctx
, EXCP_RI
);
19005 generate_exception_end(ctx
, EXCP_RI
);
19009 tcg_temp_free_i32(t0
);
19012 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19013 int rt
, int rs
, int rd
)
19016 TCGv t0
= tcg_temp_new();
19017 TCGv t1
= tcg_temp_new();
19018 TCGv v0_t
= tcg_temp_new();
19019 TCGv v1_t
= tcg_temp_new();
19021 gen_load_gpr(v0_t
, rt
);
19022 gen_load_gpr(v1_t
, rs
);
19025 case NM_POOL32AXF_2_0_7
:
19026 switch (extract32(ctx
->opcode
, 9, 3)) {
19028 case NM_DPAQ_S_W_PH
:
19030 case NM_DPSQ_S_W_PH
:
19031 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19036 gen_load_gpr(t0
, rs
);
19038 if (rd
!= 0 && rd
!= 2) {
19039 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
19040 tcg_gen_ext32u_tl(t0
, t0
);
19041 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
19042 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
19044 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
19050 int acc
= extract32(ctx
->opcode
, 14, 2);
19051 TCGv_i64 t2
= tcg_temp_new_i64();
19052 TCGv_i64 t3
= tcg_temp_new_i64();
19054 gen_load_gpr(t0
, rt
);
19055 gen_load_gpr(t1
, rs
);
19056 tcg_gen_ext_tl_i64(t2
, t0
);
19057 tcg_gen_ext_tl_i64(t3
, t1
);
19058 tcg_gen_mul_i64(t2
, t2
, t3
);
19059 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19060 tcg_gen_add_i64(t2
, t2
, t3
);
19061 tcg_temp_free_i64(t3
);
19062 gen_move_low32(cpu_LO
[acc
], t2
);
19063 gen_move_high32(cpu_HI
[acc
], t2
);
19064 tcg_temp_free_i64(t2
);
19070 int acc
= extract32(ctx
->opcode
, 14, 2);
19071 TCGv_i32 t2
= tcg_temp_new_i32();
19072 TCGv_i32 t3
= tcg_temp_new_i32();
19074 gen_load_gpr(t0
, rs
);
19075 gen_load_gpr(t1
, rt
);
19076 tcg_gen_trunc_tl_i32(t2
, t0
);
19077 tcg_gen_trunc_tl_i32(t3
, t1
);
19078 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
19079 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19080 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19081 tcg_temp_free_i32(t2
);
19082 tcg_temp_free_i32(t3
);
19087 gen_load_gpr(v1_t
, rs
);
19088 tcg_gen_movi_tl(t0
, rd
>> 3);
19089 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
19090 gen_store_gpr(t0
, ret
);
19094 case NM_POOL32AXF_2_8_15
:
19095 switch (extract32(ctx
->opcode
, 9, 3)) {
19097 case NM_DPAQ_SA_L_W
:
19099 case NM_DPSQ_SA_L_W
:
19100 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19105 int acc
= extract32(ctx
->opcode
, 14, 2);
19106 TCGv_i64 t2
= tcg_temp_new_i64();
19107 TCGv_i64 t3
= tcg_temp_new_i64();
19109 gen_load_gpr(t0
, rs
);
19110 gen_load_gpr(t1
, rt
);
19111 tcg_gen_ext32u_tl(t0
, t0
);
19112 tcg_gen_ext32u_tl(t1
, t1
);
19113 tcg_gen_extu_tl_i64(t2
, t0
);
19114 tcg_gen_extu_tl_i64(t3
, t1
);
19115 tcg_gen_mul_i64(t2
, t2
, t3
);
19116 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19117 tcg_gen_add_i64(t2
, t2
, t3
);
19118 tcg_temp_free_i64(t3
);
19119 gen_move_low32(cpu_LO
[acc
], t2
);
19120 gen_move_high32(cpu_HI
[acc
], t2
);
19121 tcg_temp_free_i64(t2
);
19127 int acc
= extract32(ctx
->opcode
, 14, 2);
19128 TCGv_i32 t2
= tcg_temp_new_i32();
19129 TCGv_i32 t3
= tcg_temp_new_i32();
19131 gen_load_gpr(t0
, rs
);
19132 gen_load_gpr(t1
, rt
);
19133 tcg_gen_trunc_tl_i32(t2
, t0
);
19134 tcg_gen_trunc_tl_i32(t3
, t1
);
19135 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
19136 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19137 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19138 tcg_temp_free_i32(t2
);
19139 tcg_temp_free_i32(t3
);
19144 tcg_gen_movi_tl(t0
, rd
>> 3);
19145 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
19146 gen_store_gpr(t0
, ret
);
19149 generate_exception_end(ctx
, EXCP_RI
);
19153 case NM_POOL32AXF_2_16_23
:
19154 switch (extract32(ctx
->opcode
, 9, 3)) {
19155 case NM_DPAU_H_QBL
:
19156 case NM_DPAQX_S_W_PH
:
19157 case NM_DPSU_H_QBL
:
19158 case NM_DPSQX_S_W_PH
:
19159 case NM_MULSA_W_PH
:
19160 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19164 tcg_gen_movi_tl(t0
, rd
>> 3);
19165 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
19166 gen_store_gpr(t0
, ret
);
19171 int acc
= extract32(ctx
->opcode
, 14, 2);
19172 TCGv_i64 t2
= tcg_temp_new_i64();
19173 TCGv_i64 t3
= tcg_temp_new_i64();
19175 gen_load_gpr(t0
, rs
);
19176 gen_load_gpr(t1
, rt
);
19177 tcg_gen_ext_tl_i64(t2
, t0
);
19178 tcg_gen_ext_tl_i64(t3
, t1
);
19179 tcg_gen_mul_i64(t2
, t2
, t3
);
19180 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19181 tcg_gen_sub_i64(t2
, t3
, t2
);
19182 tcg_temp_free_i64(t3
);
19183 gen_move_low32(cpu_LO
[acc
], t2
);
19184 gen_move_high32(cpu_HI
[acc
], t2
);
19185 tcg_temp_free_i64(t2
);
19188 case NM_EXTRV_RS_W
:
19190 tcg_gen_movi_tl(t0
, rd
>> 3);
19191 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
19192 gen_store_gpr(t0
, ret
);
19196 case NM_POOL32AXF_2_24_31
:
19197 switch (extract32(ctx
->opcode
, 9, 3)) {
19198 case NM_DPAU_H_QBR
:
19199 case NM_DPAQX_SA_W_PH
:
19200 case NM_DPSU_H_QBR
:
19201 case NM_DPSQX_SA_W_PH
:
19202 case NM_MULSAQ_S_W_PH
:
19203 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19207 tcg_gen_movi_tl(t0
, rd
>> 3);
19208 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
19209 gen_store_gpr(t0
, ret
);
19214 int acc
= extract32(ctx
->opcode
, 14, 2);
19215 TCGv_i64 t2
= tcg_temp_new_i64();
19216 TCGv_i64 t3
= tcg_temp_new_i64();
19218 gen_load_gpr(t0
, rs
);
19219 gen_load_gpr(t1
, rt
);
19220 tcg_gen_ext32u_tl(t0
, t0
);
19221 tcg_gen_ext32u_tl(t1
, t1
);
19222 tcg_gen_extu_tl_i64(t2
, t0
);
19223 tcg_gen_extu_tl_i64(t3
, t1
);
19224 tcg_gen_mul_i64(t2
, t2
, t3
);
19225 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19226 tcg_gen_sub_i64(t2
, t3
, t2
);
19227 tcg_temp_free_i64(t3
);
19228 gen_move_low32(cpu_LO
[acc
], t2
);
19229 gen_move_high32(cpu_HI
[acc
], t2
);
19230 tcg_temp_free_i64(t2
);
19235 tcg_gen_movi_tl(t0
, rd
>> 3);
19236 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19237 gen_store_gpr(t0
, ret
);
19242 generate_exception_end(ctx
, EXCP_RI
);
19249 tcg_temp_free(v0_t
);
19250 tcg_temp_free(v1_t
);
19253 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19257 TCGv t0
= tcg_temp_new();
19258 TCGv v0_t
= tcg_temp_new();
19260 gen_load_gpr(v0_t
, rs
);
19265 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19266 gen_store_gpr(v0_t
, ret
);
19270 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19271 gen_store_gpr(v0_t
, ret
);
19275 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19276 gen_store_gpr(v0_t
, ret
);
19278 case NM_PRECEQ_W_PHL
:
19280 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19281 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19282 gen_store_gpr(v0_t
, ret
);
19284 case NM_PRECEQ_W_PHR
:
19286 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19287 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19288 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19289 gen_store_gpr(v0_t
, ret
);
19291 case NM_PRECEQU_PH_QBL
:
19293 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19294 gen_store_gpr(v0_t
, ret
);
19296 case NM_PRECEQU_PH_QBR
:
19298 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19299 gen_store_gpr(v0_t
, ret
);
19301 case NM_PRECEQU_PH_QBLA
:
19303 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19304 gen_store_gpr(v0_t
, ret
);
19306 case NM_PRECEQU_PH_QBRA
:
19308 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19309 gen_store_gpr(v0_t
, ret
);
19311 case NM_PRECEU_PH_QBL
:
19313 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19314 gen_store_gpr(v0_t
, ret
);
19316 case NM_PRECEU_PH_QBR
:
19318 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19319 gen_store_gpr(v0_t
, ret
);
19321 case NM_PRECEU_PH_QBLA
:
19323 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19324 gen_store_gpr(v0_t
, ret
);
19326 case NM_PRECEU_PH_QBRA
:
19328 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19329 gen_store_gpr(v0_t
, ret
);
19333 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19334 tcg_gen_shli_tl(t0
, v0_t
, 16);
19335 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19336 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19337 gen_store_gpr(v0_t
, ret
);
19341 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19342 tcg_gen_shli_tl(t0
, v0_t
, 8);
19343 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19344 tcg_gen_shli_tl(t0
, v0_t
, 16);
19345 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19346 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19347 gen_store_gpr(v0_t
, ret
);
19351 gen_helper_bitrev(v0_t
, v0_t
);
19352 gen_store_gpr(v0_t
, ret
);
19357 TCGv tv0
= tcg_temp_new();
19359 gen_load_gpr(tv0
, rt
);
19360 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19361 gen_store_gpr(v0_t
, ret
);
19362 tcg_temp_free(tv0
);
19365 case NM_RADDU_W_QB
:
19367 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19368 gen_store_gpr(v0_t
, ret
);
19371 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19375 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19379 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19382 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19385 generate_exception_end(ctx
, EXCP_RI
);
19389 tcg_temp_free(v0_t
);
19393 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19394 int rt
, int rs
, int rd
)
19396 TCGv t0
= tcg_temp_new();
19397 TCGv rs_t
= tcg_temp_new();
19399 gen_load_gpr(rs_t
, rs
);
19404 tcg_gen_movi_tl(t0
, rd
>> 2);
19405 switch (extract32(ctx
->opcode
, 12, 1)) {
19408 gen_helper_shra_qb(t0
, t0
, rs_t
);
19409 gen_store_gpr(t0
, rt
);
19413 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19414 gen_store_gpr(t0
, rt
);
19420 tcg_gen_movi_tl(t0
, rd
>> 1);
19421 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19422 gen_store_gpr(t0
, rt
);
19428 target_long result
;
19429 imm
= extract32(ctx
->opcode
, 13, 8);
19430 result
= (uint32_t)imm
<< 24 |
19431 (uint32_t)imm
<< 16 |
19432 (uint32_t)imm
<< 8 |
19434 result
= (int32_t)result
;
19435 tcg_gen_movi_tl(t0
, result
);
19436 gen_store_gpr(t0
, rt
);
19440 generate_exception_end(ctx
, EXCP_RI
);
19444 tcg_temp_free(rs_t
);
19448 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19450 int rt
= extract32(ctx
->opcode
, 21, 5);
19451 int rs
= extract32(ctx
->opcode
, 16, 5);
19452 int rd
= extract32(ctx
->opcode
, 11, 5);
19454 switch (extract32(ctx
->opcode
, 6, 3)) {
19455 case NM_POOL32AXF_1
:
19457 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19458 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19461 case NM_POOL32AXF_2
:
19463 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19464 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19467 case NM_POOL32AXF_4
:
19469 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19470 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19473 case NM_POOL32AXF_5
:
19474 switch (extract32(ctx
->opcode
, 9, 7)) {
19475 #ifndef CONFIG_USER_ONLY
19477 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19480 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19483 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19486 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19489 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19492 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19495 check_cp0_enabled(ctx
);
19497 TCGv t0
= tcg_temp_new();
19499 save_cpu_state(ctx
, 1);
19500 gen_helper_di(t0
, cpu_env
);
19501 gen_store_gpr(t0
, rt
);
19502 /* Stop translation as we may have switched the execution mode */
19503 ctx
->base
.is_jmp
= DISAS_STOP
;
19508 check_cp0_enabled(ctx
);
19510 TCGv t0
= tcg_temp_new();
19512 save_cpu_state(ctx
, 1);
19513 gen_helper_ei(t0
, cpu_env
);
19514 gen_store_gpr(t0
, rt
);
19515 /* Stop translation as we may have switched the execution mode */
19516 ctx
->base
.is_jmp
= DISAS_STOP
;
19521 gen_load_srsgpr(rs
, rt
);
19524 gen_store_srsgpr(rs
, rt
);
19527 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19530 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19533 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19537 generate_exception_end(ctx
, EXCP_RI
);
19541 case NM_POOL32AXF_7
:
19543 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19544 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19548 generate_exception_end(ctx
, EXCP_RI
);
19553 /* Immediate Value Compact Branches */
19554 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19555 int rt
, int32_t imm
, int32_t offset
)
19558 int bcond_compute
= 0;
19559 TCGv t0
= tcg_temp_new();
19560 TCGv t1
= tcg_temp_new();
19562 gen_load_gpr(t0
, rt
);
19563 tcg_gen_movi_tl(t1
, imm
);
19564 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19566 /* Load needed operands and calculate btarget */
19569 if (rt
== 0 && imm
== 0) {
19570 /* Unconditional branch */
19571 } else if (rt
== 0 && imm
!= 0) {
19576 cond
= TCG_COND_EQ
;
19582 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19583 generate_exception_end(ctx
, EXCP_RI
);
19585 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19586 /* Unconditional branch */
19587 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19591 tcg_gen_shri_tl(t0
, t0
, imm
);
19592 tcg_gen_andi_tl(t0
, t0
, 1);
19593 tcg_gen_movi_tl(t1
, 0);
19595 if (opc
== NM_BBEQZC
) {
19596 cond
= TCG_COND_EQ
;
19598 cond
= TCG_COND_NE
;
19603 if (rt
== 0 && imm
== 0) {
19606 } else if (rt
== 0 && imm
!= 0) {
19607 /* Unconditional branch */
19610 cond
= TCG_COND_NE
;
19614 if (rt
== 0 && imm
== 0) {
19615 /* Unconditional branch */
19618 cond
= TCG_COND_GE
;
19623 cond
= TCG_COND_LT
;
19626 if (rt
== 0 && imm
== 0) {
19627 /* Unconditional branch */
19630 cond
= TCG_COND_GEU
;
19635 cond
= TCG_COND_LTU
;
19638 MIPS_INVAL("Immediate Value Compact branch");
19639 generate_exception_end(ctx
, EXCP_RI
);
19643 if (bcond_compute
== 0) {
19644 /* Uncoditional compact branch */
19645 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19647 /* Conditional compact branch */
19648 TCGLabel
*fs
= gen_new_label();
19650 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19652 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19655 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19663 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19664 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19667 TCGv t0
= tcg_temp_new();
19668 TCGv t1
= tcg_temp_new();
19671 gen_load_gpr(t0
, rs
);
19675 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19678 /* calculate btarget */
19679 tcg_gen_shli_tl(t0
, t0
, 1);
19680 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19681 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19683 /* unconditional branch to register */
19684 tcg_gen_mov_tl(cpu_PC
, btarget
);
19685 tcg_gen_lookup_and_goto_ptr();
19691 /* nanoMIPS Branches */
19692 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19693 int rs
, int rt
, int32_t offset
)
19695 int bcond_compute
= 0;
19696 TCGv t0
= tcg_temp_new();
19697 TCGv t1
= tcg_temp_new();
19699 /* Load needed operands and calculate btarget */
19701 /* compact branch */
19704 gen_load_gpr(t0
, rs
);
19705 gen_load_gpr(t1
, rt
);
19707 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19711 if (rs
== 0 || rs
== rt
) {
19712 /* OPC_BLEZALC, OPC_BGEZALC */
19713 /* OPC_BGTZALC, OPC_BLTZALC */
19714 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19716 gen_load_gpr(t0
, rs
);
19717 gen_load_gpr(t1
, rt
);
19719 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19722 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19726 /* OPC_BEQZC, OPC_BNEZC */
19727 gen_load_gpr(t0
, rs
);
19729 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19731 /* OPC_JIC, OPC_JIALC */
19732 TCGv tbase
= tcg_temp_new();
19733 TCGv toffset
= tcg_temp_new();
19735 gen_load_gpr(tbase
, rt
);
19736 tcg_gen_movi_tl(toffset
, offset
);
19737 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19738 tcg_temp_free(tbase
);
19739 tcg_temp_free(toffset
);
19743 MIPS_INVAL("Compact branch/jump");
19744 generate_exception_end(ctx
, EXCP_RI
);
19748 if (bcond_compute
== 0) {
19749 /* Uncoditional compact branch */
19752 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19755 MIPS_INVAL("Compact branch/jump");
19756 generate_exception_end(ctx
, EXCP_RI
);
19760 /* Conditional compact branch */
19761 TCGLabel
*fs
= gen_new_label();
19765 if (rs
== 0 && rt
!= 0) {
19767 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19768 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19770 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19773 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
19777 if (rs
== 0 && rt
!= 0) {
19779 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19780 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19782 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19785 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
19789 if (rs
== 0 && rt
!= 0) {
19791 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19792 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19794 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19797 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
19801 if (rs
== 0 && rt
!= 0) {
19803 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19804 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19806 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19809 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
19813 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
19816 MIPS_INVAL("Compact conditional branch/jump");
19817 generate_exception_end(ctx
, EXCP_RI
);
19821 /* Generating branch here as compact branches don't have delay slot */
19822 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19825 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19834 /* nanoMIPS CP1 Branches */
19835 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
19836 int32_t ft
, int32_t offset
)
19838 target_ulong btarget
;
19839 TCGv_i64 t0
= tcg_temp_new_i64();
19841 gen_load_fpr64(ctx
, t0
, ft
);
19842 tcg_gen_andi_i64(t0
, t0
, 1);
19844 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19848 tcg_gen_xori_i64(t0
, t0
, 1);
19849 ctx
->hflags
|= MIPS_HFLAG_BC
;
19852 /* t0 already set */
19853 ctx
->hflags
|= MIPS_HFLAG_BC
;
19856 MIPS_INVAL("cp1 cond branch");
19857 generate_exception_end(ctx
, EXCP_RI
);
19861 tcg_gen_trunc_i64_tl(bcond
, t0
);
19863 ctx
->btarget
= btarget
;
19866 tcg_temp_free_i64(t0
);
19870 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
19873 t0
= tcg_temp_new();
19874 t1
= tcg_temp_new();
19876 gen_load_gpr(t0
, rs
);
19877 gen_load_gpr(t1
, rt
);
19879 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
19880 /* PP.LSXS instructions require shifting */
19881 switch (extract32(ctx
->opcode
, 7, 4)) {
19886 tcg_gen_shli_tl(t0
, t0
, 1);
19893 tcg_gen_shli_tl(t0
, t0
, 2);
19897 tcg_gen_shli_tl(t0
, t0
, 3);
19901 gen_op_addr_add(ctx
, t0
, t0
, t1
);
19903 switch (extract32(ctx
->opcode
, 7, 4)) {
19905 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19907 gen_store_gpr(t0
, rd
);
19911 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19913 gen_store_gpr(t0
, rd
);
19917 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19919 gen_store_gpr(t0
, rd
);
19922 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19924 gen_store_gpr(t0
, rd
);
19928 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19930 gen_store_gpr(t0
, rd
);
19934 gen_load_gpr(t1
, rd
);
19935 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19941 gen_load_gpr(t1
, rd
);
19942 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19948 gen_load_gpr(t1
, rd
);
19949 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19953 /*case NM_LWC1XS:*/
19955 /*case NM_LDC1XS:*/
19957 /*case NM_SWC1XS:*/
19959 /*case NM_SDC1XS:*/
19960 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19961 check_cp1_enabled(ctx
);
19962 switch (extract32(ctx
->opcode
, 7, 4)) {
19964 /*case NM_LWC1XS:*/
19965 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
19968 /*case NM_LDC1XS:*/
19969 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
19972 /*case NM_SWC1XS:*/
19973 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
19976 /*case NM_SDC1XS:*/
19977 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
19981 generate_exception_err(ctx
, EXCP_CpU
, 1);
19985 generate_exception_end(ctx
, EXCP_RI
);
19993 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
19997 rt
= extract32(ctx
->opcode
, 21, 5);
19998 rs
= extract32(ctx
->opcode
, 16, 5);
19999 rd
= extract32(ctx
->opcode
, 11, 5);
20001 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
20002 generate_exception_end(ctx
, EXCP_RI
);
20005 check_cp1_enabled(ctx
);
20006 switch (extract32(ctx
->opcode
, 0, 3)) {
20008 switch (extract32(ctx
->opcode
, 3, 7)) {
20010 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
20013 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
20016 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
20019 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
20022 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
20025 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
20028 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
20031 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
20034 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
20037 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
20040 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
20043 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
20046 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
20049 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
20052 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
20055 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
20058 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
20061 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
20064 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
20067 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
20070 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
20073 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
20076 generate_exception_end(ctx
, EXCP_RI
);
20081 switch (extract32(ctx
->opcode
, 3, 3)) {
20083 switch (extract32(ctx
->opcode
, 9, 1)) {
20085 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
20088 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
20093 switch (extract32(ctx
->opcode
, 9, 1)) {
20095 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
20098 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
20103 switch (extract32(ctx
->opcode
, 9, 1)) {
20105 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
20108 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
20113 switch (extract32(ctx
->opcode
, 9, 1)) {
20115 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
20118 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
20123 switch (extract32(ctx
->opcode
, 6, 8)) {
20125 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
20128 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
20131 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
20134 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
20137 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
20140 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
20143 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
20146 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
20149 switch (extract32(ctx
->opcode
, 6, 9)) {
20151 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
20154 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
20157 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
20160 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
20163 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
20166 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
20169 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
20172 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
20175 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
20178 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
20181 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
20184 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
20187 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
20190 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
20193 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
20196 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
20199 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
20202 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
20205 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
20208 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
20211 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
20214 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
20217 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
20220 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
20223 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
20226 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
20229 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
20232 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
20235 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20238 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20241 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20244 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20247 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20250 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20253 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20256 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20259 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20262 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20265 generate_exception_end(ctx
, EXCP_RI
);
20274 switch (extract32(ctx
->opcode
, 3, 3)) {
20275 case NM_CMP_CONDN_S
:
20276 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20278 case NM_CMP_CONDN_D
:
20279 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20282 generate_exception_end(ctx
, EXCP_RI
);
20287 generate_exception_end(ctx
, EXCP_RI
);
20292 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20293 int rd
, int rs
, int rt
)
20296 TCGv t0
= tcg_temp_new();
20297 TCGv v1_t
= tcg_temp_new();
20298 TCGv v2_t
= tcg_temp_new();
20300 gen_load_gpr(v1_t
, rs
);
20301 gen_load_gpr(v2_t
, rt
);
20306 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20310 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20314 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20316 case NM_CMPU_EQ_QB
:
20318 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20320 case NM_CMPU_LT_QB
:
20322 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20324 case NM_CMPU_LE_QB
:
20326 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20328 case NM_CMPGU_EQ_QB
:
20330 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20331 gen_store_gpr(v1_t
, ret
);
20333 case NM_CMPGU_LT_QB
:
20335 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20336 gen_store_gpr(v1_t
, ret
);
20338 case NM_CMPGU_LE_QB
:
20340 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20341 gen_store_gpr(v1_t
, ret
);
20343 case NM_CMPGDU_EQ_QB
:
20345 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20346 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20347 gen_store_gpr(v1_t
, ret
);
20349 case NM_CMPGDU_LT_QB
:
20351 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20352 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20353 gen_store_gpr(v1_t
, ret
);
20355 case NM_CMPGDU_LE_QB
:
20357 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20358 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20359 gen_store_gpr(v1_t
, ret
);
20363 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20364 gen_store_gpr(v1_t
, ret
);
20368 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20369 gen_store_gpr(v1_t
, ret
);
20373 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20374 gen_store_gpr(v1_t
, ret
);
20378 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20379 gen_store_gpr(v1_t
, ret
);
20383 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20384 gen_store_gpr(v1_t
, ret
);
20388 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20389 gen_store_gpr(v1_t
, ret
);
20393 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20394 gen_store_gpr(v1_t
, ret
);
20398 switch (extract32(ctx
->opcode
, 10, 1)) {
20401 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20402 gen_store_gpr(v1_t
, ret
);
20406 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20407 gen_store_gpr(v1_t
, ret
);
20411 case NM_ADDQH_R_PH
:
20413 switch (extract32(ctx
->opcode
, 10, 1)) {
20416 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20417 gen_store_gpr(v1_t
, ret
);
20421 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20422 gen_store_gpr(v1_t
, ret
);
20428 switch (extract32(ctx
->opcode
, 10, 1)) {
20431 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20432 gen_store_gpr(v1_t
, ret
);
20436 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20437 gen_store_gpr(v1_t
, ret
);
20443 switch (extract32(ctx
->opcode
, 10, 1)) {
20446 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20447 gen_store_gpr(v1_t
, ret
);
20451 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20452 gen_store_gpr(v1_t
, ret
);
20458 switch (extract32(ctx
->opcode
, 10, 1)) {
20461 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20462 gen_store_gpr(v1_t
, ret
);
20466 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20467 gen_store_gpr(v1_t
, ret
);
20471 case NM_ADDUH_R_QB
:
20473 switch (extract32(ctx
->opcode
, 10, 1)) {
20476 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20477 gen_store_gpr(v1_t
, ret
);
20481 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20482 gen_store_gpr(v1_t
, ret
);
20486 case NM_SHRAV_R_PH
:
20488 switch (extract32(ctx
->opcode
, 10, 1)) {
20491 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20492 gen_store_gpr(v1_t
, ret
);
20496 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20497 gen_store_gpr(v1_t
, ret
);
20501 case NM_SHRAV_R_QB
:
20503 switch (extract32(ctx
->opcode
, 10, 1)) {
20506 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20507 gen_store_gpr(v1_t
, ret
);
20511 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20512 gen_store_gpr(v1_t
, ret
);
20518 switch (extract32(ctx
->opcode
, 10, 1)) {
20521 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20522 gen_store_gpr(v1_t
, ret
);
20526 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20527 gen_store_gpr(v1_t
, ret
);
20531 case NM_SUBQH_R_PH
:
20533 switch (extract32(ctx
->opcode
, 10, 1)) {
20536 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20537 gen_store_gpr(v1_t
, ret
);
20541 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20542 gen_store_gpr(v1_t
, ret
);
20548 switch (extract32(ctx
->opcode
, 10, 1)) {
20551 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20552 gen_store_gpr(v1_t
, ret
);
20556 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20557 gen_store_gpr(v1_t
, ret
);
20563 switch (extract32(ctx
->opcode
, 10, 1)) {
20566 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20567 gen_store_gpr(v1_t
, ret
);
20571 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20572 gen_store_gpr(v1_t
, ret
);
20578 switch (extract32(ctx
->opcode
, 10, 1)) {
20581 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20582 gen_store_gpr(v1_t
, ret
);
20586 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20587 gen_store_gpr(v1_t
, ret
);
20591 case NM_SUBUH_R_QB
:
20593 switch (extract32(ctx
->opcode
, 10, 1)) {
20596 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20597 gen_store_gpr(v1_t
, ret
);
20601 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20602 gen_store_gpr(v1_t
, ret
);
20606 case NM_SHLLV_S_PH
:
20608 switch (extract32(ctx
->opcode
, 10, 1)) {
20611 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20612 gen_store_gpr(v1_t
, ret
);
20616 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20617 gen_store_gpr(v1_t
, ret
);
20621 case NM_PRECR_SRA_R_PH_W
:
20623 switch (extract32(ctx
->opcode
, 10, 1)) {
20625 /* PRECR_SRA_PH_W */
20627 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20628 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20630 gen_store_gpr(v1_t
, rt
);
20631 tcg_temp_free_i32(sa_t
);
20635 /* PRECR_SRA_R_PH_W */
20637 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20638 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20640 gen_store_gpr(v1_t
, rt
);
20641 tcg_temp_free_i32(sa_t
);
20646 case NM_MULEU_S_PH_QBL
:
20648 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20649 gen_store_gpr(v1_t
, ret
);
20651 case NM_MULEU_S_PH_QBR
:
20653 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20654 gen_store_gpr(v1_t
, ret
);
20656 case NM_MULQ_RS_PH
:
20658 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20659 gen_store_gpr(v1_t
, ret
);
20663 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20664 gen_store_gpr(v1_t
, ret
);
20668 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20669 gen_store_gpr(v1_t
, ret
);
20673 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20674 gen_store_gpr(v1_t
, ret
);
20678 gen_load_gpr(t0
, rs
);
20680 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20682 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20686 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20687 gen_store_gpr(v1_t
, ret
);
20691 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20692 gen_store_gpr(v1_t
, ret
);
20696 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20697 gen_store_gpr(v1_t
, ret
);
20701 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20702 gen_store_gpr(v1_t
, ret
);
20706 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20707 gen_store_gpr(v1_t
, ret
);
20711 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20712 gen_store_gpr(v1_t
, ret
);
20717 TCGv tv0
= tcg_temp_new();
20718 TCGv tv1
= tcg_temp_new();
20719 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20721 tcg_gen_movi_tl(tv0
, rd
>> 3);
20722 tcg_gen_movi_tl(tv1
, imm
);
20723 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20726 case NM_MULEQ_S_W_PHL
:
20728 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20729 gen_store_gpr(v1_t
, ret
);
20731 case NM_MULEQ_S_W_PHR
:
20733 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20734 gen_store_gpr(v1_t
, ret
);
20738 switch (extract32(ctx
->opcode
, 10, 1)) {
20741 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20742 gen_store_gpr(v1_t
, ret
);
20746 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20747 gen_store_gpr(v1_t
, ret
);
20751 case NM_PRECR_QB_PH
:
20753 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
20754 gen_store_gpr(v1_t
, ret
);
20756 case NM_PRECRQ_QB_PH
:
20758 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
20759 gen_store_gpr(v1_t
, ret
);
20761 case NM_PRECRQ_PH_W
:
20763 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
20764 gen_store_gpr(v1_t
, ret
);
20766 case NM_PRECRQ_RS_PH_W
:
20768 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20769 gen_store_gpr(v1_t
, ret
);
20771 case NM_PRECRQU_S_QB_PH
:
20773 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20774 gen_store_gpr(v1_t
, ret
);
20778 tcg_gen_movi_tl(t0
, rd
);
20779 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
20780 gen_store_gpr(v1_t
, rt
);
20784 tcg_gen_movi_tl(t0
, rd
>> 1);
20785 switch (extract32(ctx
->opcode
, 10, 1)) {
20788 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
20789 gen_store_gpr(v1_t
, rt
);
20793 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
20794 gen_store_gpr(v1_t
, rt
);
20800 tcg_gen_movi_tl(t0
, rd
>> 1);
20801 switch (extract32(ctx
->opcode
, 10, 2)) {
20804 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
20805 gen_store_gpr(v1_t
, rt
);
20809 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
20810 gen_store_gpr(v1_t
, rt
);
20813 generate_exception_end(ctx
, EXCP_RI
);
20819 tcg_gen_movi_tl(t0
, rd
);
20820 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
20821 gen_store_gpr(v1_t
, rt
);
20827 imm
= sextract32(ctx
->opcode
, 11, 11);
20828 imm
= (int16_t)(imm
<< 6) >> 6;
20830 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
20835 generate_exception_end(ctx
, EXCP_RI
);
20840 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
20848 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
20849 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
20851 rt
= extract32(ctx
->opcode
, 21, 5);
20852 rs
= extract32(ctx
->opcode
, 16, 5);
20853 rd
= extract32(ctx
->opcode
, 11, 5);
20855 op
= extract32(ctx
->opcode
, 26, 6);
20860 switch (extract32(ctx
->opcode
, 19, 2)) {
20863 generate_exception_end(ctx
, EXCP_RI
);
20866 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
20867 generate_exception_end(ctx
, EXCP_SYSCALL
);
20869 generate_exception_end(ctx
, EXCP_RI
);
20873 generate_exception_end(ctx
, EXCP_BREAK
);
20876 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
20877 gen_helper_do_semihosting(cpu_env
);
20879 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
20880 generate_exception_end(ctx
, EXCP_RI
);
20882 generate_exception_end(ctx
, EXCP_DBp
);
20889 imm
= extract32(ctx
->opcode
, 0, 16);
20891 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
20893 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
20895 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20900 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
20901 extract32(ctx
->opcode
, 1, 20) << 1;
20902 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20903 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20907 switch (ctx
->opcode
& 0x07) {
20909 gen_pool32a0_nanomips_insn(env
, ctx
);
20913 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
20914 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
20918 switch (extract32(ctx
->opcode
, 3, 3)) {
20920 gen_p_lsx(ctx
, rd
, rs
, rt
);
20923 /* In nanoMIPS, the shift field directly encodes the shift
20924 * amount, meaning that the supported shift values are in
20925 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
20926 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
20927 extract32(ctx
->opcode
, 9, 2) - 1);
20930 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
20933 gen_pool32axf_nanomips_insn(env
, ctx
);
20936 generate_exception_end(ctx
, EXCP_RI
);
20941 generate_exception_end(ctx
, EXCP_RI
);
20946 switch (ctx
->opcode
& 0x03) {
20949 offset
= extract32(ctx
->opcode
, 0, 21);
20950 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
20954 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20957 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20960 generate_exception_end(ctx
, EXCP_RI
);
20966 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
20967 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
20968 switch (extract32(ctx
->opcode
, 16, 5)) {
20972 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
20978 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
20979 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20985 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
20991 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20994 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21001 t0
= tcg_temp_new();
21003 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21006 tcg_gen_movi_tl(t0
, addr
);
21007 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
21015 t0
= tcg_temp_new();
21016 t1
= tcg_temp_new();
21018 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21021 tcg_gen_movi_tl(t0
, addr
);
21022 gen_load_gpr(t1
, rt
);
21024 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
21031 generate_exception_end(ctx
, EXCP_RI
);
21037 switch (extract32(ctx
->opcode
, 12, 4)) {
21039 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21042 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21045 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21048 switch (extract32(ctx
->opcode
, 20, 1)) {
21050 switch (ctx
->opcode
& 3) {
21052 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21053 extract32(ctx
->opcode
, 2, 1),
21054 extract32(ctx
->opcode
, 3, 9) << 3);
21057 case NM_RESTORE_JRC
:
21058 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21059 extract32(ctx
->opcode
, 2, 1),
21060 extract32(ctx
->opcode
, 3, 9) << 3);
21061 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
21062 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21066 generate_exception_end(ctx
, EXCP_RI
);
21071 generate_exception_end(ctx
, EXCP_RI
);
21076 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21079 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21083 TCGv t0
= tcg_temp_new();
21085 imm
= extract32(ctx
->opcode
, 0, 12);
21086 gen_load_gpr(t0
, rs
);
21087 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
21088 gen_store_gpr(t0
, rt
);
21094 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
21095 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
21099 int shift
= extract32(ctx
->opcode
, 0, 5);
21100 switch (extract32(ctx
->opcode
, 5, 4)) {
21102 if (rt
== 0 && shift
== 0) {
21104 } else if (rt
== 0 && shift
== 3) {
21105 /* EHB - treat as NOP */
21106 } else if (rt
== 0 && shift
== 5) {
21107 /* PAUSE - treat as NOP */
21108 } else if (rt
== 0 && shift
== 6) {
21110 gen_sync(extract32(ctx
->opcode
, 16, 5));
21113 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
21114 extract32(ctx
->opcode
, 0, 5));
21118 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
21119 extract32(ctx
->opcode
, 0, 5));
21122 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
21123 extract32(ctx
->opcode
, 0, 5));
21126 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
21127 extract32(ctx
->opcode
, 0, 5));
21135 TCGv t0
= tcg_temp_new();
21136 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
21137 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
21139 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
21141 gen_load_gpr(t0
, rs
);
21142 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
21145 tcg_temp_free_i32(shift
);
21146 tcg_temp_free_i32(shiftx
);
21147 tcg_temp_free_i32(stripe
);
21151 switch (((ctx
->opcode
>> 10) & 2) |
21152 (extract32(ctx
->opcode
, 5, 1))) {
21155 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21156 extract32(ctx
->opcode
, 6, 5));
21159 generate_exception_end(ctx
, EXCP_RI
);
21164 switch (((ctx
->opcode
>> 10) & 2) |
21165 (extract32(ctx
->opcode
, 5, 1))) {
21168 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21169 extract32(ctx
->opcode
, 6, 5));
21172 generate_exception_end(ctx
, EXCP_RI
);
21177 generate_exception_end(ctx
, EXCP_RI
);
21182 gen_pool32f_nanomips_insn(ctx
);
21187 switch (extract32(ctx
->opcode
, 1, 1)) {
21190 tcg_gen_movi_tl(cpu_gpr
[rt
],
21191 sextract32(ctx
->opcode
, 0, 1) << 31 |
21192 extract32(ctx
->opcode
, 2, 10) << 21 |
21193 extract32(ctx
->opcode
, 12, 9) << 12);
21198 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
21199 extract32(ctx
->opcode
, 2, 10) << 21 |
21200 extract32(ctx
->opcode
, 12, 9) << 12;
21202 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21203 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21210 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
21212 switch (extract32(ctx
->opcode
, 18, 3)) {
21214 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
21217 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
21220 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
21224 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
21229 switch (ctx
->opcode
& 1) {
21231 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
21234 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21240 switch (ctx
->opcode
& 1) {
21242 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21245 generate_exception_end(ctx
, EXCP_RI
);
21251 switch (ctx
->opcode
& 0x3) {
21253 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21256 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21259 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21262 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21267 generate_exception_end(ctx
, EXCP_RI
);
21274 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21276 switch (extract32(ctx
->opcode
, 12, 4)) {
21280 /* Break the TB to be able to sync copied instructions
21282 ctx
->base
.is_jmp
= DISAS_STOP
;
21285 /* Treat as NOP. */
21289 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21292 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21295 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21298 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21301 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21304 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21307 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21310 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21313 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21316 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21319 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21322 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21325 generate_exception_end(ctx
, EXCP_RI
);
21332 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21333 extract32(ctx
->opcode
, 0, 8);
21335 switch (extract32(ctx
->opcode
, 8, 3)) {
21337 switch (extract32(ctx
->opcode
, 11, 4)) {
21339 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21342 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21345 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21348 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21351 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21354 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21357 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21360 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21363 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21366 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21369 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21372 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21377 /* Break the TB to be able to sync copied instructions
21379 ctx
->base
.is_jmp
= DISAS_STOP
;
21382 /* Treat as NOP. */
21386 generate_exception_end(ctx
, EXCP_RI
);
21391 switch (extract32(ctx
->opcode
, 11, 4)) {
21396 TCGv t0
= tcg_temp_new();
21397 TCGv t1
= tcg_temp_new();
21399 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21401 switch (extract32(ctx
->opcode
, 11, 4)) {
21403 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21405 gen_store_gpr(t0
, rt
);
21408 gen_load_gpr(t1
, rt
);
21409 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21418 switch (ctx
->opcode
& 0x03) {
21420 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21424 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21429 switch (ctx
->opcode
& 0x03) {
21431 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, s
);
21435 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21440 check_cp0_enabled(ctx
);
21441 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21442 gen_cache_operation(ctx
, rt
, rs
, s
);
21448 switch (extract32(ctx
->opcode
, 11, 4)) {
21451 check_cp0_enabled(ctx
);
21452 gen_ld(ctx
, OPC_LBE
, rt
, rs
, s
);
21456 check_cp0_enabled(ctx
);
21457 gen_st(ctx
, OPC_SBE
, rt
, rs
, s
);
21461 check_cp0_enabled(ctx
);
21462 gen_ld(ctx
, OPC_LBUE
, rt
, rs
, s
);
21466 /* case NM_SYNCIE */
21468 check_cp0_enabled(ctx
);
21469 /* Break the TB to be able to sync copied instructions
21471 ctx
->base
.is_jmp
= DISAS_STOP
;
21473 /* case NM_PREFE */
21475 check_cp0_enabled(ctx
);
21476 /* Treat as NOP. */
21481 check_cp0_enabled(ctx
);
21482 gen_ld(ctx
, OPC_LHE
, rt
, rs
, s
);
21486 check_cp0_enabled(ctx
);
21487 gen_st(ctx
, OPC_SHE
, rt
, rs
, s
);
21491 check_cp0_enabled(ctx
);
21492 gen_ld(ctx
, OPC_LHUE
, rt
, rs
, s
);
21495 check_nms_dl_il_sl_tl_l2c(ctx
);
21496 gen_cache_operation(ctx
, rt
, rs
, s
);
21500 check_cp0_enabled(ctx
);
21501 gen_ld(ctx
, OPC_LWE
, rt
, rs
, s
);
21505 check_cp0_enabled(ctx
);
21506 gen_st(ctx
, OPC_SWE
, rt
, rs
, s
);
21509 switch (extract32(ctx
->opcode
, 2, 2)) {
21513 check_cp0_enabled(ctx
);
21514 gen_ld(ctx
, OPC_LLE
, rt
, rs
, s
);
21519 check_cp0_enabled(ctx
);
21520 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21523 generate_exception_end(ctx
, EXCP_RI
);
21528 switch (extract32(ctx
->opcode
, 2, 2)) {
21532 check_cp0_enabled(ctx
);
21533 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, s
);
21538 check_cp0_enabled(ctx
);
21539 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21542 generate_exception_end(ctx
, EXCP_RI
);
21552 int count
= extract32(ctx
->opcode
, 12, 3);
21555 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21556 extract32(ctx
->opcode
, 0, 8);
21557 TCGv va
= tcg_temp_new();
21558 TCGv t1
= tcg_temp_new();
21559 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21560 NM_P_LS_UAWM
? MO_UNALN
: 0;
21562 count
= (count
== 0) ? 8 : count
;
21563 while (counter
!= count
) {
21564 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21565 int this_offset
= offset
+ (counter
<< 2);
21567 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21569 switch (extract32(ctx
->opcode
, 11, 1)) {
21571 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21573 gen_store_gpr(t1
, this_rt
);
21574 if ((this_rt
== rs
) &&
21575 (counter
!= (count
- 1))) {
21576 /* UNPREDICTABLE */
21580 this_rt
= (rt
== 0) ? 0 : this_rt
;
21581 gen_load_gpr(t1
, this_rt
);
21582 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21593 generate_exception_end(ctx
, EXCP_RI
);
21601 TCGv t0
= tcg_temp_new();
21602 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21603 extract32(ctx
->opcode
, 1, 20) << 1;
21604 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21605 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21606 extract32(ctx
->opcode
, 21, 3));
21607 gen_load_gpr(t0
, rt
);
21608 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21609 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21615 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21616 extract32(ctx
->opcode
, 1, 24) << 1;
21618 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21620 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21623 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21628 switch (extract32(ctx
->opcode
, 12, 4)) {
21631 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21634 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21637 generate_exception_end(ctx
, EXCP_RI
);
21643 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21644 extract32(ctx
->opcode
, 1, 13) << 1;
21645 switch (extract32(ctx
->opcode
, 14, 2)) {
21648 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21651 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21652 extract32(ctx
->opcode
, 1, 13) << 1;
21653 check_cp1_enabled(ctx
);
21654 switch (extract32(ctx
->opcode
, 16, 5)) {
21656 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21659 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21664 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21665 extract32(ctx
->opcode
, 0, 1) << 13;
21667 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21672 generate_exception_end(ctx
, EXCP_RI
);
21678 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21680 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21684 if (rs
== rt
|| rt
== 0) {
21685 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21686 } else if (rs
== 0) {
21687 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21689 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21697 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21698 extract32(ctx
->opcode
, 1, 13) << 1;
21699 switch (extract32(ctx
->opcode
, 14, 2)) {
21702 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21705 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21707 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21709 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21713 if (rs
== 0 || rs
== rt
) {
21715 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21717 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21721 generate_exception_end(ctx
, EXCP_RI
);
21728 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21729 extract32(ctx
->opcode
, 1, 10) << 1;
21730 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21732 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
21737 generate_exception_end(ctx
, EXCP_RI
);
21743 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21746 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21747 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21748 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS1(ctx
->opcode
));
21752 /* make sure instructions are on a halfword boundary */
21753 if (ctx
->base
.pc_next
& 0x1) {
21754 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
21755 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
21756 tcg_temp_free(tmp
);
21757 generate_exception_end(ctx
, EXCP_AdEL
);
21761 op
= extract32(ctx
->opcode
, 10, 6);
21764 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21767 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
21768 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
21771 switch (extract32(ctx
->opcode
, 3, 2)) {
21772 case NM_P16_SYSCALL
:
21773 if (extract32(ctx
->opcode
, 2, 1) == 0) {
21774 generate_exception_end(ctx
, EXCP_SYSCALL
);
21776 generate_exception_end(ctx
, EXCP_RI
);
21780 generate_exception_end(ctx
, EXCP_BREAK
);
21783 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
21784 gen_helper_do_semihosting(cpu_env
);
21786 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21787 generate_exception_end(ctx
, EXCP_RI
);
21789 generate_exception_end(ctx
, EXCP_DBp
);
21794 generate_exception_end(ctx
, EXCP_RI
);
21801 int shift
= extract32(ctx
->opcode
, 0, 3);
21803 shift
= (shift
== 0) ? 8 : shift
;
21805 switch (extract32(ctx
->opcode
, 3, 1)) {
21813 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
21817 switch (ctx
->opcode
& 1) {
21819 gen_pool16c_nanomips_insn(ctx
);
21822 gen_ldxs(ctx
, rt
, rs
, rd
);
21827 switch (extract32(ctx
->opcode
, 6, 1)) {
21829 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
21830 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
21833 generate_exception_end(ctx
, EXCP_RI
);
21838 switch (extract32(ctx
->opcode
, 3, 1)) {
21840 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
21841 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
21843 case NM_P_ADDIURS5
:
21844 rt
= extract32(ctx
->opcode
, 5, 5);
21846 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
21847 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
21848 (extract32(ctx
->opcode
, 0, 3));
21849 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
21855 switch (ctx
->opcode
& 0x1) {
21857 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
21860 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
21865 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21866 extract32(ctx
->opcode
, 5, 3);
21867 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21868 extract32(ctx
->opcode
, 0, 3);
21869 rt
= decode_gpr_gpr4(rt
);
21870 rs
= decode_gpr_gpr4(rs
);
21871 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
21872 (extract32(ctx
->opcode
, 3, 1))) {
21875 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
21879 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
21882 generate_exception_end(ctx
, EXCP_RI
);
21888 int imm
= extract32(ctx
->opcode
, 0, 7);
21889 imm
= (imm
== 0x7f ? -1 : imm
);
21891 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21897 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
21898 u
= (u
== 12) ? 0xff :
21899 (u
== 13) ? 0xffff : u
;
21900 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
21904 offset
= extract32(ctx
->opcode
, 0, 2);
21905 switch (extract32(ctx
->opcode
, 2, 2)) {
21907 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
21910 rt
= decode_gpr_gpr3_src_store(
21911 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21912 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
21915 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
21918 generate_exception_end(ctx
, EXCP_RI
);
21923 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
21924 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
21926 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
21929 rt
= decode_gpr_gpr3_src_store(
21930 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21931 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
21934 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
21937 generate_exception_end(ctx
, EXCP_RI
);
21942 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21943 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21946 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21947 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21948 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
21952 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21953 extract32(ctx
->opcode
, 5, 3);
21954 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21955 extract32(ctx
->opcode
, 0, 3);
21956 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21957 (extract32(ctx
->opcode
, 8, 1) << 2);
21958 rt
= decode_gpr_gpr4(rt
);
21959 rs
= decode_gpr_gpr4(rs
);
21960 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21964 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21965 extract32(ctx
->opcode
, 5, 3);
21966 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21967 extract32(ctx
->opcode
, 0, 3);
21968 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21969 (extract32(ctx
->opcode
, 8, 1) << 2);
21970 rt
= decode_gpr_gpr4_zero(rt
);
21971 rs
= decode_gpr_gpr4(rs
);
21972 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21975 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21976 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
21979 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21980 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21981 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
21984 rt
= decode_gpr_gpr3_src_store(
21985 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21986 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21987 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21988 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21991 rt
= decode_gpr_gpr3_src_store(
21992 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21993 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21994 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
21997 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
21998 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21999 (extract32(ctx
->opcode
, 1, 9) << 1));
22002 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
22003 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22004 (extract32(ctx
->opcode
, 1, 9) << 1));
22007 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
22008 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22009 (extract32(ctx
->opcode
, 1, 6) << 1));
22012 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
22013 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22014 (extract32(ctx
->opcode
, 1, 6) << 1));
22017 switch (ctx
->opcode
& 0xf) {
22020 switch (extract32(ctx
->opcode
, 4, 1)) {
22022 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
22023 extract32(ctx
->opcode
, 5, 5), 0, 0);
22026 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
22027 extract32(ctx
->opcode
, 5, 5), 31, 0);
22034 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
22035 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
22036 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
22037 extract32(ctx
->opcode
, 0, 4) << 1);
22044 int count
= extract32(ctx
->opcode
, 0, 4);
22045 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
22047 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
22048 switch (extract32(ctx
->opcode
, 8, 1)) {
22050 gen_save(ctx
, rt
, count
, 0, u
);
22052 case NM_RESTORE_JRC16
:
22053 gen_restore(ctx
, rt
, count
, 0, u
);
22054 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
22063 static const int gpr2reg1
[] = {4, 5, 6, 7};
22064 static const int gpr2reg2
[] = {5, 6, 7, 8};
22066 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
22067 extract32(ctx
->opcode
, 8, 1);
22068 int r1
= gpr2reg1
[rd2
];
22069 int r2
= gpr2reg2
[rd2
];
22070 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
22071 extract32(ctx
->opcode
, 0, 3);
22072 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
22073 extract32(ctx
->opcode
, 5, 3);
22074 TCGv t0
= tcg_temp_new();
22075 TCGv t1
= tcg_temp_new();
22076 if (op
== NM_MOVEP
) {
22079 rs
= decode_gpr_gpr4_zero(r3
);
22080 rt
= decode_gpr_gpr4_zero(r4
);
22082 rd
= decode_gpr_gpr4(r3
);
22083 re
= decode_gpr_gpr4(r4
);
22087 gen_load_gpr(t0
, rs
);
22088 gen_load_gpr(t1
, rt
);
22089 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
22090 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
22096 return decode_nanomips_32_48_opc(env
, ctx
);
22103 /* SmartMIPS extension to MIPS32 */
22105 #if defined(TARGET_MIPS64)
22107 /* MDMX extension to MIPS64 */
22111 /* MIPSDSP functions. */
22112 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
22113 int rd
, int base
, int offset
)
22118 t0
= tcg_temp_new();
22121 gen_load_gpr(t0
, offset
);
22122 } else if (offset
== 0) {
22123 gen_load_gpr(t0
, base
);
22125 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
22130 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
22131 gen_store_gpr(t0
, rd
);
22134 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
22135 gen_store_gpr(t0
, rd
);
22138 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
22139 gen_store_gpr(t0
, rd
);
22141 #if defined(TARGET_MIPS64)
22143 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
22144 gen_store_gpr(t0
, rd
);
22151 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22152 int ret
, int v1
, int v2
)
22158 /* Treat as NOP. */
22162 v1_t
= tcg_temp_new();
22163 v2_t
= tcg_temp_new();
22165 gen_load_gpr(v1_t
, v1
);
22166 gen_load_gpr(v2_t
, v2
);
22169 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
22170 case OPC_MULT_G_2E
:
22174 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22176 case OPC_ADDUH_R_QB
:
22177 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22180 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22182 case OPC_ADDQH_R_PH
:
22183 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22186 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22188 case OPC_ADDQH_R_W
:
22189 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22192 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22194 case OPC_SUBUH_R_QB
:
22195 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22198 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22200 case OPC_SUBQH_R_PH
:
22201 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22204 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22206 case OPC_SUBQH_R_W
:
22207 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22211 case OPC_ABSQ_S_PH_DSP
:
22213 case OPC_ABSQ_S_QB
:
22215 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
22217 case OPC_ABSQ_S_PH
:
22219 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
22223 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
22225 case OPC_PRECEQ_W_PHL
:
22227 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
22228 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22230 case OPC_PRECEQ_W_PHR
:
22232 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
22233 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
22234 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22236 case OPC_PRECEQU_PH_QBL
:
22238 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
22240 case OPC_PRECEQU_PH_QBR
:
22242 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
22244 case OPC_PRECEQU_PH_QBLA
:
22246 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
22248 case OPC_PRECEQU_PH_QBRA
:
22250 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
22252 case OPC_PRECEU_PH_QBL
:
22254 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
22256 case OPC_PRECEU_PH_QBR
:
22258 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
22260 case OPC_PRECEU_PH_QBLA
:
22262 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
22264 case OPC_PRECEU_PH_QBRA
:
22266 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
22270 case OPC_ADDU_QB_DSP
:
22274 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22276 case OPC_ADDQ_S_PH
:
22278 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22282 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22286 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22288 case OPC_ADDU_S_QB
:
22290 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22294 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22296 case OPC_ADDU_S_PH
:
22298 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22302 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22304 case OPC_SUBQ_S_PH
:
22306 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22310 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22314 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22316 case OPC_SUBU_S_QB
:
22318 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22322 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22324 case OPC_SUBU_S_PH
:
22326 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22330 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22334 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22338 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22340 case OPC_RADDU_W_QB
:
22342 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22346 case OPC_CMPU_EQ_QB_DSP
:
22348 case OPC_PRECR_QB_PH
:
22350 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22352 case OPC_PRECRQ_QB_PH
:
22354 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22356 case OPC_PRECR_SRA_PH_W
:
22359 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22360 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22362 tcg_temp_free_i32(sa_t
);
22365 case OPC_PRECR_SRA_R_PH_W
:
22368 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22369 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22371 tcg_temp_free_i32(sa_t
);
22374 case OPC_PRECRQ_PH_W
:
22376 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22378 case OPC_PRECRQ_RS_PH_W
:
22380 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22382 case OPC_PRECRQU_S_QB_PH
:
22384 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22388 #ifdef TARGET_MIPS64
22389 case OPC_ABSQ_S_QH_DSP
:
22391 case OPC_PRECEQ_L_PWL
:
22393 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22395 case OPC_PRECEQ_L_PWR
:
22397 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22399 case OPC_PRECEQ_PW_QHL
:
22401 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22403 case OPC_PRECEQ_PW_QHR
:
22405 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22407 case OPC_PRECEQ_PW_QHLA
:
22409 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22411 case OPC_PRECEQ_PW_QHRA
:
22413 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22415 case OPC_PRECEQU_QH_OBL
:
22417 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22419 case OPC_PRECEQU_QH_OBR
:
22421 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22423 case OPC_PRECEQU_QH_OBLA
:
22425 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22427 case OPC_PRECEQU_QH_OBRA
:
22429 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22431 case OPC_PRECEU_QH_OBL
:
22433 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22435 case OPC_PRECEU_QH_OBR
:
22437 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22439 case OPC_PRECEU_QH_OBLA
:
22441 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22443 case OPC_PRECEU_QH_OBRA
:
22445 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22447 case OPC_ABSQ_S_OB
:
22449 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22451 case OPC_ABSQ_S_PW
:
22453 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22455 case OPC_ABSQ_S_QH
:
22457 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22461 case OPC_ADDU_OB_DSP
:
22463 case OPC_RADDU_L_OB
:
22465 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22469 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22471 case OPC_SUBQ_S_PW
:
22473 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22477 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22479 case OPC_SUBQ_S_QH
:
22481 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22485 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22487 case OPC_SUBU_S_OB
:
22489 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22493 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22495 case OPC_SUBU_S_QH
:
22497 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22501 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22503 case OPC_SUBUH_R_OB
:
22505 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22509 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22511 case OPC_ADDQ_S_PW
:
22513 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22517 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22519 case OPC_ADDQ_S_QH
:
22521 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22525 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22527 case OPC_ADDU_S_OB
:
22529 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22533 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22535 case OPC_ADDU_S_QH
:
22537 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22541 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22543 case OPC_ADDUH_R_OB
:
22545 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22549 case OPC_CMPU_EQ_OB_DSP
:
22551 case OPC_PRECR_OB_QH
:
22553 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22555 case OPC_PRECR_SRA_QH_PW
:
22558 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22559 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22560 tcg_temp_free_i32(ret_t
);
22563 case OPC_PRECR_SRA_R_QH_PW
:
22566 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22567 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22568 tcg_temp_free_i32(sa_v
);
22571 case OPC_PRECRQ_OB_QH
:
22573 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22575 case OPC_PRECRQ_PW_L
:
22577 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22579 case OPC_PRECRQ_QH_PW
:
22581 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22583 case OPC_PRECRQ_RS_QH_PW
:
22585 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22587 case OPC_PRECRQU_S_OB_QH
:
22589 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22596 tcg_temp_free(v1_t
);
22597 tcg_temp_free(v2_t
);
22600 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22601 int ret
, int v1
, int v2
)
22609 /* Treat as NOP. */
22613 t0
= tcg_temp_new();
22614 v1_t
= tcg_temp_new();
22615 v2_t
= tcg_temp_new();
22617 tcg_gen_movi_tl(t0
, v1
);
22618 gen_load_gpr(v1_t
, v1
);
22619 gen_load_gpr(v2_t
, v2
);
22622 case OPC_SHLL_QB_DSP
:
22624 op2
= MASK_SHLL_QB(ctx
->opcode
);
22628 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22632 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22636 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22640 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22642 case OPC_SHLL_S_PH
:
22644 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22646 case OPC_SHLLV_S_PH
:
22648 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22652 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22654 case OPC_SHLLV_S_W
:
22656 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22660 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22664 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22668 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22672 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22676 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22678 case OPC_SHRA_R_QB
:
22680 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22684 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22686 case OPC_SHRAV_R_QB
:
22688 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22692 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22694 case OPC_SHRA_R_PH
:
22696 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22700 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22702 case OPC_SHRAV_R_PH
:
22704 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22708 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22710 case OPC_SHRAV_R_W
:
22712 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22714 default: /* Invalid */
22715 MIPS_INVAL("MASK SHLL.QB");
22716 generate_exception_end(ctx
, EXCP_RI
);
22721 #ifdef TARGET_MIPS64
22722 case OPC_SHLL_OB_DSP
:
22723 op2
= MASK_SHLL_OB(ctx
->opcode
);
22727 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22731 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22733 case OPC_SHLL_S_PW
:
22735 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22737 case OPC_SHLLV_S_PW
:
22739 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22743 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22747 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22751 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22755 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22757 case OPC_SHLL_S_QH
:
22759 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22761 case OPC_SHLLV_S_QH
:
22763 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22767 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
22771 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22773 case OPC_SHRA_R_OB
:
22775 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
22777 case OPC_SHRAV_R_OB
:
22779 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22783 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
22787 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22789 case OPC_SHRA_R_PW
:
22791 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
22793 case OPC_SHRAV_R_PW
:
22795 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22799 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
22803 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22805 case OPC_SHRA_R_QH
:
22807 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
22809 case OPC_SHRAV_R_QH
:
22811 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22815 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
22819 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22823 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
22827 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22829 default: /* Invalid */
22830 MIPS_INVAL("MASK SHLL.OB");
22831 generate_exception_end(ctx
, EXCP_RI
);
22839 tcg_temp_free(v1_t
);
22840 tcg_temp_free(v2_t
);
22843 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22844 int ret
, int v1
, int v2
, int check_ret
)
22850 if ((ret
== 0) && (check_ret
== 1)) {
22851 /* Treat as NOP. */
22855 t0
= tcg_temp_new_i32();
22856 v1_t
= tcg_temp_new();
22857 v2_t
= tcg_temp_new();
22859 tcg_gen_movi_i32(t0
, ret
);
22860 gen_load_gpr(v1_t
, v1
);
22861 gen_load_gpr(v2_t
, v2
);
22864 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22865 * the same mask and op1. */
22866 case OPC_MULT_G_2E
:
22870 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22873 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22876 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22878 case OPC_MULQ_RS_W
:
22879 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22883 case OPC_DPA_W_PH_DSP
:
22885 case OPC_DPAU_H_QBL
:
22887 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22889 case OPC_DPAU_H_QBR
:
22891 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22893 case OPC_DPSU_H_QBL
:
22895 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22897 case OPC_DPSU_H_QBR
:
22899 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22903 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22905 case OPC_DPAX_W_PH
:
22907 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22909 case OPC_DPAQ_S_W_PH
:
22911 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22913 case OPC_DPAQX_S_W_PH
:
22915 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22917 case OPC_DPAQX_SA_W_PH
:
22919 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22923 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22925 case OPC_DPSX_W_PH
:
22927 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22929 case OPC_DPSQ_S_W_PH
:
22931 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22933 case OPC_DPSQX_S_W_PH
:
22935 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22937 case OPC_DPSQX_SA_W_PH
:
22939 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22941 case OPC_MULSAQ_S_W_PH
:
22943 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22945 case OPC_DPAQ_SA_L_W
:
22947 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22949 case OPC_DPSQ_SA_L_W
:
22951 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22953 case OPC_MAQ_S_W_PHL
:
22955 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22957 case OPC_MAQ_S_W_PHR
:
22959 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22961 case OPC_MAQ_SA_W_PHL
:
22963 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22965 case OPC_MAQ_SA_W_PHR
:
22967 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22969 case OPC_MULSA_W_PH
:
22971 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22975 #ifdef TARGET_MIPS64
22976 case OPC_DPAQ_W_QH_DSP
:
22978 int ac
= ret
& 0x03;
22979 tcg_gen_movi_i32(t0
, ac
);
22984 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
22988 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
22992 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
22996 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
23000 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23002 case OPC_DPAQ_S_W_QH
:
23004 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23006 case OPC_DPAQ_SA_L_PW
:
23008 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23010 case OPC_DPAU_H_OBL
:
23012 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23014 case OPC_DPAU_H_OBR
:
23016 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23020 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23022 case OPC_DPSQ_S_W_QH
:
23024 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23026 case OPC_DPSQ_SA_L_PW
:
23028 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23030 case OPC_DPSU_H_OBL
:
23032 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23034 case OPC_DPSU_H_OBR
:
23036 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23038 case OPC_MAQ_S_L_PWL
:
23040 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
23042 case OPC_MAQ_S_L_PWR
:
23044 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
23046 case OPC_MAQ_S_W_QHLL
:
23048 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23050 case OPC_MAQ_SA_W_QHLL
:
23052 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23054 case OPC_MAQ_S_W_QHLR
:
23056 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23058 case OPC_MAQ_SA_W_QHLR
:
23060 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23062 case OPC_MAQ_S_W_QHRL
:
23064 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23066 case OPC_MAQ_SA_W_QHRL
:
23068 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23070 case OPC_MAQ_S_W_QHRR
:
23072 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23074 case OPC_MAQ_SA_W_QHRR
:
23076 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23078 case OPC_MULSAQ_S_L_PW
:
23080 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23082 case OPC_MULSAQ_S_W_QH
:
23084 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23090 case OPC_ADDU_QB_DSP
:
23092 case OPC_MULEU_S_PH_QBL
:
23094 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23096 case OPC_MULEU_S_PH_QBR
:
23098 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23100 case OPC_MULQ_RS_PH
:
23102 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23104 case OPC_MULEQ_S_W_PHL
:
23106 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23108 case OPC_MULEQ_S_W_PHR
:
23110 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23112 case OPC_MULQ_S_PH
:
23114 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23118 #ifdef TARGET_MIPS64
23119 case OPC_ADDU_OB_DSP
:
23121 case OPC_MULEQ_S_PW_QHL
:
23123 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23125 case OPC_MULEQ_S_PW_QHR
:
23127 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23129 case OPC_MULEU_S_QH_OBL
:
23131 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23133 case OPC_MULEU_S_QH_OBR
:
23135 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23137 case OPC_MULQ_RS_QH
:
23139 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23146 tcg_temp_free_i32(t0
);
23147 tcg_temp_free(v1_t
);
23148 tcg_temp_free(v2_t
);
23151 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23159 /* Treat as NOP. */
23163 t0
= tcg_temp_new();
23164 val_t
= tcg_temp_new();
23165 gen_load_gpr(val_t
, val
);
23168 case OPC_ABSQ_S_PH_DSP
:
23172 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
23177 target_long result
;
23178 imm
= (ctx
->opcode
>> 16) & 0xFF;
23179 result
= (uint32_t)imm
<< 24 |
23180 (uint32_t)imm
<< 16 |
23181 (uint32_t)imm
<< 8 |
23183 result
= (int32_t)result
;
23184 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
23189 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23190 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23191 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23192 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23193 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23194 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23199 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23200 imm
= (int16_t)(imm
<< 6) >> 6;
23201 tcg_gen_movi_tl(cpu_gpr
[ret
], \
23202 (target_long
)((int32_t)imm
<< 16 | \
23208 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23209 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23210 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23211 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23215 #ifdef TARGET_MIPS64
23216 case OPC_ABSQ_S_QH_DSP
:
23223 imm
= (ctx
->opcode
>> 16) & 0xFF;
23224 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
23225 temp
= (temp
<< 16) | temp
;
23226 temp
= (temp
<< 32) | temp
;
23227 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23235 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23236 imm
= (int16_t)(imm
<< 6) >> 6;
23237 temp
= ((target_long
)imm
<< 32) \
23238 | ((target_long
)imm
& 0xFFFFFFFF);
23239 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23247 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23248 imm
= (int16_t)(imm
<< 6) >> 6;
23250 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
23251 ((uint64_t)(uint16_t)imm
<< 32) |
23252 ((uint64_t)(uint16_t)imm
<< 16) |
23253 (uint64_t)(uint16_t)imm
;
23254 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23259 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23260 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23261 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23262 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23263 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23264 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23265 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23269 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
23270 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23271 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23275 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23276 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23277 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23278 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23279 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23286 tcg_temp_free(val_t
);
23289 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
23290 uint32_t op1
, uint32_t op2
,
23291 int ret
, int v1
, int v2
, int check_ret
)
23297 if ((ret
== 0) && (check_ret
== 1)) {
23298 /* Treat as NOP. */
23302 t1
= tcg_temp_new();
23303 v1_t
= tcg_temp_new();
23304 v2_t
= tcg_temp_new();
23306 gen_load_gpr(v1_t
, v1
);
23307 gen_load_gpr(v2_t
, v2
);
23310 case OPC_CMPU_EQ_QB_DSP
:
23312 case OPC_CMPU_EQ_QB
:
23314 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
23316 case OPC_CMPU_LT_QB
:
23318 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
23320 case OPC_CMPU_LE_QB
:
23322 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
23324 case OPC_CMPGU_EQ_QB
:
23326 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23328 case OPC_CMPGU_LT_QB
:
23330 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23332 case OPC_CMPGU_LE_QB
:
23334 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23336 case OPC_CMPGDU_EQ_QB
:
23338 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23339 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23340 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23341 tcg_gen_shli_tl(t1
, t1
, 24);
23342 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23344 case OPC_CMPGDU_LT_QB
:
23346 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23347 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23348 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23349 tcg_gen_shli_tl(t1
, t1
, 24);
23350 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23352 case OPC_CMPGDU_LE_QB
:
23354 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23355 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23356 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23357 tcg_gen_shli_tl(t1
, t1
, 24);
23358 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23360 case OPC_CMP_EQ_PH
:
23362 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23364 case OPC_CMP_LT_PH
:
23366 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23368 case OPC_CMP_LE_PH
:
23370 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23374 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23378 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23380 case OPC_PACKRL_PH
:
23382 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23386 #ifdef TARGET_MIPS64
23387 case OPC_CMPU_EQ_OB_DSP
:
23389 case OPC_CMP_EQ_PW
:
23391 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23393 case OPC_CMP_LT_PW
:
23395 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23397 case OPC_CMP_LE_PW
:
23399 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23401 case OPC_CMP_EQ_QH
:
23403 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23405 case OPC_CMP_LT_QH
:
23407 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23409 case OPC_CMP_LE_QH
:
23411 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23413 case OPC_CMPGDU_EQ_OB
:
23415 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23417 case OPC_CMPGDU_LT_OB
:
23419 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23421 case OPC_CMPGDU_LE_OB
:
23423 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23425 case OPC_CMPGU_EQ_OB
:
23427 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23429 case OPC_CMPGU_LT_OB
:
23431 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23433 case OPC_CMPGU_LE_OB
:
23435 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23437 case OPC_CMPU_EQ_OB
:
23439 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23441 case OPC_CMPU_LT_OB
:
23443 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23445 case OPC_CMPU_LE_OB
:
23447 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23449 case OPC_PACKRL_PW
:
23451 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23455 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23459 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23463 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23471 tcg_temp_free(v1_t
);
23472 tcg_temp_free(v2_t
);
23475 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23476 uint32_t op1
, int rt
, int rs
, int sa
)
23483 /* Treat as NOP. */
23487 t0
= tcg_temp_new();
23488 gen_load_gpr(t0
, rs
);
23491 case OPC_APPEND_DSP
:
23492 switch (MASK_APPEND(ctx
->opcode
)) {
23495 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23497 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23501 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23502 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23503 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23504 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23506 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23510 if (sa
!= 0 && sa
!= 2) {
23511 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23512 tcg_gen_ext32u_tl(t0
, t0
);
23513 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23514 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23516 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23518 default: /* Invalid */
23519 MIPS_INVAL("MASK APPEND");
23520 generate_exception_end(ctx
, EXCP_RI
);
23524 #ifdef TARGET_MIPS64
23525 case OPC_DAPPEND_DSP
:
23526 switch (MASK_DAPPEND(ctx
->opcode
)) {
23529 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23533 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23534 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23535 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23539 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23540 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23541 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23546 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23547 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23548 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23549 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23552 default: /* Invalid */
23553 MIPS_INVAL("MASK DAPPEND");
23554 generate_exception_end(ctx
, EXCP_RI
);
23563 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23564 int ret
, int v1
, int v2
, int check_ret
)
23573 if ((ret
== 0) && (check_ret
== 1)) {
23574 /* Treat as NOP. */
23578 t0
= tcg_temp_new();
23579 t1
= tcg_temp_new();
23580 v1_t
= tcg_temp_new();
23581 v2_t
= tcg_temp_new();
23583 gen_load_gpr(v1_t
, v1
);
23584 gen_load_gpr(v2_t
, v2
);
23587 case OPC_EXTR_W_DSP
:
23591 tcg_gen_movi_tl(t0
, v2
);
23592 tcg_gen_movi_tl(t1
, v1
);
23593 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23596 tcg_gen_movi_tl(t0
, v2
);
23597 tcg_gen_movi_tl(t1
, v1
);
23598 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23600 case OPC_EXTR_RS_W
:
23601 tcg_gen_movi_tl(t0
, v2
);
23602 tcg_gen_movi_tl(t1
, v1
);
23603 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23606 tcg_gen_movi_tl(t0
, v2
);
23607 tcg_gen_movi_tl(t1
, v1
);
23608 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23610 case OPC_EXTRV_S_H
:
23611 tcg_gen_movi_tl(t0
, v2
);
23612 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23615 tcg_gen_movi_tl(t0
, v2
);
23616 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23618 case OPC_EXTRV_R_W
:
23619 tcg_gen_movi_tl(t0
, v2
);
23620 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23622 case OPC_EXTRV_RS_W
:
23623 tcg_gen_movi_tl(t0
, v2
);
23624 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23627 tcg_gen_movi_tl(t0
, v2
);
23628 tcg_gen_movi_tl(t1
, v1
);
23629 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23632 tcg_gen_movi_tl(t0
, v2
);
23633 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23636 tcg_gen_movi_tl(t0
, v2
);
23637 tcg_gen_movi_tl(t1
, v1
);
23638 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23641 tcg_gen_movi_tl(t0
, v2
);
23642 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23645 imm
= (ctx
->opcode
>> 20) & 0x3F;
23646 tcg_gen_movi_tl(t0
, ret
);
23647 tcg_gen_movi_tl(t1
, imm
);
23648 gen_helper_shilo(t0
, t1
, cpu_env
);
23651 tcg_gen_movi_tl(t0
, ret
);
23652 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23655 tcg_gen_movi_tl(t0
, ret
);
23656 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23659 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23660 tcg_gen_movi_tl(t0
, imm
);
23661 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23664 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23665 tcg_gen_movi_tl(t0
, imm
);
23666 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23670 #ifdef TARGET_MIPS64
23671 case OPC_DEXTR_W_DSP
:
23675 tcg_gen_movi_tl(t0
, ret
);
23676 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23680 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23681 int ac
= (ctx
->opcode
>> 11) & 0x03;
23682 tcg_gen_movi_tl(t0
, shift
);
23683 tcg_gen_movi_tl(t1
, ac
);
23684 gen_helper_dshilo(t0
, t1
, cpu_env
);
23689 int ac
= (ctx
->opcode
>> 11) & 0x03;
23690 tcg_gen_movi_tl(t0
, ac
);
23691 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23695 tcg_gen_movi_tl(t0
, v2
);
23696 tcg_gen_movi_tl(t1
, v1
);
23698 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23701 tcg_gen_movi_tl(t0
, v2
);
23702 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23705 tcg_gen_movi_tl(t0
, v2
);
23706 tcg_gen_movi_tl(t1
, v1
);
23707 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23710 tcg_gen_movi_tl(t0
, v2
);
23711 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23714 tcg_gen_movi_tl(t0
, v2
);
23715 tcg_gen_movi_tl(t1
, v1
);
23716 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23718 case OPC_DEXTR_R_L
:
23719 tcg_gen_movi_tl(t0
, v2
);
23720 tcg_gen_movi_tl(t1
, v1
);
23721 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23723 case OPC_DEXTR_RS_L
:
23724 tcg_gen_movi_tl(t0
, v2
);
23725 tcg_gen_movi_tl(t1
, v1
);
23726 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23729 tcg_gen_movi_tl(t0
, v2
);
23730 tcg_gen_movi_tl(t1
, v1
);
23731 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23733 case OPC_DEXTR_R_W
:
23734 tcg_gen_movi_tl(t0
, v2
);
23735 tcg_gen_movi_tl(t1
, v1
);
23736 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23738 case OPC_DEXTR_RS_W
:
23739 tcg_gen_movi_tl(t0
, v2
);
23740 tcg_gen_movi_tl(t1
, v1
);
23741 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23743 case OPC_DEXTR_S_H
:
23744 tcg_gen_movi_tl(t0
, v2
);
23745 tcg_gen_movi_tl(t1
, v1
);
23746 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23748 case OPC_DEXTRV_S_H
:
23749 tcg_gen_movi_tl(t0
, v2
);
23750 tcg_gen_movi_tl(t1
, v1
);
23751 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23754 tcg_gen_movi_tl(t0
, v2
);
23755 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23757 case OPC_DEXTRV_R_L
:
23758 tcg_gen_movi_tl(t0
, v2
);
23759 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23761 case OPC_DEXTRV_RS_L
:
23762 tcg_gen_movi_tl(t0
, v2
);
23763 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23766 tcg_gen_movi_tl(t0
, v2
);
23767 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23769 case OPC_DEXTRV_R_W
:
23770 tcg_gen_movi_tl(t0
, v2
);
23771 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23773 case OPC_DEXTRV_RS_W
:
23774 tcg_gen_movi_tl(t0
, v2
);
23775 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23784 tcg_temp_free(v1_t
);
23785 tcg_temp_free(v2_t
);
23788 /* End MIPSDSP functions. */
23790 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23792 int rs
, rt
, rd
, sa
;
23795 rs
= (ctx
->opcode
>> 21) & 0x1f;
23796 rt
= (ctx
->opcode
>> 16) & 0x1f;
23797 rd
= (ctx
->opcode
>> 11) & 0x1f;
23798 sa
= (ctx
->opcode
>> 6) & 0x1f;
23800 op1
= MASK_SPECIAL(ctx
->opcode
);
23803 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23809 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23819 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23822 MIPS_INVAL("special_r6 muldiv");
23823 generate_exception_end(ctx
, EXCP_RI
);
23829 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23833 if (rt
== 0 && sa
== 1) {
23834 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23835 We need additionally to check other fields */
23836 gen_cl(ctx
, op1
, rd
, rs
);
23838 generate_exception_end(ctx
, EXCP_RI
);
23842 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23843 gen_helper_do_semihosting(cpu_env
);
23845 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
23846 generate_exception_end(ctx
, EXCP_RI
);
23848 generate_exception_end(ctx
, EXCP_DBp
);
23852 #if defined(TARGET_MIPS64)
23854 check_mips_64(ctx
);
23855 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23859 if (rt
== 0 && sa
== 1) {
23860 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23861 We need additionally to check other fields */
23862 check_mips_64(ctx
);
23863 gen_cl(ctx
, op1
, rd
, rs
);
23865 generate_exception_end(ctx
, EXCP_RI
);
23873 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23883 check_mips_64(ctx
);
23884 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23887 MIPS_INVAL("special_r6 muldiv");
23888 generate_exception_end(ctx
, EXCP_RI
);
23893 default: /* Invalid */
23894 MIPS_INVAL("special_r6");
23895 generate_exception_end(ctx
, EXCP_RI
);
23900 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
23902 int rs
= extract32(ctx
->opcode
, 21, 5);
23903 int rt
= extract32(ctx
->opcode
, 16, 5);
23904 int rd
= extract32(ctx
->opcode
, 11, 5);
23905 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
23908 case OPC_MOVN
: /* Conditional move */
23910 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23912 case OPC_MFHI
: /* Move from HI/LO */
23914 gen_HILO(ctx
, op1
, 0, rd
);
23917 case OPC_MTLO
: /* Move to HI/LO */
23918 gen_HILO(ctx
, op1
, 0, rs
);
23922 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
23926 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23928 #if defined(TARGET_MIPS64)
23933 check_insn_opc_user_only(ctx
, INSN_R5900
);
23934 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23938 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
23940 default: /* Invalid */
23941 MIPS_INVAL("special_tx79");
23942 generate_exception_end(ctx
, EXCP_RI
);
23947 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23949 int rs
, rt
, rd
, sa
;
23952 rs
= (ctx
->opcode
>> 21) & 0x1f;
23953 rt
= (ctx
->opcode
>> 16) & 0x1f;
23954 rd
= (ctx
->opcode
>> 11) & 0x1f;
23955 sa
= (ctx
->opcode
>> 6) & 0x1f;
23957 op1
= MASK_SPECIAL(ctx
->opcode
);
23959 case OPC_MOVN
: /* Conditional move */
23961 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
23962 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
23963 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23965 case OPC_MFHI
: /* Move from HI/LO */
23967 gen_HILO(ctx
, op1
, rs
& 3, rd
);
23970 case OPC_MTLO
: /* Move to HI/LO */
23971 gen_HILO(ctx
, op1
, rd
& 3, rs
);
23974 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
23975 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
23976 check_cp1_enabled(ctx
);
23977 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
23978 (ctx
->opcode
>> 16) & 1);
23980 generate_exception_err(ctx
, EXCP_CpU
, 1);
23986 check_insn(ctx
, INSN_VR54XX
);
23987 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
23988 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
23990 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
23995 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23997 #if defined(TARGET_MIPS64)
24002 check_insn(ctx
, ISA_MIPS3
);
24003 check_mips_64(ctx
);
24004 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24008 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24011 #ifdef MIPS_STRICT_STANDARD
24012 MIPS_INVAL("SPIM");
24013 generate_exception_end(ctx
, EXCP_RI
);
24015 /* Implemented as RI exception for now. */
24016 MIPS_INVAL("spim (unofficial)");
24017 generate_exception_end(ctx
, EXCP_RI
);
24020 default: /* Invalid */
24021 MIPS_INVAL("special_legacy");
24022 generate_exception_end(ctx
, EXCP_RI
);
24027 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
24029 int rs
, rt
, rd
, sa
;
24032 rs
= (ctx
->opcode
>> 21) & 0x1f;
24033 rt
= (ctx
->opcode
>> 16) & 0x1f;
24034 rd
= (ctx
->opcode
>> 11) & 0x1f;
24035 sa
= (ctx
->opcode
>> 6) & 0x1f;
24037 op1
= MASK_SPECIAL(ctx
->opcode
);
24039 case OPC_SLL
: /* Shift with immediate */
24040 if (sa
== 5 && rd
== 0 &&
24041 rs
== 0 && rt
== 0) { /* PAUSE */
24042 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
24043 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
24044 generate_exception_end(ctx
, EXCP_RI
);
24050 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24053 switch ((ctx
->opcode
>> 21) & 0x1f) {
24055 /* rotr is decoded as srl on non-R2 CPUs */
24056 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24061 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24064 generate_exception_end(ctx
, EXCP_RI
);
24072 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24074 case OPC_SLLV
: /* Shifts */
24076 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24079 switch ((ctx
->opcode
>> 6) & 0x1f) {
24081 /* rotrv is decoded as srlv on non-R2 CPUs */
24082 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24087 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24090 generate_exception_end(ctx
, EXCP_RI
);
24094 case OPC_SLT
: /* Set on less than */
24096 gen_slt(ctx
, op1
, rd
, rs
, rt
);
24098 case OPC_AND
: /* Logic*/
24102 gen_logic(ctx
, op1
, rd
, rs
, rt
);
24105 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24107 case OPC_TGE
: /* Traps */
24113 check_insn(ctx
, ISA_MIPS2
);
24114 gen_trap(ctx
, op1
, rs
, rt
, -1);
24116 case OPC_LSA
: /* OPC_PMON */
24117 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24118 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24119 decode_opc_special_r6(env
, ctx
);
24121 /* Pmon entry point, also R4010 selsl */
24122 #ifdef MIPS_STRICT_STANDARD
24123 MIPS_INVAL("PMON / selsl");
24124 generate_exception_end(ctx
, EXCP_RI
);
24126 gen_helper_0e0i(pmon
, sa
);
24131 generate_exception_end(ctx
, EXCP_SYSCALL
);
24134 generate_exception_end(ctx
, EXCP_BREAK
);
24137 check_insn(ctx
, ISA_MIPS2
);
24138 gen_sync(extract32(ctx
->opcode
, 6, 5));
24141 #if defined(TARGET_MIPS64)
24142 /* MIPS64 specific opcodes */
24147 check_insn(ctx
, ISA_MIPS3
);
24148 check_mips_64(ctx
);
24149 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24152 switch ((ctx
->opcode
>> 21) & 0x1f) {
24154 /* drotr is decoded as dsrl on non-R2 CPUs */
24155 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24160 check_insn(ctx
, ISA_MIPS3
);
24161 check_mips_64(ctx
);
24162 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24165 generate_exception_end(ctx
, EXCP_RI
);
24170 switch ((ctx
->opcode
>> 21) & 0x1f) {
24172 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
24173 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24178 check_insn(ctx
, ISA_MIPS3
);
24179 check_mips_64(ctx
);
24180 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24183 generate_exception_end(ctx
, EXCP_RI
);
24191 check_insn(ctx
, ISA_MIPS3
);
24192 check_mips_64(ctx
);
24193 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24197 check_insn(ctx
, ISA_MIPS3
);
24198 check_mips_64(ctx
);
24199 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24202 switch ((ctx
->opcode
>> 6) & 0x1f) {
24204 /* drotrv is decoded as dsrlv on non-R2 CPUs */
24205 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24210 check_insn(ctx
, ISA_MIPS3
);
24211 check_mips_64(ctx
);
24212 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24215 generate_exception_end(ctx
, EXCP_RI
);
24220 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24221 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24222 decode_opc_special_r6(env
, ctx
);
24227 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24228 decode_opc_special_r6(env
, ctx
);
24229 } else if (ctx
->insn_flags
& INSN_R5900
) {
24230 decode_opc_special_tx79(env
, ctx
);
24232 decode_opc_special_legacy(env
, ctx
);
24238 /* MXU accumulate add/subtract 1-bit pattern 'aptn1' */
24239 #define MXU_APTN1_A 0
24240 #define MXU_APTN1_S 1
24242 /* MXU accumulate add/subtract 2-bit pattern 'aptn2' */
24243 #define MXU_APTN2_AA 0
24244 #define MXU_APTN2_AS 1
24245 #define MXU_APTN2_SA 2
24246 #define MXU_APTN2_SS 3
24248 /* MXU execute add/subtract 2-bit pattern 'eptn2' */
24249 #define MXU_EPTN2_AA 0
24250 #define MXU_EPTN2_AS 1
24251 #define MXU_EPTN2_SA 2
24252 #define MXU_EPTN2_SS 3
24254 /* MXU operand getting pattern 'optn2' */
24255 #define MXU_OPTN2_PTN0 0
24256 #define MXU_OPTN2_PTN1 1
24257 #define MXU_OPTN2_PTN2 2
24258 #define MXU_OPTN2_PTN3 3
24259 /* alternative naming scheme for 'optn2' */
24260 #define MXU_OPTN2_WW 0
24261 #define MXU_OPTN2_LW 1
24262 #define MXU_OPTN2_HW 2
24263 #define MXU_OPTN2_XW 3
24265 /* MXU operand getting pattern 'optn3' */
24266 #define MXU_OPTN3_PTN0 0
24267 #define MXU_OPTN3_PTN1 1
24268 #define MXU_OPTN3_PTN2 2
24269 #define MXU_OPTN3_PTN3 3
24270 #define MXU_OPTN3_PTN4 4
24271 #define MXU_OPTN3_PTN5 5
24272 #define MXU_OPTN3_PTN6 6
24273 #define MXU_OPTN3_PTN7 7
24277 * S32I2M XRa, rb - Register move from GRF to XRF
24279 static void gen_mxu_s32i2m(DisasContext
*ctx
)
24284 t0
= tcg_temp_new();
24286 XRa
= extract32(ctx
->opcode
, 6, 5);
24287 Rb
= extract32(ctx
->opcode
, 16, 5);
24289 gen_load_gpr(t0
, Rb
);
24291 gen_store_mxu_gpr(t0
, XRa
);
24292 } else if (XRa
== 16) {
24293 gen_store_mxu_cr(t0
);
24300 * S32M2I XRa, rb - Register move from XRF to GRF
24302 static void gen_mxu_s32m2i(DisasContext
*ctx
)
24307 t0
= tcg_temp_new();
24309 XRa
= extract32(ctx
->opcode
, 6, 5);
24310 Rb
= extract32(ctx
->opcode
, 16, 5);
24313 gen_load_mxu_gpr(t0
, XRa
);
24314 } else if (XRa
== 16) {
24315 gen_load_mxu_cr(t0
);
24318 gen_store_gpr(t0
, Rb
);
24324 * S8LDD XRa, Rb, s8, optn3 - Load a byte from memory to XRF
24326 static void gen_mxu_s8ldd(DisasContext
*ctx
)
24329 uint32_t XRa
, Rb
, s8
, optn3
;
24331 t0
= tcg_temp_new();
24332 t1
= tcg_temp_new();
24334 XRa
= extract32(ctx
->opcode
, 6, 4);
24335 s8
= extract32(ctx
->opcode
, 10, 8);
24336 optn3
= extract32(ctx
->opcode
, 18, 3);
24337 Rb
= extract32(ctx
->opcode
, 21, 5);
24339 gen_load_gpr(t0
, Rb
);
24340 tcg_gen_addi_tl(t0
, t0
, (int8_t)s8
);
24343 /* XRa[7:0] = tmp8 */
24344 case MXU_OPTN3_PTN0
:
24345 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24346 gen_load_mxu_gpr(t0
, XRa
);
24347 tcg_gen_deposit_tl(t0
, t0
, t1
, 0, 8);
24349 /* XRa[15:8] = tmp8 */
24350 case MXU_OPTN3_PTN1
:
24351 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24352 gen_load_mxu_gpr(t0
, XRa
);
24353 tcg_gen_deposit_tl(t0
, t0
, t1
, 8, 8);
24355 /* XRa[23:16] = tmp8 */
24356 case MXU_OPTN3_PTN2
:
24357 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24358 gen_load_mxu_gpr(t0
, XRa
);
24359 tcg_gen_deposit_tl(t0
, t0
, t1
, 16, 8);
24361 /* XRa[31:24] = tmp8 */
24362 case MXU_OPTN3_PTN3
:
24363 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24364 gen_load_mxu_gpr(t0
, XRa
);
24365 tcg_gen_deposit_tl(t0
, t0
, t1
, 24, 8);
24367 /* XRa = {8'b0, tmp8, 8'b0, tmp8} */
24368 case MXU_OPTN3_PTN4
:
24369 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24370 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24372 /* XRa = {tmp8, 8'b0, tmp8, 8'b0} */
24373 case MXU_OPTN3_PTN5
:
24374 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24375 tcg_gen_shli_tl(t1
, t1
, 8);
24376 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24378 /* XRa = {{8{sign of tmp8}}, tmp8, {8{sign of tmp8}}, tmp8} */
24379 case MXU_OPTN3_PTN6
:
24380 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
24381 tcg_gen_mov_tl(t0
, t1
);
24382 tcg_gen_andi_tl(t0
, t0
, 0xFF00FFFF);
24383 tcg_gen_shli_tl(t1
, t1
, 16);
24384 tcg_gen_or_tl(t0
, t0
, t1
);
24386 /* XRa = {tmp8, tmp8, tmp8, tmp8} */
24387 case MXU_OPTN3_PTN7
:
24388 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24389 tcg_gen_deposit_tl(t1
, t1
, t1
, 8, 8);
24390 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24394 gen_store_mxu_gpr(t0
, XRa
);
24401 * D16MUL XRa, XRb, XRc, XRd, optn2 - Signed 16 bit pattern multiplication
24403 static void gen_mxu_d16mul(DisasContext
*ctx
)
24405 TCGv t0
, t1
, t2
, t3
;
24406 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
;
24408 t0
= tcg_temp_new();
24409 t1
= tcg_temp_new();
24410 t2
= tcg_temp_new();
24411 t3
= tcg_temp_new();
24413 XRa
= extract32(ctx
->opcode
, 6, 4);
24414 XRb
= extract32(ctx
->opcode
, 10, 4);
24415 XRc
= extract32(ctx
->opcode
, 14, 4);
24416 XRd
= extract32(ctx
->opcode
, 18, 4);
24417 optn2
= extract32(ctx
->opcode
, 22, 2);
24419 gen_load_mxu_gpr(t1
, XRb
);
24420 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24421 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24422 gen_load_mxu_gpr(t3
, XRc
);
24423 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24424 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24427 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24428 tcg_gen_mul_tl(t3
, t1
, t3
);
24429 tcg_gen_mul_tl(t2
, t0
, t2
);
24431 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24432 tcg_gen_mul_tl(t3
, t0
, t3
);
24433 tcg_gen_mul_tl(t2
, t0
, t2
);
24435 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24436 tcg_gen_mul_tl(t3
, t1
, t3
);
24437 tcg_gen_mul_tl(t2
, t1
, t2
);
24439 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24440 tcg_gen_mul_tl(t3
, t0
, t3
);
24441 tcg_gen_mul_tl(t2
, t1
, t2
);
24444 gen_store_mxu_gpr(t3
, XRa
);
24445 gen_store_mxu_gpr(t2
, XRd
);
24454 * D16MAC XRa, XRb, XRc, XRd, aptn2, optn2 - Signed 16 bit pattern multiply
24457 static void gen_mxu_d16mac(DisasContext
*ctx
)
24459 TCGv t0
, t1
, t2
, t3
;
24460 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
, aptn2
;
24462 t0
= tcg_temp_new();
24463 t1
= tcg_temp_new();
24464 t2
= tcg_temp_new();
24465 t3
= tcg_temp_new();
24467 XRa
= extract32(ctx
->opcode
, 6, 4);
24468 XRb
= extract32(ctx
->opcode
, 10, 4);
24469 XRc
= extract32(ctx
->opcode
, 14, 4);
24470 XRd
= extract32(ctx
->opcode
, 18, 4);
24471 optn2
= extract32(ctx
->opcode
, 22, 2);
24472 aptn2
= extract32(ctx
->opcode
, 24, 2);
24474 gen_load_mxu_gpr(t1
, XRb
);
24475 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24476 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24478 gen_load_mxu_gpr(t3
, XRc
);
24479 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24480 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24483 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24484 tcg_gen_mul_tl(t3
, t1
, t3
);
24485 tcg_gen_mul_tl(t2
, t0
, t2
);
24487 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24488 tcg_gen_mul_tl(t3
, t0
, t3
);
24489 tcg_gen_mul_tl(t2
, t0
, t2
);
24491 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24492 tcg_gen_mul_tl(t3
, t1
, t3
);
24493 tcg_gen_mul_tl(t2
, t1
, t2
);
24495 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24496 tcg_gen_mul_tl(t3
, t0
, t3
);
24497 tcg_gen_mul_tl(t2
, t1
, t2
);
24500 gen_load_mxu_gpr(t0
, XRa
);
24501 gen_load_mxu_gpr(t1
, XRd
);
24505 tcg_gen_add_tl(t3
, t0
, t3
);
24506 tcg_gen_add_tl(t2
, t1
, t2
);
24509 tcg_gen_add_tl(t3
, t0
, t3
);
24510 tcg_gen_sub_tl(t2
, t1
, t2
);
24513 tcg_gen_sub_tl(t3
, t0
, t3
);
24514 tcg_gen_add_tl(t2
, t1
, t2
);
24517 tcg_gen_sub_tl(t3
, t0
, t3
);
24518 tcg_gen_sub_tl(t2
, t1
, t2
);
24521 gen_store_mxu_gpr(t3
, XRa
);
24522 gen_store_mxu_gpr(t2
, XRd
);
24531 * Q8MUL XRa, XRb, XRc, XRd - Parallel unsigned 8 bit pattern multiply
24532 * Q8MULSU XRa, XRb, XRc, XRd - Parallel signed 8 bit pattern multiply
24534 static void gen_mxu_q8mul_q8mulsu(DisasContext
*ctx
)
24536 TCGv t0
, t1
, t2
, t3
, t4
, t5
, t6
, t7
;
24537 uint32_t XRa
, XRb
, XRc
, XRd
, sel
;
24539 t0
= tcg_temp_new();
24540 t1
= tcg_temp_new();
24541 t2
= tcg_temp_new();
24542 t3
= tcg_temp_new();
24543 t4
= tcg_temp_new();
24544 t5
= tcg_temp_new();
24545 t6
= tcg_temp_new();
24546 t7
= tcg_temp_new();
24548 XRa
= extract32(ctx
->opcode
, 6, 4);
24549 XRb
= extract32(ctx
->opcode
, 10, 4);
24550 XRc
= extract32(ctx
->opcode
, 14, 4);
24551 XRd
= extract32(ctx
->opcode
, 18, 4);
24552 sel
= extract32(ctx
->opcode
, 22, 2);
24554 gen_load_mxu_gpr(t3
, XRb
);
24555 gen_load_mxu_gpr(t7
, XRc
);
24559 tcg_gen_ext8s_tl(t0
, t3
);
24560 tcg_gen_shri_tl(t3
, t3
, 8);
24561 tcg_gen_ext8s_tl(t1
, t3
);
24562 tcg_gen_shri_tl(t3
, t3
, 8);
24563 tcg_gen_ext8s_tl(t2
, t3
);
24564 tcg_gen_shri_tl(t3
, t3
, 8);
24565 tcg_gen_ext8s_tl(t3
, t3
);
24568 tcg_gen_ext8u_tl(t0
, t3
);
24569 tcg_gen_shri_tl(t3
, t3
, 8);
24570 tcg_gen_ext8u_tl(t1
, t3
);
24571 tcg_gen_shri_tl(t3
, t3
, 8);
24572 tcg_gen_ext8u_tl(t2
, t3
);
24573 tcg_gen_shri_tl(t3
, t3
, 8);
24574 tcg_gen_ext8u_tl(t3
, t3
);
24577 tcg_gen_ext8u_tl(t4
, t7
);
24578 tcg_gen_shri_tl(t7
, t7
, 8);
24579 tcg_gen_ext8u_tl(t5
, t7
);
24580 tcg_gen_shri_tl(t7
, t7
, 8);
24581 tcg_gen_ext8u_tl(t6
, t7
);
24582 tcg_gen_shri_tl(t7
, t7
, 8);
24583 tcg_gen_ext8u_tl(t7
, t7
);
24585 tcg_gen_mul_tl(t0
, t0
, t4
);
24586 tcg_gen_mul_tl(t1
, t1
, t5
);
24587 tcg_gen_mul_tl(t2
, t2
, t6
);
24588 tcg_gen_mul_tl(t3
, t3
, t7
);
24590 tcg_gen_andi_tl(t0
, t0
, 0xFFFF);
24591 tcg_gen_andi_tl(t1
, t1
, 0xFFFF);
24592 tcg_gen_andi_tl(t2
, t2
, 0xFFFF);
24593 tcg_gen_andi_tl(t3
, t3
, 0xFFFF);
24595 tcg_gen_shli_tl(t1
, t1
, 16);
24596 tcg_gen_shli_tl(t3
, t3
, 16);
24598 tcg_gen_or_tl(t0
, t0
, t1
);
24599 tcg_gen_or_tl(t1
, t2
, t3
);
24601 gen_store_mxu_gpr(t0
, XRd
);
24602 gen_store_mxu_gpr(t1
, XRa
);
24615 * S32LDD XRa, Rb, S12 - Load a word from memory to XRF
24616 * S32LDDR XRa, Rb, S12 - Load a word from memory to XRF, reversed byte seq.
24618 static void gen_mxu_s32ldd_s32lddr(DisasContext
*ctx
)
24621 uint32_t XRa
, Rb
, s12
, sel
;
24623 t0
= tcg_temp_new();
24624 t1
= tcg_temp_new();
24626 XRa
= extract32(ctx
->opcode
, 6, 4);
24627 s12
= extract32(ctx
->opcode
, 10, 10);
24628 sel
= extract32(ctx
->opcode
, 20, 1);
24629 Rb
= extract32(ctx
->opcode
, 21, 5);
24631 gen_load_gpr(t0
, Rb
);
24633 tcg_gen_movi_tl(t1
, s12
);
24634 tcg_gen_shli_tl(t1
, t1
, 2);
24636 tcg_gen_ori_tl(t1
, t1
, 0xFFFFF000);
24638 tcg_gen_add_tl(t1
, t0
, t1
);
24639 tcg_gen_qemu_ld_tl(t1
, t1
, ctx
->mem_idx
, MO_SL
);
24643 tcg_gen_bswap32_tl(t1
, t1
);
24645 gen_store_mxu_gpr(t1
, XRa
);
24653 * Decoding engine for MXU
24654 * =======================
24659 * Decode MXU pool00
24661 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24662 * +-----------+---------+-----+-------+-------+-------+-----------+
24663 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL00|
24664 * +-----------+---------+-----+-------+-------+-------+-----------+
24667 static void decode_opc_mxu__pool00(CPUMIPSState
*env
, DisasContext
*ctx
)
24669 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
24672 case OPC_MXU_S32MAX
:
24673 /* TODO: Implement emulation of S32MAX instruction. */
24674 MIPS_INVAL("OPC_MXU_S32MAX");
24675 generate_exception_end(ctx
, EXCP_RI
);
24677 case OPC_MXU_S32MIN
:
24678 /* TODO: Implement emulation of S32MIN instruction. */
24679 MIPS_INVAL("OPC_MXU_S32MIN");
24680 generate_exception_end(ctx
, EXCP_RI
);
24682 case OPC_MXU_D16MAX
:
24683 /* TODO: Implement emulation of D16MAX instruction. */
24684 MIPS_INVAL("OPC_MXU_D16MAX");
24685 generate_exception_end(ctx
, EXCP_RI
);
24687 case OPC_MXU_D16MIN
:
24688 /* TODO: Implement emulation of D16MIN instruction. */
24689 MIPS_INVAL("OPC_MXU_D16MIN");
24690 generate_exception_end(ctx
, EXCP_RI
);
24692 case OPC_MXU_Q8MAX
:
24693 /* TODO: Implement emulation of Q8MAX instruction. */
24694 MIPS_INVAL("OPC_MXU_Q8MAX");
24695 generate_exception_end(ctx
, EXCP_RI
);
24697 case OPC_MXU_Q8MIN
:
24698 /* TODO: Implement emulation of Q8MIN instruction. */
24699 MIPS_INVAL("OPC_MXU_Q8MIN");
24700 generate_exception_end(ctx
, EXCP_RI
);
24702 case OPC_MXU_Q8SLT
:
24703 /* TODO: Implement emulation of Q8SLT instruction. */
24704 MIPS_INVAL("OPC_MXU_Q8SLT");
24705 generate_exception_end(ctx
, EXCP_RI
);
24707 case OPC_MXU_Q8SLTU
:
24708 /* TODO: Implement emulation of Q8SLTU instruction. */
24709 MIPS_INVAL("OPC_MXU_Q8SLTU");
24710 generate_exception_end(ctx
, EXCP_RI
);
24713 MIPS_INVAL("decode_opc_mxu");
24714 generate_exception_end(ctx
, EXCP_RI
);
24721 * Decode MXU pool01
24723 * S32SLT, D16SLT, D16AVG, D16AVGR, Q8AVG, Q8AVGR:
24724 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24725 * +-----------+---------+-----+-------+-------+-------+-----------+
24726 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
24727 * +-----------+---------+-----+-------+-------+-------+-----------+
24730 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24731 * +-----------+---+-----+-----+-------+-------+-------+-----------+
24732 * | SPECIAL2 |en2|0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
24733 * +-----------+---+-----+-----+-------+-------+-------+-----------+
24736 static void decode_opc_mxu__pool01(CPUMIPSState
*env
, DisasContext
*ctx
)
24738 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
24741 case OPC_MXU_S32SLT
:
24742 /* TODO: Implement emulation of S32SLT instruction. */
24743 MIPS_INVAL("OPC_MXU_S32SLT");
24744 generate_exception_end(ctx
, EXCP_RI
);
24746 case OPC_MXU_D16SLT
:
24747 /* TODO: Implement emulation of D16SLT instruction. */
24748 MIPS_INVAL("OPC_MXU_D16SLT");
24749 generate_exception_end(ctx
, EXCP_RI
);
24751 case OPC_MXU_D16AVG
:
24752 /* TODO: Implement emulation of D16AVG instruction. */
24753 MIPS_INVAL("OPC_MXU_D16AVG");
24754 generate_exception_end(ctx
, EXCP_RI
);
24756 case OPC_MXU_D16AVGR
:
24757 /* TODO: Implement emulation of D16AVGR instruction. */
24758 MIPS_INVAL("OPC_MXU_D16AVGR");
24759 generate_exception_end(ctx
, EXCP_RI
);
24761 case OPC_MXU_Q8AVG
:
24762 /* TODO: Implement emulation of Q8AVG instruction. */
24763 MIPS_INVAL("OPC_MXU_Q8AVG");
24764 generate_exception_end(ctx
, EXCP_RI
);
24766 case OPC_MXU_Q8AVGR
:
24767 /* TODO: Implement emulation of Q8AVGR instruction. */
24768 MIPS_INVAL("OPC_MXU_Q8AVGR");
24769 generate_exception_end(ctx
, EXCP_RI
);
24771 case OPC_MXU_Q8ADD
:
24772 /* TODO: Implement emulation of Q8ADD instruction. */
24773 MIPS_INVAL("OPC_MXU_Q8ADD");
24774 generate_exception_end(ctx
, EXCP_RI
);
24777 MIPS_INVAL("decode_opc_mxu");
24778 generate_exception_end(ctx
, EXCP_RI
);
24785 * Decode MXU pool02
24787 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24788 * +-----------+---------+-----+-------+-------+-------+-----------+
24789 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL02|
24790 * +-----------+---------+-----+-------+-------+-------+-----------+
24793 static void decode_opc_mxu__pool02(CPUMIPSState
*env
, DisasContext
*ctx
)
24795 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
24798 case OPC_MXU_S32CPS
:
24799 /* TODO: Implement emulation of S32CPS instruction. */
24800 MIPS_INVAL("OPC_MXU_S32CPS");
24801 generate_exception_end(ctx
, EXCP_RI
);
24803 case OPC_MXU_D16CPS
:
24804 /* TODO: Implement emulation of D16CPS instruction. */
24805 MIPS_INVAL("OPC_MXU_D16CPS");
24806 generate_exception_end(ctx
, EXCP_RI
);
24808 case OPC_MXU_Q8ABD
:
24809 /* TODO: Implement emulation of Q8ABD instruction. */
24810 MIPS_INVAL("OPC_MXU_Q8ABD");
24811 generate_exception_end(ctx
, EXCP_RI
);
24813 case OPC_MXU_Q16SAT
:
24814 /* TODO: Implement emulation of Q16SAT instruction. */
24815 MIPS_INVAL("OPC_MXU_Q16SAT");
24816 generate_exception_end(ctx
, EXCP_RI
);
24819 MIPS_INVAL("decode_opc_mxu");
24820 generate_exception_end(ctx
, EXCP_RI
);
24827 * Decode MXU pool03
24830 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24831 * +-----------+---+---+-------+-------+-------+-------+-----------+
24832 * | SPECIAL2 |x x|on2|0 0 0 0| XRc | XRb | XRa |MXU__POOL03|
24833 * +-----------+---+---+-------+-------+-------+-------+-----------+
24836 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24837 * +-----------+---+---+-------+-------+-------+-------+-----------+
24838 * | SPECIAL2 |x x|on2| Xd | XRc | XRb | XRa |MXU__POOL03|
24839 * +-----------+---+---+-------+-------+-------+-------+-----------+
24842 static void decode_opc_mxu__pool03(CPUMIPSState
*env
, DisasContext
*ctx
)
24844 uint32_t opcode
= extract32(ctx
->opcode
, 24, 2);
24847 case OPC_MXU_D16MULF
:
24848 /* TODO: Implement emulation of D16MULF instruction. */
24849 MIPS_INVAL("OPC_MXU_D16MULF");
24850 generate_exception_end(ctx
, EXCP_RI
);
24852 case OPC_MXU_D16MULE
:
24853 /* TODO: Implement emulation of D16MULE instruction. */
24854 MIPS_INVAL("OPC_MXU_D16MULE");
24855 generate_exception_end(ctx
, EXCP_RI
);
24858 MIPS_INVAL("decode_opc_mxu");
24859 generate_exception_end(ctx
, EXCP_RI
);
24866 * Decode MXU pool04
24868 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24869 * +-----------+---------+-+-------------------+-------+-----------+
24870 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL04|
24871 * +-----------+---------+-+-------------------+-------+-----------+
24874 static void decode_opc_mxu__pool04(CPUMIPSState
*env
, DisasContext
*ctx
)
24876 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
24879 case OPC_MXU_S32LDD
:
24880 case OPC_MXU_S32LDDR
:
24881 gen_mxu_s32ldd_s32lddr(ctx
);
24884 MIPS_INVAL("decode_opc_mxu");
24885 generate_exception_end(ctx
, EXCP_RI
);
24892 * Decode MXU pool05
24894 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24895 * +-----------+---------+-+-------------------+-------+-----------+
24896 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL05|
24897 * +-----------+---------+-+-------------------+-------+-----------+
24900 static void decode_opc_mxu__pool05(CPUMIPSState
*env
, DisasContext
*ctx
)
24902 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
24905 case OPC_MXU_S32STD
:
24906 /* TODO: Implement emulation of S32STD instruction. */
24907 MIPS_INVAL("OPC_MXU_S32STD");
24908 generate_exception_end(ctx
, EXCP_RI
);
24910 case OPC_MXU_S32STDR
:
24911 /* TODO: Implement emulation of S32STDR instruction. */
24912 MIPS_INVAL("OPC_MXU_S32STDR");
24913 generate_exception_end(ctx
, EXCP_RI
);
24916 MIPS_INVAL("decode_opc_mxu");
24917 generate_exception_end(ctx
, EXCP_RI
);
24924 * Decode MXU pool06
24926 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24927 * +-----------+---------+---------+---+-------+-------+-----------+
24928 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL06|
24929 * +-----------+---------+---------+---+-------+-------+-----------+
24932 static void decode_opc_mxu__pool06(CPUMIPSState
*env
, DisasContext
*ctx
)
24934 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
24937 case OPC_MXU_S32LDDV
:
24938 /* TODO: Implement emulation of S32LDDV instruction. */
24939 MIPS_INVAL("OPC_MXU_S32LDDV");
24940 generate_exception_end(ctx
, EXCP_RI
);
24942 case OPC_MXU_S32LDDVR
:
24943 /* TODO: Implement emulation of S32LDDVR instruction. */
24944 MIPS_INVAL("OPC_MXU_S32LDDVR");
24945 generate_exception_end(ctx
, EXCP_RI
);
24948 MIPS_INVAL("decode_opc_mxu");
24949 generate_exception_end(ctx
, EXCP_RI
);
24956 * Decode MXU pool07
24958 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24959 * +-----------+---------+---------+---+-------+-------+-----------+
24960 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL07|
24961 * +-----------+---------+---------+---+-------+-------+-----------+
24964 static void decode_opc_mxu__pool07(CPUMIPSState
*env
, DisasContext
*ctx
)
24966 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
24969 case OPC_MXU_S32STDV
:
24970 /* TODO: Implement emulation of S32TDV instruction. */
24971 MIPS_INVAL("OPC_MXU_S32TDV");
24972 generate_exception_end(ctx
, EXCP_RI
);
24974 case OPC_MXU_S32STDVR
:
24975 /* TODO: Implement emulation of S32TDVR instruction. */
24976 MIPS_INVAL("OPC_MXU_S32TDVR");
24977 generate_exception_end(ctx
, EXCP_RI
);
24980 MIPS_INVAL("decode_opc_mxu");
24981 generate_exception_end(ctx
, EXCP_RI
);
24988 * Decode MXU pool08
24990 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24991 * +-----------+---------+-+-------------------+-------+-----------+
24992 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL08|
24993 * +-----------+---------+-+-------------------+-------+-----------+
24996 static void decode_opc_mxu__pool08(CPUMIPSState
*env
, DisasContext
*ctx
)
24998 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25001 case OPC_MXU_S32LDI
:
25002 /* TODO: Implement emulation of S32LDI instruction. */
25003 MIPS_INVAL("OPC_MXU_S32LDI");
25004 generate_exception_end(ctx
, EXCP_RI
);
25006 case OPC_MXU_S32LDIR
:
25007 /* TODO: Implement emulation of S32LDIR instruction. */
25008 MIPS_INVAL("OPC_MXU_S32LDIR");
25009 generate_exception_end(ctx
, EXCP_RI
);
25012 MIPS_INVAL("decode_opc_mxu");
25013 generate_exception_end(ctx
, EXCP_RI
);
25020 * Decode MXU pool09
25022 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25023 * +-----------+---------+-+-------------------+-------+-----------+
25024 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL09|
25025 * +-----------+---------+-+-------------------+-------+-----------+
25028 static void decode_opc_mxu__pool09(CPUMIPSState
*env
, DisasContext
*ctx
)
25030 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
25033 case OPC_MXU_S32SDI
:
25034 /* TODO: Implement emulation of S32SDI instruction. */
25035 MIPS_INVAL("OPC_MXU_S32SDI");
25036 generate_exception_end(ctx
, EXCP_RI
);
25038 case OPC_MXU_S32SDIR
:
25039 /* TODO: Implement emulation of S32SDIR instruction. */
25040 MIPS_INVAL("OPC_MXU_S32SDIR");
25041 generate_exception_end(ctx
, EXCP_RI
);
25044 MIPS_INVAL("decode_opc_mxu");
25045 generate_exception_end(ctx
, EXCP_RI
);
25052 * Decode MXU pool10
25054 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25055 * +-----------+---------+---------+---+-------+-------+-----------+
25056 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL10|
25057 * +-----------+---------+---------+---+-------+-------+-----------+
25060 static void decode_opc_mxu__pool10(CPUMIPSState
*env
, DisasContext
*ctx
)
25062 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
25065 case OPC_MXU_S32LDIV
:
25066 /* TODO: Implement emulation of S32LDIV instruction. */
25067 MIPS_INVAL("OPC_MXU_S32LDIV");
25068 generate_exception_end(ctx
, EXCP_RI
);
25070 case OPC_MXU_S32LDIVR
:
25071 /* TODO: Implement emulation of S32LDIVR instruction. */
25072 MIPS_INVAL("OPC_MXU_S32LDIVR");
25073 generate_exception_end(ctx
, EXCP_RI
);
25076 MIPS_INVAL("decode_opc_mxu");
25077 generate_exception_end(ctx
, EXCP_RI
);
25084 * Decode MXU pool11
25086 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25087 * +-----------+---------+---------+---+-------+-------+-----------+
25088 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL11|
25089 * +-----------+---------+---------+---+-------+-------+-----------+
25092 static void decode_opc_mxu__pool11(CPUMIPSState
*env
, DisasContext
*ctx
)
25094 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25097 case OPC_MXU_S32SDIV
:
25098 /* TODO: Implement emulation of S32SDIV instruction. */
25099 MIPS_INVAL("OPC_MXU_S32SDIV");
25100 generate_exception_end(ctx
, EXCP_RI
);
25102 case OPC_MXU_S32SDIVR
:
25103 /* TODO: Implement emulation of S32SDIVR instruction. */
25104 MIPS_INVAL("OPC_MXU_S32SDIVR");
25105 generate_exception_end(ctx
, EXCP_RI
);
25108 MIPS_INVAL("decode_opc_mxu");
25109 generate_exception_end(ctx
, EXCP_RI
);
25116 * Decode MXU pool12
25118 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25119 * +-----------+---+---+-------+-------+-------+-------+-----------+
25120 * | SPECIAL2 |an2|x x| Xd | XRc | XRb | XRa |MXU__POOL12|
25121 * +-----------+---+---+-------+-------+-------+-------+-----------+
25124 static void decode_opc_mxu__pool12(CPUMIPSState
*env
, DisasContext
*ctx
)
25126 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25129 case OPC_MXU_D32ACC
:
25130 /* TODO: Implement emulation of D32ACC instruction. */
25131 MIPS_INVAL("OPC_MXU_D32ACC");
25132 generate_exception_end(ctx
, EXCP_RI
);
25134 case OPC_MXU_D32ACCM
:
25135 /* TODO: Implement emulation of D32ACCM instruction. */
25136 MIPS_INVAL("OPC_MXU_D32ACCM");
25137 generate_exception_end(ctx
, EXCP_RI
);
25139 case OPC_MXU_D32ASUM
:
25140 /* TODO: Implement emulation of D32ASUM instruction. */
25141 MIPS_INVAL("OPC_MXU_D32ASUM");
25142 generate_exception_end(ctx
, EXCP_RI
);
25145 MIPS_INVAL("decode_opc_mxu");
25146 generate_exception_end(ctx
, EXCP_RI
);
25153 * Decode MXU pool13
25155 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25156 * +-----------+---+---+-------+-------+-------+-------+-----------+
25157 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL13|
25158 * +-----------+---+---+-------+-------+-------+-------+-----------+
25161 static void decode_opc_mxu__pool13(CPUMIPSState
*env
, DisasContext
*ctx
)
25163 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25166 case OPC_MXU_Q16ACC
:
25167 /* TODO: Implement emulation of Q16ACC instruction. */
25168 MIPS_INVAL("OPC_MXU_Q16ACC");
25169 generate_exception_end(ctx
, EXCP_RI
);
25171 case OPC_MXU_Q16ACCM
:
25172 /* TODO: Implement emulation of Q16ACCM instruction. */
25173 MIPS_INVAL("OPC_MXU_Q16ACCM");
25174 generate_exception_end(ctx
, EXCP_RI
);
25176 case OPC_MXU_Q16ASUM
:
25177 /* TODO: Implement emulation of Q16ASUM instruction. */
25178 MIPS_INVAL("OPC_MXU_Q16ASUM");
25179 generate_exception_end(ctx
, EXCP_RI
);
25182 MIPS_INVAL("decode_opc_mxu");
25183 generate_exception_end(ctx
, EXCP_RI
);
25190 * Decode MXU pool14
25193 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25194 * +-----------+---+---+-------+-------+-------+-------+-----------+
25195 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL14|
25196 * +-----------+---+---+-------+-------+-------+-------+-----------+
25199 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25200 * +-----------+---+---+-------+-------+-------+-------+-----------+
25201 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL14|
25202 * +-----------+---+---+-------+-------+-------+-------+-----------+
25205 static void decode_opc_mxu__pool14(CPUMIPSState
*env
, DisasContext
*ctx
)
25207 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25210 case OPC_MXU_Q8ADDE
:
25211 /* TODO: Implement emulation of Q8ADDE instruction. */
25212 MIPS_INVAL("OPC_MXU_Q8ADDE");
25213 generate_exception_end(ctx
, EXCP_RI
);
25215 case OPC_MXU_D8SUM
:
25216 /* TODO: Implement emulation of D8SUM instruction. */
25217 MIPS_INVAL("OPC_MXU_D8SUM");
25218 generate_exception_end(ctx
, EXCP_RI
);
25220 case OPC_MXU_D8SUMC
:
25221 /* TODO: Implement emulation of D8SUMC instruction. */
25222 MIPS_INVAL("OPC_MXU_D8SUMC");
25223 generate_exception_end(ctx
, EXCP_RI
);
25226 MIPS_INVAL("decode_opc_mxu");
25227 generate_exception_end(ctx
, EXCP_RI
);
25234 * Decode MXU pool15
25236 * S32MUL, S32MULU, S32EXTRV:
25237 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25238 * +-----------+---------+---------+---+-------+-------+-----------+
25239 * | SPECIAL2 | rs | rt |x x| XRd | XRa |MXU__POOL15|
25240 * +-----------+---------+---------+---+-------+-------+-----------+
25243 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25244 * +-----------+---------+---------+---+-------+-------+-----------+
25245 * | SPECIAL2 | rb | sft5 |x x| XRd | XRa |MXU__POOL15|
25246 * +-----------+---------+---------+---+-------+-------+-----------+
25249 static void decode_opc_mxu__pool15(CPUMIPSState
*env
, DisasContext
*ctx
)
25251 uint32_t opcode
= extract32(ctx
->opcode
, 14, 2);
25254 case OPC_MXU_S32MUL
:
25255 /* TODO: Implement emulation of S32MUL instruction. */
25256 MIPS_INVAL("OPC_MXU_S32MUL");
25257 generate_exception_end(ctx
, EXCP_RI
);
25259 case OPC_MXU_S32MULU
:
25260 /* TODO: Implement emulation of S32MULU instruction. */
25261 MIPS_INVAL("OPC_MXU_S32MULU");
25262 generate_exception_end(ctx
, EXCP_RI
);
25264 case OPC_MXU_S32EXTR
:
25265 /* TODO: Implement emulation of S32EXTR instruction. */
25266 MIPS_INVAL("OPC_MXU_S32EXTR");
25267 generate_exception_end(ctx
, EXCP_RI
);
25269 case OPC_MXU_S32EXTRV
:
25270 /* TODO: Implement emulation of S32EXTRV instruction. */
25271 MIPS_INVAL("OPC_MXU_S32EXTRV");
25272 generate_exception_end(ctx
, EXCP_RI
);
25275 MIPS_INVAL("decode_opc_mxu");
25276 generate_exception_end(ctx
, EXCP_RI
);
25283 * Decode MXU pool16
25286 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25287 * +-----------+---------+-----+-------+-------+-------+-----------+
25288 * | SPECIAL2 | rb |x x x| XRc | XRb | XRa |MXU__POOL16|
25289 * +-----------+---------+-----+-------+-------+-------+-----------+
25292 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25293 * +-----------+---------+-----+-------+-------+-------+-----------+
25294 * | SPECIAL2 | rs |x x x| XRc | XRb | XRa |MXU__POOL16|
25295 * +-----------+---------+-----+-------+-------+-------+-----------+
25298 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25299 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25300 * | SPECIAL2 | s3 |0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25301 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25303 * S32NOR, S32AND, S32OR, S32XOR:
25304 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25305 * +-----------+---------+-----+-------+-------+-------+-----------+
25306 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25307 * +-----------+---------+-----+-------+-------+-------+-----------+
25310 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25311 * +-----------+-----+---+-----+-------+---------------+-----------+
25312 * | SPECIAL2 |optn3|0 0|x x x| XRc | s8 |MXU__POOL16|
25313 * +-----------+-----+---+-----+-------+---------------+-----------+
25316 static void decode_opc_mxu__pool16(CPUMIPSState
*env
, DisasContext
*ctx
)
25318 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25321 case OPC_MXU_D32SARW
:
25322 /* TODO: Implement emulation of D32SARW instruction. */
25323 MIPS_INVAL("OPC_MXU_D32SARW");
25324 generate_exception_end(ctx
, EXCP_RI
);
25326 case OPC_MXU_S32ALN
:
25327 /* TODO: Implement emulation of S32ALN instruction. */
25328 MIPS_INVAL("OPC_MXU_S32ALN");
25329 generate_exception_end(ctx
, EXCP_RI
);
25331 case OPC_MXU_S32ALNI
:
25332 /* TODO: Implement emulation of S32ALNI instruction. */
25333 MIPS_INVAL("OPC_MXU_S32ALNI");
25334 generate_exception_end(ctx
, EXCP_RI
);
25336 case OPC_MXU_S32NOR
:
25337 /* TODO: Implement emulation of S32NOR instruction. */
25338 MIPS_INVAL("OPC_MXU_S32NOR");
25339 generate_exception_end(ctx
, EXCP_RI
);
25341 case OPC_MXU_S32AND
:
25342 /* TODO: Implement emulation of S32AND instruction. */
25343 MIPS_INVAL("OPC_MXU_S32AND");
25344 generate_exception_end(ctx
, EXCP_RI
);
25346 case OPC_MXU_S32OR
:
25347 /* TODO: Implement emulation of S32OR instruction. */
25348 MIPS_INVAL("OPC_MXU_S32OR");
25349 generate_exception_end(ctx
, EXCP_RI
);
25351 case OPC_MXU_S32XOR
:
25352 /* TODO: Implement emulation of S32XOR instruction. */
25353 MIPS_INVAL("OPC_MXU_S32XOR");
25354 generate_exception_end(ctx
, EXCP_RI
);
25356 case OPC_MXU_S32LUI
:
25357 /* TODO: Implement emulation of S32LUI instruction. */
25358 MIPS_INVAL("OPC_MXU_S32LUI");
25359 generate_exception_end(ctx
, EXCP_RI
);
25362 MIPS_INVAL("decode_opc_mxu");
25363 generate_exception_end(ctx
, EXCP_RI
);
25370 * Decode MXU pool17
25372 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25373 * +-----------+---------+---------+---+---------+-----+-----------+
25374 * | SPECIAL2 | rs | rt |0 0| rd |x x x|MXU__POOL15|
25375 * +-----------+---------+---------+---+---------+-----+-----------+
25378 static void decode_opc_mxu__pool17(CPUMIPSState
*env
, DisasContext
*ctx
)
25380 uint32_t opcode
= extract32(ctx
->opcode
, 6, 2);
25384 /* TODO: Implement emulation of LXW instruction. */
25385 MIPS_INVAL("OPC_MXU_LXW");
25386 generate_exception_end(ctx
, EXCP_RI
);
25389 /* TODO: Implement emulation of LXH instruction. */
25390 MIPS_INVAL("OPC_MXU_LXH");
25391 generate_exception_end(ctx
, EXCP_RI
);
25394 /* TODO: Implement emulation of LXHU instruction. */
25395 MIPS_INVAL("OPC_MXU_LXHU");
25396 generate_exception_end(ctx
, EXCP_RI
);
25399 /* TODO: Implement emulation of LXB instruction. */
25400 MIPS_INVAL("OPC_MXU_LXB");
25401 generate_exception_end(ctx
, EXCP_RI
);
25404 /* TODO: Implement emulation of LXBU instruction. */
25405 MIPS_INVAL("OPC_MXU_LXBU");
25406 generate_exception_end(ctx
, EXCP_RI
);
25409 MIPS_INVAL("decode_opc_mxu");
25410 generate_exception_end(ctx
, EXCP_RI
);
25416 * Decode MXU pool18
25418 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25419 * +-----------+---------+-----+-------+-------+-------+-----------+
25420 * | SPECIAL2 | rb |x x x| XRd | XRa |0 0 0 0|MXU__POOL18|
25421 * +-----------+---------+-----+-------+-------+-------+-----------+
25424 static void decode_opc_mxu__pool18(CPUMIPSState
*env
, DisasContext
*ctx
)
25426 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25429 case OPC_MXU_D32SLLV
:
25430 /* TODO: Implement emulation of D32SLLV instruction. */
25431 MIPS_INVAL("OPC_MXU_D32SLLV");
25432 generate_exception_end(ctx
, EXCP_RI
);
25434 case OPC_MXU_D32SLRV
:
25435 /* TODO: Implement emulation of D32SLRV instruction. */
25436 MIPS_INVAL("OPC_MXU_D32SLRV");
25437 generate_exception_end(ctx
, EXCP_RI
);
25439 case OPC_MXU_D32SARV
:
25440 /* TODO: Implement emulation of D32SARV instruction. */
25441 MIPS_INVAL("OPC_MXU_D32SARV");
25442 generate_exception_end(ctx
, EXCP_RI
);
25444 case OPC_MXU_Q16SLLV
:
25445 /* TODO: Implement emulation of Q16SLLV instruction. */
25446 MIPS_INVAL("OPC_MXU_Q16SLLV");
25447 generate_exception_end(ctx
, EXCP_RI
);
25449 case OPC_MXU_Q16SLRV
:
25450 /* TODO: Implement emulation of Q16SLRV instruction. */
25451 MIPS_INVAL("OPC_MXU_Q16SLRV");
25452 generate_exception_end(ctx
, EXCP_RI
);
25454 case OPC_MXU_Q16SARV
:
25455 /* TODO: Implement emulation of Q16SARV instruction. */
25456 MIPS_INVAL("OPC_MXU_Q16SARV");
25457 generate_exception_end(ctx
, EXCP_RI
);
25460 MIPS_INVAL("decode_opc_mxu");
25461 generate_exception_end(ctx
, EXCP_RI
);
25468 * Decode MXU pool19
25470 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25471 * +-----------+---+---+-------+-------+-------+-------+-----------+
25472 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL19|
25473 * +-----------+---+---+-------+-------+-------+-------+-----------+
25476 static void decode_opc_mxu__pool19(CPUMIPSState
*env
, DisasContext
*ctx
)
25478 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25481 case OPC_MXU_Q8MUL
:
25482 case OPC_MXU_Q8MULSU
:
25483 gen_mxu_q8mul_q8mulsu(ctx
);
25486 MIPS_INVAL("decode_opc_mxu");
25487 generate_exception_end(ctx
, EXCP_RI
);
25494 * Decode MXU pool20
25496 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25497 * +-----------+---------+-----+-------+-------+-------+-----------+
25498 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL20|
25499 * +-----------+---------+-----+-------+-------+-------+-----------+
25502 static void decode_opc_mxu__pool20(CPUMIPSState
*env
, DisasContext
*ctx
)
25504 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25507 case OPC_MXU_Q8MOVZ
:
25508 /* TODO: Implement emulation of Q8MOVZ instruction. */
25509 MIPS_INVAL("OPC_MXU_Q8MOVZ");
25510 generate_exception_end(ctx
, EXCP_RI
);
25512 case OPC_MXU_Q8MOVN
:
25513 /* TODO: Implement emulation of Q8MOVN instruction. */
25514 MIPS_INVAL("OPC_MXU_Q8MOVN");
25515 generate_exception_end(ctx
, EXCP_RI
);
25517 case OPC_MXU_D16MOVZ
:
25518 /* TODO: Implement emulation of D16MOVZ instruction. */
25519 MIPS_INVAL("OPC_MXU_D16MOVZ");
25520 generate_exception_end(ctx
, EXCP_RI
);
25522 case OPC_MXU_D16MOVN
:
25523 /* TODO: Implement emulation of D16MOVN instruction. */
25524 MIPS_INVAL("OPC_MXU_D16MOVN");
25525 generate_exception_end(ctx
, EXCP_RI
);
25527 case OPC_MXU_S32MOVZ
:
25528 /* TODO: Implement emulation of S32MOVZ instruction. */
25529 MIPS_INVAL("OPC_MXU_S32MOVZ");
25530 generate_exception_end(ctx
, EXCP_RI
);
25532 case OPC_MXU_S32MOVN
:
25533 /* TODO: Implement emulation of S32MOVN instruction. */
25534 MIPS_INVAL("OPC_MXU_S32MOVN");
25535 generate_exception_end(ctx
, EXCP_RI
);
25538 MIPS_INVAL("decode_opc_mxu");
25539 generate_exception_end(ctx
, EXCP_RI
);
25546 * Decode MXU pool21
25548 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25549 * +-----------+---+---+-------+-------+-------+-------+-----------+
25550 * | SPECIAL2 |an2|x x| XRd | XRc | XRb | XRa |MXU__POOL21|
25551 * +-----------+---+---+-------+-------+-------+-------+-----------+
25554 static void decode_opc_mxu__pool21(CPUMIPSState
*env
, DisasContext
*ctx
)
25556 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25559 case OPC_MXU_Q8MAC
:
25560 /* TODO: Implement emulation of Q8MAC instruction. */
25561 MIPS_INVAL("OPC_MXU_Q8MAC");
25562 generate_exception_end(ctx
, EXCP_RI
);
25564 case OPC_MXU_Q8MACSU
:
25565 /* TODO: Implement emulation of Q8MACSU instruction. */
25566 MIPS_INVAL("OPC_MXU_Q8MACSU");
25567 generate_exception_end(ctx
, EXCP_RI
);
25570 MIPS_INVAL("decode_opc_mxu");
25571 generate_exception_end(ctx
, EXCP_RI
);
25578 * Main MXU decoding function
25580 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25581 * +-----------+---------------------------------------+-----------+
25582 * | SPECIAL2 | |x x x x x x|
25583 * +-----------+---------------------------------------+-----------+
25586 static void decode_opc_mxu(CPUMIPSState
*env
, DisasContext
*ctx
)
25589 * TODO: Investigate necessity of including handling of
25590 * CLZ, CLO, SDBB in this function, as they belong to
25591 * SPECIAL2 opcode space for regular pre-R6 MIPS ISAs.
25593 uint32_t opcode
= extract32(ctx
->opcode
, 0, 6);
25595 if (opcode
== OPC__MXU_MUL
) {
25596 uint32_t rs
, rt
, rd
, op1
;
25598 rs
= extract32(ctx
->opcode
, 21, 5);
25599 rt
= extract32(ctx
->opcode
, 16, 5);
25600 rd
= extract32(ctx
->opcode
, 11, 5);
25601 op1
= MASK_SPECIAL2(ctx
->opcode
);
25603 gen_arith(ctx
, op1
, rd
, rs
, rt
);
25608 if (opcode
== OPC_MXU_S32M2I
) {
25609 gen_mxu_s32m2i(ctx
);
25613 if (opcode
== OPC_MXU_S32I2M
) {
25614 gen_mxu_s32i2m(ctx
);
25619 TCGv t_mxu_cr
= tcg_temp_new();
25620 TCGLabel
*l_exit
= gen_new_label();
25622 gen_load_mxu_cr(t_mxu_cr
);
25623 tcg_gen_andi_tl(t_mxu_cr
, t_mxu_cr
, MXU_CR_MXU_EN
);
25624 tcg_gen_brcondi_tl(TCG_COND_NE
, t_mxu_cr
, MXU_CR_MXU_EN
, l_exit
);
25627 case OPC_MXU_S32MADD
:
25628 /* TODO: Implement emulation of S32MADD instruction. */
25629 MIPS_INVAL("OPC_MXU_S32MADD");
25630 generate_exception_end(ctx
, EXCP_RI
);
25632 case OPC_MXU_S32MADDU
:
25633 /* TODO: Implement emulation of S32MADDU instruction. */
25634 MIPS_INVAL("OPC_MXU_S32MADDU");
25635 generate_exception_end(ctx
, EXCP_RI
);
25637 case OPC_MXU__POOL00
:
25638 decode_opc_mxu__pool00(env
, ctx
);
25640 case OPC_MXU_S32MSUB
:
25641 /* TODO: Implement emulation of S32MSUB instruction. */
25642 MIPS_INVAL("OPC_MXU_S32MSUB");
25643 generate_exception_end(ctx
, EXCP_RI
);
25645 case OPC_MXU_S32MSUBU
:
25646 /* TODO: Implement emulation of S32MSUBU instruction. */
25647 MIPS_INVAL("OPC_MXU_S32MSUBU");
25648 generate_exception_end(ctx
, EXCP_RI
);
25650 case OPC_MXU__POOL01
:
25651 decode_opc_mxu__pool01(env
, ctx
);
25653 case OPC_MXU__POOL02
:
25654 decode_opc_mxu__pool02(env
, ctx
);
25656 case OPC_MXU_D16MUL
:
25657 gen_mxu_d16mul(ctx
);
25659 case OPC_MXU__POOL03
:
25660 decode_opc_mxu__pool03(env
, ctx
);
25662 case OPC_MXU_D16MAC
:
25663 gen_mxu_d16mac(ctx
);
25665 case OPC_MXU_D16MACF
:
25666 /* TODO: Implement emulation of D16MACF instruction. */
25667 MIPS_INVAL("OPC_MXU_D16MACF");
25668 generate_exception_end(ctx
, EXCP_RI
);
25670 case OPC_MXU_D16MADL
:
25671 /* TODO: Implement emulation of D16MADL instruction. */
25672 MIPS_INVAL("OPC_MXU_D16MADL");
25673 generate_exception_end(ctx
, EXCP_RI
);
25675 case OPC_MXU_S16MAD
:
25676 /* TODO: Implement emulation of S16MAD instruction. */
25677 MIPS_INVAL("OPC_MXU_S16MAD");
25678 generate_exception_end(ctx
, EXCP_RI
);
25680 case OPC_MXU_Q16ADD
:
25681 /* TODO: Implement emulation of Q16ADD instruction. */
25682 MIPS_INVAL("OPC_MXU_Q16ADD");
25683 generate_exception_end(ctx
, EXCP_RI
);
25685 case OPC_MXU_D16MACE
:
25686 /* TODO: Implement emulation of D16MACE instruction. */
25687 MIPS_INVAL("OPC_MXU_D16MACE");
25688 generate_exception_end(ctx
, EXCP_RI
);
25690 case OPC_MXU__POOL04
:
25691 decode_opc_mxu__pool04(env
, ctx
);
25693 case OPC_MXU__POOL05
:
25694 decode_opc_mxu__pool05(env
, ctx
);
25696 case OPC_MXU__POOL06
:
25697 decode_opc_mxu__pool06(env
, ctx
);
25699 case OPC_MXU__POOL07
:
25700 decode_opc_mxu__pool07(env
, ctx
);
25702 case OPC_MXU__POOL08
:
25703 decode_opc_mxu__pool08(env
, ctx
);
25705 case OPC_MXU__POOL09
:
25706 decode_opc_mxu__pool09(env
, ctx
);
25708 case OPC_MXU__POOL10
:
25709 decode_opc_mxu__pool10(env
, ctx
);
25711 case OPC_MXU__POOL11
:
25712 decode_opc_mxu__pool11(env
, ctx
);
25714 case OPC_MXU_D32ADD
:
25715 /* TODO: Implement emulation of D32ADD instruction. */
25716 MIPS_INVAL("OPC_MXU_D32ADD");
25717 generate_exception_end(ctx
, EXCP_RI
);
25719 case OPC_MXU__POOL12
:
25720 decode_opc_mxu__pool12(env
, ctx
);
25722 case OPC_MXU__POOL13
:
25723 decode_opc_mxu__pool13(env
, ctx
);
25725 case OPC_MXU__POOL14
:
25726 decode_opc_mxu__pool14(env
, ctx
);
25728 case OPC_MXU_Q8ACCE
:
25729 /* TODO: Implement emulation of Q8ACCE instruction. */
25730 MIPS_INVAL("OPC_MXU_Q8ACCE");
25731 generate_exception_end(ctx
, EXCP_RI
);
25733 case OPC_MXU_S8LDD
:
25734 gen_mxu_s8ldd(ctx
);
25736 case OPC_MXU_S8STD
:
25737 /* TODO: Implement emulation of S8STD instruction. */
25738 MIPS_INVAL("OPC_MXU_S8STD");
25739 generate_exception_end(ctx
, EXCP_RI
);
25741 case OPC_MXU_S8LDI
:
25742 /* TODO: Implement emulation of S8LDI instruction. */
25743 MIPS_INVAL("OPC_MXU_S8LDI");
25744 generate_exception_end(ctx
, EXCP_RI
);
25746 case OPC_MXU_S8SDI
:
25747 /* TODO: Implement emulation of S8SDI instruction. */
25748 MIPS_INVAL("OPC_MXU_S8SDI");
25749 generate_exception_end(ctx
, EXCP_RI
);
25751 case OPC_MXU__POOL15
:
25752 decode_opc_mxu__pool15(env
, ctx
);
25754 case OPC_MXU__POOL16
:
25755 decode_opc_mxu__pool16(env
, ctx
);
25757 case OPC_MXU__POOL17
:
25758 decode_opc_mxu__pool17(env
, ctx
);
25760 case OPC_MXU_S16LDD
:
25761 /* TODO: Implement emulation of S16LDD instruction. */
25762 MIPS_INVAL("OPC_MXU_S16LDD");
25763 generate_exception_end(ctx
, EXCP_RI
);
25765 case OPC_MXU_S16STD
:
25766 /* TODO: Implement emulation of S16STD instruction. */
25767 MIPS_INVAL("OPC_MXU_S16STD");
25768 generate_exception_end(ctx
, EXCP_RI
);
25770 case OPC_MXU_S16LDI
:
25771 /* TODO: Implement emulation of S16LDI instruction. */
25772 MIPS_INVAL("OPC_MXU_S16LDI");
25773 generate_exception_end(ctx
, EXCP_RI
);
25775 case OPC_MXU_S16SDI
:
25776 /* TODO: Implement emulation of S16SDI instruction. */
25777 MIPS_INVAL("OPC_MXU_S16SDI");
25778 generate_exception_end(ctx
, EXCP_RI
);
25780 case OPC_MXU_D32SLL
:
25781 /* TODO: Implement emulation of D32SLL instruction. */
25782 MIPS_INVAL("OPC_MXU_D32SLL");
25783 generate_exception_end(ctx
, EXCP_RI
);
25785 case OPC_MXU_D32SLR
:
25786 /* TODO: Implement emulation of D32SLR instruction. */
25787 MIPS_INVAL("OPC_MXU_D32SLR");
25788 generate_exception_end(ctx
, EXCP_RI
);
25790 case OPC_MXU_D32SARL
:
25791 /* TODO: Implement emulation of D32SARL instruction. */
25792 MIPS_INVAL("OPC_MXU_D32SARL");
25793 generate_exception_end(ctx
, EXCP_RI
);
25795 case OPC_MXU_D32SAR
:
25796 /* TODO: Implement emulation of D32SAR instruction. */
25797 MIPS_INVAL("OPC_MXU_D32SAR");
25798 generate_exception_end(ctx
, EXCP_RI
);
25800 case OPC_MXU_Q16SLL
:
25801 /* TODO: Implement emulation of Q16SLL instruction. */
25802 MIPS_INVAL("OPC_MXU_Q16SLL");
25803 generate_exception_end(ctx
, EXCP_RI
);
25805 case OPC_MXU_Q16SLR
:
25806 /* TODO: Implement emulation of Q16SLR instruction. */
25807 MIPS_INVAL("OPC_MXU_Q16SLR");
25808 generate_exception_end(ctx
, EXCP_RI
);
25810 case OPC_MXU__POOL18
:
25811 decode_opc_mxu__pool18(env
, ctx
);
25813 case OPC_MXU_Q16SAR
:
25814 /* TODO: Implement emulation of Q16SAR instruction. */
25815 MIPS_INVAL("OPC_MXU_Q16SAR");
25816 generate_exception_end(ctx
, EXCP_RI
);
25818 case OPC_MXU__POOL19
:
25819 decode_opc_mxu__pool19(env
, ctx
);
25821 case OPC_MXU__POOL20
:
25822 decode_opc_mxu__pool20(env
, ctx
);
25824 case OPC_MXU__POOL21
:
25825 decode_opc_mxu__pool21(env
, ctx
);
25827 case OPC_MXU_Q16SCOP
:
25828 /* TODO: Implement emulation of Q16SCOP instruction. */
25829 MIPS_INVAL("OPC_MXU_Q16SCOP");
25830 generate_exception_end(ctx
, EXCP_RI
);
25832 case OPC_MXU_Q8MADL
:
25833 /* TODO: Implement emulation of Q8MADL instruction. */
25834 MIPS_INVAL("OPC_MXU_Q8MADL");
25835 generate_exception_end(ctx
, EXCP_RI
);
25837 case OPC_MXU_S32SFL
:
25838 /* TODO: Implement emulation of S32SFL instruction. */
25839 MIPS_INVAL("OPC_MXU_S32SFL");
25840 generate_exception_end(ctx
, EXCP_RI
);
25842 case OPC_MXU_Q8SAD
:
25843 /* TODO: Implement emulation of Q8SAD instruction. */
25844 MIPS_INVAL("OPC_MXU_Q8SAD");
25845 generate_exception_end(ctx
, EXCP_RI
);
25848 MIPS_INVAL("decode_opc_mxu");
25849 generate_exception_end(ctx
, EXCP_RI
);
25852 gen_set_label(l_exit
);
25853 tcg_temp_free(t_mxu_cr
);
25858 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
25863 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25865 rs
= (ctx
->opcode
>> 21) & 0x1f;
25866 rt
= (ctx
->opcode
>> 16) & 0x1f;
25867 rd
= (ctx
->opcode
>> 11) & 0x1f;
25869 op1
= MASK_SPECIAL2(ctx
->opcode
);
25871 case OPC_MADD
: /* Multiply and add/sub */
25875 check_insn(ctx
, ISA_MIPS32
);
25876 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
25879 gen_arith(ctx
, op1
, rd
, rs
, rt
);
25882 case OPC_DIVU_G_2F
:
25883 case OPC_MULT_G_2F
:
25884 case OPC_MULTU_G_2F
:
25886 case OPC_MODU_G_2F
:
25887 check_insn(ctx
, INSN_LOONGSON2F
);
25888 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
25892 check_insn(ctx
, ISA_MIPS32
);
25893 gen_cl(ctx
, op1
, rd
, rs
);
25896 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
25897 gen_helper_do_semihosting(cpu_env
);
25899 /* XXX: not clear which exception should be raised
25900 * when in debug mode...
25902 check_insn(ctx
, ISA_MIPS32
);
25903 generate_exception_end(ctx
, EXCP_DBp
);
25906 #if defined(TARGET_MIPS64)
25909 check_insn(ctx
, ISA_MIPS64
);
25910 check_mips_64(ctx
);
25911 gen_cl(ctx
, op1
, rd
, rs
);
25913 case OPC_DMULT_G_2F
:
25914 case OPC_DMULTU_G_2F
:
25915 case OPC_DDIV_G_2F
:
25916 case OPC_DDIVU_G_2F
:
25917 case OPC_DMOD_G_2F
:
25918 case OPC_DMODU_G_2F
:
25919 check_insn(ctx
, INSN_LOONGSON2F
);
25920 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
25923 default: /* Invalid */
25924 MIPS_INVAL("special2_legacy");
25925 generate_exception_end(ctx
, EXCP_RI
);
25930 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
25932 int rs
, rt
, rd
, sa
;
25936 rs
= (ctx
->opcode
>> 21) & 0x1f;
25937 rt
= (ctx
->opcode
>> 16) & 0x1f;
25938 rd
= (ctx
->opcode
>> 11) & 0x1f;
25939 sa
= (ctx
->opcode
>> 6) & 0x1f;
25940 imm
= (int16_t)ctx
->opcode
>> 7;
25942 op1
= MASK_SPECIAL3(ctx
->opcode
);
25946 /* hint codes 24-31 are reserved and signal RI */
25947 generate_exception_end(ctx
, EXCP_RI
);
25949 /* Treat as NOP. */
25952 check_cp0_enabled(ctx
);
25953 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
25954 gen_cache_operation(ctx
, rt
, rs
, imm
);
25958 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
25961 gen_ld(ctx
, op1
, rt
, rs
, imm
);
25966 /* Treat as NOP. */
25969 op2
= MASK_BSHFL(ctx
->opcode
);
25975 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
25978 gen_bitswap(ctx
, op2
, rd
, rt
);
25983 #if defined(TARGET_MIPS64)
25985 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
25988 gen_ld(ctx
, op1
, rt
, rs
, imm
);
25991 check_mips_64(ctx
);
25994 /* Treat as NOP. */
25997 op2
= MASK_DBSHFL(ctx
->opcode
);
26007 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
26010 gen_bitswap(ctx
, op2
, rd
, rt
);
26017 default: /* Invalid */
26018 MIPS_INVAL("special3_r6");
26019 generate_exception_end(ctx
, EXCP_RI
);
26024 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26029 rs
= (ctx
->opcode
>> 21) & 0x1f;
26030 rt
= (ctx
->opcode
>> 16) & 0x1f;
26031 rd
= (ctx
->opcode
>> 11) & 0x1f;
26033 op1
= MASK_SPECIAL3(ctx
->opcode
);
26036 case OPC_DIVU_G_2E
:
26038 case OPC_MODU_G_2E
:
26039 case OPC_MULT_G_2E
:
26040 case OPC_MULTU_G_2E
:
26041 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
26042 * the same mask and op1. */
26043 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
26044 op2
= MASK_ADDUH_QB(ctx
->opcode
);
26047 case OPC_ADDUH_R_QB
:
26049 case OPC_ADDQH_R_PH
:
26051 case OPC_ADDQH_R_W
:
26053 case OPC_SUBUH_R_QB
:
26055 case OPC_SUBQH_R_PH
:
26057 case OPC_SUBQH_R_W
:
26058 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26063 case OPC_MULQ_RS_W
:
26064 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26067 MIPS_INVAL("MASK ADDUH.QB");
26068 generate_exception_end(ctx
, EXCP_RI
);
26071 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
26072 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26074 generate_exception_end(ctx
, EXCP_RI
);
26078 op2
= MASK_LX(ctx
->opcode
);
26080 #if defined(TARGET_MIPS64)
26086 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
26088 default: /* Invalid */
26089 MIPS_INVAL("MASK LX");
26090 generate_exception_end(ctx
, EXCP_RI
);
26094 case OPC_ABSQ_S_PH_DSP
:
26095 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
26097 case OPC_ABSQ_S_QB
:
26098 case OPC_ABSQ_S_PH
:
26100 case OPC_PRECEQ_W_PHL
:
26101 case OPC_PRECEQ_W_PHR
:
26102 case OPC_PRECEQU_PH_QBL
:
26103 case OPC_PRECEQU_PH_QBR
:
26104 case OPC_PRECEQU_PH_QBLA
:
26105 case OPC_PRECEQU_PH_QBRA
:
26106 case OPC_PRECEU_PH_QBL
:
26107 case OPC_PRECEU_PH_QBR
:
26108 case OPC_PRECEU_PH_QBLA
:
26109 case OPC_PRECEU_PH_QBRA
:
26110 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26117 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
26120 MIPS_INVAL("MASK ABSQ_S.PH");
26121 generate_exception_end(ctx
, EXCP_RI
);
26125 case OPC_ADDU_QB_DSP
:
26126 op2
= MASK_ADDU_QB(ctx
->opcode
);
26129 case OPC_ADDQ_S_PH
:
26132 case OPC_ADDU_S_QB
:
26134 case OPC_ADDU_S_PH
:
26136 case OPC_SUBQ_S_PH
:
26139 case OPC_SUBU_S_QB
:
26141 case OPC_SUBU_S_PH
:
26145 case OPC_RADDU_W_QB
:
26146 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26148 case OPC_MULEU_S_PH_QBL
:
26149 case OPC_MULEU_S_PH_QBR
:
26150 case OPC_MULQ_RS_PH
:
26151 case OPC_MULEQ_S_W_PHL
:
26152 case OPC_MULEQ_S_W_PHR
:
26153 case OPC_MULQ_S_PH
:
26154 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26156 default: /* Invalid */
26157 MIPS_INVAL("MASK ADDU.QB");
26158 generate_exception_end(ctx
, EXCP_RI
);
26163 case OPC_CMPU_EQ_QB_DSP
:
26164 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
26166 case OPC_PRECR_SRA_PH_W
:
26167 case OPC_PRECR_SRA_R_PH_W
:
26168 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
26170 case OPC_PRECR_QB_PH
:
26171 case OPC_PRECRQ_QB_PH
:
26172 case OPC_PRECRQ_PH_W
:
26173 case OPC_PRECRQ_RS_PH_W
:
26174 case OPC_PRECRQU_S_QB_PH
:
26175 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26177 case OPC_CMPU_EQ_QB
:
26178 case OPC_CMPU_LT_QB
:
26179 case OPC_CMPU_LE_QB
:
26180 case OPC_CMP_EQ_PH
:
26181 case OPC_CMP_LT_PH
:
26182 case OPC_CMP_LE_PH
:
26183 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26185 case OPC_CMPGU_EQ_QB
:
26186 case OPC_CMPGU_LT_QB
:
26187 case OPC_CMPGU_LE_QB
:
26188 case OPC_CMPGDU_EQ_QB
:
26189 case OPC_CMPGDU_LT_QB
:
26190 case OPC_CMPGDU_LE_QB
:
26193 case OPC_PACKRL_PH
:
26194 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26196 default: /* Invalid */
26197 MIPS_INVAL("MASK CMPU.EQ.QB");
26198 generate_exception_end(ctx
, EXCP_RI
);
26202 case OPC_SHLL_QB_DSP
:
26203 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
26205 case OPC_DPA_W_PH_DSP
:
26206 op2
= MASK_DPA_W_PH(ctx
->opcode
);
26208 case OPC_DPAU_H_QBL
:
26209 case OPC_DPAU_H_QBR
:
26210 case OPC_DPSU_H_QBL
:
26211 case OPC_DPSU_H_QBR
:
26213 case OPC_DPAX_W_PH
:
26214 case OPC_DPAQ_S_W_PH
:
26215 case OPC_DPAQX_S_W_PH
:
26216 case OPC_DPAQX_SA_W_PH
:
26218 case OPC_DPSX_W_PH
:
26219 case OPC_DPSQ_S_W_PH
:
26220 case OPC_DPSQX_S_W_PH
:
26221 case OPC_DPSQX_SA_W_PH
:
26222 case OPC_MULSAQ_S_W_PH
:
26223 case OPC_DPAQ_SA_L_W
:
26224 case OPC_DPSQ_SA_L_W
:
26225 case OPC_MAQ_S_W_PHL
:
26226 case OPC_MAQ_S_W_PHR
:
26227 case OPC_MAQ_SA_W_PHL
:
26228 case OPC_MAQ_SA_W_PHR
:
26229 case OPC_MULSA_W_PH
:
26230 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26232 default: /* Invalid */
26233 MIPS_INVAL("MASK DPAW.PH");
26234 generate_exception_end(ctx
, EXCP_RI
);
26239 op2
= MASK_INSV(ctx
->opcode
);
26250 t0
= tcg_temp_new();
26251 t1
= tcg_temp_new();
26253 gen_load_gpr(t0
, rt
);
26254 gen_load_gpr(t1
, rs
);
26256 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
26262 default: /* Invalid */
26263 MIPS_INVAL("MASK INSV");
26264 generate_exception_end(ctx
, EXCP_RI
);
26268 case OPC_APPEND_DSP
:
26269 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
26271 case OPC_EXTR_W_DSP
:
26272 op2
= MASK_EXTR_W(ctx
->opcode
);
26276 case OPC_EXTR_RS_W
:
26278 case OPC_EXTRV_S_H
:
26280 case OPC_EXTRV_R_W
:
26281 case OPC_EXTRV_RS_W
:
26286 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
26289 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26295 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26297 default: /* Invalid */
26298 MIPS_INVAL("MASK EXTR.W");
26299 generate_exception_end(ctx
, EXCP_RI
);
26303 #if defined(TARGET_MIPS64)
26304 case OPC_DDIV_G_2E
:
26305 case OPC_DDIVU_G_2E
:
26306 case OPC_DMULT_G_2E
:
26307 case OPC_DMULTU_G_2E
:
26308 case OPC_DMOD_G_2E
:
26309 case OPC_DMODU_G_2E
:
26310 check_insn(ctx
, INSN_LOONGSON2E
);
26311 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26313 case OPC_ABSQ_S_QH_DSP
:
26314 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
26316 case OPC_PRECEQ_L_PWL
:
26317 case OPC_PRECEQ_L_PWR
:
26318 case OPC_PRECEQ_PW_QHL
:
26319 case OPC_PRECEQ_PW_QHR
:
26320 case OPC_PRECEQ_PW_QHLA
:
26321 case OPC_PRECEQ_PW_QHRA
:
26322 case OPC_PRECEQU_QH_OBL
:
26323 case OPC_PRECEQU_QH_OBR
:
26324 case OPC_PRECEQU_QH_OBLA
:
26325 case OPC_PRECEQU_QH_OBRA
:
26326 case OPC_PRECEU_QH_OBL
:
26327 case OPC_PRECEU_QH_OBR
:
26328 case OPC_PRECEU_QH_OBLA
:
26329 case OPC_PRECEU_QH_OBRA
:
26330 case OPC_ABSQ_S_OB
:
26331 case OPC_ABSQ_S_PW
:
26332 case OPC_ABSQ_S_QH
:
26333 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26341 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
26343 default: /* Invalid */
26344 MIPS_INVAL("MASK ABSQ_S.QH");
26345 generate_exception_end(ctx
, EXCP_RI
);
26349 case OPC_ADDU_OB_DSP
:
26350 op2
= MASK_ADDU_OB(ctx
->opcode
);
26352 case OPC_RADDU_L_OB
:
26354 case OPC_SUBQ_S_PW
:
26356 case OPC_SUBQ_S_QH
:
26358 case OPC_SUBU_S_OB
:
26360 case OPC_SUBU_S_QH
:
26362 case OPC_SUBUH_R_OB
:
26364 case OPC_ADDQ_S_PW
:
26366 case OPC_ADDQ_S_QH
:
26368 case OPC_ADDU_S_OB
:
26370 case OPC_ADDU_S_QH
:
26372 case OPC_ADDUH_R_OB
:
26373 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26375 case OPC_MULEQ_S_PW_QHL
:
26376 case OPC_MULEQ_S_PW_QHR
:
26377 case OPC_MULEU_S_QH_OBL
:
26378 case OPC_MULEU_S_QH_OBR
:
26379 case OPC_MULQ_RS_QH
:
26380 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26382 default: /* Invalid */
26383 MIPS_INVAL("MASK ADDU.OB");
26384 generate_exception_end(ctx
, EXCP_RI
);
26388 case OPC_CMPU_EQ_OB_DSP
:
26389 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
26391 case OPC_PRECR_SRA_QH_PW
:
26392 case OPC_PRECR_SRA_R_QH_PW
:
26393 /* Return value is rt. */
26394 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
26396 case OPC_PRECR_OB_QH
:
26397 case OPC_PRECRQ_OB_QH
:
26398 case OPC_PRECRQ_PW_L
:
26399 case OPC_PRECRQ_QH_PW
:
26400 case OPC_PRECRQ_RS_QH_PW
:
26401 case OPC_PRECRQU_S_OB_QH
:
26402 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26404 case OPC_CMPU_EQ_OB
:
26405 case OPC_CMPU_LT_OB
:
26406 case OPC_CMPU_LE_OB
:
26407 case OPC_CMP_EQ_QH
:
26408 case OPC_CMP_LT_QH
:
26409 case OPC_CMP_LE_QH
:
26410 case OPC_CMP_EQ_PW
:
26411 case OPC_CMP_LT_PW
:
26412 case OPC_CMP_LE_PW
:
26413 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26415 case OPC_CMPGDU_EQ_OB
:
26416 case OPC_CMPGDU_LT_OB
:
26417 case OPC_CMPGDU_LE_OB
:
26418 case OPC_CMPGU_EQ_OB
:
26419 case OPC_CMPGU_LT_OB
:
26420 case OPC_CMPGU_LE_OB
:
26421 case OPC_PACKRL_PW
:
26425 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26427 default: /* Invalid */
26428 MIPS_INVAL("MASK CMPU_EQ.OB");
26429 generate_exception_end(ctx
, EXCP_RI
);
26433 case OPC_DAPPEND_DSP
:
26434 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
26436 case OPC_DEXTR_W_DSP
:
26437 op2
= MASK_DEXTR_W(ctx
->opcode
);
26444 case OPC_DEXTR_R_L
:
26445 case OPC_DEXTR_RS_L
:
26447 case OPC_DEXTR_R_W
:
26448 case OPC_DEXTR_RS_W
:
26449 case OPC_DEXTR_S_H
:
26451 case OPC_DEXTRV_R_L
:
26452 case OPC_DEXTRV_RS_L
:
26453 case OPC_DEXTRV_S_H
:
26455 case OPC_DEXTRV_R_W
:
26456 case OPC_DEXTRV_RS_W
:
26457 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
26462 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26464 default: /* Invalid */
26465 MIPS_INVAL("MASK EXTR.W");
26466 generate_exception_end(ctx
, EXCP_RI
);
26470 case OPC_DPAQ_W_QH_DSP
:
26471 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
26473 case OPC_DPAU_H_OBL
:
26474 case OPC_DPAU_H_OBR
:
26475 case OPC_DPSU_H_OBL
:
26476 case OPC_DPSU_H_OBR
:
26478 case OPC_DPAQ_S_W_QH
:
26480 case OPC_DPSQ_S_W_QH
:
26481 case OPC_MULSAQ_S_W_QH
:
26482 case OPC_DPAQ_SA_L_PW
:
26483 case OPC_DPSQ_SA_L_PW
:
26484 case OPC_MULSAQ_S_L_PW
:
26485 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26487 case OPC_MAQ_S_W_QHLL
:
26488 case OPC_MAQ_S_W_QHLR
:
26489 case OPC_MAQ_S_W_QHRL
:
26490 case OPC_MAQ_S_W_QHRR
:
26491 case OPC_MAQ_SA_W_QHLL
:
26492 case OPC_MAQ_SA_W_QHLR
:
26493 case OPC_MAQ_SA_W_QHRL
:
26494 case OPC_MAQ_SA_W_QHRR
:
26495 case OPC_MAQ_S_L_PWL
:
26496 case OPC_MAQ_S_L_PWR
:
26501 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26503 default: /* Invalid */
26504 MIPS_INVAL("MASK DPAQ.W.QH");
26505 generate_exception_end(ctx
, EXCP_RI
);
26509 case OPC_DINSV_DSP
:
26510 op2
= MASK_INSV(ctx
->opcode
);
26521 t0
= tcg_temp_new();
26522 t1
= tcg_temp_new();
26524 gen_load_gpr(t0
, rt
);
26525 gen_load_gpr(t1
, rs
);
26527 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
26533 default: /* Invalid */
26534 MIPS_INVAL("MASK DINSV");
26535 generate_exception_end(ctx
, EXCP_RI
);
26539 case OPC_SHLL_OB_DSP
:
26540 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
26543 default: /* Invalid */
26544 MIPS_INVAL("special3_legacy");
26545 generate_exception_end(ctx
, EXCP_RI
);
26550 static void decode_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
26552 uint32_t opc
= MASK_MMI0(ctx
->opcode
);
26555 case MMI_OPC_0_PADDW
: /* TODO: MMI_OPC_0_PADDW */
26556 case MMI_OPC_0_PSUBW
: /* TODO: MMI_OPC_0_PSUBW */
26557 case MMI_OPC_0_PCGTW
: /* TODO: MMI_OPC_0_PCGTW */
26558 case MMI_OPC_0_PMAXW
: /* TODO: MMI_OPC_0_PMAXW */
26559 case MMI_OPC_0_PADDH
: /* TODO: MMI_OPC_0_PADDH */
26560 case MMI_OPC_0_PSUBH
: /* TODO: MMI_OPC_0_PSUBH */
26561 case MMI_OPC_0_PCGTH
: /* TODO: MMI_OPC_0_PCGTH */
26562 case MMI_OPC_0_PMAXH
: /* TODO: MMI_OPC_0_PMAXH */
26563 case MMI_OPC_0_PADDB
: /* TODO: MMI_OPC_0_PADDB */
26564 case MMI_OPC_0_PSUBB
: /* TODO: MMI_OPC_0_PSUBB */
26565 case MMI_OPC_0_PCGTB
: /* TODO: MMI_OPC_0_PCGTB */
26566 case MMI_OPC_0_PADDSW
: /* TODO: MMI_OPC_0_PADDSW */
26567 case MMI_OPC_0_PSUBSW
: /* TODO: MMI_OPC_0_PSUBSW */
26568 case MMI_OPC_0_PEXTLW
: /* TODO: MMI_OPC_0_PEXTLW */
26569 case MMI_OPC_0_PPACW
: /* TODO: MMI_OPC_0_PPACW */
26570 case MMI_OPC_0_PADDSH
: /* TODO: MMI_OPC_0_PADDSH */
26571 case MMI_OPC_0_PSUBSH
: /* TODO: MMI_OPC_0_PSUBSH */
26572 case MMI_OPC_0_PEXTLH
: /* TODO: MMI_OPC_0_PEXTLH */
26573 case MMI_OPC_0_PPACH
: /* TODO: MMI_OPC_0_PPACH */
26574 case MMI_OPC_0_PADDSB
: /* TODO: MMI_OPC_0_PADDSB */
26575 case MMI_OPC_0_PSUBSB
: /* TODO: MMI_OPC_0_PSUBSB */
26576 case MMI_OPC_0_PEXTLB
: /* TODO: MMI_OPC_0_PEXTLB */
26577 case MMI_OPC_0_PPACB
: /* TODO: MMI_OPC_0_PPACB */
26578 case MMI_OPC_0_PEXT5
: /* TODO: MMI_OPC_0_PEXT5 */
26579 case MMI_OPC_0_PPAC5
: /* TODO: MMI_OPC_0_PPAC5 */
26580 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI0 */
26583 MIPS_INVAL("TX79 MMI class MMI0");
26584 generate_exception_end(ctx
, EXCP_RI
);
26589 static void decode_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
26591 uint32_t opc
= MASK_MMI1(ctx
->opcode
);
26594 case MMI_OPC_1_PABSW
: /* TODO: MMI_OPC_1_PABSW */
26595 case MMI_OPC_1_PCEQW
: /* TODO: MMI_OPC_1_PCEQW */
26596 case MMI_OPC_1_PMINW
: /* TODO: MMI_OPC_1_PMINW */
26597 case MMI_OPC_1_PADSBH
: /* TODO: MMI_OPC_1_PADSBH */
26598 case MMI_OPC_1_PABSH
: /* TODO: MMI_OPC_1_PABSH */
26599 case MMI_OPC_1_PCEQH
: /* TODO: MMI_OPC_1_PCEQH */
26600 case MMI_OPC_1_PMINH
: /* TODO: MMI_OPC_1_PMINH */
26601 case MMI_OPC_1_PCEQB
: /* TODO: MMI_OPC_1_PCEQB */
26602 case MMI_OPC_1_PADDUW
: /* TODO: MMI_OPC_1_PADDUW */
26603 case MMI_OPC_1_PSUBUW
: /* TODO: MMI_OPC_1_PSUBUW */
26604 case MMI_OPC_1_PEXTUW
: /* TODO: MMI_OPC_1_PEXTUW */
26605 case MMI_OPC_1_PADDUH
: /* TODO: MMI_OPC_1_PADDUH */
26606 case MMI_OPC_1_PSUBUH
: /* TODO: MMI_OPC_1_PSUBUH */
26607 case MMI_OPC_1_PEXTUH
: /* TODO: MMI_OPC_1_PEXTUH */
26608 case MMI_OPC_1_PADDUB
: /* TODO: MMI_OPC_1_PADDUB */
26609 case MMI_OPC_1_PSUBUB
: /* TODO: MMI_OPC_1_PSUBUB */
26610 case MMI_OPC_1_PEXTUB
: /* TODO: MMI_OPC_1_PEXTUB */
26611 case MMI_OPC_1_QFSRV
: /* TODO: MMI_OPC_1_QFSRV */
26612 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI1 */
26615 MIPS_INVAL("TX79 MMI class MMI1");
26616 generate_exception_end(ctx
, EXCP_RI
);
26621 static void decode_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
26623 uint32_t opc
= MASK_MMI2(ctx
->opcode
);
26626 case MMI_OPC_2_PMADDW
: /* TODO: MMI_OPC_2_PMADDW */
26627 case MMI_OPC_2_PSLLVW
: /* TODO: MMI_OPC_2_PSLLVW */
26628 case MMI_OPC_2_PSRLVW
: /* TODO: MMI_OPC_2_PSRLVW */
26629 case MMI_OPC_2_PMSUBW
: /* TODO: MMI_OPC_2_PMSUBW */
26630 case MMI_OPC_2_PMFHI
: /* TODO: MMI_OPC_2_PMFHI */
26631 case MMI_OPC_2_PMFLO
: /* TODO: MMI_OPC_2_PMFLO */
26632 case MMI_OPC_2_PINTH
: /* TODO: MMI_OPC_2_PINTH */
26633 case MMI_OPC_2_PMULTW
: /* TODO: MMI_OPC_2_PMULTW */
26634 case MMI_OPC_2_PDIVW
: /* TODO: MMI_OPC_2_PDIVW */
26635 case MMI_OPC_2_PCPYLD
: /* TODO: MMI_OPC_2_PCPYLD */
26636 case MMI_OPC_2_PMADDH
: /* TODO: MMI_OPC_2_PMADDH */
26637 case MMI_OPC_2_PHMADH
: /* TODO: MMI_OPC_2_PHMADH */
26638 case MMI_OPC_2_PAND
: /* TODO: MMI_OPC_2_PAND */
26639 case MMI_OPC_2_PXOR
: /* TODO: MMI_OPC_2_PXOR */
26640 case MMI_OPC_2_PMSUBH
: /* TODO: MMI_OPC_2_PMSUBH */
26641 case MMI_OPC_2_PHMSBH
: /* TODO: MMI_OPC_2_PHMSBH */
26642 case MMI_OPC_2_PEXEH
: /* TODO: MMI_OPC_2_PEXEH */
26643 case MMI_OPC_2_PREVH
: /* TODO: MMI_OPC_2_PREVH */
26644 case MMI_OPC_2_PMULTH
: /* TODO: MMI_OPC_2_PMULTH */
26645 case MMI_OPC_2_PDIVBW
: /* TODO: MMI_OPC_2_PDIVBW */
26646 case MMI_OPC_2_PEXEW
: /* TODO: MMI_OPC_2_PEXEW */
26647 case MMI_OPC_2_PROT3W
: /* TODO: MMI_OPC_2_PROT3W */
26648 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI2 */
26651 MIPS_INVAL("TX79 MMI class MMI2");
26652 generate_exception_end(ctx
, EXCP_RI
);
26657 static void decode_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
26659 uint32_t opc
= MASK_MMI3(ctx
->opcode
);
26662 case MMI_OPC_3_PMADDUW
: /* TODO: MMI_OPC_3_PMADDUW */
26663 case MMI_OPC_3_PSRAVW
: /* TODO: MMI_OPC_3_PSRAVW */
26664 case MMI_OPC_3_PMTHI
: /* TODO: MMI_OPC_3_PMTHI */
26665 case MMI_OPC_3_PMTLO
: /* TODO: MMI_OPC_3_PMTLO */
26666 case MMI_OPC_3_PINTEH
: /* TODO: MMI_OPC_3_PINTEH */
26667 case MMI_OPC_3_PMULTUW
: /* TODO: MMI_OPC_3_PMULTUW */
26668 case MMI_OPC_3_PDIVUW
: /* TODO: MMI_OPC_3_PDIVUW */
26669 case MMI_OPC_3_PCPYUD
: /* TODO: MMI_OPC_3_PCPYUD */
26670 case MMI_OPC_3_POR
: /* TODO: MMI_OPC_3_POR */
26671 case MMI_OPC_3_PNOR
: /* TODO: MMI_OPC_3_PNOR */
26672 case MMI_OPC_3_PEXCH
: /* TODO: MMI_OPC_3_PEXCH */
26673 case MMI_OPC_3_PCPYH
: /* TODO: MMI_OPC_3_PCPYH */
26674 case MMI_OPC_3_PEXCW
: /* TODO: MMI_OPC_3_PEXCW */
26675 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI3 */
26678 MIPS_INVAL("TX79 MMI class MMI3");
26679 generate_exception_end(ctx
, EXCP_RI
);
26684 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
26686 uint32_t opc
= MASK_MMI(ctx
->opcode
);
26687 int rs
= extract32(ctx
->opcode
, 21, 5);
26688 int rt
= extract32(ctx
->opcode
, 16, 5);
26689 int rd
= extract32(ctx
->opcode
, 11, 5);
26692 case MMI_OPC_CLASS_MMI0
:
26693 decode_mmi0(env
, ctx
);
26695 case MMI_OPC_CLASS_MMI1
:
26696 decode_mmi1(env
, ctx
);
26698 case MMI_OPC_CLASS_MMI2
:
26699 decode_mmi2(env
, ctx
);
26701 case MMI_OPC_CLASS_MMI3
:
26702 decode_mmi3(env
, ctx
);
26704 case MMI_OPC_MULT1
:
26705 case MMI_OPC_MULTU1
:
26706 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
26709 case MMI_OPC_DIVU1
:
26710 gen_div1_tx79(ctx
, opc
, rs
, rt
);
26712 case MMI_OPC_MTLO1
:
26713 case MMI_OPC_MTHI1
:
26714 gen_HILO1_tx79(ctx
, opc
, rs
);
26716 case MMI_OPC_MFLO1
:
26717 case MMI_OPC_MFHI1
:
26718 gen_HILO1_tx79(ctx
, opc
, rd
);
26720 case MMI_OPC_MADD
: /* TODO: MMI_OPC_MADD */
26721 case MMI_OPC_MADDU
: /* TODO: MMI_OPC_MADDU */
26722 case MMI_OPC_PLZCW
: /* TODO: MMI_OPC_PLZCW */
26723 case MMI_OPC_MADD1
: /* TODO: MMI_OPC_MADD1 */
26724 case MMI_OPC_MADDU1
: /* TODO: MMI_OPC_MADDU1 */
26725 case MMI_OPC_PMFHL
: /* TODO: MMI_OPC_PMFHL */
26726 case MMI_OPC_PMTHL
: /* TODO: MMI_OPC_PMTHL */
26727 case MMI_OPC_PSLLH
: /* TODO: MMI_OPC_PSLLH */
26728 case MMI_OPC_PSRLH
: /* TODO: MMI_OPC_PSRLH */
26729 case MMI_OPC_PSRAH
: /* TODO: MMI_OPC_PSRAH */
26730 case MMI_OPC_PSLLW
: /* TODO: MMI_OPC_PSLLW */
26731 case MMI_OPC_PSRLW
: /* TODO: MMI_OPC_PSRLW */
26732 case MMI_OPC_PSRAW
: /* TODO: MMI_OPC_PSRAW */
26733 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI */
26736 MIPS_INVAL("TX79 MMI class");
26737 generate_exception_end(ctx
, EXCP_RI
);
26742 static void gen_mmi_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
26744 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_LQ */
26747 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
26749 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_SQ */
26753 * The TX79-specific instruction Store Quadword
26755 * +--------+-------+-------+------------------------+
26756 * | 011111 | base | rt | offset | SQ
26757 * +--------+-------+-------+------------------------+
26760 * has the same opcode as the Read Hardware Register instruction
26762 * +--------+-------+-------+-------+-------+--------+
26763 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
26764 * +--------+-------+-------+-------+-------+--------+
26767 * that is required, trapped and emulated by the Linux kernel. However, all
26768 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
26769 * offset is odd. Therefore all valid SQ instructions can execute normally.
26770 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
26771 * between SQ and RDHWR, as the Linux kernel does.
26773 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
26775 int base
= extract32(ctx
->opcode
, 21, 5);
26776 int rt
= extract32(ctx
->opcode
, 16, 5);
26777 int offset
= extract32(ctx
->opcode
, 0, 16);
26779 #ifdef CONFIG_USER_ONLY
26780 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
26781 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
26783 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
26784 int rd
= extract32(ctx
->opcode
, 11, 5);
26786 gen_rdhwr(ctx
, rt
, rd
, 0);
26791 gen_mmi_sq(ctx
, base
, rt
, offset
);
26794 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
26796 int rs
, rt
, rd
, sa
;
26800 rs
= (ctx
->opcode
>> 21) & 0x1f;
26801 rt
= (ctx
->opcode
>> 16) & 0x1f;
26802 rd
= (ctx
->opcode
>> 11) & 0x1f;
26803 sa
= (ctx
->opcode
>> 6) & 0x1f;
26804 imm
= sextract32(ctx
->opcode
, 7, 9);
26806 op1
= MASK_SPECIAL3(ctx
->opcode
);
26809 * EVA loads and stores overlap Loongson 2E instructions decoded by
26810 * decode_opc_special3_legacy(), so be careful to allow their decoding when
26817 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26825 check_cp0_enabled(ctx
);
26826 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26830 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26835 check_cp0_enabled(ctx
);
26836 gen_st(ctx
, op1
, rt
, rs
, imm
);
26839 check_cp0_enabled(ctx
);
26840 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
26843 check_cp0_enabled(ctx
);
26844 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
26845 gen_cache_operation(ctx
, rt
, rs
, imm
);
26847 /* Treat as NOP. */
26850 check_cp0_enabled(ctx
);
26851 /* Treat as NOP. */
26859 check_insn(ctx
, ISA_MIPS32R2
);
26860 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
26863 op2
= MASK_BSHFL(ctx
->opcode
);
26870 check_insn(ctx
, ISA_MIPS32R6
);
26871 decode_opc_special3_r6(env
, ctx
);
26874 check_insn(ctx
, ISA_MIPS32R2
);
26875 gen_bshfl(ctx
, op2
, rt
, rd
);
26879 #if defined(TARGET_MIPS64)
26886 check_insn(ctx
, ISA_MIPS64R2
);
26887 check_mips_64(ctx
);
26888 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
26891 op2
= MASK_DBSHFL(ctx
->opcode
);
26902 check_insn(ctx
, ISA_MIPS32R6
);
26903 decode_opc_special3_r6(env
, ctx
);
26906 check_insn(ctx
, ISA_MIPS64R2
);
26907 check_mips_64(ctx
);
26908 op2
= MASK_DBSHFL(ctx
->opcode
);
26909 gen_bshfl(ctx
, op2
, rt
, rd
);
26915 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
26920 TCGv t0
= tcg_temp_new();
26921 TCGv t1
= tcg_temp_new();
26923 gen_load_gpr(t0
, rt
);
26924 gen_load_gpr(t1
, rs
);
26925 gen_helper_fork(t0
, t1
);
26933 TCGv t0
= tcg_temp_new();
26935 gen_load_gpr(t0
, rs
);
26936 gen_helper_yield(t0
, cpu_env
, t0
);
26937 gen_store_gpr(t0
, rd
);
26942 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26943 decode_opc_special3_r6(env
, ctx
);
26945 decode_opc_special3_legacy(env
, ctx
);
26950 /* MIPS SIMD Architecture (MSA) */
26951 static inline int check_msa_access(DisasContext
*ctx
)
26953 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
26954 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
26955 generate_exception_end(ctx
, EXCP_RI
);
26959 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
26960 if (ctx
->insn_flags
& ASE_MSA
) {
26961 generate_exception_end(ctx
, EXCP_MSADIS
);
26964 generate_exception_end(ctx
, EXCP_RI
);
26971 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
26973 /* generates tcg ops to check if any element is 0 */
26974 /* Note this function only works with MSA_WRLEN = 128 */
26975 uint64_t eval_zero_or_big
= 0;
26976 uint64_t eval_big
= 0;
26977 TCGv_i64 t0
= tcg_temp_new_i64();
26978 TCGv_i64 t1
= tcg_temp_new_i64();
26981 eval_zero_or_big
= 0x0101010101010101ULL
;
26982 eval_big
= 0x8080808080808080ULL
;
26985 eval_zero_or_big
= 0x0001000100010001ULL
;
26986 eval_big
= 0x8000800080008000ULL
;
26989 eval_zero_or_big
= 0x0000000100000001ULL
;
26990 eval_big
= 0x8000000080000000ULL
;
26993 eval_zero_or_big
= 0x0000000000000001ULL
;
26994 eval_big
= 0x8000000000000000ULL
;
26997 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
26998 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
26999 tcg_gen_andi_i64(t0
, t0
, eval_big
);
27000 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
27001 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
27002 tcg_gen_andi_i64(t1
, t1
, eval_big
);
27003 tcg_gen_or_i64(t0
, t0
, t1
);
27004 /* if all bits are zero then all elements are not zero */
27005 /* if some bit is non-zero then some element is zero */
27006 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
27007 tcg_gen_trunc_i64_tl(tresult
, t0
);
27008 tcg_temp_free_i64(t0
);
27009 tcg_temp_free_i64(t1
);
27012 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
27014 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27015 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27016 int64_t s16
= (int16_t)ctx
->opcode
;
27018 check_msa_access(ctx
);
27020 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
27021 generate_exception_end(ctx
, EXCP_RI
);
27028 TCGv_i64 t0
= tcg_temp_new_i64();
27029 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
27030 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
27031 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
27032 tcg_gen_trunc_i64_tl(bcond
, t0
);
27033 tcg_temp_free_i64(t0
);
27040 gen_check_zero_element(bcond
, df
, wt
);
27046 gen_check_zero_element(bcond
, df
, wt
);
27047 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
27051 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
27053 ctx
->hflags
|= MIPS_HFLAG_BC
;
27054 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
27057 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
27059 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
27060 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
27061 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27062 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27064 TCGv_i32 twd
= tcg_const_i32(wd
);
27065 TCGv_i32 tws
= tcg_const_i32(ws
);
27066 TCGv_i32 ti8
= tcg_const_i32(i8
);
27068 switch (MASK_MSA_I8(ctx
->opcode
)) {
27070 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
27073 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
27076 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
27079 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
27082 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
27085 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
27088 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
27094 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
27095 if (df
== DF_DOUBLE
) {
27096 generate_exception_end(ctx
, EXCP_RI
);
27098 TCGv_i32 tdf
= tcg_const_i32(df
);
27099 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
27100 tcg_temp_free_i32(tdf
);
27105 MIPS_INVAL("MSA instruction");
27106 generate_exception_end(ctx
, EXCP_RI
);
27110 tcg_temp_free_i32(twd
);
27111 tcg_temp_free_i32(tws
);
27112 tcg_temp_free_i32(ti8
);
27115 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
27117 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27118 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27119 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
27120 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
27121 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27122 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27124 TCGv_i32 tdf
= tcg_const_i32(df
);
27125 TCGv_i32 twd
= tcg_const_i32(wd
);
27126 TCGv_i32 tws
= tcg_const_i32(ws
);
27127 TCGv_i32 timm
= tcg_temp_new_i32();
27128 tcg_gen_movi_i32(timm
, u5
);
27130 switch (MASK_MSA_I5(ctx
->opcode
)) {
27132 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27135 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27137 case OPC_MAXI_S_df
:
27138 tcg_gen_movi_i32(timm
, s5
);
27139 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27141 case OPC_MAXI_U_df
:
27142 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27144 case OPC_MINI_S_df
:
27145 tcg_gen_movi_i32(timm
, s5
);
27146 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27148 case OPC_MINI_U_df
:
27149 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27152 tcg_gen_movi_i32(timm
, s5
);
27153 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27155 case OPC_CLTI_S_df
:
27156 tcg_gen_movi_i32(timm
, s5
);
27157 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27159 case OPC_CLTI_U_df
:
27160 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27162 case OPC_CLEI_S_df
:
27163 tcg_gen_movi_i32(timm
, s5
);
27164 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27166 case OPC_CLEI_U_df
:
27167 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27171 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
27172 tcg_gen_movi_i32(timm
, s10
);
27173 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
27177 MIPS_INVAL("MSA instruction");
27178 generate_exception_end(ctx
, EXCP_RI
);
27182 tcg_temp_free_i32(tdf
);
27183 tcg_temp_free_i32(twd
);
27184 tcg_temp_free_i32(tws
);
27185 tcg_temp_free_i32(timm
);
27188 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
27190 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27191 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
27192 uint32_t df
= 0, m
= 0;
27193 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27194 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27201 if ((dfm
& 0x40) == 0x00) {
27204 } else if ((dfm
& 0x60) == 0x40) {
27207 } else if ((dfm
& 0x70) == 0x60) {
27210 } else if ((dfm
& 0x78) == 0x70) {
27214 generate_exception_end(ctx
, EXCP_RI
);
27218 tdf
= tcg_const_i32(df
);
27219 tm
= tcg_const_i32(m
);
27220 twd
= tcg_const_i32(wd
);
27221 tws
= tcg_const_i32(ws
);
27223 switch (MASK_MSA_BIT(ctx
->opcode
)) {
27225 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27228 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
27231 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27234 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27237 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
27240 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
27242 case OPC_BINSLI_df
:
27243 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27245 case OPC_BINSRI_df
:
27246 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27249 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
27252 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
27255 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
27258 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27261 MIPS_INVAL("MSA instruction");
27262 generate_exception_end(ctx
, EXCP_RI
);
27266 tcg_temp_free_i32(tdf
);
27267 tcg_temp_free_i32(tm
);
27268 tcg_temp_free_i32(twd
);
27269 tcg_temp_free_i32(tws
);
27272 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
27274 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27275 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27276 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27277 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27278 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27280 TCGv_i32 tdf
= tcg_const_i32(df
);
27281 TCGv_i32 twd
= tcg_const_i32(wd
);
27282 TCGv_i32 tws
= tcg_const_i32(ws
);
27283 TCGv_i32 twt
= tcg_const_i32(wt
);
27285 switch (MASK_MSA_3R(ctx
->opcode
)) {
27287 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
27290 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27293 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
27296 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27298 case OPC_SUBS_S_df
:
27299 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27302 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27305 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
27308 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
27311 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
27314 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27316 case OPC_ADDS_A_df
:
27317 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27319 case OPC_SUBS_U_df
:
27320 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27323 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27326 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
27329 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
27332 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
27335 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27338 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27340 case OPC_ADDS_S_df
:
27341 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27343 case OPC_SUBSUS_U_df
:
27344 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27347 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27350 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
27353 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
27356 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
27359 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27362 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27364 case OPC_ADDS_U_df
:
27365 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27367 case OPC_SUBSUU_S_df
:
27368 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27371 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
27374 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
27377 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27380 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27383 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27385 case OPC_ASUB_S_df
:
27386 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27389 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27392 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
27395 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
27398 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27401 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27404 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27406 case OPC_ASUB_U_df
:
27407 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27410 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27413 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
27416 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
27419 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27421 case OPC_AVER_S_df
:
27422 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27425 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27428 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
27431 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
27434 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27436 case OPC_AVER_U_df
:
27437 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27440 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27443 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
27446 case OPC_DOTP_S_df
:
27447 case OPC_DOTP_U_df
:
27448 case OPC_DPADD_S_df
:
27449 case OPC_DPADD_U_df
:
27450 case OPC_DPSUB_S_df
:
27451 case OPC_HADD_S_df
:
27452 case OPC_DPSUB_U_df
:
27453 case OPC_HADD_U_df
:
27454 case OPC_HSUB_S_df
:
27455 case OPC_HSUB_U_df
:
27456 if (df
== DF_BYTE
) {
27457 generate_exception_end(ctx
, EXCP_RI
);
27460 switch (MASK_MSA_3R(ctx
->opcode
)) {
27461 case OPC_DOTP_S_df
:
27462 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27464 case OPC_DOTP_U_df
:
27465 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27467 case OPC_DPADD_S_df
:
27468 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27470 case OPC_DPADD_U_df
:
27471 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27473 case OPC_DPSUB_S_df
:
27474 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27476 case OPC_HADD_S_df
:
27477 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27479 case OPC_DPSUB_U_df
:
27480 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27482 case OPC_HADD_U_df
:
27483 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27485 case OPC_HSUB_S_df
:
27486 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27488 case OPC_HSUB_U_df
:
27489 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27494 MIPS_INVAL("MSA instruction");
27495 generate_exception_end(ctx
, EXCP_RI
);
27498 tcg_temp_free_i32(twd
);
27499 tcg_temp_free_i32(tws
);
27500 tcg_temp_free_i32(twt
);
27501 tcg_temp_free_i32(tdf
);
27504 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
27506 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
27507 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
27508 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
27509 TCGv telm
= tcg_temp_new();
27510 TCGv_i32 tsr
= tcg_const_i32(source
);
27511 TCGv_i32 tdt
= tcg_const_i32(dest
);
27513 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
27515 gen_load_gpr(telm
, source
);
27516 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
27519 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
27520 gen_store_gpr(telm
, dest
);
27523 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
27526 MIPS_INVAL("MSA instruction");
27527 generate_exception_end(ctx
, EXCP_RI
);
27531 tcg_temp_free(telm
);
27532 tcg_temp_free_i32(tdt
);
27533 tcg_temp_free_i32(tsr
);
27536 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
27539 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
27540 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27541 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27543 TCGv_i32 tws
= tcg_const_i32(ws
);
27544 TCGv_i32 twd
= tcg_const_i32(wd
);
27545 TCGv_i32 tn
= tcg_const_i32(n
);
27546 TCGv_i32 tdf
= tcg_const_i32(df
);
27548 switch (MASK_MSA_ELM(ctx
->opcode
)) {
27550 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
27552 case OPC_SPLATI_df
:
27553 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
27556 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
27558 case OPC_COPY_S_df
:
27559 case OPC_COPY_U_df
:
27560 case OPC_INSERT_df
:
27561 #if !defined(TARGET_MIPS64)
27562 /* Double format valid only for MIPS64 */
27563 if (df
== DF_DOUBLE
) {
27564 generate_exception_end(ctx
, EXCP_RI
);
27568 switch (MASK_MSA_ELM(ctx
->opcode
)) {
27569 case OPC_COPY_S_df
:
27570 if (likely(wd
!= 0)) {
27571 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
27574 case OPC_COPY_U_df
:
27575 if (likely(wd
!= 0)) {
27576 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
27579 case OPC_INSERT_df
:
27580 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
27585 MIPS_INVAL("MSA instruction");
27586 generate_exception_end(ctx
, EXCP_RI
);
27588 tcg_temp_free_i32(twd
);
27589 tcg_temp_free_i32(tws
);
27590 tcg_temp_free_i32(tn
);
27591 tcg_temp_free_i32(tdf
);
27594 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
27596 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
27597 uint32_t df
= 0, n
= 0;
27599 if ((dfn
& 0x30) == 0x00) {
27602 } else if ((dfn
& 0x38) == 0x20) {
27605 } else if ((dfn
& 0x3c) == 0x30) {
27608 } else if ((dfn
& 0x3e) == 0x38) {
27611 } else if (dfn
== 0x3E) {
27612 /* CTCMSA, CFCMSA, MOVE.V */
27613 gen_msa_elm_3e(env
, ctx
);
27616 generate_exception_end(ctx
, EXCP_RI
);
27620 gen_msa_elm_df(env
, ctx
, df
, n
);
27623 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
27625 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
27626 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
27627 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27628 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27629 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27631 TCGv_i32 twd
= tcg_const_i32(wd
);
27632 TCGv_i32 tws
= tcg_const_i32(ws
);
27633 TCGv_i32 twt
= tcg_const_i32(wt
);
27634 TCGv_i32 tdf
= tcg_temp_new_i32();
27636 /* adjust df value for floating-point instruction */
27637 tcg_gen_movi_i32(tdf
, df
+ 2);
27639 switch (MASK_MSA_3RF(ctx
->opcode
)) {
27641 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
27644 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
27647 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
27650 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
27653 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
27656 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
27659 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
27662 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
27665 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
27668 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27671 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
27674 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
27677 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
27680 tcg_gen_movi_i32(tdf
, df
+ 1);
27681 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
27684 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
27687 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
27689 case OPC_MADD_Q_df
:
27690 tcg_gen_movi_i32(tdf
, df
+ 1);
27691 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
27694 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
27696 case OPC_MSUB_Q_df
:
27697 tcg_gen_movi_i32(tdf
, df
+ 1);
27698 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
27701 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
27704 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
27707 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
27710 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
27713 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
27716 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
27719 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
27722 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
27725 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
27728 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
27731 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
27734 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
27737 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
27739 case OPC_MULR_Q_df
:
27740 tcg_gen_movi_i32(tdf
, df
+ 1);
27741 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
27744 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
27746 case OPC_FMIN_A_df
:
27747 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27749 case OPC_MADDR_Q_df
:
27750 tcg_gen_movi_i32(tdf
, df
+ 1);
27751 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
27754 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
27757 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
27759 case OPC_MSUBR_Q_df
:
27760 tcg_gen_movi_i32(tdf
, df
+ 1);
27761 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
27764 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
27766 case OPC_FMAX_A_df
:
27767 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27770 MIPS_INVAL("MSA instruction");
27771 generate_exception_end(ctx
, EXCP_RI
);
27775 tcg_temp_free_i32(twd
);
27776 tcg_temp_free_i32(tws
);
27777 tcg_temp_free_i32(twt
);
27778 tcg_temp_free_i32(tdf
);
27781 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
27783 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
27784 (op & (0x7 << 18)))
27785 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27786 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27787 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27788 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
27789 TCGv_i32 twd
= tcg_const_i32(wd
);
27790 TCGv_i32 tws
= tcg_const_i32(ws
);
27791 TCGv_i32 twt
= tcg_const_i32(wt
);
27792 TCGv_i32 tdf
= tcg_const_i32(df
);
27794 switch (MASK_MSA_2R(ctx
->opcode
)) {
27796 #if !defined(TARGET_MIPS64)
27797 /* Double format valid only for MIPS64 */
27798 if (df
== DF_DOUBLE
) {
27799 generate_exception_end(ctx
, EXCP_RI
);
27803 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
27806 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
27809 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
27812 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
27815 MIPS_INVAL("MSA instruction");
27816 generate_exception_end(ctx
, EXCP_RI
);
27820 tcg_temp_free_i32(twd
);
27821 tcg_temp_free_i32(tws
);
27822 tcg_temp_free_i32(twt
);
27823 tcg_temp_free_i32(tdf
);
27826 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
27828 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
27829 (op & (0xf << 17)))
27830 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27831 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27832 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27833 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
27834 TCGv_i32 twd
= tcg_const_i32(wd
);
27835 TCGv_i32 tws
= tcg_const_i32(ws
);
27836 TCGv_i32 twt
= tcg_const_i32(wt
);
27837 /* adjust df value for floating-point instruction */
27838 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
27840 switch (MASK_MSA_2RF(ctx
->opcode
)) {
27841 case OPC_FCLASS_df
:
27842 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
27844 case OPC_FTRUNC_S_df
:
27845 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
27847 case OPC_FTRUNC_U_df
:
27848 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
27851 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
27853 case OPC_FRSQRT_df
:
27854 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
27857 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
27860 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
27863 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
27865 case OPC_FEXUPL_df
:
27866 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
27868 case OPC_FEXUPR_df
:
27869 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
27872 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
27875 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
27877 case OPC_FTINT_S_df
:
27878 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
27880 case OPC_FTINT_U_df
:
27881 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
27883 case OPC_FFINT_S_df
:
27884 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
27886 case OPC_FFINT_U_df
:
27887 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
27891 tcg_temp_free_i32(twd
);
27892 tcg_temp_free_i32(tws
);
27893 tcg_temp_free_i32(twt
);
27894 tcg_temp_free_i32(tdf
);
27897 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
27899 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
27900 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27901 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27902 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27903 TCGv_i32 twd
= tcg_const_i32(wd
);
27904 TCGv_i32 tws
= tcg_const_i32(ws
);
27905 TCGv_i32 twt
= tcg_const_i32(wt
);
27907 switch (MASK_MSA_VEC(ctx
->opcode
)) {
27909 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
27912 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
27915 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
27918 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
27921 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
27924 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
27927 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
27930 MIPS_INVAL("MSA instruction");
27931 generate_exception_end(ctx
, EXCP_RI
);
27935 tcg_temp_free_i32(twd
);
27936 tcg_temp_free_i32(tws
);
27937 tcg_temp_free_i32(twt
);
27940 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
27942 switch (MASK_MSA_VEC(ctx
->opcode
)) {
27950 gen_msa_vec_v(env
, ctx
);
27953 gen_msa_2r(env
, ctx
);
27956 gen_msa_2rf(env
, ctx
);
27959 MIPS_INVAL("MSA instruction");
27960 generate_exception_end(ctx
, EXCP_RI
);
27965 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
27967 uint32_t opcode
= ctx
->opcode
;
27968 check_insn(ctx
, ASE_MSA
);
27969 check_msa_access(ctx
);
27971 switch (MASK_MSA_MINOR(opcode
)) {
27972 case OPC_MSA_I8_00
:
27973 case OPC_MSA_I8_01
:
27974 case OPC_MSA_I8_02
:
27975 gen_msa_i8(env
, ctx
);
27977 case OPC_MSA_I5_06
:
27978 case OPC_MSA_I5_07
:
27979 gen_msa_i5(env
, ctx
);
27981 case OPC_MSA_BIT_09
:
27982 case OPC_MSA_BIT_0A
:
27983 gen_msa_bit(env
, ctx
);
27985 case OPC_MSA_3R_0D
:
27986 case OPC_MSA_3R_0E
:
27987 case OPC_MSA_3R_0F
:
27988 case OPC_MSA_3R_10
:
27989 case OPC_MSA_3R_11
:
27990 case OPC_MSA_3R_12
:
27991 case OPC_MSA_3R_13
:
27992 case OPC_MSA_3R_14
:
27993 case OPC_MSA_3R_15
:
27994 gen_msa_3r(env
, ctx
);
27997 gen_msa_elm(env
, ctx
);
27999 case OPC_MSA_3RF_1A
:
28000 case OPC_MSA_3RF_1B
:
28001 case OPC_MSA_3RF_1C
:
28002 gen_msa_3rf(env
, ctx
);
28005 gen_msa_vec(env
, ctx
);
28016 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
28017 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
28018 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28019 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
28021 TCGv_i32 twd
= tcg_const_i32(wd
);
28022 TCGv taddr
= tcg_temp_new();
28023 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
28025 switch (MASK_MSA_MINOR(opcode
)) {
28027 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
28030 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
28033 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
28036 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
28039 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
28042 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
28045 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
28048 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
28052 tcg_temp_free_i32(twd
);
28053 tcg_temp_free(taddr
);
28057 MIPS_INVAL("MSA instruction");
28058 generate_exception_end(ctx
, EXCP_RI
);
28064 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
28067 int rs
, rt
, rd
, sa
;
28071 /* make sure instructions are on a word boundary */
28072 if (ctx
->base
.pc_next
& 0x3) {
28073 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
28074 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
28078 /* Handle blikely not taken case */
28079 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
28080 TCGLabel
*l1
= gen_new_label();
28082 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
28083 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
28084 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
28088 op
= MASK_OP_MAJOR(ctx
->opcode
);
28089 rs
= (ctx
->opcode
>> 21) & 0x1f;
28090 rt
= (ctx
->opcode
>> 16) & 0x1f;
28091 rd
= (ctx
->opcode
>> 11) & 0x1f;
28092 sa
= (ctx
->opcode
>> 6) & 0x1f;
28093 imm
= (int16_t)ctx
->opcode
;
28096 decode_opc_special(env
, ctx
);
28099 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
28100 decode_mmi(env
, ctx
);
28101 } else if (ctx
->insn_flags
& ASE_MXU
) {
28102 decode_opc_mxu(env
, ctx
);
28104 decode_opc_special2_legacy(env
, ctx
);
28108 if (ctx
->insn_flags
& INSN_R5900
) {
28109 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
28111 decode_opc_special3(env
, ctx
);
28115 op1
= MASK_REGIMM(ctx
->opcode
);
28117 case OPC_BLTZL
: /* REGIMM branches */
28121 check_insn(ctx
, ISA_MIPS2
);
28122 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28126 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
28130 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28132 /* OPC_NAL, OPC_BAL */
28133 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
28135 generate_exception_end(ctx
, EXCP_RI
);
28138 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
28141 case OPC_TGEI
: /* REGIMM traps */
28148 check_insn(ctx
, ISA_MIPS2
);
28149 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28150 gen_trap(ctx
, op1
, rs
, -1, imm
);
28153 check_insn(ctx
, ISA_MIPS32R6
);
28154 generate_exception_end(ctx
, EXCP_RI
);
28157 check_insn(ctx
, ISA_MIPS32R2
);
28158 /* Break the TB to be able to sync copied instructions
28160 ctx
->base
.is_jmp
= DISAS_STOP
;
28162 case OPC_BPOSGE32
: /* MIPS DSP branch */
28163 #if defined(TARGET_MIPS64)
28167 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
28169 #if defined(TARGET_MIPS64)
28171 check_insn(ctx
, ISA_MIPS32R6
);
28172 check_mips_64(ctx
);
28174 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
28178 check_insn(ctx
, ISA_MIPS32R6
);
28179 check_mips_64(ctx
);
28181 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
28185 default: /* Invalid */
28186 MIPS_INVAL("regimm");
28187 generate_exception_end(ctx
, EXCP_RI
);
28192 check_cp0_enabled(ctx
);
28193 op1
= MASK_CP0(ctx
->opcode
);
28201 #if defined(TARGET_MIPS64)
28205 #ifndef CONFIG_USER_ONLY
28206 gen_cp0(env
, ctx
, op1
, rt
, rd
);
28207 #endif /* !CONFIG_USER_ONLY */
28225 #ifndef CONFIG_USER_ONLY
28226 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
28227 #endif /* !CONFIG_USER_ONLY */
28230 #ifndef CONFIG_USER_ONLY
28233 TCGv t0
= tcg_temp_new();
28235 op2
= MASK_MFMC0(ctx
->opcode
);
28239 gen_helper_dmt(t0
);
28240 gen_store_gpr(t0
, rt
);
28244 gen_helper_emt(t0
);
28245 gen_store_gpr(t0
, rt
);
28249 gen_helper_dvpe(t0
, cpu_env
);
28250 gen_store_gpr(t0
, rt
);
28254 gen_helper_evpe(t0
, cpu_env
);
28255 gen_store_gpr(t0
, rt
);
28258 check_insn(ctx
, ISA_MIPS32R6
);
28260 gen_helper_dvp(t0
, cpu_env
);
28261 gen_store_gpr(t0
, rt
);
28265 check_insn(ctx
, ISA_MIPS32R6
);
28267 gen_helper_evp(t0
, cpu_env
);
28268 gen_store_gpr(t0
, rt
);
28272 check_insn(ctx
, ISA_MIPS32R2
);
28273 save_cpu_state(ctx
, 1);
28274 gen_helper_di(t0
, cpu_env
);
28275 gen_store_gpr(t0
, rt
);
28276 /* Stop translation as we may have switched
28277 the execution mode. */
28278 ctx
->base
.is_jmp
= DISAS_STOP
;
28281 check_insn(ctx
, ISA_MIPS32R2
);
28282 save_cpu_state(ctx
, 1);
28283 gen_helper_ei(t0
, cpu_env
);
28284 gen_store_gpr(t0
, rt
);
28285 /* DISAS_STOP isn't sufficient, we need to ensure we break
28286 out of translated code to check for pending interrupts */
28287 gen_save_pc(ctx
->base
.pc_next
+ 4);
28288 ctx
->base
.is_jmp
= DISAS_EXIT
;
28290 default: /* Invalid */
28291 MIPS_INVAL("mfmc0");
28292 generate_exception_end(ctx
, EXCP_RI
);
28297 #endif /* !CONFIG_USER_ONLY */
28300 check_insn(ctx
, ISA_MIPS32R2
);
28301 gen_load_srsgpr(rt
, rd
);
28304 check_insn(ctx
, ISA_MIPS32R2
);
28305 gen_store_srsgpr(rt
, rd
);
28309 generate_exception_end(ctx
, EXCP_RI
);
28313 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
28314 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28315 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
28316 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28319 /* Arithmetic with immediate opcode */
28320 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
28324 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
28326 case OPC_SLTI
: /* Set on less than with immediate opcode */
28328 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
28330 case OPC_ANDI
: /* Arithmetic with immediate opcode */
28331 case OPC_LUI
: /* OPC_AUI */
28334 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
28336 case OPC_J
: /* Jump */
28338 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
28339 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
28342 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
28343 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28345 generate_exception_end(ctx
, EXCP_RI
);
28348 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
28349 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28352 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
28355 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
28356 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28358 generate_exception_end(ctx
, EXCP_RI
);
28361 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
28362 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28365 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
28368 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
28371 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
28373 check_insn(ctx
, ISA_MIPS32R6
);
28374 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
28375 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28378 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
28381 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
28383 check_insn(ctx
, ISA_MIPS32R6
);
28384 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
28385 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28390 check_insn(ctx
, ISA_MIPS2
);
28391 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28395 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
28397 case OPC_LL
: /* Load and stores */
28398 check_insn(ctx
, ISA_MIPS2
);
28399 if (ctx
->insn_flags
& INSN_R5900
) {
28400 check_insn_opc_user_only(ctx
, INSN_R5900
);
28405 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28413 gen_ld(ctx
, op
, rt
, rs
, imm
);
28417 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28422 gen_st(ctx
, op
, rt
, rs
, imm
);
28425 check_insn(ctx
, ISA_MIPS2
);
28426 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28427 if (ctx
->insn_flags
& INSN_R5900
) {
28428 check_insn_opc_user_only(ctx
, INSN_R5900
);
28430 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
28433 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28434 check_cp0_enabled(ctx
);
28435 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
28436 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
28437 gen_cache_operation(ctx
, rt
, rs
, imm
);
28439 /* Treat as NOP. */
28442 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28443 if (ctx
->insn_flags
& INSN_R5900
) {
28444 /* Treat as NOP. */
28446 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
28447 /* Treat as NOP. */
28451 /* Floating point (COP1). */
28456 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
28460 op1
= MASK_CP1(ctx
->opcode
);
28465 check_cp1_enabled(ctx
);
28466 check_insn(ctx
, ISA_MIPS32R2
);
28472 check_cp1_enabled(ctx
);
28473 gen_cp1(ctx
, op1
, rt
, rd
);
28475 #if defined(TARGET_MIPS64)
28478 check_cp1_enabled(ctx
);
28479 check_insn(ctx
, ISA_MIPS3
);
28480 check_mips_64(ctx
);
28481 gen_cp1(ctx
, op1
, rt
, rd
);
28484 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
28485 check_cp1_enabled(ctx
);
28486 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28488 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
28493 check_insn(ctx
, ASE_MIPS3D
);
28494 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
28495 (rt
>> 2) & 0x7, imm
<< 2);
28499 check_cp1_enabled(ctx
);
28500 check_insn(ctx
, ISA_MIPS32R6
);
28501 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
28505 check_cp1_enabled(ctx
);
28506 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28508 check_insn(ctx
, ASE_MIPS3D
);
28511 check_cp1_enabled(ctx
);
28512 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28513 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
28514 (rt
>> 2) & 0x7, imm
<< 2);
28521 check_cp1_enabled(ctx
);
28522 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
28528 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
28529 check_cp1_enabled(ctx
);
28530 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28532 case R6_OPC_CMP_AF_S
:
28533 case R6_OPC_CMP_UN_S
:
28534 case R6_OPC_CMP_EQ_S
:
28535 case R6_OPC_CMP_UEQ_S
:
28536 case R6_OPC_CMP_LT_S
:
28537 case R6_OPC_CMP_ULT_S
:
28538 case R6_OPC_CMP_LE_S
:
28539 case R6_OPC_CMP_ULE_S
:
28540 case R6_OPC_CMP_SAF_S
:
28541 case R6_OPC_CMP_SUN_S
:
28542 case R6_OPC_CMP_SEQ_S
:
28543 case R6_OPC_CMP_SEUQ_S
:
28544 case R6_OPC_CMP_SLT_S
:
28545 case R6_OPC_CMP_SULT_S
:
28546 case R6_OPC_CMP_SLE_S
:
28547 case R6_OPC_CMP_SULE_S
:
28548 case R6_OPC_CMP_OR_S
:
28549 case R6_OPC_CMP_UNE_S
:
28550 case R6_OPC_CMP_NE_S
:
28551 case R6_OPC_CMP_SOR_S
:
28552 case R6_OPC_CMP_SUNE_S
:
28553 case R6_OPC_CMP_SNE_S
:
28554 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
28556 case R6_OPC_CMP_AF_D
:
28557 case R6_OPC_CMP_UN_D
:
28558 case R6_OPC_CMP_EQ_D
:
28559 case R6_OPC_CMP_UEQ_D
:
28560 case R6_OPC_CMP_LT_D
:
28561 case R6_OPC_CMP_ULT_D
:
28562 case R6_OPC_CMP_LE_D
:
28563 case R6_OPC_CMP_ULE_D
:
28564 case R6_OPC_CMP_SAF_D
:
28565 case R6_OPC_CMP_SUN_D
:
28566 case R6_OPC_CMP_SEQ_D
:
28567 case R6_OPC_CMP_SEUQ_D
:
28568 case R6_OPC_CMP_SLT_D
:
28569 case R6_OPC_CMP_SULT_D
:
28570 case R6_OPC_CMP_SLE_D
:
28571 case R6_OPC_CMP_SULE_D
:
28572 case R6_OPC_CMP_OR_D
:
28573 case R6_OPC_CMP_UNE_D
:
28574 case R6_OPC_CMP_NE_D
:
28575 case R6_OPC_CMP_SOR_D
:
28576 case R6_OPC_CMP_SUNE_D
:
28577 case R6_OPC_CMP_SNE_D
:
28578 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
28581 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
28582 rt
, rd
, sa
, (imm
>> 8) & 0x7);
28587 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
28602 check_insn(ctx
, ASE_MSA
);
28603 gen_msa_branch(env
, ctx
, op1
);
28607 generate_exception_end(ctx
, EXCP_RI
);
28612 /* Compact branches [R6] and COP2 [non-R6] */
28613 case OPC_BC
: /* OPC_LWC2 */
28614 case OPC_BALC
: /* OPC_SWC2 */
28615 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28616 /* OPC_BC, OPC_BALC */
28617 gen_compute_compact_branch(ctx
, op
, 0, 0,
28618 sextract32(ctx
->opcode
<< 2, 0, 28));
28620 /* OPC_LWC2, OPC_SWC2 */
28621 /* COP2: Not implemented. */
28622 generate_exception_err(ctx
, EXCP_CpU
, 2);
28625 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
28626 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
28627 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28629 /* OPC_BEQZC, OPC_BNEZC */
28630 gen_compute_compact_branch(ctx
, op
, rs
, 0,
28631 sextract32(ctx
->opcode
<< 2, 0, 23));
28633 /* OPC_JIC, OPC_JIALC */
28634 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
28637 /* OPC_LWC2, OPC_SWC2 */
28638 /* COP2: Not implemented. */
28639 generate_exception_err(ctx
, EXCP_CpU
, 2);
28643 check_insn(ctx
, INSN_LOONGSON2F
);
28644 /* Note that these instructions use different fields. */
28645 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
28649 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28650 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
28651 check_cp1_enabled(ctx
);
28652 op1
= MASK_CP3(ctx
->opcode
);
28656 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
28662 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
28663 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
28666 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
28667 /* Treat as NOP. */
28670 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
28684 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
28685 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
28689 generate_exception_end(ctx
, EXCP_RI
);
28693 generate_exception_err(ctx
, EXCP_CpU
, 1);
28697 #if defined(TARGET_MIPS64)
28698 /* MIPS64 opcodes */
28700 if (ctx
->insn_flags
& INSN_R5900
) {
28701 check_insn_opc_user_only(ctx
, INSN_R5900
);
28706 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28710 check_insn(ctx
, ISA_MIPS3
);
28711 check_mips_64(ctx
);
28712 gen_ld(ctx
, op
, rt
, rs
, imm
);
28716 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28719 check_insn(ctx
, ISA_MIPS3
);
28720 check_mips_64(ctx
);
28721 gen_st(ctx
, op
, rt
, rs
, imm
);
28724 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28725 check_insn(ctx
, ISA_MIPS3
);
28726 if (ctx
->insn_flags
& INSN_R5900
) {
28727 check_insn_opc_user_only(ctx
, INSN_R5900
);
28729 check_mips_64(ctx
);
28730 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
28732 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
28733 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28734 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
28735 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28738 check_insn(ctx
, ISA_MIPS3
);
28739 check_mips_64(ctx
);
28740 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
28744 check_insn(ctx
, ISA_MIPS3
);
28745 check_mips_64(ctx
);
28746 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
28749 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
28750 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28751 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28753 MIPS_INVAL("major opcode");
28754 generate_exception_end(ctx
, EXCP_RI
);
28758 case OPC_DAUI
: /* OPC_JALX */
28759 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28760 #if defined(TARGET_MIPS64)
28762 check_mips_64(ctx
);
28764 generate_exception(ctx
, EXCP_RI
);
28765 } else if (rt
!= 0) {
28766 TCGv t0
= tcg_temp_new();
28767 gen_load_gpr(t0
, rs
);
28768 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
28772 generate_exception_end(ctx
, EXCP_RI
);
28773 MIPS_INVAL("major opcode");
28777 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
28778 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
28779 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
28782 case OPC_MSA
: /* OPC_MDMX */
28783 if (ctx
->insn_flags
& INSN_R5900
) {
28784 gen_mmi_lq(env
, ctx
); /* MMI_OPC_LQ */
28786 /* MDMX: Not implemented. */
28791 check_insn(ctx
, ISA_MIPS32R6
);
28792 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
28794 default: /* Invalid */
28795 MIPS_INVAL("major opcode");
28796 generate_exception_end(ctx
, EXCP_RI
);
28801 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
28803 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
28804 CPUMIPSState
*env
= cs
->env_ptr
;
28806 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
28807 ctx
->saved_pc
= -1;
28808 ctx
->insn_flags
= env
->insn_flags
;
28809 ctx
->CP0_Config1
= env
->CP0_Config1
;
28810 ctx
->CP0_Config2
= env
->CP0_Config2
;
28811 ctx
->CP0_Config3
= env
->CP0_Config3
;
28812 ctx
->CP0_Config5
= env
->CP0_Config5
;
28814 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
28815 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
28816 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
28817 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
28818 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
28819 ctx
->PAMask
= env
->PAMask
;
28820 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
28821 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
28822 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
28823 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
28824 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
28825 /* Restore delay slot state from the tb context. */
28826 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
28827 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
28828 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
28829 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
28830 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
28831 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
28832 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
28833 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
28834 restore_cpu_state(env
, ctx
);
28835 #ifdef CONFIG_USER_ONLY
28836 ctx
->mem_idx
= MIPS_HFLAG_UM
;
28838 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
28840 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
28841 MO_UNALN
: MO_ALIGN
;
28843 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
28847 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
28851 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
28853 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
28855 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
28859 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
28860 const CPUBreakpoint
*bp
)
28862 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
28864 save_cpu_state(ctx
, 1);
28865 ctx
->base
.is_jmp
= DISAS_NORETURN
;
28866 gen_helper_raise_exception_debug(cpu_env
);
28867 /* The address covered by the breakpoint must be included in
28868 [tb->pc, tb->pc + tb->size) in order to for it to be
28869 properly cleared -- thus we increment the PC here so that
28870 the logic setting tb->size below does the right thing. */
28871 ctx
->base
.pc_next
+= 4;
28875 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
28877 CPUMIPSState
*env
= cs
->env_ptr
;
28878 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
28882 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
28883 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
28884 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
28885 insn_bytes
= decode_nanomips_opc(env
, ctx
);
28886 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
28887 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
28889 decode_opc(env
, ctx
);
28890 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
28891 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
28892 insn_bytes
= decode_micromips_opc(env
, ctx
);
28893 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
28894 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
28895 insn_bytes
= decode_mips16_opc(env
, ctx
);
28897 generate_exception_end(ctx
, EXCP_RI
);
28898 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
28902 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
28903 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
28904 MIPS_HFLAG_FBNSLOT
))) {
28905 /* force to generate branch as there is neither delay nor
28909 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
28910 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
28911 /* Force to generate branch as microMIPS R6 doesn't restrict
28912 branches in the forbidden slot. */
28917 gen_branch(ctx
, insn_bytes
);
28919 ctx
->base
.pc_next
+= insn_bytes
;
28921 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
28924 /* Execute a branch and its delay slot as a single instruction.
28925 This is what GDB expects and is consistent with what the
28926 hardware does (e.g. if a delay slot instruction faults, the
28927 reported PC is the PC of the branch). */
28928 if (ctx
->base
.singlestep_enabled
&&
28929 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
28930 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
28932 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
28933 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
28937 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
28939 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
28941 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
28942 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
28943 gen_helper_raise_exception_debug(cpu_env
);
28945 switch (ctx
->base
.is_jmp
) {
28947 gen_save_pc(ctx
->base
.pc_next
);
28948 tcg_gen_lookup_and_goto_ptr();
28951 case DISAS_TOO_MANY
:
28952 save_cpu_state(ctx
, 0);
28953 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
28956 tcg_gen_exit_tb(NULL
, 0);
28958 case DISAS_NORETURN
:
28961 g_assert_not_reached();
28966 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
28968 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
28969 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
28972 static const TranslatorOps mips_tr_ops
= {
28973 .init_disas_context
= mips_tr_init_disas_context
,
28974 .tb_start
= mips_tr_tb_start
,
28975 .insn_start
= mips_tr_insn_start
,
28976 .breakpoint_check
= mips_tr_breakpoint_check
,
28977 .translate_insn
= mips_tr_translate_insn
,
28978 .tb_stop
= mips_tr_tb_stop
,
28979 .disas_log
= mips_tr_disas_log
,
28982 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
28986 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
28989 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
28993 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
28995 #define printfpr(fp) \
28998 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
28999 " fd:%13g fs:%13g psu: %13g\n", \
29000 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
29001 (double)(fp)->fd, \
29002 (double)(fp)->fs[FP_ENDIAN_IDX], \
29003 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
29006 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
29007 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
29008 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
29009 " fd:%13g fs:%13g psu:%13g\n", \
29010 tmp.w[FP_ENDIAN_IDX], tmp.d, \
29012 (double)tmp.fs[FP_ENDIAN_IDX], \
29013 (double)tmp.fs[!FP_ENDIAN_IDX]); \
29018 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
29019 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
29020 get_float_exception_flags(&env
->active_fpu
.fp_status
));
29021 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
29022 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
29023 printfpr(&env
->active_fpu
.fpr
[i
]);
29029 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
29032 MIPSCPU
*cpu
= MIPS_CPU(cs
);
29033 CPUMIPSState
*env
= &cpu
->env
;
29036 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
29037 " LO=0x" TARGET_FMT_lx
" ds %04x "
29038 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
29039 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
29040 env
->hflags
, env
->btarget
, env
->bcond
);
29041 for (i
= 0; i
< 32; i
++) {
29043 cpu_fprintf(f
, "GPR%02d:", i
);
29044 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
29046 cpu_fprintf(f
, "\n");
29049 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
29050 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
29051 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
29053 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
29054 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
29055 env
->CP0_Config2
, env
->CP0_Config3
);
29056 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
29057 env
->CP0_Config4
, env
->CP0_Config5
);
29058 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
29059 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
29063 void mips_tcg_init(void)
29068 for (i
= 1; i
< 32; i
++)
29069 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
29070 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
29073 for (i
= 0; i
< 32; i
++) {
29074 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
29076 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
29077 /* The scalar floating-point unit (FPU) registers are mapped on
29078 * the MSA vector registers. */
29079 fpu_f64
[i
] = msa_wr_d
[i
* 2];
29080 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
29081 msa_wr_d
[i
* 2 + 1] =
29082 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
29085 cpu_PC
= tcg_global_mem_new(cpu_env
,
29086 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
29087 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
29088 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
29089 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
29091 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
29092 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
29095 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
29096 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
29098 bcond
= tcg_global_mem_new(cpu_env
,
29099 offsetof(CPUMIPSState
, bcond
), "bcond");
29100 btarget
= tcg_global_mem_new(cpu_env
,
29101 offsetof(CPUMIPSState
, btarget
), "btarget");
29102 hflags
= tcg_global_mem_new_i32(cpu_env
,
29103 offsetof(CPUMIPSState
, hflags
), "hflags");
29105 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
29106 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
29108 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
29109 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
29112 for (i
= 0; i
< NUMBER_OF_MXU_REGISTERS
- 1; i
++) {
29113 mxu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
29114 offsetof(CPUMIPSState
,
29115 active_tc
.mxu_gpr
[i
]),
29119 mxu_CR
= tcg_global_mem_new(cpu_env
,
29120 offsetof(CPUMIPSState
, active_tc
.mxu_cr
),
29121 mxuregnames
[NUMBER_OF_MXU_REGISTERS
- 1]);
29124 #include "translate_init.inc.c"
29126 void cpu_mips_realize_env(CPUMIPSState
*env
)
29128 env
->exception_base
= (int32_t)0xBFC00000;
29130 #ifndef CONFIG_USER_ONLY
29131 mmu_init(env
, env
->cpu_model
);
29133 fpu_init(env
, env
->cpu_model
);
29134 mvp_init(env
, env
->cpu_model
);
29137 bool cpu_supports_cps_smp(const char *cpu_type
)
29139 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
29140 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
29143 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
29145 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
29146 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
29149 void cpu_set_exception_base(int vp_index
, target_ulong address
)
29151 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
29152 vp
->env
.exception_base
= address
;
29155 void cpu_state_reset(CPUMIPSState
*env
)
29157 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
29158 CPUState
*cs
= CPU(cpu
);
29160 /* Reset registers to their default values */
29161 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
29162 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
29163 #ifdef TARGET_WORDS_BIGENDIAN
29164 env
->CP0_Config0
|= (1 << CP0C0_BE
);
29166 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
29167 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
29168 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
29169 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
29170 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
29171 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
29172 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
29173 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
29174 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
29175 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
29176 << env
->cpu_model
->CP0_LLAddr_shift
;
29177 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
29178 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
29179 env
->CCRes
= env
->cpu_model
->CCRes
;
29180 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
29181 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
29182 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
29183 env
->current_tc
= 0;
29184 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
29185 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
29186 #if defined(TARGET_MIPS64)
29187 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
29188 env
->SEGMask
|= 3ULL << 62;
29191 env
->PABITS
= env
->cpu_model
->PABITS
;
29192 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
29193 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
29194 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
29195 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
29196 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
29197 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
29198 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
29199 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
29200 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
29201 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
29202 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
29203 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
29204 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
29205 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
29206 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
29207 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
29208 env
->msair
= env
->cpu_model
->MSAIR
;
29209 env
->insn_flags
= env
->cpu_model
->insn_flags
;
29211 #if defined(CONFIG_USER_ONLY)
29212 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
29213 # ifdef TARGET_MIPS64
29214 /* Enable 64-bit register mode. */
29215 env
->CP0_Status
|= (1 << CP0St_PX
);
29217 # ifdef TARGET_ABI_MIPSN64
29218 /* Enable 64-bit address mode. */
29219 env
->CP0_Status
|= (1 << CP0St_UX
);
29221 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
29222 hardware registers. */
29223 env
->CP0_HWREna
|= 0x0000000F;
29224 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
29225 env
->CP0_Status
|= (1 << CP0St_CU1
);
29227 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
29228 env
->CP0_Status
|= (1 << CP0St_MX
);
29230 # if defined(TARGET_MIPS64)
29231 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
29232 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
29233 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
29234 env
->CP0_Status
|= (1 << CP0St_FR
);
29238 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
29239 /* If the exception was raised from a delay slot,
29240 come back to the jump. */
29241 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
29242 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
29244 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
29246 env
->active_tc
.PC
= env
->exception_base
;
29247 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
29248 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
29249 env
->CP0_Wired
= 0;
29250 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
29251 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
29252 if (mips_um_ksegs_enabled()) {
29253 env
->CP0_EBase
|= 0x40000000;
29255 env
->CP0_EBase
|= (int32_t)0x80000000;
29257 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
29258 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
29260 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
29262 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
29263 /* vectored interrupts not implemented, timer on int 7,
29264 no performance counters. */
29265 env
->CP0_IntCtl
= 0xe0000000;
29269 for (i
= 0; i
< 7; i
++) {
29270 env
->CP0_WatchLo
[i
] = 0;
29271 env
->CP0_WatchHi
[i
] = 0x80000000;
29273 env
->CP0_WatchLo
[7] = 0;
29274 env
->CP0_WatchHi
[7] = 0;
29276 /* Count register increments in debug mode, EJTAG version 1 */
29277 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
29279 cpu_mips_store_count(env
, 1);
29281 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
29284 /* Only TC0 on VPE 0 starts as active. */
29285 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
29286 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
29287 env
->tcs
[i
].CP0_TCHalt
= 1;
29289 env
->active_tc
.CP0_TCHalt
= 1;
29292 if (cs
->cpu_index
== 0) {
29293 /* VPE0 starts up enabled. */
29294 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
29295 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
29297 /* TC0 starts up unhalted. */
29299 env
->active_tc
.CP0_TCHalt
= 0;
29300 env
->tcs
[0].CP0_TCHalt
= 0;
29301 /* With thread 0 active. */
29302 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
29303 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
29308 * Configure default legacy segmentation control. We use this regardless of
29309 * whether segmentation control is presented to the guest.
29311 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
29312 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
29313 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
29314 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
29315 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
29316 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
29318 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
29319 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
29320 (3 << CP0SC_C
)) << 16;
29321 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
29322 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
29323 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
29324 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
29325 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
29326 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
29327 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
29328 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
29330 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
29331 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
29332 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
29333 env
->CP0_Status
|= (1 << CP0St_FR
);
29336 if (env
->insn_flags
& ISA_MIPS32R6
) {
29338 env
->CP0_PWSize
= 0x40;
29344 env
->CP0_PWField
= 0x0C30C302;
29351 env
->CP0_PWField
= 0x02;
29354 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
29355 /* microMIPS on reset when Config3.ISA is 3 */
29356 env
->hflags
|= MIPS_HFLAG_M16
;
29360 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
29364 compute_hflags(env
);
29365 restore_fp_status(env
);
29366 restore_pamask(env
);
29367 cs
->exception_index
= EXCP_NONE
;
29369 if (semihosting_get_argc()) {
29370 /* UHI interface can be used to obtain argc and argv */
29371 env
->active_tc
.gpr
[4] = -1;
29375 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
29376 target_ulong
*data
)
29378 env
->active_tc
.PC
= data
[0];
29379 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
29380 env
->hflags
|= data
[1];
29381 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
29382 case MIPS_HFLAG_BR
:
29384 case MIPS_HFLAG_BC
:
29385 case MIPS_HFLAG_BL
:
29387 env
->btarget
= data
[2];