2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
467 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
468 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
469 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
470 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
474 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
477 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
478 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
479 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
480 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
481 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
482 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
483 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
484 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
485 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
486 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
487 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
490 /* MIPS DSP REGIMM opcodes */
492 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
493 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
496 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
500 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
501 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
502 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
505 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
507 /* MIPS DSP Arithmetic Sub-class */
508 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
509 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
510 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
515 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
516 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
517 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
518 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
519 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
522 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
523 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
525 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
526 /* MIPS DSP Multiply Sub-class insns */
527 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
528 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
529 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
530 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
531 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
532 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
535 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
536 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
538 /* MIPS DSP Arithmetic Sub-class */
539 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
548 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
549 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
550 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
551 /* MIPS DSP Multiply Sub-class insns */
552 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
553 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
554 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
555 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
558 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
560 /* MIPS DSP Arithmetic Sub-class */
561 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
572 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
573 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
574 /* DSP Bit/Manipulation Sub-class */
575 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
576 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
577 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
578 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
579 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
582 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
584 /* MIPS DSP Arithmetic Sub-class */
585 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
592 /* DSP Compare-Pick Sub-class */
593 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
600 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
601 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
602 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
603 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
604 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
605 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
606 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
607 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
610 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
612 /* MIPS DSP GPR-Based Shift Sub-class */
613 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
627 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
628 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
629 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
630 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
631 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
632 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
633 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
634 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
637 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
639 /* MIPS DSP Multiply Sub-class insns */
640 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
654 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
655 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
656 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
657 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
658 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
659 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
660 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
661 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
664 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
666 /* DSP Bit/Manipulation Sub-class */
667 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
670 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Append Sub-class */
673 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
674 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
675 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
678 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
680 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
681 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
685 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
686 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
687 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
688 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
689 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
690 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
691 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
692 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
693 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
694 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
695 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
696 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
697 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
700 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
702 /* MIPS DSP Arithmetic Sub-class */
703 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
719 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
720 /* DSP Bit/Manipulation Sub-class */
721 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
722 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
723 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
724 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
725 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
726 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
729 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
731 /* MIPS DSP Multiply Sub-class insns */
732 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
733 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
734 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
735 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
737 /* MIPS DSP Arithmetic Sub-class */
738 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
741 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
742 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
743 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
744 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
745 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
747 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
751 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
752 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
753 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
754 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
755 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
756 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
757 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
758 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
761 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
763 /* DSP Compare-Pick Sub-class */
764 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
783 /* MIPS DSP Arithmetic Sub-class */
784 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
785 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
786 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
787 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
788 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
789 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
794 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
796 /* DSP Append Sub-class */
797 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
798 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
799 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
800 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
803 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
805 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
806 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
819 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
820 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
821 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
822 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
823 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
824 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
825 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
826 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
829 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
831 /* DSP Bit/Manipulation Sub-class */
832 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
835 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
837 /* MIPS DSP Multiply Sub-class insns */
838 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
856 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
857 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
858 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
859 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
860 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
861 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
862 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
863 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
866 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
868 /* MIPS DSP GPR-Based Shift Sub-class */
869 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
887 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
888 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
889 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
890 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
891 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
892 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
893 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
894 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
897 /* Coprocessor 0 (rs field) */
898 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
901 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
902 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
903 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
904 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
905 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
906 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
907 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
908 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
909 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
910 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
911 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
912 OPC_C0
= (0x10 << 21) | OPC_CP0
,
913 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
914 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
915 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
916 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
917 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
918 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
919 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
920 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
921 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
922 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
923 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
924 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
925 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
926 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
927 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
931 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
934 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
935 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
936 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
937 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
938 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
939 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
940 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
941 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
944 /* Coprocessor 0 (with rs == C0) */
945 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
948 OPC_TLBR
= 0x01 | OPC_C0
,
949 OPC_TLBWI
= 0x02 | OPC_C0
,
950 OPC_TLBINV
= 0x03 | OPC_C0
,
951 OPC_TLBINVF
= 0x04 | OPC_C0
,
952 OPC_TLBWR
= 0x06 | OPC_C0
,
953 OPC_TLBP
= 0x08 | OPC_C0
,
954 OPC_RFE
= 0x10 | OPC_C0
,
955 OPC_ERET
= 0x18 | OPC_C0
,
956 OPC_DERET
= 0x1F | OPC_C0
,
957 OPC_WAIT
= 0x20 | OPC_C0
,
960 /* Coprocessor 1 (rs field) */
961 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
963 /* Values for the fmt field in FP instructions */
965 /* 0 - 15 are reserved */
966 FMT_S
= 16, /* single fp */
967 FMT_D
= 17, /* double fp */
968 FMT_E
= 18, /* extended fp */
969 FMT_Q
= 19, /* quad fp */
970 FMT_W
= 20, /* 32-bit fixed */
971 FMT_L
= 21, /* 64-bit fixed */
972 FMT_PS
= 22, /* paired single fp */
973 /* 23 - 31 are reserved */
977 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
978 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
979 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
980 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
981 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
982 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
983 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
984 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
985 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
986 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
987 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
988 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
989 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
990 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
991 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
992 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
993 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
994 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
995 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
996 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
997 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
998 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
999 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
1000 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
1001 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
1002 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
1003 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
1004 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
1005 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
1006 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1009 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1010 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1013 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1014 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1015 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1016 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1020 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1021 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1025 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1026 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1029 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1032 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1033 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1034 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1035 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1036 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1037 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1038 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1039 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1040 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1041 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1042 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1045 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1048 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1050 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1051 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1052 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1053 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1054 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1055 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1057 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1059 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1060 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1061 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1062 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1063 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1064 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1066 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1068 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1069 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1070 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1071 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1072 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1073 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1075 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1077 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1078 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1079 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1080 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1081 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1082 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1084 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1085 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1086 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1087 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1088 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1089 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1091 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1092 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1093 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1094 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1095 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1096 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1098 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1099 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1100 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1101 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1102 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1103 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1105 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1106 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1107 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1108 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1109 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1110 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1112 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1113 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1114 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1115 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1116 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1117 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1119 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1120 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1121 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1122 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1123 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1124 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1126 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1127 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1128 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1129 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1130 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1131 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1133 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1134 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1135 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1136 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1137 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1138 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1142 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1145 OPC_LWXC1
= 0x00 | OPC_CP3
,
1146 OPC_LDXC1
= 0x01 | OPC_CP3
,
1147 OPC_LUXC1
= 0x05 | OPC_CP3
,
1148 OPC_SWXC1
= 0x08 | OPC_CP3
,
1149 OPC_SDXC1
= 0x09 | OPC_CP3
,
1150 OPC_SUXC1
= 0x0D | OPC_CP3
,
1151 OPC_PREFX
= 0x0F | OPC_CP3
,
1152 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1153 OPC_MADD_S
= 0x20 | OPC_CP3
,
1154 OPC_MADD_D
= 0x21 | OPC_CP3
,
1155 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1156 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1157 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1158 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1159 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1160 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1161 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1162 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1163 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1164 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1168 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1170 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1171 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1172 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1173 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1174 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1175 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1176 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1177 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1178 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1179 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1180 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1181 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1182 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1183 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1184 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1185 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1186 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1187 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1188 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1189 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1190 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1192 /* MI10 instruction */
1193 OPC_LD_B
= (0x20) | OPC_MSA
,
1194 OPC_LD_H
= (0x21) | OPC_MSA
,
1195 OPC_LD_W
= (0x22) | OPC_MSA
,
1196 OPC_LD_D
= (0x23) | OPC_MSA
,
1197 OPC_ST_B
= (0x24) | OPC_MSA
,
1198 OPC_ST_H
= (0x25) | OPC_MSA
,
1199 OPC_ST_W
= (0x26) | OPC_MSA
,
1200 OPC_ST_D
= (0x27) | OPC_MSA
,
1204 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1205 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1206 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1207 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1208 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1209 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1210 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1211 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1212 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1213 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1214 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1215 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1216 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1218 /* I8 instruction */
1219 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1220 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1221 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1222 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1223 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1224 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1225 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1226 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1227 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1228 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1230 /* VEC/2R/2RF instruction */
1231 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1232 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1233 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1234 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1235 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1236 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1237 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1239 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1240 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1242 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1243 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1244 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1245 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1246 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1248 /* 2RF instruction df(bit 16) = _w, _d */
1249 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1250 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1251 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1252 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1253 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1254 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1255 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1256 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1257 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1258 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1259 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1260 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1261 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1262 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1263 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1264 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1266 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1267 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1268 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1269 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1270 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1272 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1274 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1275 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1277 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1278 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1279 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1280 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1281 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1282 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1283 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1284 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1285 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1286 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1287 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1288 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1289 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1290 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1292 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1293 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1294 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1295 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1296 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1297 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1298 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1299 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1300 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1301 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1302 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1303 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1304 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1305 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1306 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1307 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1308 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1309 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1310 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1311 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1312 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1313 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1314 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1315 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1316 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1317 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1318 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1319 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1320 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1321 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1322 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1323 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1324 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1325 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1326 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1327 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1328 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1329 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1331 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1332 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1333 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1334 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1335 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1336 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1337 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1338 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1339 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1340 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1342 /* 3RF instruction _df(bit 21) = _w, _d */
1343 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1346 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1347 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1348 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1349 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1350 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1351 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1352 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1353 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1357 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1358 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1359 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1361 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1362 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1363 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1364 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1365 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1366 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1367 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1370 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1373 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1374 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1375 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1376 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1377 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1378 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1379 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1380 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1381 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1382 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1383 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1385 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1386 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1387 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1388 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1389 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1390 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1391 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1392 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1393 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1394 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1395 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1396 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1397 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1403 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1404 * ============================================
1407 * MXU (full name: MIPS eXtension/enhanced Unit) is a SIMD extension of MIPS32
1408 * instructions set. It is designed to fit the needs of signal, graphical and
1409 * video processing applications. MXU instruction set is used in Xburst family
1410 * of microprocessors by Ingenic.
1412 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1413 * the control register.
1416 * The notation used in MXU assembler mnemonics
1417 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1419 * Register operands:
1421 * XRa, XRb, XRc, XRd - MXU registers
1422 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1424 * Non-register operands:
1426 * aptn1 - 1-bit accumulate add/subtract pattern
1427 * aptn2 - 2-bit accumulate add/subtract pattern
1428 * eptn2 - 2-bit execute add/subtract pattern
1429 * optn2 - 2-bit operand pattern
1430 * optn3 - 3-bit operand pattern
1431 * sft4 - 4-bit shift amount
1432 * strd2 - 2-bit stride amount
1436 * Level of parallelism: Operand size:
1437 * S - single operation at a time 32 - word
1438 * D - two operations in parallel 16 - half word
1439 * Q - four operations in parallel 8 - byte
1443 * ADD - Add or subtract
1444 * ADDC - Add with carry-in
1446 * ASUM - Sum together then accumulate (add or subtract)
1447 * ASUMC - Sum together then accumulate (add or subtract) with carry-in
1448 * AVG - Average between 2 operands
1449 * ABD - Absolute difference
1451 * AND - Logical bitwise 'and' operation
1453 * EXTR - Extract bits
1454 * I2M - Move from GPR register to MXU register
1455 * LDD - Load data from memory to XRF
1456 * LDI - Load data from memory to XRF (and increase the address base)
1457 * LUI - Load unsigned immediate
1459 * MULU - Unsigned multiply
1460 * MADD - 64-bit operand add 32x32 product
1461 * MSUB - 64-bit operand subtract 32x32 product
1462 * MAC - Multiply and accumulate (add or subtract)
1463 * MAD - Multiply and add or subtract
1464 * MAX - Maximum between 2 operands
1465 * MIN - Minimum between 2 operands
1466 * M2I - Move from MXU register to GPR register
1467 * MOVZ - Move if zero
1468 * MOVN - Move if non-zero
1469 * NOR - Logical bitwise 'nor' operation
1470 * OR - Logical bitwise 'or' operation
1471 * STD - Store data from XRF to memory
1472 * SDI - Store data from XRF to memory (and increase the address base)
1473 * SLT - Set of less than comparison
1474 * SAD - Sum of absolute differences
1475 * SLL - Logical shift left
1476 * SLR - Logical shift right
1477 * SAR - Arithmetic shift right
1480 * SCOP - Calculate x’s scope (-1, means x<0; 0, means x==0; 1, means x>0)
1481 * XOR - Logical bitwise 'exclusive or' operation
1485 * E - Expand results
1486 * F - Fixed point multiplication
1487 * L - Low part result
1488 * R - Doing rounding
1489 * V - Variable instead of immediate
1490 * W - Combine above L and V
1493 * The list of MXU instructions grouped by functionality
1494 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1496 * Load/Store instructions Multiplication instructions
1497 * ----------------------- ---------------------------
1499 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1500 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1501 * S32LDDV XRa, Rb, rc, strd2 S32MSUB XRa, XRd, Rs, Rt
1502 * S32STDV XRa, Rb, rc, strd2 S32MSUBU XRa, XRd, Rs, Rt
1503 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1504 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1505 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1506 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1507 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1508 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1509 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1510 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1511 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1512 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1513 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1514 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1515 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1516 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1517 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1518 * S16SDI XRa, Rb, s10, eptn2
1519 * S8LDD XRa, Rb, s8, eptn3
1520 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1521 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1522 * S8SDI XRa, Rb, s8, eptn3
1523 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1524 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1525 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1526 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1527 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1528 * S32CPS XRa, XRb, XRc
1529 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1530 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1531 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1532 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1533 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1534 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1535 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1536 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1537 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1538 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1539 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1540 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1541 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1542 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1543 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1544 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1545 * Q8SLT XRa, XRb, XRc
1546 * Q8SLTU XRa, XRb, XRc
1547 * Q8MOVZ XRa, XRb, XRc Shift instructions
1548 * Q8MOVN XRa, XRb, XRc ------------------
1550 * D32SLL XRa, XRb, XRc, XRd, sft4
1551 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1552 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1553 * D32SARL XRa, XRb, XRc, sft4
1554 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1555 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1556 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1557 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1558 * Q16SLL XRa, XRb, XRc, XRd, sft4
1559 * Q16SLR XRa, XRb, XRc, XRd, sft4
1560 * Miscellaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1561 * ------------------------- Q16SLLV XRa, XRb, Rb
1562 * Q16SLRV XRa, XRb, Rb
1563 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1564 * S32ALN XRa, XRb, XRc, Rb
1565 * S32ALNI XRa, XRb, XRc, s3
1566 * S32LUI XRa, s8, optn3 Move instructions
1567 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1568 * S32EXTRV XRa, XRb, Rs, Rt
1569 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1570 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1573 * The opcode organization of MXU instructions
1574 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1576 * The bits 31..26 of all MXU instructions are equal to 0x1C (also referred
1577 * as opcode SPECIAL2 in the base MIPS ISA). The organization and meaning of
1578 * other bits up to the instruction level is as follows:
1583 * ┌─ 000000 ─ OPC_MXU_S32MADD
1584 * ├─ 000001 ─ OPC_MXU_S32MADDU
1585 * ├─ 000010 ─ <not assigned> (non-MXU OPC_MUL)
1588 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1589 * │ ├─ 001 ─ OPC_MXU_S32MIN
1590 * │ ├─ 010 ─ OPC_MXU_D16MAX
1591 * │ ├─ 011 ─ OPC_MXU_D16MIN
1592 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1593 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1594 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1595 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1596 * ├─ 000100 ─ OPC_MXU_S32MSUB
1597 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1598 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1599 * │ ├─ 001 ─ OPC_MXU_D16SLT
1600 * │ ├─ 010 ─ OPC_MXU_D16AVG
1601 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1602 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1603 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1604 * │ └─ 111 ─ OPC_MXU_Q8ADD
1607 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1608 * │ ├─ 010 ─ OPC_MXU_D16CPS
1609 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1610 * │ └─ 110 ─ OPC_MXU_Q16SAT
1611 * ├─ 001000 ─ OPC_MXU_D16MUL
1613 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1614 * │ └─ 01 ─ OPC_MXU_D16MULE
1615 * ├─ 001010 ─ OPC_MXU_D16MAC
1616 * ├─ 001011 ─ OPC_MXU_D16MACF
1617 * ├─ 001100 ─ OPC_MXU_D16MADL
1618 * ├─ 001101 ─ OPC_MXU_S16MAD
1619 * ├─ 001110 ─ OPC_MXU_Q16ADD
1620 * ├─ 001111 ─ OPC_MXU_D16MACE 23
1621 * │ ┌─ 0 ─ OPC_MXU_S32LDD
1622 * ├─ 010000 ─ OPC_MXU__POOL04 ─┴─ 1 ─ OPC_MXU_S32LDDR
1625 * ├─ 010001 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32STD
1626 * │ └─ 1 ─ OPC_MXU_S32STDR
1629 * ├─ 010010 ─ OPC_MXU__POOL06 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1630 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1633 * ├─ 010011 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32STDV
1634 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1637 * ├─ 010100 ─ OPC_MXU__POOL08 ─┬─ 0 ─ OPC_MXU_S32LDI
1638 * │ └─ 1 ─ OPC_MXU_S32LDIR
1641 * ├─ 010101 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32SDI
1642 * │ └─ 1 ─ OPC_MXU_S32SDIR
1645 * ├─ 010110 ─ OPC_MXU__POOL10 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1646 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1649 * ├─ 010111 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1650 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1651 * ├─ 011000 ─ OPC_MXU_D32ADD
1653 * MXU ├─ 011001 ─ OPC_MXU__POOL12 ─┬─ 00 ─ OPC_MXU_D32ACC
1654 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1655 * │ └─ 10 ─ OPC_MXU_D32ASUM
1656 * ├─ 011010 ─ <not assigned>
1658 * ├─ 011011 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_Q16ACC
1659 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1660 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1663 * ├─ 011100 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1664 * │ ├─ 01 ─ OPC_MXU_D8SUM
1665 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1666 * ├─ 011110 ─ <not assigned>
1667 * ├─ 011111 ─ <not assigned>
1668 * ├─ 100000 ─ <not assigned> (overlaps with CLZ)
1669 * ├─ 100001 ─ <not assigned> (overlaps with CLO)
1670 * ├─ 100010 ─ OPC_MXU_S8LDD
1671 * ├─ 100011 ─ OPC_MXU_S8STD 15..14
1672 * ├─ 100100 ─ OPC_MXU_S8LDI ┌─ 00 ─ OPC_MXU_S32MUL
1673 * ├─ 100101 ─ OPC_MXU_S8SDI ├─ 00 ─ OPC_MXU_S32MULU
1674 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1675 * ├─ 100110 ─ OPC_MXU__POOL15 ─┴─ 00 ─ OPC_MXU_S32EXTRV
1678 * ├─ 100111 ─ OPC_MXU__POOL16 ─┬─ 000 ─ OPC_MXU_D32SARW
1679 * │ ├─ 001 ─ OPC_MXU_S32ALN
1680 * │ ├─ 010 ─ OPC_MXU_S32ALNI
1681 * │ ├─ 011 ─ OPC_MXU_S32LUI
1682 * │ ├─ 100 ─ OPC_MXU_S32NOR
1683 * │ ├─ 101 ─ OPC_MXU_S32AND
1684 * │ ├─ 110 ─ OPC_MXU_S32OR
1685 * │ └─ 111 ─ OPC_MXU_S32XOR
1688 * ├─ 101000 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_LXB
1689 * │ ├─ 001 ─ OPC_MXU_LXH
1690 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_LXW
1691 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_LXBU
1692 * ├─ 101011 ─ OPC_MXU_S16STD └─ 101 ─ OPC_MXU_LXHU
1693 * ├─ 101100 ─ OPC_MXU_S16LDI
1694 * ├─ 101101 ─ OPC_MXU_S16SDI
1695 * ├─ 101110 ─ OPC_MXU_S32M2I
1696 * ├─ 101111 ─ OPC_MXU_S32I2M
1697 * ├─ 110000 ─ OPC_MXU_D32SLL
1698 * ├─ 110001 ─ OPC_MXU_D32SLR 20..18
1699 * ├─ 110010 ─ OPC_MXU_D32SARL ┌─ 000 ─ OPC_MXU_D32SLLV
1700 * ├─ 110011 ─ OPC_MXU_D32SAR ├─ 001 ─ OPC_MXU_D32SLRV
1701 * ├─ 110100 ─ OPC_MXU_Q16SLL ├─ 010 ─ OPC_MXU_D32SARV
1702 * ├─ 110101 ─ OPC_MXU_Q16SLR ├─ 011 ─ OPC_MXU_Q16SLLV
1703 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1704 * ├─ 110110 ─ OPC_MXU__POOL18 ─┴─ 101 ─ OPC_MXU_Q16SARV
1706 * ├─ 110111 ─ OPC_MXU_Q16SAR
1708 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1709 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1712 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1713 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1714 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1715 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1716 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1717 * │ └─ 101 ─ OPC_MXU_S32MOVN
1720 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1721 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1722 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1723 * ├─ 111100 ─ OPC_MXU_Q8MADL
1724 * ├─ 111101 ─ OPC_MXU_S32SFL
1725 * ├─ 111110 ─ OPC_MXU_Q8SAD
1726 * └─ 111111 ─ <not assigned> (overlaps with SDBBP)
1731 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1732 * Programming Manual", Ingenic Semiconductor Co, Ltd., revision June 2, 2017
1736 OPC_MXU_S32MADD
= 0x00,
1737 OPC_MXU_S32MADDU
= 0x01,
1738 OPC__MXU_MUL
= 0x02,
1739 OPC_MXU__POOL00
= 0x03,
1740 OPC_MXU_S32MSUB
= 0x04,
1741 OPC_MXU_S32MSUBU
= 0x05,
1742 OPC_MXU__POOL01
= 0x06,
1743 OPC_MXU__POOL02
= 0x07,
1744 OPC_MXU_D16MUL
= 0x08,
1745 OPC_MXU__POOL03
= 0x09,
1746 OPC_MXU_D16MAC
= 0x0A,
1747 OPC_MXU_D16MACF
= 0x0B,
1748 OPC_MXU_D16MADL
= 0x0C,
1749 OPC_MXU_S16MAD
= 0x0D,
1750 OPC_MXU_Q16ADD
= 0x0E,
1751 OPC_MXU_D16MACE
= 0x0F,
1752 OPC_MXU__POOL04
= 0x10,
1753 OPC_MXU__POOL05
= 0x11,
1754 OPC_MXU__POOL06
= 0x12,
1755 OPC_MXU__POOL07
= 0x13,
1756 OPC_MXU__POOL08
= 0x14,
1757 OPC_MXU__POOL09
= 0x15,
1758 OPC_MXU__POOL10
= 0x16,
1759 OPC_MXU__POOL11
= 0x17,
1760 OPC_MXU_D32ADD
= 0x18,
1761 OPC_MXU__POOL12
= 0x19,
1762 /* not assigned 0x1A */
1763 OPC_MXU__POOL13
= 0x1B,
1764 OPC_MXU__POOL14
= 0x1C,
1765 OPC_MXU_Q8ACCE
= 0x1D,
1766 /* not assigned 0x1E */
1767 /* not assigned 0x1F */
1768 /* not assigned 0x20 */
1769 /* not assigned 0x21 */
1770 OPC_MXU_S8LDD
= 0x22,
1771 OPC_MXU_S8STD
= 0x23,
1772 OPC_MXU_S8LDI
= 0x24,
1773 OPC_MXU_S8SDI
= 0x25,
1774 OPC_MXU__POOL15
= 0x26,
1775 OPC_MXU__POOL16
= 0x27,
1776 OPC_MXU__POOL17
= 0x28,
1777 /* not assigned 0x29 */
1778 OPC_MXU_S16LDD
= 0x2A,
1779 OPC_MXU_S16STD
= 0x2B,
1780 OPC_MXU_S16LDI
= 0x2C,
1781 OPC_MXU_S16SDI
= 0x2D,
1782 OPC_MXU_S32M2I
= 0x2E,
1783 OPC_MXU_S32I2M
= 0x2F,
1784 OPC_MXU_D32SLL
= 0x30,
1785 OPC_MXU_D32SLR
= 0x31,
1786 OPC_MXU_D32SARL
= 0x32,
1787 OPC_MXU_D32SAR
= 0x33,
1788 OPC_MXU_Q16SLL
= 0x34,
1789 OPC_MXU_Q16SLR
= 0x35,
1790 OPC_MXU__POOL18
= 0x36,
1791 OPC_MXU_Q16SAR
= 0x37,
1792 OPC_MXU__POOL19
= 0x38,
1793 OPC_MXU__POOL20
= 0x39,
1794 OPC_MXU__POOL21
= 0x3A,
1795 OPC_MXU_Q16SCOP
= 0x3B,
1796 OPC_MXU_Q8MADL
= 0x3C,
1797 OPC_MXU_S32SFL
= 0x3D,
1798 OPC_MXU_Q8SAD
= 0x3E,
1799 /* not assigned 0x3F */
1807 OPC_MXU_S32MAX
= 0x00,
1808 OPC_MXU_S32MIN
= 0x01,
1809 OPC_MXU_D16MAX
= 0x02,
1810 OPC_MXU_D16MIN
= 0x03,
1811 OPC_MXU_Q8MAX
= 0x04,
1812 OPC_MXU_Q8MIN
= 0x05,
1813 OPC_MXU_Q8SLT
= 0x06,
1814 OPC_MXU_Q8SLTU
= 0x07,
1821 OPC_MXU_S32SLT
= 0x00,
1822 OPC_MXU_D16SLT
= 0x01,
1823 OPC_MXU_D16AVG
= 0x02,
1824 OPC_MXU_D16AVGR
= 0x03,
1825 OPC_MXU_Q8AVG
= 0x04,
1826 OPC_MXU_Q8AVGR
= 0x05,
1827 OPC_MXU_Q8ADD
= 0x07,
1834 OPC_MXU_S32CPS
= 0x00,
1835 OPC_MXU_D16CPS
= 0x02,
1836 OPC_MXU_Q8ABD
= 0x04,
1837 OPC_MXU_Q16SAT
= 0x06,
1844 OPC_MXU_D16MULF
= 0x00,
1845 OPC_MXU_D16MULE
= 0x01,
1852 OPC_MXU_S32LDD
= 0x00,
1853 OPC_MXU_S32LDDR
= 0x01,
1860 OPC_MXU_S32STD
= 0x00,
1861 OPC_MXU_S32STDR
= 0x01,
1868 OPC_MXU_S32LDDV
= 0x00,
1869 OPC_MXU_S32LDDVR
= 0x01,
1876 OPC_MXU_S32STDV
= 0x00,
1877 OPC_MXU_S32STDVR
= 0x01,
1884 OPC_MXU_S32LDI
= 0x00,
1885 OPC_MXU_S32LDIR
= 0x01,
1892 OPC_MXU_S32SDI
= 0x00,
1893 OPC_MXU_S32SDIR
= 0x01,
1900 OPC_MXU_S32LDIV
= 0x00,
1901 OPC_MXU_S32LDIVR
= 0x01,
1908 OPC_MXU_S32SDIV
= 0x00,
1909 OPC_MXU_S32SDIVR
= 0x01,
1916 OPC_MXU_D32ACC
= 0x00,
1917 OPC_MXU_D32ACCM
= 0x01,
1918 OPC_MXU_D32ASUM
= 0x02,
1925 OPC_MXU_Q16ACC
= 0x00,
1926 OPC_MXU_Q16ACCM
= 0x01,
1927 OPC_MXU_Q16ASUM
= 0x02,
1934 OPC_MXU_Q8ADDE
= 0x00,
1935 OPC_MXU_D8SUM
= 0x01,
1936 OPC_MXU_D8SUMC
= 0x02,
1943 OPC_MXU_S32MUL
= 0x00,
1944 OPC_MXU_S32MULU
= 0x01,
1945 OPC_MXU_S32EXTR
= 0x02,
1946 OPC_MXU_S32EXTRV
= 0x03,
1953 OPC_MXU_D32SARW
= 0x00,
1954 OPC_MXU_S32ALN
= 0x01,
1955 OPC_MXU_S32ALNI
= 0x02,
1956 OPC_MXU_S32LUI
= 0x03,
1957 OPC_MXU_S32NOR
= 0x04,
1958 OPC_MXU_S32AND
= 0x05,
1959 OPC_MXU_S32OR
= 0x06,
1960 OPC_MXU_S32XOR
= 0x07,
1970 OPC_MXU_LXBU
= 0x04,
1971 OPC_MXU_LXHU
= 0x05,
1978 OPC_MXU_D32SLLV
= 0x00,
1979 OPC_MXU_D32SLRV
= 0x01,
1980 OPC_MXU_D32SARV
= 0x03,
1981 OPC_MXU_Q16SLLV
= 0x04,
1982 OPC_MXU_Q16SLRV
= 0x05,
1983 OPC_MXU_Q16SARV
= 0x07,
1990 OPC_MXU_Q8MUL
= 0x00,
1991 OPC_MXU_Q8MULSU
= 0x01,
1998 OPC_MXU_Q8MOVZ
= 0x00,
1999 OPC_MXU_Q8MOVN
= 0x01,
2000 OPC_MXU_D16MOVZ
= 0x02,
2001 OPC_MXU_D16MOVN
= 0x03,
2002 OPC_MXU_S32MOVZ
= 0x04,
2003 OPC_MXU_S32MOVN
= 0x05,
2010 OPC_MXU_Q8MAC
= 0x00,
2011 OPC_MXU_Q8MACSU
= 0x01,
2015 * Overview of the TX79-specific instruction set
2016 * =============================================
2018 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
2019 * are only used by the specific quadword (128-bit) LQ/SQ load/store
2020 * instructions and certain multimedia instructions (MMIs). These MMIs
2021 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
2022 * or sixteen 8-bit paths.
2026 * The Toshiba TX System RISC TX79 Core Architecture manual,
2027 * https://wiki.qemu.org/File:C790.pdf
2029 * Three-Operand Multiply and Multiply-Add (4 instructions)
2030 * --------------------------------------------------------
2031 * MADD [rd,] rs, rt Multiply/Add
2032 * MADDU [rd,] rs, rt Multiply/Add Unsigned
2033 * MULT [rd,] rs, rt Multiply (3-operand)
2034 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
2036 * Multiply Instructions for Pipeline 1 (10 instructions)
2037 * ------------------------------------------------------
2038 * MULT1 [rd,] rs, rt Multiply Pipeline 1
2039 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
2040 * DIV1 rs, rt Divide Pipeline 1
2041 * DIVU1 rs, rt Divide Unsigned Pipeline 1
2042 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
2043 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
2044 * MFHI1 rd Move From HI1 Register
2045 * MFLO1 rd Move From LO1 Register
2046 * MTHI1 rs Move To HI1 Register
2047 * MTLO1 rs Move To LO1 Register
2049 * Arithmetic (19 instructions)
2050 * ----------------------------
2051 * PADDB rd, rs, rt Parallel Add Byte
2052 * PSUBB rd, rs, rt Parallel Subtract Byte
2053 * PADDH rd, rs, rt Parallel Add Halfword
2054 * PSUBH rd, rs, rt Parallel Subtract Halfword
2055 * PADDW rd, rs, rt Parallel Add Word
2056 * PSUBW rd, rs, rt Parallel Subtract Word
2057 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
2058 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
2059 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
2060 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
2061 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
2062 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
2063 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
2064 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
2065 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
2066 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
2067 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
2068 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
2069 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
2071 * Min/Max (4 instructions)
2072 * ------------------------
2073 * PMAXH rd, rs, rt Parallel Maximum Halfword
2074 * PMINH rd, rs, rt Parallel Minimum Halfword
2075 * PMAXW rd, rs, rt Parallel Maximum Word
2076 * PMINW rd, rs, rt Parallel Minimum Word
2078 * Absolute (2 instructions)
2079 * -------------------------
2080 * PABSH rd, rt Parallel Absolute Halfword
2081 * PABSW rd, rt Parallel Absolute Word
2083 * Logical (4 instructions)
2084 * ------------------------
2085 * PAND rd, rs, rt Parallel AND
2086 * POR rd, rs, rt Parallel OR
2087 * PXOR rd, rs, rt Parallel XOR
2088 * PNOR rd, rs, rt Parallel NOR
2090 * Shift (9 instructions)
2091 * ----------------------
2092 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2093 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2094 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2095 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2096 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2097 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2098 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2099 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2100 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2102 * Compare (6 instructions)
2103 * ------------------------
2104 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2105 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2106 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2107 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2108 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2109 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2111 * LZC (1 instruction)
2112 * -------------------
2113 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2115 * Quadword Load and Store (2 instructions)
2116 * ----------------------------------------
2117 * LQ rt, offset(base) Load Quadword
2118 * SQ rt, offset(base) Store Quadword
2120 * Multiply and Divide (19 instructions)
2121 * -------------------------------------
2122 * PMULTW rd, rs, rt Parallel Multiply Word
2123 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2124 * PDIVW rs, rt Parallel Divide Word
2125 * PDIVUW rs, rt Parallel Divide Unsigned Word
2126 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2127 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2128 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2129 * PMULTH rd, rs, rt Parallel Multiply Halfword
2130 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2131 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2132 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2133 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2134 * PDIVBW rs, rt Parallel Divide Broadcast Word
2135 * PMFHI rd Parallel Move From HI Register
2136 * PMFLO rd Parallel Move From LO Register
2137 * PMTHI rs Parallel Move To HI Register
2138 * PMTLO rs Parallel Move To LO Register
2139 * PMFHL rd Parallel Move From HI/LO Register
2140 * PMTHL rs Parallel Move To HI/LO Register
2142 * Pack/Extend (11 instructions)
2143 * -----------------------------
2144 * PPAC5 rd, rt Parallel Pack to 5 bits
2145 * PPACB rd, rs, rt Parallel Pack to Byte
2146 * PPACH rd, rs, rt Parallel Pack to Halfword
2147 * PPACW rd, rs, rt Parallel Pack to Word
2148 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2149 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2150 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2151 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2152 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2153 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2154 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2156 * Others (16 instructions)
2157 * ------------------------
2158 * PCPYH rd, rt Parallel Copy Halfword
2159 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2160 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2161 * PREVH rd, rt Parallel Reverse Halfword
2162 * PINTH rd, rs, rt Parallel Interleave Halfword
2163 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2164 * PEXEH rd, rt Parallel Exchange Even Halfword
2165 * PEXCH rd, rt Parallel Exchange Center Halfword
2166 * PEXEW rd, rt Parallel Exchange Even Word
2167 * PEXCW rd, rt Parallel Exchange Center Word
2168 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2169 * MFSA rd Move from Shift Amount Register
2170 * MTSA rs Move to Shift Amount Register
2171 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2172 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2173 * PROT3W rd, rt Parallel Rotate 3 Words
2175 * MMI (MultiMedia Instruction) encodings
2176 * ======================================
2178 * MMI instructions encoding table keys:
2180 * * This code is reserved for future use. An attempt to execute it
2181 * causes a Reserved Instruction exception.
2182 * % This code indicates an instruction class. The instruction word
2183 * must be further decoded by examining additional tables that show
2184 * the values for other instruction fields.
2185 * # This code is reserved for the unsupported instructions DMULT,
2186 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2187 * to execute it causes a Reserved Instruction exception.
2189 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
2192 * +--------+----------------------------------------+
2194 * +--------+----------------------------------------+
2196 * opcode bits 28..26
2197 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2198 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2199 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2200 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2201 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2202 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2203 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2204 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2205 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2206 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2207 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2211 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2212 MMI_OPC_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2213 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2217 * MMI instructions with opcode field = MMI:
2220 * +--------+-------------------------------+--------+
2221 * | MMI | |function|
2222 * +--------+-------------------------------+--------+
2224 * function bits 2..0
2225 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2226 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2227 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2228 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2229 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2230 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2231 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2232 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2233 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2234 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2235 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2238 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2240 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
2241 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
2242 MMI_OPC_PLZCW
= 0x04 | MMI_OPC_CLASS_MMI
,
2243 MMI_OPC_CLASS_MMI0
= 0x08 | MMI_OPC_CLASS_MMI
,
2244 MMI_OPC_CLASS_MMI2
= 0x09 | MMI_OPC_CLASS_MMI
,
2245 MMI_OPC_MFHI1
= 0x10 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFHI */
2246 MMI_OPC_MTHI1
= 0x11 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTHI */
2247 MMI_OPC_MFLO1
= 0x12 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFLO */
2248 MMI_OPC_MTLO1
= 0x13 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTLO */
2249 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
2250 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
2251 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
2252 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
2253 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
2254 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
2255 MMI_OPC_CLASS_MMI1
= 0x28 | MMI_OPC_CLASS_MMI
,
2256 MMI_OPC_CLASS_MMI3
= 0x29 | MMI_OPC_CLASS_MMI
,
2257 MMI_OPC_PMFHL
= 0x30 | MMI_OPC_CLASS_MMI
,
2258 MMI_OPC_PMTHL
= 0x31 | MMI_OPC_CLASS_MMI
,
2259 MMI_OPC_PSLLH
= 0x34 | MMI_OPC_CLASS_MMI
,
2260 MMI_OPC_PSRLH
= 0x36 | MMI_OPC_CLASS_MMI
,
2261 MMI_OPC_PSRAH
= 0x37 | MMI_OPC_CLASS_MMI
,
2262 MMI_OPC_PSLLW
= 0x3C | MMI_OPC_CLASS_MMI
,
2263 MMI_OPC_PSRLW
= 0x3E | MMI_OPC_CLASS_MMI
,
2264 MMI_OPC_PSRAW
= 0x3F | MMI_OPC_CLASS_MMI
,
2268 * MMI instructions with opcode field = MMI and bits 5..0 = MMI0:
2271 * +--------+----------------------+--------+--------+
2272 * | MMI | |function| MMI0 |
2273 * +--------+----------------------+--------+--------+
2275 * function bits 7..6
2276 * bits | 0 | 1 | 2 | 3
2277 * 10..8 | 00 | 01 | 10 | 11
2278 * -------+-------+-------+-------+-------
2279 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2280 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2281 * 2 010 | PADDB | PSUBB | PCGTB | *
2282 * 3 011 | * | * | * | *
2283 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2284 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2285 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2286 * 7 111 | * | * | PEXT5 | PPAC5
2289 #define MASK_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2291 MMI_OPC_0_PADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI0
,
2292 MMI_OPC_0_PSUBW
= (0x01 << 6) | MMI_OPC_CLASS_MMI0
,
2293 MMI_OPC_0_PCGTW
= (0x02 << 6) | MMI_OPC_CLASS_MMI0
,
2294 MMI_OPC_0_PMAXW
= (0x03 << 6) | MMI_OPC_CLASS_MMI0
,
2295 MMI_OPC_0_PADDH
= (0x04 << 6) | MMI_OPC_CLASS_MMI0
,
2296 MMI_OPC_0_PSUBH
= (0x05 << 6) | MMI_OPC_CLASS_MMI0
,
2297 MMI_OPC_0_PCGTH
= (0x06 << 6) | MMI_OPC_CLASS_MMI0
,
2298 MMI_OPC_0_PMAXH
= (0x07 << 6) | MMI_OPC_CLASS_MMI0
,
2299 MMI_OPC_0_PADDB
= (0x08 << 6) | MMI_OPC_CLASS_MMI0
,
2300 MMI_OPC_0_PSUBB
= (0x09 << 6) | MMI_OPC_CLASS_MMI0
,
2301 MMI_OPC_0_PCGTB
= (0x0A << 6) | MMI_OPC_CLASS_MMI0
,
2302 MMI_OPC_0_PADDSW
= (0x10 << 6) | MMI_OPC_CLASS_MMI0
,
2303 MMI_OPC_0_PSUBSW
= (0x11 << 6) | MMI_OPC_CLASS_MMI0
,
2304 MMI_OPC_0_PEXTLW
= (0x12 << 6) | MMI_OPC_CLASS_MMI0
,
2305 MMI_OPC_0_PPACW
= (0x13 << 6) | MMI_OPC_CLASS_MMI0
,
2306 MMI_OPC_0_PADDSH
= (0x14 << 6) | MMI_OPC_CLASS_MMI0
,
2307 MMI_OPC_0_PSUBSH
= (0x15 << 6) | MMI_OPC_CLASS_MMI0
,
2308 MMI_OPC_0_PEXTLH
= (0x16 << 6) | MMI_OPC_CLASS_MMI0
,
2309 MMI_OPC_0_PPACH
= (0x17 << 6) | MMI_OPC_CLASS_MMI0
,
2310 MMI_OPC_0_PADDSB
= (0x18 << 6) | MMI_OPC_CLASS_MMI0
,
2311 MMI_OPC_0_PSUBSB
= (0x19 << 6) | MMI_OPC_CLASS_MMI0
,
2312 MMI_OPC_0_PEXTLB
= (0x1A << 6) | MMI_OPC_CLASS_MMI0
,
2313 MMI_OPC_0_PPACB
= (0x1B << 6) | MMI_OPC_CLASS_MMI0
,
2314 MMI_OPC_0_PEXT5
= (0x1E << 6) | MMI_OPC_CLASS_MMI0
,
2315 MMI_OPC_0_PPAC5
= (0x1F << 6) | MMI_OPC_CLASS_MMI0
,
2319 * MMI instructions with opcode field = MMI and bits 5..0 = MMI1:
2322 * +--------+----------------------+--------+--------+
2323 * | MMI | |function| MMI1 |
2324 * +--------+----------------------+--------+--------+
2326 * function bits 7..6
2327 * bits | 0 | 1 | 2 | 3
2328 * 10..8 | 00 | 01 | 10 | 11
2329 * -------+-------+-------+-------+-------
2330 * 0 000 | * | PABSW | PCEQW | PMINW
2331 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2332 * 2 010 | * | * | PCEQB | *
2333 * 3 011 | * | * | * | *
2334 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2335 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2336 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2337 * 7 111 | * | * | * | *
2340 #define MASK_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2342 MMI_OPC_1_PABSW
= (0x01 << 6) | MMI_OPC_CLASS_MMI1
,
2343 MMI_OPC_1_PCEQW
= (0x02 << 6) | MMI_OPC_CLASS_MMI1
,
2344 MMI_OPC_1_PMINW
= (0x03 << 6) | MMI_OPC_CLASS_MMI1
,
2345 MMI_OPC_1_PADSBH
= (0x04 << 6) | MMI_OPC_CLASS_MMI1
,
2346 MMI_OPC_1_PABSH
= (0x05 << 6) | MMI_OPC_CLASS_MMI1
,
2347 MMI_OPC_1_PCEQH
= (0x06 << 6) | MMI_OPC_CLASS_MMI1
,
2348 MMI_OPC_1_PMINH
= (0x07 << 6) | MMI_OPC_CLASS_MMI1
,
2349 MMI_OPC_1_PCEQB
= (0x0A << 6) | MMI_OPC_CLASS_MMI1
,
2350 MMI_OPC_1_PADDUW
= (0x10 << 6) | MMI_OPC_CLASS_MMI1
,
2351 MMI_OPC_1_PSUBUW
= (0x11 << 6) | MMI_OPC_CLASS_MMI1
,
2352 MMI_OPC_1_PEXTUW
= (0x12 << 6) | MMI_OPC_CLASS_MMI1
,
2353 MMI_OPC_1_PADDUH
= (0x14 << 6) | MMI_OPC_CLASS_MMI1
,
2354 MMI_OPC_1_PSUBUH
= (0x15 << 6) | MMI_OPC_CLASS_MMI1
,
2355 MMI_OPC_1_PEXTUH
= (0x16 << 6) | MMI_OPC_CLASS_MMI1
,
2356 MMI_OPC_1_PADDUB
= (0x18 << 6) | MMI_OPC_CLASS_MMI1
,
2357 MMI_OPC_1_PSUBUB
= (0x19 << 6) | MMI_OPC_CLASS_MMI1
,
2358 MMI_OPC_1_PEXTUB
= (0x1A << 6) | MMI_OPC_CLASS_MMI1
,
2359 MMI_OPC_1_QFSRV
= (0x1B << 6) | MMI_OPC_CLASS_MMI1
,
2363 * MMI instructions with opcode field = MMI and bits 5..0 = MMI2:
2366 * +--------+----------------------+--------+--------+
2367 * | MMI | |function| MMI2 |
2368 * +--------+----------------------+--------+--------+
2370 * function bits 7..6
2371 * bits | 0 | 1 | 2 | 3
2372 * 10..8 | 00 | 01 | 10 | 11
2373 * -------+-------+-------+-------+-------
2374 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2375 * 1 001 | PMSUBW| * | * | *
2376 * 2 010 | PMFHI | PMFLO | PINTH | *
2377 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2378 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2379 * 5 101 | PMSUBH| PHMSBH| * | *
2380 * 6 110 | * | * | PEXEH | PREVH
2381 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2384 #define MASK_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2386 MMI_OPC_2_PMADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI2
,
2387 MMI_OPC_2_PSLLVW
= (0x02 << 6) | MMI_OPC_CLASS_MMI2
,
2388 MMI_OPC_2_PSRLVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI2
,
2389 MMI_OPC_2_PMSUBW
= (0x04 << 6) | MMI_OPC_CLASS_MMI2
,
2390 MMI_OPC_2_PMFHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI2
,
2391 MMI_OPC_2_PMFLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI2
,
2392 MMI_OPC_2_PINTH
= (0x0A << 6) | MMI_OPC_CLASS_MMI2
,
2393 MMI_OPC_2_PMULTW
= (0x0C << 6) | MMI_OPC_CLASS_MMI2
,
2394 MMI_OPC_2_PDIVW
= (0x0D << 6) | MMI_OPC_CLASS_MMI2
,
2395 MMI_OPC_2_PCPYLD
= (0x0E << 6) | MMI_OPC_CLASS_MMI2
,
2396 MMI_OPC_2_PMADDH
= (0x10 << 6) | MMI_OPC_CLASS_MMI2
,
2397 MMI_OPC_2_PHMADH
= (0x11 << 6) | MMI_OPC_CLASS_MMI2
,
2398 MMI_OPC_2_PAND
= (0x12 << 6) | MMI_OPC_CLASS_MMI2
,
2399 MMI_OPC_2_PXOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI2
,
2400 MMI_OPC_2_PMSUBH
= (0x14 << 6) | MMI_OPC_CLASS_MMI2
,
2401 MMI_OPC_2_PHMSBH
= (0x15 << 6) | MMI_OPC_CLASS_MMI2
,
2402 MMI_OPC_2_PEXEH
= (0x1A << 6) | MMI_OPC_CLASS_MMI2
,
2403 MMI_OPC_2_PREVH
= (0x1B << 6) | MMI_OPC_CLASS_MMI2
,
2404 MMI_OPC_2_PMULTH
= (0x1C << 6) | MMI_OPC_CLASS_MMI2
,
2405 MMI_OPC_2_PDIVBW
= (0x1D << 6) | MMI_OPC_CLASS_MMI2
,
2406 MMI_OPC_2_PEXEW
= (0x1E << 6) | MMI_OPC_CLASS_MMI2
,
2407 MMI_OPC_2_PROT3W
= (0x1F << 6) | MMI_OPC_CLASS_MMI2
,
2411 * MMI instructions with opcode field = MMI and bits 5..0 = MMI3:
2414 * +--------+----------------------+--------+--------+
2415 * | MMI | |function| MMI3 |
2416 * +--------+----------------------+--------+--------+
2418 * function bits 7..6
2419 * bits | 0 | 1 | 2 | 3
2420 * 10..8 | 00 | 01 | 10 | 11
2421 * -------+-------+-------+-------+-------
2422 * 0 000 |PMADDUW| * | * | PSRAVW
2423 * 1 001 | * | * | * | *
2424 * 2 010 | PMTHI | PMTLO | PINTEH| *
2425 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2426 * 4 100 | * | * | POR | PNOR
2427 * 5 101 | * | * | * | *
2428 * 6 110 | * | * | PEXCH | PCPYH
2429 * 7 111 | * | * | PEXCW | *
2432 #define MASK_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2434 MMI_OPC_3_PMADDUW
= (0x00 << 6) | MMI_OPC_CLASS_MMI3
,
2435 MMI_OPC_3_PSRAVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI3
,
2436 MMI_OPC_3_PMTHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI3
,
2437 MMI_OPC_3_PMTLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI3
,
2438 MMI_OPC_3_PINTEH
= (0x0A << 6) | MMI_OPC_CLASS_MMI3
,
2439 MMI_OPC_3_PMULTUW
= (0x0C << 6) | MMI_OPC_CLASS_MMI3
,
2440 MMI_OPC_3_PDIVUW
= (0x0D << 6) | MMI_OPC_CLASS_MMI3
,
2441 MMI_OPC_3_PCPYUD
= (0x0E << 6) | MMI_OPC_CLASS_MMI3
,
2442 MMI_OPC_3_POR
= (0x12 << 6) | MMI_OPC_CLASS_MMI3
,
2443 MMI_OPC_3_PNOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI3
,
2444 MMI_OPC_3_PEXCH
= (0x1A << 6) | MMI_OPC_CLASS_MMI3
,
2445 MMI_OPC_3_PCPYH
= (0x1B << 6) | MMI_OPC_CLASS_MMI3
,
2446 MMI_OPC_3_PEXCW
= (0x1E << 6) | MMI_OPC_CLASS_MMI3
,
2449 /* global register indices */
2450 static TCGv cpu_gpr
[32], cpu_PC
;
2451 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2452 static TCGv cpu_dspctrl
, btarget
, bcond
;
2453 static TCGv_i32 hflags
;
2454 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2455 static TCGv_i64 fpu_f64
[32];
2456 static TCGv_i64 msa_wr_d
[64];
2458 #if defined(TARGET_MIPS64)
2459 /* Upper halves of R5900's 128-bit registers: MMRs (multimedia registers) */
2460 static TCGv_i64 cpu_mmr
[32];
2463 #if !defined(TARGET_MIPS64)
2465 static TCGv mxu_gpr
[NUMBER_OF_MXU_REGISTERS
- 1];
2469 #include "exec/gen-icount.h"
2471 #define gen_helper_0e0i(name, arg) do { \
2472 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2473 gen_helper_##name(cpu_env, helper_tmp); \
2474 tcg_temp_free_i32(helper_tmp); \
2477 #define gen_helper_0e1i(name, arg1, arg2) do { \
2478 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2479 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2480 tcg_temp_free_i32(helper_tmp); \
2483 #define gen_helper_1e0i(name, ret, arg1) do { \
2484 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2485 gen_helper_##name(ret, cpu_env, helper_tmp); \
2486 tcg_temp_free_i32(helper_tmp); \
2489 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2490 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2491 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2492 tcg_temp_free_i32(helper_tmp); \
2495 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2496 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2497 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2498 tcg_temp_free_i32(helper_tmp); \
2501 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2502 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2503 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2504 tcg_temp_free_i32(helper_tmp); \
2507 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2508 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2509 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2510 tcg_temp_free_i32(helper_tmp); \
2513 typedef struct DisasContext
{
2514 DisasContextBase base
;
2515 target_ulong saved_pc
;
2516 target_ulong page_start
;
2518 uint64_t insn_flags
;
2519 int32_t CP0_Config1
;
2520 int32_t CP0_Config2
;
2521 int32_t CP0_Config3
;
2522 int32_t CP0_Config5
;
2523 /* Routine used to access memory */
2525 TCGMemOp default_tcg_memop_mask
;
2526 uint32_t hflags
, saved_hflags
;
2527 target_ulong btarget
;
2538 int CP0_LLAddr_shift
;
2548 #define DISAS_STOP DISAS_TARGET_0
2549 #define DISAS_EXIT DISAS_TARGET_1
2551 static const char * const regnames
[] = {
2552 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2553 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2554 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2555 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2558 static const char * const regnames_HI
[] = {
2559 "HI0", "HI1", "HI2", "HI3",
2562 static const char * const regnames_LO
[] = {
2563 "LO0", "LO1", "LO2", "LO3",
2566 static const char * const fregnames
[] = {
2567 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2568 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2569 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2570 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2573 static const char * const msaregnames
[] = {
2574 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2575 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2576 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2577 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2578 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2579 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2580 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2581 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2582 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2583 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2584 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2585 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2586 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2587 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2588 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2589 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2592 #if !defined(TARGET_MIPS64)
2593 static const char * const mxuregnames
[] = {
2594 "XR1", "XR2", "XR3", "XR4", "XR5", "XR6", "XR7", "XR8",
2595 "XR9", "XR10", "XR11", "XR12", "XR13", "XR14", "XR15", "MXU_CR",
2599 #define LOG_DISAS(...) \
2601 if (MIPS_DEBUG_DISAS) { \
2602 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2606 #define MIPS_INVAL(op) \
2608 if (MIPS_DEBUG_DISAS) { \
2609 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2610 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2611 ctx->base.pc_next, ctx->opcode, op, \
2612 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2613 ((ctx->opcode >> 16) & 0x1F)); \
2617 /* General purpose registers moves. */
2618 static inline void gen_load_gpr (TCGv t
, int reg
)
2621 tcg_gen_movi_tl(t
, 0);
2623 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2626 static inline void gen_store_gpr (TCGv t
, int reg
)
2629 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2632 /* Moves to/from shadow registers. */
2633 static inline void gen_load_srsgpr (int from
, int to
)
2635 TCGv t0
= tcg_temp_new();
2638 tcg_gen_movi_tl(t0
, 0);
2640 TCGv_i32 t2
= tcg_temp_new_i32();
2641 TCGv_ptr addr
= tcg_temp_new_ptr();
2643 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2644 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2645 tcg_gen_andi_i32(t2
, t2
, 0xf);
2646 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2647 tcg_gen_ext_i32_ptr(addr
, t2
);
2648 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2650 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2651 tcg_temp_free_ptr(addr
);
2652 tcg_temp_free_i32(t2
);
2654 gen_store_gpr(t0
, to
);
2658 static inline void gen_store_srsgpr (int from
, int to
)
2661 TCGv t0
= tcg_temp_new();
2662 TCGv_i32 t2
= tcg_temp_new_i32();
2663 TCGv_ptr addr
= tcg_temp_new_ptr();
2665 gen_load_gpr(t0
, from
);
2666 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2667 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2668 tcg_gen_andi_i32(t2
, t2
, 0xf);
2669 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2670 tcg_gen_ext_i32_ptr(addr
, t2
);
2671 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2673 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2674 tcg_temp_free_ptr(addr
);
2675 tcg_temp_free_i32(t2
);
2680 #if !defined(TARGET_MIPS64)
2681 /* MXU General purpose registers moves. */
2682 static inline void gen_load_mxu_gpr(TCGv t
, unsigned int reg
)
2685 tcg_gen_movi_tl(t
, 0);
2686 } else if (reg
<= 15) {
2687 tcg_gen_mov_tl(t
, mxu_gpr
[reg
- 1]);
2691 static inline void gen_store_mxu_gpr(TCGv t
, unsigned int reg
)
2693 if (reg
> 0 && reg
<= 15) {
2694 tcg_gen_mov_tl(mxu_gpr
[reg
- 1], t
);
2698 /* MXU control register moves. */
2699 static inline void gen_load_mxu_cr(TCGv t
)
2701 tcg_gen_mov_tl(t
, mxu_CR
);
2704 static inline void gen_store_mxu_cr(TCGv t
)
2706 /* TODO: Add handling of RW rules for MXU_CR. */
2707 tcg_gen_mov_tl(mxu_CR
, t
);
2713 static inline void gen_save_pc(target_ulong pc
)
2715 tcg_gen_movi_tl(cpu_PC
, pc
);
2718 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2720 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2721 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2722 gen_save_pc(ctx
->base
.pc_next
);
2723 ctx
->saved_pc
= ctx
->base
.pc_next
;
2725 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2726 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2727 ctx
->saved_hflags
= ctx
->hflags
;
2728 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2734 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2740 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2742 ctx
->saved_hflags
= ctx
->hflags
;
2743 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2749 ctx
->btarget
= env
->btarget
;
2754 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2756 TCGv_i32 texcp
= tcg_const_i32(excp
);
2757 TCGv_i32 terr
= tcg_const_i32(err
);
2758 save_cpu_state(ctx
, 1);
2759 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2760 tcg_temp_free_i32(terr
);
2761 tcg_temp_free_i32(texcp
);
2762 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2765 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2767 gen_helper_0e0i(raise_exception
, excp
);
2770 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2772 generate_exception_err(ctx
, excp
, 0);
2775 /* Floating point register moves. */
2776 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2778 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2779 generate_exception(ctx
, EXCP_RI
);
2781 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2784 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2787 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2788 generate_exception(ctx
, EXCP_RI
);
2790 t64
= tcg_temp_new_i64();
2791 tcg_gen_extu_i32_i64(t64
, t
);
2792 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2793 tcg_temp_free_i64(t64
);
2796 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2798 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2799 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2801 gen_load_fpr32(ctx
, t
, reg
| 1);
2805 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2807 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2808 TCGv_i64 t64
= tcg_temp_new_i64();
2809 tcg_gen_extu_i32_i64(t64
, t
);
2810 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2811 tcg_temp_free_i64(t64
);
2813 gen_store_fpr32(ctx
, t
, reg
| 1);
2817 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2819 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2820 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2822 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2826 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2828 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2829 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2832 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2833 t0
= tcg_temp_new_i64();
2834 tcg_gen_shri_i64(t0
, t
, 32);
2835 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2836 tcg_temp_free_i64(t0
);
2840 static inline int get_fp_bit (int cc
)
2848 /* Addresses computation */
2849 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
2851 tcg_gen_add_tl(ret
, arg0
, arg1
);
2853 #if defined(TARGET_MIPS64)
2854 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2855 tcg_gen_ext32s_i64(ret
, ret
);
2860 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2863 tcg_gen_addi_tl(ret
, base
, ofs
);
2865 #if defined(TARGET_MIPS64)
2866 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2867 tcg_gen_ext32s_i64(ret
, ret
);
2872 /* Addresses computation (translation time) */
2873 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2876 target_long sum
= base
+ offset
;
2878 #if defined(TARGET_MIPS64)
2879 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2886 /* Sign-extract the low 32-bits to a target_long. */
2887 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2889 #if defined(TARGET_MIPS64)
2890 tcg_gen_ext32s_i64(ret
, arg
);
2892 tcg_gen_extrl_i64_i32(ret
, arg
);
2896 /* Sign-extract the high 32-bits to a target_long. */
2897 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2899 #if defined(TARGET_MIPS64)
2900 tcg_gen_sari_i64(ret
, arg
, 32);
2902 tcg_gen_extrh_i64_i32(ret
, arg
);
2906 static inline void check_cp0_enabled(DisasContext
*ctx
)
2908 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
2909 generate_exception_err(ctx
, EXCP_CpU
, 0);
2912 static inline void check_cp1_enabled(DisasContext
*ctx
)
2914 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
2915 generate_exception_err(ctx
, EXCP_CpU
, 1);
2918 /* Verify that the processor is running with COP1X instructions enabled.
2919 This is associated with the nabla symbol in the MIPS32 and MIPS64
2922 static inline void check_cop1x(DisasContext
*ctx
)
2924 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
2925 generate_exception_end(ctx
, EXCP_RI
);
2928 /* Verify that the processor is running with 64-bit floating-point
2929 operations enabled. */
2931 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2933 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
2934 generate_exception_end(ctx
, EXCP_RI
);
2938 * Verify if floating point register is valid; an operation is not defined
2939 * if bit 0 of any register specification is set and the FR bit in the
2940 * Status register equals zero, since the register numbers specify an
2941 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2942 * in the Status register equals one, both even and odd register numbers
2943 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2945 * Multiple 64 bit wide registers can be checked by calling
2946 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2948 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2950 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
2951 generate_exception_end(ctx
, EXCP_RI
);
2954 /* Verify that the processor is running with DSP instructions enabled.
2955 This is enabled by CP0 Status register MX(24) bit.
2958 static inline void check_dsp(DisasContext
*ctx
)
2960 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2961 if (ctx
->insn_flags
& ASE_DSP
) {
2962 generate_exception_end(ctx
, EXCP_DSPDIS
);
2964 generate_exception_end(ctx
, EXCP_RI
);
2969 static inline void check_dsp_r2(DisasContext
*ctx
)
2971 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2972 if (ctx
->insn_flags
& ASE_DSP
) {
2973 generate_exception_end(ctx
, EXCP_DSPDIS
);
2975 generate_exception_end(ctx
, EXCP_RI
);
2980 static inline void check_dsp_r3(DisasContext
*ctx
)
2982 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2983 if (ctx
->insn_flags
& ASE_DSP
) {
2984 generate_exception_end(ctx
, EXCP_DSPDIS
);
2986 generate_exception_end(ctx
, EXCP_RI
);
2991 /* This code generates a "reserved instruction" exception if the
2992 CPU does not support the instruction set corresponding to flags. */
2993 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
2995 if (unlikely(!(ctx
->insn_flags
& flags
))) {
2996 generate_exception_end(ctx
, EXCP_RI
);
3000 /* This code generates a "reserved instruction" exception if the
3001 CPU has corresponding flag set which indicates that the instruction
3002 has been removed. */
3003 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
3005 if (unlikely(ctx
->insn_flags
& flags
)) {
3006 generate_exception_end(ctx
, EXCP_RI
);
3011 * The Linux kernel traps certain reserved instruction exceptions to
3012 * emulate the corresponding instructions. QEMU is the kernel in user
3013 * mode, so those traps are emulated by accepting the instructions.
3015 * A reserved instruction exception is generated for flagged CPUs if
3016 * QEMU runs in system mode.
3018 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
3020 #ifndef CONFIG_USER_ONLY
3021 check_insn_opc_removed(ctx
, flags
);
3025 /* This code generates a "reserved instruction" exception if the
3026 CPU does not support 64-bit paired-single (PS) floating point data type */
3027 static inline void check_ps(DisasContext
*ctx
)
3029 if (unlikely(!ctx
->ps
)) {
3030 generate_exception(ctx
, EXCP_RI
);
3032 check_cp1_64bitmode(ctx
);
3035 #ifdef TARGET_MIPS64
3036 /* This code generates a "reserved instruction" exception if 64-bit
3037 instructions are not enabled. */
3038 static inline void check_mips_64(DisasContext
*ctx
)
3040 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
3041 generate_exception_end(ctx
, EXCP_RI
);
3045 #ifndef CONFIG_USER_ONLY
3046 static inline void check_mvh(DisasContext
*ctx
)
3048 if (unlikely(!ctx
->mvh
)) {
3049 generate_exception(ctx
, EXCP_RI
);
3055 * This code generates a "reserved instruction" exception if the
3056 * Config5 XNP bit is set.
3058 static inline void check_xnp(DisasContext
*ctx
)
3060 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
3061 generate_exception_end(ctx
, EXCP_RI
);
3065 #ifndef CONFIG_USER_ONLY
3067 * This code generates a "reserved instruction" exception if the
3068 * Config3 PW bit is NOT set.
3070 static inline void check_pw(DisasContext
*ctx
)
3072 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
3073 generate_exception_end(ctx
, EXCP_RI
);
3079 * This code generates a "reserved instruction" exception if the
3080 * Config3 MT bit is NOT set.
3082 static inline void check_mt(DisasContext
*ctx
)
3084 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3085 generate_exception_end(ctx
, EXCP_RI
);
3089 #ifndef CONFIG_USER_ONLY
3091 * This code generates a "coprocessor unusable" exception if CP0 is not
3092 * available, and, if that is not the case, generates a "reserved instruction"
3093 * exception if the Config5 MT bit is NOT set. This is needed for availability
3094 * control of some of MT ASE instructions.
3096 static inline void check_cp0_mt(DisasContext
*ctx
)
3098 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
3099 generate_exception_err(ctx
, EXCP_CpU
, 0);
3101 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3102 generate_exception_err(ctx
, EXCP_RI
, 0);
3109 * This code generates a "reserved instruction" exception if the
3110 * Config5 NMS bit is set.
3112 static inline void check_nms(DisasContext
*ctx
)
3114 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
3115 generate_exception_end(ctx
, EXCP_RI
);
3120 * This code generates a "reserved instruction" exception if the
3121 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
3122 * Config2 TL, and Config5 L2C are unset.
3124 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
3126 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
3127 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
3128 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
3129 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
3130 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
3131 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))
3133 generate_exception_end(ctx
, EXCP_RI
);
3138 * This code generates a "reserved instruction" exception if the
3139 * Config5 EVA bit is NOT set.
3141 static inline void check_eva(DisasContext
*ctx
)
3143 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
3144 generate_exception_end(ctx
, EXCP_RI
);
3149 /* Define small wrappers for gen_load_fpr* so that we have a uniform
3150 calling interface for 32 and 64-bit FPRs. No sense in changing
3151 all callers for gen_load_fpr32 when we need the CTX parameter for
3153 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
3154 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
3155 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
3156 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
3157 int ft, int fs, int cc) \
3159 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
3160 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
3169 check_cp1_registers(ctx, fs | ft); \
3177 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
3178 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
3180 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
3181 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
3182 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
3183 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
3184 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
3185 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
3186 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
3187 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
3188 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
3189 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
3190 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
3191 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
3192 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
3193 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
3194 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
3195 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
3198 tcg_temp_free_i##bits (fp0); \
3199 tcg_temp_free_i##bits (fp1); \
3202 FOP_CONDS(, 0, d
, FMT_D
, 64)
3203 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3204 FOP_CONDS(, 0, s
, FMT_S
, 32)
3205 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3206 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3207 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3210 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3211 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
3212 int ft, int fs, int fd) \
3214 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3215 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3216 if (ifmt == FMT_D) { \
3217 check_cp1_registers(ctx, fs | ft | fd); \
3219 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3220 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3223 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3226 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3229 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3232 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3235 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3238 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3241 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3244 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3247 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3250 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3253 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3256 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3259 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3262 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3265 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3268 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3271 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3274 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3277 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3280 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3283 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3286 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3292 tcg_temp_free_i ## bits (fp0); \
3293 tcg_temp_free_i ## bits (fp1); \
3296 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3297 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3299 #undef gen_ldcmp_fpr32
3300 #undef gen_ldcmp_fpr64
3302 /* load/store instructions. */
3303 #ifdef CONFIG_USER_ONLY
3304 #define OP_LD_ATOMIC(insn,fname) \
3305 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3306 DisasContext *ctx) \
3308 TCGv t0 = tcg_temp_new(); \
3309 tcg_gen_mov_tl(t0, arg1); \
3310 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3311 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3312 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3313 tcg_temp_free(t0); \
3316 #define OP_LD_ATOMIC(insn,fname) \
3317 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3318 DisasContext *ctx) \
3320 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3323 OP_LD_ATOMIC(ll
,ld32s
);
3324 #if defined(TARGET_MIPS64)
3325 OP_LD_ATOMIC(lld
,ld64
);
3329 #ifdef CONFIG_USER_ONLY
3330 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3331 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3332 DisasContext *ctx) \
3334 TCGv t0 = tcg_temp_new(); \
3335 TCGLabel *l1 = gen_new_label(); \
3336 TCGLabel *l2 = gen_new_label(); \
3338 tcg_gen_andi_tl(t0, arg2, almask); \
3339 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
3340 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
3341 generate_exception(ctx, EXCP_AdES); \
3342 gen_set_label(l1); \
3343 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3344 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
3345 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
3346 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
3347 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
3348 generate_exception_end(ctx, EXCP_SC); \
3349 gen_set_label(l2); \
3350 tcg_gen_movi_tl(t0, 0); \
3351 gen_store_gpr(t0, rt); \
3352 tcg_temp_free(t0); \
3355 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3356 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3357 DisasContext *ctx) \
3359 TCGv t0 = tcg_temp_new(); \
3360 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
3361 gen_store_gpr(t0, rt); \
3362 tcg_temp_free(t0); \
3365 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
3366 #if defined(TARGET_MIPS64)
3367 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
3371 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
3372 int base
, int offset
)
3375 tcg_gen_movi_tl(addr
, offset
);
3376 } else if (offset
== 0) {
3377 gen_load_gpr(addr
, base
);
3379 tcg_gen_movi_tl(addr
, offset
);
3380 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3384 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
3386 target_ulong pc
= ctx
->base
.pc_next
;
3388 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3389 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3394 pc
&= ~(target_ulong
)3;
3399 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3400 int rt
, int base
, int offset
)
3403 int mem_idx
= ctx
->mem_idx
;
3405 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3406 /* Loongson CPU uses a load to zero register for prefetch.
3407 We emulate it as a NOP. On other CPU we must perform the
3408 actual memory access. */
3412 t0
= tcg_temp_new();
3413 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3416 #if defined(TARGET_MIPS64)
3418 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3419 ctx
->default_tcg_memop_mask
);
3420 gen_store_gpr(t0
, rt
);
3423 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3424 ctx
->default_tcg_memop_mask
);
3425 gen_store_gpr(t0
, rt
);
3429 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3430 gen_store_gpr(t0
, rt
);
3433 t1
= tcg_temp_new();
3434 /* Do a byte access to possibly trigger a page
3435 fault with the unaligned address. */
3436 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3437 tcg_gen_andi_tl(t1
, t0
, 7);
3438 #ifndef TARGET_WORDS_BIGENDIAN
3439 tcg_gen_xori_tl(t1
, t1
, 7);
3441 tcg_gen_shli_tl(t1
, t1
, 3);
3442 tcg_gen_andi_tl(t0
, t0
, ~7);
3443 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3444 tcg_gen_shl_tl(t0
, t0
, t1
);
3445 t2
= tcg_const_tl(-1);
3446 tcg_gen_shl_tl(t2
, t2
, t1
);
3447 gen_load_gpr(t1
, rt
);
3448 tcg_gen_andc_tl(t1
, t1
, t2
);
3450 tcg_gen_or_tl(t0
, t0
, t1
);
3452 gen_store_gpr(t0
, rt
);
3455 t1
= tcg_temp_new();
3456 /* Do a byte access to possibly trigger a page
3457 fault with the unaligned address. */
3458 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3459 tcg_gen_andi_tl(t1
, t0
, 7);
3460 #ifdef TARGET_WORDS_BIGENDIAN
3461 tcg_gen_xori_tl(t1
, t1
, 7);
3463 tcg_gen_shli_tl(t1
, t1
, 3);
3464 tcg_gen_andi_tl(t0
, t0
, ~7);
3465 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3466 tcg_gen_shr_tl(t0
, t0
, t1
);
3467 tcg_gen_xori_tl(t1
, t1
, 63);
3468 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3469 tcg_gen_shl_tl(t2
, t2
, t1
);
3470 gen_load_gpr(t1
, rt
);
3471 tcg_gen_and_tl(t1
, t1
, t2
);
3473 tcg_gen_or_tl(t0
, t0
, t1
);
3475 gen_store_gpr(t0
, rt
);
3478 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3479 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3481 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3482 gen_store_gpr(t0
, rt
);
3486 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3487 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3489 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3490 gen_store_gpr(t0
, rt
);
3493 mem_idx
= MIPS_HFLAG_UM
;
3496 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3497 ctx
->default_tcg_memop_mask
);
3498 gen_store_gpr(t0
, rt
);
3501 mem_idx
= MIPS_HFLAG_UM
;
3504 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3505 ctx
->default_tcg_memop_mask
);
3506 gen_store_gpr(t0
, rt
);
3509 mem_idx
= MIPS_HFLAG_UM
;
3512 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3513 ctx
->default_tcg_memop_mask
);
3514 gen_store_gpr(t0
, rt
);
3517 mem_idx
= MIPS_HFLAG_UM
;
3520 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3521 gen_store_gpr(t0
, rt
);
3524 mem_idx
= MIPS_HFLAG_UM
;
3527 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3528 gen_store_gpr(t0
, rt
);
3531 mem_idx
= MIPS_HFLAG_UM
;
3534 t1
= tcg_temp_new();
3535 /* Do a byte access to possibly trigger a page
3536 fault with the unaligned address. */
3537 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3538 tcg_gen_andi_tl(t1
, t0
, 3);
3539 #ifndef TARGET_WORDS_BIGENDIAN
3540 tcg_gen_xori_tl(t1
, t1
, 3);
3542 tcg_gen_shli_tl(t1
, t1
, 3);
3543 tcg_gen_andi_tl(t0
, t0
, ~3);
3544 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3545 tcg_gen_shl_tl(t0
, t0
, t1
);
3546 t2
= tcg_const_tl(-1);
3547 tcg_gen_shl_tl(t2
, t2
, t1
);
3548 gen_load_gpr(t1
, rt
);
3549 tcg_gen_andc_tl(t1
, t1
, t2
);
3551 tcg_gen_or_tl(t0
, t0
, t1
);
3553 tcg_gen_ext32s_tl(t0
, t0
);
3554 gen_store_gpr(t0
, rt
);
3557 mem_idx
= MIPS_HFLAG_UM
;
3560 t1
= tcg_temp_new();
3561 /* Do a byte access to possibly trigger a page
3562 fault with the unaligned address. */
3563 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3564 tcg_gen_andi_tl(t1
, t0
, 3);
3565 #ifdef TARGET_WORDS_BIGENDIAN
3566 tcg_gen_xori_tl(t1
, t1
, 3);
3568 tcg_gen_shli_tl(t1
, t1
, 3);
3569 tcg_gen_andi_tl(t0
, t0
, ~3);
3570 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3571 tcg_gen_shr_tl(t0
, t0
, t1
);
3572 tcg_gen_xori_tl(t1
, t1
, 31);
3573 t2
= tcg_const_tl(0xfffffffeull
);
3574 tcg_gen_shl_tl(t2
, t2
, t1
);
3575 gen_load_gpr(t1
, rt
);
3576 tcg_gen_and_tl(t1
, t1
, t2
);
3578 tcg_gen_or_tl(t0
, t0
, t1
);
3580 tcg_gen_ext32s_tl(t0
, t0
);
3581 gen_store_gpr(t0
, rt
);
3584 mem_idx
= MIPS_HFLAG_UM
;
3588 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3589 gen_store_gpr(t0
, rt
);
3595 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3596 uint32_t reg1
, uint32_t reg2
)
3598 TCGv taddr
= tcg_temp_new();
3599 TCGv_i64 tval
= tcg_temp_new_i64();
3600 TCGv tmp1
= tcg_temp_new();
3601 TCGv tmp2
= tcg_temp_new();
3603 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3604 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3605 #ifdef TARGET_WORDS_BIGENDIAN
3606 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3608 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3610 gen_store_gpr(tmp1
, reg1
);
3611 tcg_temp_free(tmp1
);
3612 gen_store_gpr(tmp2
, reg2
);
3613 tcg_temp_free(tmp2
);
3614 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3615 tcg_temp_free_i64(tval
);
3616 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3617 tcg_temp_free(taddr
);
3621 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
3622 int base
, int offset
)
3624 TCGv t0
= tcg_temp_new();
3625 TCGv t1
= tcg_temp_new();
3626 int mem_idx
= ctx
->mem_idx
;
3628 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3629 gen_load_gpr(t1
, rt
);
3631 #if defined(TARGET_MIPS64)
3633 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3634 ctx
->default_tcg_memop_mask
);
3637 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3640 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3644 mem_idx
= MIPS_HFLAG_UM
;
3647 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3648 ctx
->default_tcg_memop_mask
);
3651 mem_idx
= MIPS_HFLAG_UM
;
3654 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3655 ctx
->default_tcg_memop_mask
);
3658 mem_idx
= MIPS_HFLAG_UM
;
3661 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3664 mem_idx
= MIPS_HFLAG_UM
;
3667 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3670 mem_idx
= MIPS_HFLAG_UM
;
3673 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3681 /* Store conditional */
3682 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
3683 int base
, int16_t offset
)
3686 int mem_idx
= ctx
->mem_idx
;
3688 #ifdef CONFIG_USER_ONLY
3689 t0
= tcg_temp_local_new();
3690 t1
= tcg_temp_local_new();
3692 t0
= tcg_temp_new();
3693 t1
= tcg_temp_new();
3695 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3696 gen_load_gpr(t1
, rt
);
3698 #if defined(TARGET_MIPS64)
3701 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
3705 mem_idx
= MIPS_HFLAG_UM
;
3709 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
3716 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3717 uint32_t reg1
, uint32_t reg2
, bool eva
)
3719 TCGv taddr
= tcg_temp_local_new();
3720 TCGv lladdr
= tcg_temp_local_new();
3721 TCGv_i64 tval
= tcg_temp_new_i64();
3722 TCGv_i64 llval
= tcg_temp_new_i64();
3723 TCGv_i64 val
= tcg_temp_new_i64();
3724 TCGv tmp1
= tcg_temp_new();
3725 TCGv tmp2
= tcg_temp_new();
3726 TCGLabel
*lab_fail
= gen_new_label();
3727 TCGLabel
*lab_done
= gen_new_label();
3729 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3731 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3732 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3734 gen_load_gpr(tmp1
, reg1
);
3735 gen_load_gpr(tmp2
, reg2
);
3737 #ifdef TARGET_WORDS_BIGENDIAN
3738 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3740 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3743 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3744 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3745 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, MO_64
);
3747 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3749 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3751 gen_set_label(lab_fail
);
3754 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3756 gen_set_label(lab_done
);
3757 tcg_gen_movi_tl(lladdr
, -1);
3758 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3761 /* Load and store */
3762 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
3765 /* Don't do NOP if destination is zero: we must perform the actual
3770 TCGv_i32 fp0
= tcg_temp_new_i32();
3771 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3772 ctx
->default_tcg_memop_mask
);
3773 gen_store_fpr32(ctx
, fp0
, ft
);
3774 tcg_temp_free_i32(fp0
);
3779 TCGv_i32 fp0
= tcg_temp_new_i32();
3780 gen_load_fpr32(ctx
, fp0
, ft
);
3781 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3782 ctx
->default_tcg_memop_mask
);
3783 tcg_temp_free_i32(fp0
);
3788 TCGv_i64 fp0
= tcg_temp_new_i64();
3789 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3790 ctx
->default_tcg_memop_mask
);
3791 gen_store_fpr64(ctx
, fp0
, ft
);
3792 tcg_temp_free_i64(fp0
);
3797 TCGv_i64 fp0
= tcg_temp_new_i64();
3798 gen_load_fpr64(ctx
, fp0
, ft
);
3799 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3800 ctx
->default_tcg_memop_mask
);
3801 tcg_temp_free_i64(fp0
);
3805 MIPS_INVAL("flt_ldst");
3806 generate_exception_end(ctx
, EXCP_RI
);
3811 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3812 int rs
, int16_t imm
)
3814 TCGv t0
= tcg_temp_new();
3816 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3817 check_cp1_enabled(ctx
);
3821 check_insn(ctx
, ISA_MIPS2
);
3824 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3825 gen_flt_ldst(ctx
, op
, rt
, t0
);
3828 generate_exception_err(ctx
, EXCP_CpU
, 1);
3833 /* Arithmetic with immediate operand */
3834 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3835 int rt
, int rs
, int imm
)
3837 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3839 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3840 /* If no destination, treat it as a NOP.
3841 For addi, we must generate the overflow exception when needed. */
3847 TCGv t0
= tcg_temp_local_new();
3848 TCGv t1
= tcg_temp_new();
3849 TCGv t2
= tcg_temp_new();
3850 TCGLabel
*l1
= gen_new_label();
3852 gen_load_gpr(t1
, rs
);
3853 tcg_gen_addi_tl(t0
, t1
, uimm
);
3854 tcg_gen_ext32s_tl(t0
, t0
);
3856 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3857 tcg_gen_xori_tl(t2
, t0
, uimm
);
3858 tcg_gen_and_tl(t1
, t1
, t2
);
3860 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3862 /* operands of same sign, result different sign */
3863 generate_exception(ctx
, EXCP_OVERFLOW
);
3865 tcg_gen_ext32s_tl(t0
, t0
);
3866 gen_store_gpr(t0
, rt
);
3872 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3873 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3875 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3878 #if defined(TARGET_MIPS64)
3881 TCGv t0
= tcg_temp_local_new();
3882 TCGv t1
= tcg_temp_new();
3883 TCGv t2
= tcg_temp_new();
3884 TCGLabel
*l1
= gen_new_label();
3886 gen_load_gpr(t1
, rs
);
3887 tcg_gen_addi_tl(t0
, t1
, uimm
);
3889 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3890 tcg_gen_xori_tl(t2
, t0
, uimm
);
3891 tcg_gen_and_tl(t1
, t1
, t2
);
3893 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3895 /* operands of same sign, result different sign */
3896 generate_exception(ctx
, EXCP_OVERFLOW
);
3898 gen_store_gpr(t0
, rt
);
3904 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3906 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3913 /* Logic with immediate operand */
3914 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3915 int rt
, int rs
, int16_t imm
)
3920 /* If no destination, treat it as a NOP. */
3923 uimm
= (uint16_t)imm
;
3926 if (likely(rs
!= 0))
3927 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3929 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3933 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3935 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3938 if (likely(rs
!= 0))
3939 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3941 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3944 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3946 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3947 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3949 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3958 /* Set on less than with immediate operand */
3959 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3960 int rt
, int rs
, int16_t imm
)
3962 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3966 /* If no destination, treat it as a NOP. */
3969 t0
= tcg_temp_new();
3970 gen_load_gpr(t0
, rs
);
3973 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3976 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3982 /* Shifts with immediate operand */
3983 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3984 int rt
, int rs
, int16_t imm
)
3986 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3990 /* If no destination, treat it as a NOP. */
3994 t0
= tcg_temp_new();
3995 gen_load_gpr(t0
, rs
);
3998 tcg_gen_shli_tl(t0
, t0
, uimm
);
3999 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4002 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4006 tcg_gen_ext32u_tl(t0
, t0
);
4007 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4009 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4014 TCGv_i32 t1
= tcg_temp_new_i32();
4016 tcg_gen_trunc_tl_i32(t1
, t0
);
4017 tcg_gen_rotri_i32(t1
, t1
, uimm
);
4018 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
4019 tcg_temp_free_i32(t1
);
4021 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4024 #if defined(TARGET_MIPS64)
4026 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
4029 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4032 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4036 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
4038 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
4042 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4045 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4048 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4051 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4059 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
4060 int rd
, int rs
, int rt
)
4062 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
4063 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
4064 /* If no destination, treat it as a NOP.
4065 For add & sub, we must generate the overflow exception when needed. */
4072 TCGv t0
= tcg_temp_local_new();
4073 TCGv t1
= tcg_temp_new();
4074 TCGv t2
= tcg_temp_new();
4075 TCGLabel
*l1
= gen_new_label();
4077 gen_load_gpr(t1
, rs
);
4078 gen_load_gpr(t2
, rt
);
4079 tcg_gen_add_tl(t0
, t1
, t2
);
4080 tcg_gen_ext32s_tl(t0
, t0
);
4081 tcg_gen_xor_tl(t1
, t1
, t2
);
4082 tcg_gen_xor_tl(t2
, t0
, t2
);
4083 tcg_gen_andc_tl(t1
, t2
, t1
);
4085 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4087 /* operands of same sign, result different sign */
4088 generate_exception(ctx
, EXCP_OVERFLOW
);
4090 gen_store_gpr(t0
, rd
);
4095 if (rs
!= 0 && rt
!= 0) {
4096 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4097 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4098 } else if (rs
== 0 && rt
!= 0) {
4099 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4100 } else if (rs
!= 0 && rt
== 0) {
4101 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4103 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4108 TCGv t0
= tcg_temp_local_new();
4109 TCGv t1
= tcg_temp_new();
4110 TCGv t2
= tcg_temp_new();
4111 TCGLabel
*l1
= gen_new_label();
4113 gen_load_gpr(t1
, rs
);
4114 gen_load_gpr(t2
, rt
);
4115 tcg_gen_sub_tl(t0
, t1
, t2
);
4116 tcg_gen_ext32s_tl(t0
, t0
);
4117 tcg_gen_xor_tl(t2
, t1
, t2
);
4118 tcg_gen_xor_tl(t1
, t0
, t1
);
4119 tcg_gen_and_tl(t1
, t1
, t2
);
4121 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4123 /* operands of different sign, first operand and result different sign */
4124 generate_exception(ctx
, EXCP_OVERFLOW
);
4126 gen_store_gpr(t0
, rd
);
4131 if (rs
!= 0 && rt
!= 0) {
4132 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4133 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4134 } else if (rs
== 0 && rt
!= 0) {
4135 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4136 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4137 } else if (rs
!= 0 && rt
== 0) {
4138 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4140 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4143 #if defined(TARGET_MIPS64)
4146 TCGv t0
= tcg_temp_local_new();
4147 TCGv t1
= tcg_temp_new();
4148 TCGv t2
= tcg_temp_new();
4149 TCGLabel
*l1
= gen_new_label();
4151 gen_load_gpr(t1
, rs
);
4152 gen_load_gpr(t2
, rt
);
4153 tcg_gen_add_tl(t0
, t1
, t2
);
4154 tcg_gen_xor_tl(t1
, t1
, t2
);
4155 tcg_gen_xor_tl(t2
, t0
, t2
);
4156 tcg_gen_andc_tl(t1
, t2
, t1
);
4158 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4160 /* operands of same sign, result different sign */
4161 generate_exception(ctx
, EXCP_OVERFLOW
);
4163 gen_store_gpr(t0
, rd
);
4168 if (rs
!= 0 && rt
!= 0) {
4169 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4170 } else if (rs
== 0 && rt
!= 0) {
4171 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4172 } else if (rs
!= 0 && rt
== 0) {
4173 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4175 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4180 TCGv t0
= tcg_temp_local_new();
4181 TCGv t1
= tcg_temp_new();
4182 TCGv t2
= tcg_temp_new();
4183 TCGLabel
*l1
= gen_new_label();
4185 gen_load_gpr(t1
, rs
);
4186 gen_load_gpr(t2
, rt
);
4187 tcg_gen_sub_tl(t0
, t1
, t2
);
4188 tcg_gen_xor_tl(t2
, t1
, t2
);
4189 tcg_gen_xor_tl(t1
, t0
, t1
);
4190 tcg_gen_and_tl(t1
, t1
, t2
);
4192 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4194 /* operands of different sign, first operand and result different sign */
4195 generate_exception(ctx
, EXCP_OVERFLOW
);
4197 gen_store_gpr(t0
, rd
);
4202 if (rs
!= 0 && rt
!= 0) {
4203 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4204 } else if (rs
== 0 && rt
!= 0) {
4205 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4206 } else if (rs
!= 0 && rt
== 0) {
4207 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4209 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4214 if (likely(rs
!= 0 && rt
!= 0)) {
4215 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4216 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4218 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4224 /* Conditional move */
4225 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4226 int rd
, int rs
, int rt
)
4231 /* If no destination, treat it as a NOP. */
4235 t0
= tcg_temp_new();
4236 gen_load_gpr(t0
, rt
);
4237 t1
= tcg_const_tl(0);
4238 t2
= tcg_temp_new();
4239 gen_load_gpr(t2
, rs
);
4242 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4245 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4248 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4251 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4260 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4261 int rd
, int rs
, int rt
)
4264 /* If no destination, treat it as a NOP. */
4270 if (likely(rs
!= 0 && rt
!= 0)) {
4271 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4273 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4277 if (rs
!= 0 && rt
!= 0) {
4278 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4279 } else if (rs
== 0 && rt
!= 0) {
4280 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4281 } else if (rs
!= 0 && rt
== 0) {
4282 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4284 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4288 if (likely(rs
!= 0 && rt
!= 0)) {
4289 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4290 } else if (rs
== 0 && rt
!= 0) {
4291 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4292 } else if (rs
!= 0 && rt
== 0) {
4293 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4295 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4299 if (likely(rs
!= 0 && rt
!= 0)) {
4300 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4301 } else if (rs
== 0 && rt
!= 0) {
4302 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4303 } else if (rs
!= 0 && rt
== 0) {
4304 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4306 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4312 /* Set on lower than */
4313 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4314 int rd
, int rs
, int rt
)
4319 /* If no destination, treat it as a NOP. */
4323 t0
= tcg_temp_new();
4324 t1
= tcg_temp_new();
4325 gen_load_gpr(t0
, rs
);
4326 gen_load_gpr(t1
, rt
);
4329 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4332 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4340 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4341 int rd
, int rs
, int rt
)
4346 /* If no destination, treat it as a NOP.
4347 For add & sub, we must generate the overflow exception when needed. */
4351 t0
= tcg_temp_new();
4352 t1
= tcg_temp_new();
4353 gen_load_gpr(t0
, rs
);
4354 gen_load_gpr(t1
, rt
);
4357 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4358 tcg_gen_shl_tl(t0
, t1
, t0
);
4359 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4362 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4363 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4366 tcg_gen_ext32u_tl(t1
, t1
);
4367 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4368 tcg_gen_shr_tl(t0
, t1
, t0
);
4369 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4373 TCGv_i32 t2
= tcg_temp_new_i32();
4374 TCGv_i32 t3
= tcg_temp_new_i32();
4376 tcg_gen_trunc_tl_i32(t2
, t0
);
4377 tcg_gen_trunc_tl_i32(t3
, t1
);
4378 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4379 tcg_gen_rotr_i32(t2
, t3
, t2
);
4380 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4381 tcg_temp_free_i32(t2
);
4382 tcg_temp_free_i32(t3
);
4385 #if defined(TARGET_MIPS64)
4387 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4388 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4391 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4392 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4395 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4396 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4399 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4400 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4408 /* Copy GPR to and from TX79 HI1/LO1 register. */
4409 static void gen_HILO1_tx79(DisasContext
*ctx
, uint32_t opc
, int reg
)
4411 if (reg
== 0 && (opc
== MMI_OPC_MFHI1
|| opc
== MMI_OPC_MFLO1
)) {
4418 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[1]);
4421 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[1]);
4425 tcg_gen_mov_tl(cpu_HI
[1], cpu_gpr
[reg
]);
4427 tcg_gen_movi_tl(cpu_HI
[1], 0);
4432 tcg_gen_mov_tl(cpu_LO
[1], cpu_gpr
[reg
]);
4434 tcg_gen_movi_tl(cpu_LO
[1], 0);
4438 MIPS_INVAL("mfthilo1 TX79");
4439 generate_exception_end(ctx
, EXCP_RI
);
4444 /* Arithmetic on HI/LO registers */
4445 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4447 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
4458 #if defined(TARGET_MIPS64)
4460 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4464 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4468 #if defined(TARGET_MIPS64)
4470 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4474 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4479 #if defined(TARGET_MIPS64)
4481 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4485 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4488 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4493 #if defined(TARGET_MIPS64)
4495 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4499 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4502 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4508 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4511 TCGv t0
= tcg_const_tl(addr
);
4512 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4513 gen_store_gpr(t0
, reg
);
4517 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4523 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4526 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4527 addr
= addr_add(ctx
, pc
, offset
);
4528 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4532 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4533 addr
= addr_add(ctx
, pc
, offset
);
4534 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4536 #if defined(TARGET_MIPS64)
4539 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4540 addr
= addr_add(ctx
, pc
, offset
);
4541 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4545 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4548 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4549 addr
= addr_add(ctx
, pc
, offset
);
4550 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4555 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4556 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4557 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4560 #if defined(TARGET_MIPS64)
4561 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4562 case R6_OPC_LDPC
+ (1 << 16):
4563 case R6_OPC_LDPC
+ (2 << 16):
4564 case R6_OPC_LDPC
+ (3 << 16):
4566 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4567 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4568 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4572 MIPS_INVAL("OPC_PCREL");
4573 generate_exception_end(ctx
, EXCP_RI
);
4580 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4589 t0
= tcg_temp_new();
4590 t1
= tcg_temp_new();
4592 gen_load_gpr(t0
, rs
);
4593 gen_load_gpr(t1
, rt
);
4598 TCGv t2
= tcg_temp_new();
4599 TCGv t3
= tcg_temp_new();
4600 tcg_gen_ext32s_tl(t0
, t0
);
4601 tcg_gen_ext32s_tl(t1
, t1
);
4602 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4603 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4604 tcg_gen_and_tl(t2
, t2
, t3
);
4605 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4606 tcg_gen_or_tl(t2
, t2
, t3
);
4607 tcg_gen_movi_tl(t3
, 0);
4608 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4609 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4610 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4617 TCGv t2
= tcg_temp_new();
4618 TCGv t3
= tcg_temp_new();
4619 tcg_gen_ext32s_tl(t0
, t0
);
4620 tcg_gen_ext32s_tl(t1
, t1
);
4621 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4622 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4623 tcg_gen_and_tl(t2
, t2
, t3
);
4624 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4625 tcg_gen_or_tl(t2
, t2
, t3
);
4626 tcg_gen_movi_tl(t3
, 0);
4627 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4628 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4629 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4636 TCGv t2
= tcg_const_tl(0);
4637 TCGv t3
= tcg_const_tl(1);
4638 tcg_gen_ext32u_tl(t0
, t0
);
4639 tcg_gen_ext32u_tl(t1
, t1
);
4640 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4641 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4642 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4649 TCGv t2
= tcg_const_tl(0);
4650 TCGv t3
= tcg_const_tl(1);
4651 tcg_gen_ext32u_tl(t0
, t0
);
4652 tcg_gen_ext32u_tl(t1
, t1
);
4653 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4654 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4655 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4662 TCGv_i32 t2
= tcg_temp_new_i32();
4663 TCGv_i32 t3
= tcg_temp_new_i32();
4664 tcg_gen_trunc_tl_i32(t2
, t0
);
4665 tcg_gen_trunc_tl_i32(t3
, t1
);
4666 tcg_gen_mul_i32(t2
, t2
, t3
);
4667 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4668 tcg_temp_free_i32(t2
);
4669 tcg_temp_free_i32(t3
);
4674 TCGv_i32 t2
= tcg_temp_new_i32();
4675 TCGv_i32 t3
= tcg_temp_new_i32();
4676 tcg_gen_trunc_tl_i32(t2
, t0
);
4677 tcg_gen_trunc_tl_i32(t3
, t1
);
4678 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4679 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4680 tcg_temp_free_i32(t2
);
4681 tcg_temp_free_i32(t3
);
4686 TCGv_i32 t2
= tcg_temp_new_i32();
4687 TCGv_i32 t3
= tcg_temp_new_i32();
4688 tcg_gen_trunc_tl_i32(t2
, t0
);
4689 tcg_gen_trunc_tl_i32(t3
, t1
);
4690 tcg_gen_mul_i32(t2
, t2
, t3
);
4691 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4692 tcg_temp_free_i32(t2
);
4693 tcg_temp_free_i32(t3
);
4698 TCGv_i32 t2
= tcg_temp_new_i32();
4699 TCGv_i32 t3
= tcg_temp_new_i32();
4700 tcg_gen_trunc_tl_i32(t2
, t0
);
4701 tcg_gen_trunc_tl_i32(t3
, t1
);
4702 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4703 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4704 tcg_temp_free_i32(t2
);
4705 tcg_temp_free_i32(t3
);
4708 #if defined(TARGET_MIPS64)
4711 TCGv t2
= tcg_temp_new();
4712 TCGv t3
= tcg_temp_new();
4713 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4714 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4715 tcg_gen_and_tl(t2
, t2
, t3
);
4716 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4717 tcg_gen_or_tl(t2
, t2
, t3
);
4718 tcg_gen_movi_tl(t3
, 0);
4719 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4720 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4727 TCGv t2
= tcg_temp_new();
4728 TCGv t3
= tcg_temp_new();
4729 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4730 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4731 tcg_gen_and_tl(t2
, t2
, t3
);
4732 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4733 tcg_gen_or_tl(t2
, t2
, t3
);
4734 tcg_gen_movi_tl(t3
, 0);
4735 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4736 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4743 TCGv t2
= tcg_const_tl(0);
4744 TCGv t3
= tcg_const_tl(1);
4745 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4746 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4753 TCGv t2
= tcg_const_tl(0);
4754 TCGv t3
= tcg_const_tl(1);
4755 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4756 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4762 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4766 TCGv t2
= tcg_temp_new();
4767 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4772 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4776 TCGv t2
= tcg_temp_new();
4777 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4783 MIPS_INVAL("r6 mul/div");
4784 generate_exception_end(ctx
, EXCP_RI
);
4792 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
4796 t0
= tcg_temp_new();
4797 t1
= tcg_temp_new();
4799 gen_load_gpr(t0
, rs
);
4800 gen_load_gpr(t1
, rt
);
4805 TCGv t2
= tcg_temp_new();
4806 TCGv t3
= tcg_temp_new();
4807 tcg_gen_ext32s_tl(t0
, t0
);
4808 tcg_gen_ext32s_tl(t1
, t1
);
4809 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4810 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4811 tcg_gen_and_tl(t2
, t2
, t3
);
4812 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4813 tcg_gen_or_tl(t2
, t2
, t3
);
4814 tcg_gen_movi_tl(t3
, 0);
4815 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4816 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
4817 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
4818 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4819 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4826 TCGv t2
= tcg_const_tl(0);
4827 TCGv t3
= tcg_const_tl(1);
4828 tcg_gen_ext32u_tl(t0
, t0
);
4829 tcg_gen_ext32u_tl(t1
, t1
);
4830 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4831 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
4832 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
4833 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4834 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4840 MIPS_INVAL("div1 TX79");
4841 generate_exception_end(ctx
, EXCP_RI
);
4849 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4850 int acc
, int rs
, int rt
)
4854 t0
= tcg_temp_new();
4855 t1
= tcg_temp_new();
4857 gen_load_gpr(t0
, rs
);
4858 gen_load_gpr(t1
, rt
);
4867 TCGv t2
= tcg_temp_new();
4868 TCGv t3
= tcg_temp_new();
4869 tcg_gen_ext32s_tl(t0
, t0
);
4870 tcg_gen_ext32s_tl(t1
, t1
);
4871 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4872 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4873 tcg_gen_and_tl(t2
, t2
, t3
);
4874 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4875 tcg_gen_or_tl(t2
, t2
, t3
);
4876 tcg_gen_movi_tl(t3
, 0);
4877 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4878 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4879 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4880 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4881 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4888 TCGv t2
= tcg_const_tl(0);
4889 TCGv t3
= tcg_const_tl(1);
4890 tcg_gen_ext32u_tl(t0
, t0
);
4891 tcg_gen_ext32u_tl(t1
, t1
);
4892 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4893 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4894 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4895 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4896 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4903 TCGv_i32 t2
= tcg_temp_new_i32();
4904 TCGv_i32 t3
= tcg_temp_new_i32();
4905 tcg_gen_trunc_tl_i32(t2
, t0
);
4906 tcg_gen_trunc_tl_i32(t3
, t1
);
4907 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4908 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4909 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4910 tcg_temp_free_i32(t2
);
4911 tcg_temp_free_i32(t3
);
4916 TCGv_i32 t2
= tcg_temp_new_i32();
4917 TCGv_i32 t3
= tcg_temp_new_i32();
4918 tcg_gen_trunc_tl_i32(t2
, t0
);
4919 tcg_gen_trunc_tl_i32(t3
, t1
);
4920 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4921 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4922 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4923 tcg_temp_free_i32(t2
);
4924 tcg_temp_free_i32(t3
);
4927 #if defined(TARGET_MIPS64)
4930 TCGv t2
= tcg_temp_new();
4931 TCGv t3
= tcg_temp_new();
4932 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4933 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4934 tcg_gen_and_tl(t2
, t2
, t3
);
4935 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4936 tcg_gen_or_tl(t2
, t2
, t3
);
4937 tcg_gen_movi_tl(t3
, 0);
4938 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4939 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4940 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4947 TCGv t2
= tcg_const_tl(0);
4948 TCGv t3
= tcg_const_tl(1);
4949 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4950 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4951 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4957 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4960 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4965 TCGv_i64 t2
= tcg_temp_new_i64();
4966 TCGv_i64 t3
= tcg_temp_new_i64();
4968 tcg_gen_ext_tl_i64(t2
, t0
);
4969 tcg_gen_ext_tl_i64(t3
, t1
);
4970 tcg_gen_mul_i64(t2
, t2
, t3
);
4971 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4972 tcg_gen_add_i64(t2
, t2
, t3
);
4973 tcg_temp_free_i64(t3
);
4974 gen_move_low32(cpu_LO
[acc
], t2
);
4975 gen_move_high32(cpu_HI
[acc
], t2
);
4976 tcg_temp_free_i64(t2
);
4981 TCGv_i64 t2
= tcg_temp_new_i64();
4982 TCGv_i64 t3
= tcg_temp_new_i64();
4984 tcg_gen_ext32u_tl(t0
, t0
);
4985 tcg_gen_ext32u_tl(t1
, t1
);
4986 tcg_gen_extu_tl_i64(t2
, t0
);
4987 tcg_gen_extu_tl_i64(t3
, t1
);
4988 tcg_gen_mul_i64(t2
, t2
, t3
);
4989 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4990 tcg_gen_add_i64(t2
, t2
, t3
);
4991 tcg_temp_free_i64(t3
);
4992 gen_move_low32(cpu_LO
[acc
], t2
);
4993 gen_move_high32(cpu_HI
[acc
], t2
);
4994 tcg_temp_free_i64(t2
);
4999 TCGv_i64 t2
= tcg_temp_new_i64();
5000 TCGv_i64 t3
= tcg_temp_new_i64();
5002 tcg_gen_ext_tl_i64(t2
, t0
);
5003 tcg_gen_ext_tl_i64(t3
, t1
);
5004 tcg_gen_mul_i64(t2
, t2
, t3
);
5005 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5006 tcg_gen_sub_i64(t2
, t3
, t2
);
5007 tcg_temp_free_i64(t3
);
5008 gen_move_low32(cpu_LO
[acc
], t2
);
5009 gen_move_high32(cpu_HI
[acc
], t2
);
5010 tcg_temp_free_i64(t2
);
5015 TCGv_i64 t2
= tcg_temp_new_i64();
5016 TCGv_i64 t3
= tcg_temp_new_i64();
5018 tcg_gen_ext32u_tl(t0
, t0
);
5019 tcg_gen_ext32u_tl(t1
, t1
);
5020 tcg_gen_extu_tl_i64(t2
, t0
);
5021 tcg_gen_extu_tl_i64(t3
, t1
);
5022 tcg_gen_mul_i64(t2
, t2
, t3
);
5023 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5024 tcg_gen_sub_i64(t2
, t3
, t2
);
5025 tcg_temp_free_i64(t3
);
5026 gen_move_low32(cpu_LO
[acc
], t2
);
5027 gen_move_high32(cpu_HI
[acc
], t2
);
5028 tcg_temp_free_i64(t2
);
5032 MIPS_INVAL("mul/div");
5033 generate_exception_end(ctx
, EXCP_RI
);
5042 * These MULT[U] and MADD[U] instructions implemented in for example
5043 * the Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
5044 * architectures are special three-operand variants with the syntax
5046 * MULT[U][1] rd, rs, rt
5050 * (rd, LO, HI) <- rs * rt
5054 * MADD[U][1] rd, rs, rt
5058 * (rd, LO, HI) <- (LO, HI) + rs * rt
5060 * where the low-order 32-bits of the result is placed into both the
5061 * GPR rd and the special register LO. The high-order 32-bits of the
5062 * result is placed into the special register HI.
5064 * If the GPR rd is omitted in assembly language, it is taken to be 0,
5065 * which is the zero register that always reads as 0.
5067 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
5068 int rd
, int rs
, int rt
)
5070 TCGv t0
= tcg_temp_new();
5071 TCGv t1
= tcg_temp_new();
5074 gen_load_gpr(t0
, rs
);
5075 gen_load_gpr(t1
, rt
);
5083 TCGv_i32 t2
= tcg_temp_new_i32();
5084 TCGv_i32 t3
= tcg_temp_new_i32();
5085 tcg_gen_trunc_tl_i32(t2
, t0
);
5086 tcg_gen_trunc_tl_i32(t3
, t1
);
5087 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
5089 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5091 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5092 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5093 tcg_temp_free_i32(t2
);
5094 tcg_temp_free_i32(t3
);
5097 case MMI_OPC_MULTU1
:
5102 TCGv_i32 t2
= tcg_temp_new_i32();
5103 TCGv_i32 t3
= tcg_temp_new_i32();
5104 tcg_gen_trunc_tl_i32(t2
, t0
);
5105 tcg_gen_trunc_tl_i32(t3
, t1
);
5106 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
5108 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5110 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5111 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5112 tcg_temp_free_i32(t2
);
5113 tcg_temp_free_i32(t3
);
5121 TCGv_i64 t2
= tcg_temp_new_i64();
5122 TCGv_i64 t3
= tcg_temp_new_i64();
5124 tcg_gen_ext_tl_i64(t2
, t0
);
5125 tcg_gen_ext_tl_i64(t3
, t1
);
5126 tcg_gen_mul_i64(t2
, t2
, t3
);
5127 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5128 tcg_gen_add_i64(t2
, t2
, t3
);
5129 tcg_temp_free_i64(t3
);
5130 gen_move_low32(cpu_LO
[acc
], t2
);
5131 gen_move_high32(cpu_HI
[acc
], t2
);
5133 gen_move_low32(cpu_gpr
[rd
], t2
);
5135 tcg_temp_free_i64(t2
);
5138 case MMI_OPC_MADDU1
:
5143 TCGv_i64 t2
= tcg_temp_new_i64();
5144 TCGv_i64 t3
= tcg_temp_new_i64();
5146 tcg_gen_ext32u_tl(t0
, t0
);
5147 tcg_gen_ext32u_tl(t1
, t1
);
5148 tcg_gen_extu_tl_i64(t2
, t0
);
5149 tcg_gen_extu_tl_i64(t3
, t1
);
5150 tcg_gen_mul_i64(t2
, t2
, t3
);
5151 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5152 tcg_gen_add_i64(t2
, t2
, t3
);
5153 tcg_temp_free_i64(t3
);
5154 gen_move_low32(cpu_LO
[acc
], t2
);
5155 gen_move_high32(cpu_HI
[acc
], t2
);
5157 gen_move_low32(cpu_gpr
[rd
], t2
);
5159 tcg_temp_free_i64(t2
);
5163 MIPS_INVAL("mul/madd TXx9");
5164 generate_exception_end(ctx
, EXCP_RI
);
5173 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
5174 int rd
, int rs
, int rt
)
5176 TCGv t0
= tcg_temp_new();
5177 TCGv t1
= tcg_temp_new();
5179 gen_load_gpr(t0
, rs
);
5180 gen_load_gpr(t1
, rt
);
5183 case OPC_VR54XX_MULS
:
5184 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
5186 case OPC_VR54XX_MULSU
:
5187 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
5189 case OPC_VR54XX_MACC
:
5190 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
5192 case OPC_VR54XX_MACCU
:
5193 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
5195 case OPC_VR54XX_MSAC
:
5196 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
5198 case OPC_VR54XX_MSACU
:
5199 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
5201 case OPC_VR54XX_MULHI
:
5202 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
5204 case OPC_VR54XX_MULHIU
:
5205 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
5207 case OPC_VR54XX_MULSHI
:
5208 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
5210 case OPC_VR54XX_MULSHIU
:
5211 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
5213 case OPC_VR54XX_MACCHI
:
5214 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
5216 case OPC_VR54XX_MACCHIU
:
5217 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
5219 case OPC_VR54XX_MSACHI
:
5220 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
5222 case OPC_VR54XX_MSACHIU
:
5223 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
5226 MIPS_INVAL("mul vr54xx");
5227 generate_exception_end(ctx
, EXCP_RI
);
5230 gen_store_gpr(t0
, rd
);
5237 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
5247 gen_load_gpr(t0
, rs
);
5252 #if defined(TARGET_MIPS64)
5256 tcg_gen_not_tl(t0
, t0
);
5265 tcg_gen_ext32u_tl(t0
, t0
);
5266 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
5267 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
5269 #if defined(TARGET_MIPS64)
5274 tcg_gen_clzi_i64(t0
, t0
, 64);
5280 /* Godson integer instructions */
5281 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
5282 int rd
, int rs
, int rt
)
5294 case OPC_MULTU_G_2E
:
5295 case OPC_MULTU_G_2F
:
5296 #if defined(TARGET_MIPS64)
5297 case OPC_DMULT_G_2E
:
5298 case OPC_DMULT_G_2F
:
5299 case OPC_DMULTU_G_2E
:
5300 case OPC_DMULTU_G_2F
:
5302 t0
= tcg_temp_new();
5303 t1
= tcg_temp_new();
5306 t0
= tcg_temp_local_new();
5307 t1
= tcg_temp_local_new();
5311 gen_load_gpr(t0
, rs
);
5312 gen_load_gpr(t1
, rt
);
5317 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5318 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5320 case OPC_MULTU_G_2E
:
5321 case OPC_MULTU_G_2F
:
5322 tcg_gen_ext32u_tl(t0
, t0
);
5323 tcg_gen_ext32u_tl(t1
, t1
);
5324 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5325 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5330 TCGLabel
*l1
= gen_new_label();
5331 TCGLabel
*l2
= gen_new_label();
5332 TCGLabel
*l3
= gen_new_label();
5333 tcg_gen_ext32s_tl(t0
, t0
);
5334 tcg_gen_ext32s_tl(t1
, t1
);
5335 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5336 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5339 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5340 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5341 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5344 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5345 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5352 TCGLabel
*l1
= gen_new_label();
5353 TCGLabel
*l2
= gen_new_label();
5354 tcg_gen_ext32u_tl(t0
, t0
);
5355 tcg_gen_ext32u_tl(t1
, t1
);
5356 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5357 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5360 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5361 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5368 TCGLabel
*l1
= gen_new_label();
5369 TCGLabel
*l2
= gen_new_label();
5370 TCGLabel
*l3
= gen_new_label();
5371 tcg_gen_ext32u_tl(t0
, t0
);
5372 tcg_gen_ext32u_tl(t1
, t1
);
5373 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5374 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5375 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5377 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5380 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5381 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5388 TCGLabel
*l1
= gen_new_label();
5389 TCGLabel
*l2
= gen_new_label();
5390 tcg_gen_ext32u_tl(t0
, t0
);
5391 tcg_gen_ext32u_tl(t1
, t1
);
5392 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5393 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5396 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5397 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5401 #if defined(TARGET_MIPS64)
5402 case OPC_DMULT_G_2E
:
5403 case OPC_DMULT_G_2F
:
5404 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5406 case OPC_DMULTU_G_2E
:
5407 case OPC_DMULTU_G_2F
:
5408 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5413 TCGLabel
*l1
= gen_new_label();
5414 TCGLabel
*l2
= gen_new_label();
5415 TCGLabel
*l3
= gen_new_label();
5416 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5417 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5420 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5421 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5422 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5425 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5429 case OPC_DDIVU_G_2E
:
5430 case OPC_DDIVU_G_2F
:
5432 TCGLabel
*l1
= gen_new_label();
5433 TCGLabel
*l2
= gen_new_label();
5434 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5435 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5438 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5445 TCGLabel
*l1
= gen_new_label();
5446 TCGLabel
*l2
= gen_new_label();
5447 TCGLabel
*l3
= gen_new_label();
5448 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5449 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5450 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5452 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5455 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5459 case OPC_DMODU_G_2E
:
5460 case OPC_DMODU_G_2F
:
5462 TCGLabel
*l1
= gen_new_label();
5463 TCGLabel
*l2
= gen_new_label();
5464 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5465 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5468 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5479 /* Loongson multimedia instructions */
5480 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5482 uint32_t opc
, shift_max
;
5485 opc
= MASK_LMI(ctx
->opcode
);
5491 t0
= tcg_temp_local_new_i64();
5492 t1
= tcg_temp_local_new_i64();
5495 t0
= tcg_temp_new_i64();
5496 t1
= tcg_temp_new_i64();
5500 check_cp1_enabled(ctx
);
5501 gen_load_fpr64(ctx
, t0
, rs
);
5502 gen_load_fpr64(ctx
, t1
, rt
);
5504 #define LMI_HELPER(UP, LO) \
5505 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5506 #define LMI_HELPER_1(UP, LO) \
5507 case OPC_##UP: gen_helper_##LO(t0, t0); break
5508 #define LMI_DIRECT(UP, LO, OP) \
5509 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5512 LMI_HELPER(PADDSH
, paddsh
);
5513 LMI_HELPER(PADDUSH
, paddush
);
5514 LMI_HELPER(PADDH
, paddh
);
5515 LMI_HELPER(PADDW
, paddw
);
5516 LMI_HELPER(PADDSB
, paddsb
);
5517 LMI_HELPER(PADDUSB
, paddusb
);
5518 LMI_HELPER(PADDB
, paddb
);
5520 LMI_HELPER(PSUBSH
, psubsh
);
5521 LMI_HELPER(PSUBUSH
, psubush
);
5522 LMI_HELPER(PSUBH
, psubh
);
5523 LMI_HELPER(PSUBW
, psubw
);
5524 LMI_HELPER(PSUBSB
, psubsb
);
5525 LMI_HELPER(PSUBUSB
, psubusb
);
5526 LMI_HELPER(PSUBB
, psubb
);
5528 LMI_HELPER(PSHUFH
, pshufh
);
5529 LMI_HELPER(PACKSSWH
, packsswh
);
5530 LMI_HELPER(PACKSSHB
, packsshb
);
5531 LMI_HELPER(PACKUSHB
, packushb
);
5533 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5534 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5535 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5536 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5537 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5538 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5540 LMI_HELPER(PAVGH
, pavgh
);
5541 LMI_HELPER(PAVGB
, pavgb
);
5542 LMI_HELPER(PMAXSH
, pmaxsh
);
5543 LMI_HELPER(PMINSH
, pminsh
);
5544 LMI_HELPER(PMAXUB
, pmaxub
);
5545 LMI_HELPER(PMINUB
, pminub
);
5547 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5548 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5549 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5550 LMI_HELPER(PCMPGTH
, pcmpgth
);
5551 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5552 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5554 LMI_HELPER(PSLLW
, psllw
);
5555 LMI_HELPER(PSLLH
, psllh
);
5556 LMI_HELPER(PSRLW
, psrlw
);
5557 LMI_HELPER(PSRLH
, psrlh
);
5558 LMI_HELPER(PSRAW
, psraw
);
5559 LMI_HELPER(PSRAH
, psrah
);
5561 LMI_HELPER(PMULLH
, pmullh
);
5562 LMI_HELPER(PMULHH
, pmulhh
);
5563 LMI_HELPER(PMULHUH
, pmulhuh
);
5564 LMI_HELPER(PMADDHW
, pmaddhw
);
5566 LMI_HELPER(PASUBUB
, pasubub
);
5567 LMI_HELPER_1(BIADD
, biadd
);
5568 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5570 LMI_DIRECT(PADDD
, paddd
, add
);
5571 LMI_DIRECT(PSUBD
, psubd
, sub
);
5572 LMI_DIRECT(XOR_CP2
, xor, xor);
5573 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5574 LMI_DIRECT(AND_CP2
, and, and);
5575 LMI_DIRECT(OR_CP2
, or, or);
5578 tcg_gen_andc_i64(t0
, t1
, t0
);
5582 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5585 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5588 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5591 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5595 tcg_gen_andi_i64(t1
, t1
, 3);
5596 tcg_gen_shli_i64(t1
, t1
, 4);
5597 tcg_gen_shr_i64(t0
, t0
, t1
);
5598 tcg_gen_ext16u_i64(t0
, t0
);
5602 tcg_gen_add_i64(t0
, t0
, t1
);
5603 tcg_gen_ext32s_i64(t0
, t0
);
5606 tcg_gen_sub_i64(t0
, t0
, t1
);
5607 tcg_gen_ext32s_i64(t0
, t0
);
5629 /* Make sure shift count isn't TCG undefined behaviour. */
5630 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5635 tcg_gen_shl_i64(t0
, t0
, t1
);
5639 /* Since SRA is UndefinedResult without sign-extended inputs,
5640 we can treat SRA and DSRA the same. */
5641 tcg_gen_sar_i64(t0
, t0
, t1
);
5644 /* We want to shift in zeros for SRL; zero-extend first. */
5645 tcg_gen_ext32u_i64(t0
, t0
);
5648 tcg_gen_shr_i64(t0
, t0
, t1
);
5652 if (shift_max
== 32) {
5653 tcg_gen_ext32s_i64(t0
, t0
);
5656 /* Shifts larger than MAX produce zero. */
5657 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5658 tcg_gen_neg_i64(t1
, t1
);
5659 tcg_gen_and_i64(t0
, t0
, t1
);
5665 TCGv_i64 t2
= tcg_temp_new_i64();
5666 TCGLabel
*lab
= gen_new_label();
5668 tcg_gen_mov_i64(t2
, t0
);
5669 tcg_gen_add_i64(t0
, t1
, t2
);
5670 if (opc
== OPC_ADD_CP2
) {
5671 tcg_gen_ext32s_i64(t0
, t0
);
5673 tcg_gen_xor_i64(t1
, t1
, t2
);
5674 tcg_gen_xor_i64(t2
, t2
, t0
);
5675 tcg_gen_andc_i64(t1
, t2
, t1
);
5676 tcg_temp_free_i64(t2
);
5677 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5678 generate_exception(ctx
, EXCP_OVERFLOW
);
5686 TCGv_i64 t2
= tcg_temp_new_i64();
5687 TCGLabel
*lab
= gen_new_label();
5689 tcg_gen_mov_i64(t2
, t0
);
5690 tcg_gen_sub_i64(t0
, t1
, t2
);
5691 if (opc
== OPC_SUB_CP2
) {
5692 tcg_gen_ext32s_i64(t0
, t0
);
5694 tcg_gen_xor_i64(t1
, t1
, t2
);
5695 tcg_gen_xor_i64(t2
, t2
, t0
);
5696 tcg_gen_and_i64(t1
, t1
, t2
);
5697 tcg_temp_free_i64(t2
);
5698 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5699 generate_exception(ctx
, EXCP_OVERFLOW
);
5705 tcg_gen_ext32u_i64(t0
, t0
);
5706 tcg_gen_ext32u_i64(t1
, t1
);
5707 tcg_gen_mul_i64(t0
, t0
, t1
);
5716 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
5717 FD field is the CC field? */
5719 MIPS_INVAL("loongson_cp2");
5720 generate_exception_end(ctx
, EXCP_RI
);
5727 gen_store_fpr64(ctx
, t0
, rd
);
5729 tcg_temp_free_i64(t0
);
5730 tcg_temp_free_i64(t1
);
5734 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5735 int rs
, int rt
, int16_t imm
)
5738 TCGv t0
= tcg_temp_new();
5739 TCGv t1
= tcg_temp_new();
5742 /* Load needed operands */
5750 /* Compare two registers */
5752 gen_load_gpr(t0
, rs
);
5753 gen_load_gpr(t1
, rt
);
5763 /* Compare register to immediate */
5764 if (rs
!= 0 || imm
!= 0) {
5765 gen_load_gpr(t0
, rs
);
5766 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5773 case OPC_TEQ
: /* rs == rs */
5774 case OPC_TEQI
: /* r0 == 0 */
5775 case OPC_TGE
: /* rs >= rs */
5776 case OPC_TGEI
: /* r0 >= 0 */
5777 case OPC_TGEU
: /* rs >= rs unsigned */
5778 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5780 generate_exception_end(ctx
, EXCP_TRAP
);
5782 case OPC_TLT
: /* rs < rs */
5783 case OPC_TLTI
: /* r0 < 0 */
5784 case OPC_TLTU
: /* rs < rs unsigned */
5785 case OPC_TLTIU
: /* r0 < 0 unsigned */
5786 case OPC_TNE
: /* rs != rs */
5787 case OPC_TNEI
: /* r0 != 0 */
5788 /* Never trap: treat as NOP. */
5792 TCGLabel
*l1
= gen_new_label();
5797 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5801 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5805 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5809 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5813 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5817 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5820 generate_exception(ctx
, EXCP_TRAP
);
5827 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5829 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5833 #ifndef CONFIG_USER_ONLY
5834 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5840 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5842 if (use_goto_tb(ctx
, dest
)) {
5845 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5848 if (ctx
->base
.singlestep_enabled
) {
5849 save_cpu_state(ctx
, 0);
5850 gen_helper_raise_exception_debug(cpu_env
);
5852 tcg_gen_lookup_and_goto_ptr();
5856 /* Branches (before delay slot) */
5857 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5859 int rs
, int rt
, int32_t offset
,
5862 target_ulong btgt
= -1;
5864 int bcond_compute
= 0;
5865 TCGv t0
= tcg_temp_new();
5866 TCGv t1
= tcg_temp_new();
5868 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5869 #ifdef MIPS_DEBUG_DISAS
5870 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5871 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5873 generate_exception_end(ctx
, EXCP_RI
);
5877 /* Load needed operands */
5883 /* Compare two registers */
5885 gen_load_gpr(t0
, rs
);
5886 gen_load_gpr(t1
, rt
);
5889 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5903 /* Compare to zero */
5905 gen_load_gpr(t0
, rs
);
5908 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5911 #if defined(TARGET_MIPS64)
5913 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5915 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5918 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5923 /* Jump to immediate */
5924 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5929 /* Jump to register */
5930 if (offset
!= 0 && offset
!= 16) {
5931 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5932 others are reserved. */
5933 MIPS_INVAL("jump hint");
5934 generate_exception_end(ctx
, EXCP_RI
);
5937 gen_load_gpr(btarget
, rs
);
5940 MIPS_INVAL("branch/jump");
5941 generate_exception_end(ctx
, EXCP_RI
);
5944 if (bcond_compute
== 0) {
5945 /* No condition to be computed */
5947 case OPC_BEQ
: /* rx == rx */
5948 case OPC_BEQL
: /* rx == rx likely */
5949 case OPC_BGEZ
: /* 0 >= 0 */
5950 case OPC_BGEZL
: /* 0 >= 0 likely */
5951 case OPC_BLEZ
: /* 0 <= 0 */
5952 case OPC_BLEZL
: /* 0 <= 0 likely */
5954 ctx
->hflags
|= MIPS_HFLAG_B
;
5956 case OPC_BGEZAL
: /* 0 >= 0 */
5957 case OPC_BGEZALL
: /* 0 >= 0 likely */
5958 /* Always take and link */
5960 ctx
->hflags
|= MIPS_HFLAG_B
;
5962 case OPC_BNE
: /* rx != rx */
5963 case OPC_BGTZ
: /* 0 > 0 */
5964 case OPC_BLTZ
: /* 0 < 0 */
5967 case OPC_BLTZAL
: /* 0 < 0 */
5968 /* Handle as an unconditional branch to get correct delay
5971 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5972 ctx
->hflags
|= MIPS_HFLAG_B
;
5974 case OPC_BLTZALL
: /* 0 < 0 likely */
5975 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5976 /* Skip the instruction in the delay slot */
5977 ctx
->base
.pc_next
+= 4;
5979 case OPC_BNEL
: /* rx != rx likely */
5980 case OPC_BGTZL
: /* 0 > 0 likely */
5981 case OPC_BLTZL
: /* 0 < 0 likely */
5982 /* Skip the instruction in the delay slot */
5983 ctx
->base
.pc_next
+= 4;
5986 ctx
->hflags
|= MIPS_HFLAG_B
;
5989 ctx
->hflags
|= MIPS_HFLAG_BX
;
5993 ctx
->hflags
|= MIPS_HFLAG_B
;
5996 ctx
->hflags
|= MIPS_HFLAG_BR
;
6000 ctx
->hflags
|= MIPS_HFLAG_BR
;
6003 MIPS_INVAL("branch/jump");
6004 generate_exception_end(ctx
, EXCP_RI
);
6010 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6013 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6016 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6019 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6022 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6025 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6028 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6032 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6036 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6039 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6042 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6045 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6048 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6051 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6054 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6056 #if defined(TARGET_MIPS64)
6058 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
6062 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6065 ctx
->hflags
|= MIPS_HFLAG_BC
;
6068 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6071 ctx
->hflags
|= MIPS_HFLAG_BL
;
6074 MIPS_INVAL("conditional branch/jump");
6075 generate_exception_end(ctx
, EXCP_RI
);
6080 ctx
->btarget
= btgt
;
6082 switch (delayslot_size
) {
6084 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
6087 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
6092 int post_delay
= insn_bytes
+ delayslot_size
;
6093 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
6095 tcg_gen_movi_tl(cpu_gpr
[blink
],
6096 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
6100 if (insn_bytes
== 2)
6101 ctx
->hflags
|= MIPS_HFLAG_B16
;
6107 /* nanoMIPS Branches */
6108 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
6110 int rs
, int rt
, int32_t offset
)
6112 target_ulong btgt
= -1;
6113 int bcond_compute
= 0;
6114 TCGv t0
= tcg_temp_new();
6115 TCGv t1
= tcg_temp_new();
6117 /* Load needed operands */
6121 /* Compare two registers */
6123 gen_load_gpr(t0
, rs
);
6124 gen_load_gpr(t1
, rt
);
6127 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6130 /* Compare to zero */
6132 gen_load_gpr(t0
, rs
);
6135 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6138 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
6140 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6144 /* Jump to register */
6145 if (offset
!= 0 && offset
!= 16) {
6146 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
6147 others are reserved. */
6148 MIPS_INVAL("jump hint");
6149 generate_exception_end(ctx
, EXCP_RI
);
6152 gen_load_gpr(btarget
, rs
);
6155 MIPS_INVAL("branch/jump");
6156 generate_exception_end(ctx
, EXCP_RI
);
6159 if (bcond_compute
== 0) {
6160 /* No condition to be computed */
6162 case OPC_BEQ
: /* rx == rx */
6164 ctx
->hflags
|= MIPS_HFLAG_B
;
6166 case OPC_BGEZAL
: /* 0 >= 0 */
6167 /* Always take and link */
6168 tcg_gen_movi_tl(cpu_gpr
[31],
6169 ctx
->base
.pc_next
+ insn_bytes
);
6170 ctx
->hflags
|= MIPS_HFLAG_B
;
6172 case OPC_BNE
: /* rx != rx */
6173 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6174 /* Skip the instruction in the delay slot */
6175 ctx
->base
.pc_next
+= 4;
6178 ctx
->hflags
|= MIPS_HFLAG_BR
;
6182 tcg_gen_movi_tl(cpu_gpr
[rt
],
6183 ctx
->base
.pc_next
+ insn_bytes
);
6185 ctx
->hflags
|= MIPS_HFLAG_BR
;
6188 MIPS_INVAL("branch/jump");
6189 generate_exception_end(ctx
, EXCP_RI
);
6195 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6198 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6201 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6202 tcg_gen_movi_tl(cpu_gpr
[31],
6203 ctx
->base
.pc_next
+ insn_bytes
);
6206 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6208 ctx
->hflags
|= MIPS_HFLAG_BC
;
6211 MIPS_INVAL("conditional branch/jump");
6212 generate_exception_end(ctx
, EXCP_RI
);
6217 ctx
->btarget
= btgt
;
6220 if (insn_bytes
== 2) {
6221 ctx
->hflags
|= MIPS_HFLAG_B16
;
6228 /* special3 bitfield operations */
6229 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
6230 int rs
, int lsb
, int msb
)
6232 TCGv t0
= tcg_temp_new();
6233 TCGv t1
= tcg_temp_new();
6235 gen_load_gpr(t1
, rs
);
6238 if (lsb
+ msb
> 31) {
6242 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6244 /* The two checks together imply that lsb == 0,
6245 so this is a simple sign-extension. */
6246 tcg_gen_ext32s_tl(t0
, t1
);
6249 #if defined(TARGET_MIPS64)
6258 if (lsb
+ msb
> 63) {
6261 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6268 gen_load_gpr(t0
, rt
);
6269 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6270 tcg_gen_ext32s_tl(t0
, t0
);
6272 #if defined(TARGET_MIPS64)
6283 gen_load_gpr(t0
, rt
);
6284 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6289 MIPS_INVAL("bitops");
6290 generate_exception_end(ctx
, EXCP_RI
);
6295 gen_store_gpr(t0
, rt
);
6300 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6305 /* If no destination, treat it as a NOP. */
6309 t0
= tcg_temp_new();
6310 gen_load_gpr(t0
, rt
);
6314 TCGv t1
= tcg_temp_new();
6315 TCGv t2
= tcg_const_tl(0x00FF00FF);
6317 tcg_gen_shri_tl(t1
, t0
, 8);
6318 tcg_gen_and_tl(t1
, t1
, t2
);
6319 tcg_gen_and_tl(t0
, t0
, t2
);
6320 tcg_gen_shli_tl(t0
, t0
, 8);
6321 tcg_gen_or_tl(t0
, t0
, t1
);
6324 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6328 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6331 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6333 #if defined(TARGET_MIPS64)
6336 TCGv t1
= tcg_temp_new();
6337 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6339 tcg_gen_shri_tl(t1
, t0
, 8);
6340 tcg_gen_and_tl(t1
, t1
, t2
);
6341 tcg_gen_and_tl(t0
, t0
, t2
);
6342 tcg_gen_shli_tl(t0
, t0
, 8);
6343 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6350 TCGv t1
= tcg_temp_new();
6351 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6353 tcg_gen_shri_tl(t1
, t0
, 16);
6354 tcg_gen_and_tl(t1
, t1
, t2
);
6355 tcg_gen_and_tl(t0
, t0
, t2
);
6356 tcg_gen_shli_tl(t0
, t0
, 16);
6357 tcg_gen_or_tl(t0
, t0
, t1
);
6358 tcg_gen_shri_tl(t1
, t0
, 32);
6359 tcg_gen_shli_tl(t0
, t0
, 32);
6360 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6367 MIPS_INVAL("bsfhl");
6368 generate_exception_end(ctx
, EXCP_RI
);
6375 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6384 t0
= tcg_temp_new();
6385 t1
= tcg_temp_new();
6386 gen_load_gpr(t0
, rs
);
6387 gen_load_gpr(t1
, rt
);
6388 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6389 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6390 if (opc
== OPC_LSA
) {
6391 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6400 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6408 t0
= tcg_temp_new();
6409 if (bits
== 0 || bits
== wordsz
) {
6411 gen_load_gpr(t0
, rt
);
6413 gen_load_gpr(t0
, rs
);
6417 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6419 #if defined(TARGET_MIPS64)
6421 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6426 TCGv t1
= tcg_temp_new();
6427 gen_load_gpr(t0
, rt
);
6428 gen_load_gpr(t1
, rs
);
6432 TCGv_i64 t2
= tcg_temp_new_i64();
6433 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6434 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6435 gen_move_low32(cpu_gpr
[rd
], t2
);
6436 tcg_temp_free_i64(t2
);
6439 #if defined(TARGET_MIPS64)
6441 tcg_gen_shli_tl(t0
, t0
, bits
);
6442 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6443 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6453 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6456 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6459 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6462 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6465 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6472 t0
= tcg_temp_new();
6473 gen_load_gpr(t0
, rt
);
6476 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6478 #if defined(TARGET_MIPS64)
6480 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6487 #ifndef CONFIG_USER_ONLY
6488 /* CP0 (MMU and control) */
6489 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6491 TCGv_i64 t0
= tcg_temp_new_i64();
6492 TCGv_i64 t1
= tcg_temp_new_i64();
6494 tcg_gen_ext_tl_i64(t0
, arg
);
6495 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6496 #if defined(TARGET_MIPS64)
6497 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6499 tcg_gen_concat32_i64(t1
, t1
, t0
);
6501 tcg_gen_st_i64(t1
, cpu_env
, off
);
6502 tcg_temp_free_i64(t1
);
6503 tcg_temp_free_i64(t0
);
6506 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6508 TCGv_i64 t0
= tcg_temp_new_i64();
6509 TCGv_i64 t1
= tcg_temp_new_i64();
6511 tcg_gen_ext_tl_i64(t0
, arg
);
6512 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6513 tcg_gen_concat32_i64(t1
, t1
, t0
);
6514 tcg_gen_st_i64(t1
, cpu_env
, off
);
6515 tcg_temp_free_i64(t1
);
6516 tcg_temp_free_i64(t0
);
6519 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6521 TCGv_i64 t0
= tcg_temp_new_i64();
6523 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6524 #if defined(TARGET_MIPS64)
6525 tcg_gen_shri_i64(t0
, t0
, 30);
6527 tcg_gen_shri_i64(t0
, t0
, 32);
6529 gen_move_low32(arg
, t0
);
6530 tcg_temp_free_i64(t0
);
6533 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6535 TCGv_i64 t0
= tcg_temp_new_i64();
6537 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6538 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6539 gen_move_low32(arg
, t0
);
6540 tcg_temp_free_i64(t0
);
6543 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
6545 TCGv_i32 t0
= tcg_temp_new_i32();
6547 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6548 tcg_gen_ext_i32_tl(arg
, t0
);
6549 tcg_temp_free_i32(t0
);
6552 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
6554 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6555 tcg_gen_ext32s_tl(arg
, arg
);
6558 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
6560 TCGv_i32 t0
= tcg_temp_new_i32();
6562 tcg_gen_trunc_tl_i32(t0
, arg
);
6563 tcg_gen_st_i32(t0
, cpu_env
, off
);
6564 tcg_temp_free_i32(t0
);
6567 #define CP0_CHECK(c) \
6570 goto cp0_unimplemented; \
6574 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6576 const char *register_name
= "invalid";
6579 case CP0_REGISTER_02
:
6582 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6583 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6584 register_name
= "EntryLo0";
6587 goto cp0_unimplemented
;
6590 case CP0_REGISTER_03
:
6593 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6594 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6595 register_name
= "EntryLo1";
6598 goto cp0_unimplemented
;
6601 case CP0_REGISTER_09
:
6604 CP0_CHECK(ctx
->saar
);
6605 gen_helper_mfhc0_saar(arg
, cpu_env
);
6606 register_name
= "SAAR";
6609 goto cp0_unimplemented
;
6612 case CP0_REGISTER_17
:
6615 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
6616 ctx
->CP0_LLAddr_shift
);
6617 register_name
= "LLAddr";
6620 CP0_CHECK(ctx
->mrp
);
6621 gen_helper_mfhc0_maar(arg
, cpu_env
);
6622 register_name
= "MAAR";
6625 goto cp0_unimplemented
;
6628 case CP0_REGISTER_28
:
6634 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6635 register_name
= "TagLo";
6638 goto cp0_unimplemented
;
6642 goto cp0_unimplemented
;
6644 trace_mips_translate_c0("mfhc0", register_name
, reg
, sel
);
6648 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n",
6649 register_name
, reg
, sel
);
6650 tcg_gen_movi_tl(arg
, 0);
6653 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6655 const char *register_name
= "invalid";
6656 uint64_t mask
= ctx
->PAMask
>> 36;
6659 case CP0_REGISTER_02
:
6662 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6663 tcg_gen_andi_tl(arg
, arg
, mask
);
6664 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6665 register_name
= "EntryLo0";
6668 goto cp0_unimplemented
;
6671 case CP0_REGISTER_03
:
6674 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6675 tcg_gen_andi_tl(arg
, arg
, mask
);
6676 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6677 register_name
= "EntryLo1";
6680 goto cp0_unimplemented
;
6683 case CP0_REGISTER_09
:
6686 CP0_CHECK(ctx
->saar
);
6687 gen_helper_mthc0_saar(cpu_env
, arg
);
6688 register_name
= "SAAR";
6691 goto cp0_unimplemented
;
6693 case CP0_REGISTER_17
:
6696 /* LLAddr is read-only (the only exception is bit 0 if LLB is
6697 supported); the CP0_LLAddr_rw_bitmask does not seem to be
6698 relevant for modern MIPS cores supporting MTHC0, therefore
6699 treating MTHC0 to LLAddr as NOP. */
6700 register_name
= "LLAddr";
6703 CP0_CHECK(ctx
->mrp
);
6704 gen_helper_mthc0_maar(cpu_env
, arg
);
6705 register_name
= "MAAR";
6708 goto cp0_unimplemented
;
6711 case CP0_REGISTER_28
:
6717 tcg_gen_andi_tl(arg
, arg
, mask
);
6718 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6719 register_name
= "TagLo";
6722 goto cp0_unimplemented
;
6726 goto cp0_unimplemented
;
6728 trace_mips_translate_c0("mthc0", register_name
, reg
, sel
);
6731 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n",
6732 register_name
, reg
, sel
);
6735 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6737 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6738 tcg_gen_movi_tl(arg
, 0);
6740 tcg_gen_movi_tl(arg
, ~0);
6744 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6746 const char *register_name
= "invalid";
6749 check_insn(ctx
, ISA_MIPS32
);
6752 case CP0_REGISTER_00
:
6755 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6756 register_name
= "Index";
6759 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6760 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6761 register_name
= "MVPControl";
6764 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6765 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6766 register_name
= "MVPConf0";
6769 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6770 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6771 register_name
= "MVPConf1";
6775 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6776 register_name
= "VPControl";
6779 goto cp0_unimplemented
;
6782 case CP0_REGISTER_01
:
6785 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6786 gen_helper_mfc0_random(arg
, cpu_env
);
6787 register_name
= "Random";
6790 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6791 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6792 register_name
= "VPEControl";
6795 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6796 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6797 register_name
= "VPEConf0";
6800 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6801 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6802 register_name
= "VPEConf1";
6805 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6806 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6807 register_name
= "YQMask";
6810 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6811 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6812 register_name
= "VPESchedule";
6815 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6816 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6817 register_name
= "VPEScheFBack";
6820 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6821 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6822 register_name
= "VPEOpt";
6825 goto cp0_unimplemented
;
6828 case CP0_REGISTER_02
:
6832 TCGv_i64 tmp
= tcg_temp_new_i64();
6833 tcg_gen_ld_i64(tmp
, cpu_env
,
6834 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6835 #if defined(TARGET_MIPS64)
6837 /* Move RI/XI fields to bits 31:30 */
6838 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6839 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6842 gen_move_low32(arg
, tmp
);
6843 tcg_temp_free_i64(tmp
);
6845 register_name
= "EntryLo0";
6848 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6849 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6850 register_name
= "TCStatus";
6853 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6854 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6855 register_name
= "TCBind";
6858 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6859 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6860 register_name
= "TCRestart";
6863 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6864 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6865 register_name
= "TCHalt";
6868 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6869 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6870 register_name
= "TCContext";
6873 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6874 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6875 register_name
= "TCSchedule";
6878 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6879 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6880 register_name
= "TCScheFBack";
6883 goto cp0_unimplemented
;
6886 case CP0_REGISTER_03
:
6890 TCGv_i64 tmp
= tcg_temp_new_i64();
6891 tcg_gen_ld_i64(tmp
, cpu_env
,
6892 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6893 #if defined(TARGET_MIPS64)
6895 /* Move RI/XI fields to bits 31:30 */
6896 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6897 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6900 gen_move_low32(arg
, tmp
);
6901 tcg_temp_free_i64(tmp
);
6903 register_name
= "EntryLo1";
6907 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6908 register_name
= "GlobalNumber";
6911 goto cp0_unimplemented
;
6914 case CP0_REGISTER_04
:
6917 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6918 tcg_gen_ext32s_tl(arg
, arg
);
6919 register_name
= "Context";
6922 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
6923 register_name
= "ContextConfig";
6924 goto cp0_unimplemented
;
6926 CP0_CHECK(ctx
->ulri
);
6927 tcg_gen_ld_tl(arg
, cpu_env
,
6928 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6929 tcg_gen_ext32s_tl(arg
, arg
);
6930 register_name
= "UserLocal";
6933 goto cp0_unimplemented
;
6936 case CP0_REGISTER_05
:
6939 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6940 register_name
= "PageMask";
6943 check_insn(ctx
, ISA_MIPS32R2
);
6944 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6945 register_name
= "PageGrain";
6949 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6950 tcg_gen_ext32s_tl(arg
, arg
);
6951 register_name
= "SegCtl0";
6955 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6956 tcg_gen_ext32s_tl(arg
, arg
);
6957 register_name
= "SegCtl1";
6961 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6962 tcg_gen_ext32s_tl(arg
, arg
);
6963 register_name
= "SegCtl2";
6967 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6968 register_name
= "PWBase";
6972 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
6973 register_name
= "PWField";
6977 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
6978 register_name
= "PWSize";
6981 goto cp0_unimplemented
;
6984 case CP0_REGISTER_06
:
6987 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6988 register_name
= "Wired";
6991 check_insn(ctx
, ISA_MIPS32R2
);
6992 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6993 register_name
= "SRSConf0";
6996 check_insn(ctx
, ISA_MIPS32R2
);
6997 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6998 register_name
= "SRSConf1";
7001 check_insn(ctx
, ISA_MIPS32R2
);
7002 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
7003 register_name
= "SRSConf2";
7006 check_insn(ctx
, ISA_MIPS32R2
);
7007 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
7008 register_name
= "SRSConf3";
7011 check_insn(ctx
, ISA_MIPS32R2
);
7012 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
7013 register_name
= "SRSConf4";
7017 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
7018 register_name
= "PWCtl";
7021 goto cp0_unimplemented
;
7024 case CP0_REGISTER_07
:
7027 check_insn(ctx
, ISA_MIPS32R2
);
7028 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
7029 register_name
= "HWREna";
7032 goto cp0_unimplemented
;
7035 case CP0_REGISTER_08
:
7038 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7039 tcg_gen_ext32s_tl(arg
, arg
);
7040 register_name
= "BadVAddr";
7044 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7045 register_name
= "BadInstr";
7049 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7050 register_name
= "BadInstrP";
7054 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7055 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7056 register_name
= "BadInstrX";
7059 goto cp0_unimplemented
;
7062 case CP0_REGISTER_09
:
7065 /* Mark as an IO operation because we read the time. */
7066 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7069 gen_helper_mfc0_count(arg
, cpu_env
);
7070 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7073 /* Break the TB to be able to take timer interrupts immediately
7074 after reading count. DISAS_STOP isn't sufficient, we need to
7075 ensure we break completely out of translated code. */
7076 gen_save_pc(ctx
->base
.pc_next
+ 4);
7077 ctx
->base
.is_jmp
= DISAS_EXIT
;
7078 register_name
= "Count";
7081 CP0_CHECK(ctx
->saar
);
7082 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
7083 register_name
= "SAARI";
7086 CP0_CHECK(ctx
->saar
);
7087 gen_helper_mfc0_saar(arg
, cpu_env
);
7088 register_name
= "SAAR";
7091 goto cp0_unimplemented
;
7094 case CP0_REGISTER_10
:
7097 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7098 tcg_gen_ext32s_tl(arg
, arg
);
7099 register_name
= "EntryHi";
7102 goto cp0_unimplemented
;
7105 case CP0_REGISTER_11
:
7108 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7109 register_name
= "Compare";
7111 /* 6,7 are implementation dependent */
7113 goto cp0_unimplemented
;
7116 case CP0_REGISTER_12
:
7119 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7120 register_name
= "Status";
7123 check_insn(ctx
, ISA_MIPS32R2
);
7124 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7125 register_name
= "IntCtl";
7128 check_insn(ctx
, ISA_MIPS32R2
);
7129 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7130 register_name
= "SRSCtl";
7133 check_insn(ctx
, ISA_MIPS32R2
);
7134 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7135 register_name
= "SRSMap";
7138 goto cp0_unimplemented
;
7141 case CP0_REGISTER_13
:
7144 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7145 register_name
= "Cause";
7148 goto cp0_unimplemented
;
7151 case CP0_REGISTER_14
:
7154 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7155 tcg_gen_ext32s_tl(arg
, arg
);
7156 register_name
= "EPC";
7159 goto cp0_unimplemented
;
7162 case CP0_REGISTER_15
:
7165 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7166 register_name
= "PRid";
7169 check_insn(ctx
, ISA_MIPS32R2
);
7170 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7171 tcg_gen_ext32s_tl(arg
, arg
);
7172 register_name
= "EBase";
7175 check_insn(ctx
, ISA_MIPS32R2
);
7176 CP0_CHECK(ctx
->cmgcr
);
7177 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7178 tcg_gen_ext32s_tl(arg
, arg
);
7179 register_name
= "CMGCRBase";
7182 goto cp0_unimplemented
;
7185 case CP0_REGISTER_16
:
7188 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7189 register_name
= "Config";
7192 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7193 register_name
= "Config1";
7196 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7197 register_name
= "Config2";
7200 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7201 register_name
= "Config3";
7204 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7205 register_name
= "Config4";
7208 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7209 register_name
= "Config5";
7211 /* 6,7 are implementation dependent */
7213 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7214 register_name
= "Config6";
7217 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7218 register_name
= "Config7";
7221 goto cp0_unimplemented
;
7224 case CP0_REGISTER_17
:
7227 gen_helper_mfc0_lladdr(arg
, cpu_env
);
7228 register_name
= "LLAddr";
7231 CP0_CHECK(ctx
->mrp
);
7232 gen_helper_mfc0_maar(arg
, cpu_env
);
7233 register_name
= "MAAR";
7236 CP0_CHECK(ctx
->mrp
);
7237 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7238 register_name
= "MAARI";
7241 goto cp0_unimplemented
;
7244 case CP0_REGISTER_18
:
7254 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7255 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
7256 register_name
= "WatchLo";
7259 goto cp0_unimplemented
;
7262 case CP0_REGISTER_19
:
7272 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7273 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
7274 register_name
= "WatchHi";
7277 goto cp0_unimplemented
;
7280 case CP0_REGISTER_20
:
7283 #if defined(TARGET_MIPS64)
7284 check_insn(ctx
, ISA_MIPS3
);
7285 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7286 tcg_gen_ext32s_tl(arg
, arg
);
7287 register_name
= "XContext";
7291 goto cp0_unimplemented
;
7294 case CP0_REGISTER_21
:
7295 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7296 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7300 register_name
= "Framemask";
7303 goto cp0_unimplemented
;
7306 case CP0_REGISTER_22
:
7307 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7308 register_name
= "'Diagnostic"; /* implementation dependent */
7310 case CP0_REGISTER_23
:
7313 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7314 register_name
= "Debug";
7317 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
7318 register_name
= "TraceControl";
7319 goto cp0_unimplemented
;
7321 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
7322 register_name
= "TraceControl2";
7323 goto cp0_unimplemented
;
7325 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
7326 register_name
= "UserTraceData";
7327 goto cp0_unimplemented
;
7329 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
7330 register_name
= "TraceBPC";
7331 goto cp0_unimplemented
;
7333 goto cp0_unimplemented
;
7336 case CP0_REGISTER_24
:
7340 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7341 tcg_gen_ext32s_tl(arg
, arg
);
7342 register_name
= "DEPC";
7345 goto cp0_unimplemented
;
7348 case CP0_REGISTER_25
:
7351 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7352 register_name
= "Performance0";
7355 // gen_helper_mfc0_performance1(arg);
7356 register_name
= "Performance1";
7357 goto cp0_unimplemented
;
7359 // gen_helper_mfc0_performance2(arg);
7360 register_name
= "Performance2";
7361 goto cp0_unimplemented
;
7363 // gen_helper_mfc0_performance3(arg);
7364 register_name
= "Performance3";
7365 goto cp0_unimplemented
;
7367 // gen_helper_mfc0_performance4(arg);
7368 register_name
= "Performance4";
7369 goto cp0_unimplemented
;
7371 // gen_helper_mfc0_performance5(arg);
7372 register_name
= "Performance5";
7373 goto cp0_unimplemented
;
7375 // gen_helper_mfc0_performance6(arg);
7376 register_name
= "Performance6";
7377 goto cp0_unimplemented
;
7379 // gen_helper_mfc0_performance7(arg);
7380 register_name
= "Performance7";
7381 goto cp0_unimplemented
;
7383 goto cp0_unimplemented
;
7386 case CP0_REGISTER_26
:
7389 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7390 register_name
= "ErrCtl";
7393 goto cp0_unimplemented
;
7396 case CP0_REGISTER_27
:
7402 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7403 register_name
= "CacheErr";
7406 goto cp0_unimplemented
;
7409 case CP0_REGISTER_28
:
7416 TCGv_i64 tmp
= tcg_temp_new_i64();
7417 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7418 gen_move_low32(arg
, tmp
);
7419 tcg_temp_free_i64(tmp
);
7421 register_name
= "TagLo";
7427 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7428 register_name
= "DataLo";
7431 goto cp0_unimplemented
;
7434 case CP0_REGISTER_29
:
7440 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7441 register_name
= "TagHi";
7447 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7448 register_name
= "DataHi";
7451 goto cp0_unimplemented
;
7454 case CP0_REGISTER_30
:
7457 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7458 tcg_gen_ext32s_tl(arg
, arg
);
7459 register_name
= "ErrorEPC";
7462 goto cp0_unimplemented
;
7465 case CP0_REGISTER_31
:
7469 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7470 register_name
= "DESAVE";
7478 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7479 tcg_gen_ld_tl(arg
, cpu_env
,
7480 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7481 tcg_gen_ext32s_tl(arg
, arg
);
7482 register_name
= "KScratch";
7485 goto cp0_unimplemented
;
7489 goto cp0_unimplemented
;
7491 trace_mips_translate_c0("mfc0", register_name
, reg
, sel
);
7495 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n",
7496 register_name
, reg
, sel
);
7497 gen_mfc0_unimplemented(ctx
, arg
);
7500 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7502 const char *register_name
= "invalid";
7505 check_insn(ctx
, ISA_MIPS32
);
7507 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7512 case CP0_REGISTER_00
:
7515 gen_helper_mtc0_index(cpu_env
, arg
);
7516 register_name
= "Index";
7519 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7520 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7521 register_name
= "MVPControl";
7524 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7526 register_name
= "MVPConf0";
7529 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7531 register_name
= "MVPConf1";
7536 register_name
= "VPControl";
7539 goto cp0_unimplemented
;
7542 case CP0_REGISTER_01
:
7546 register_name
= "Random";
7549 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7550 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7551 register_name
= "VPEControl";
7554 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7555 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7556 register_name
= "VPEConf0";
7559 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7560 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7561 register_name
= "VPEConf1";
7564 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7565 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7566 register_name
= "YQMask";
7569 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7570 tcg_gen_st_tl(arg
, cpu_env
,
7571 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7572 register_name
= "VPESchedule";
7575 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7576 tcg_gen_st_tl(arg
, cpu_env
,
7577 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7578 register_name
= "VPEScheFBack";
7581 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7582 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7583 register_name
= "VPEOpt";
7586 goto cp0_unimplemented
;
7589 case CP0_REGISTER_02
:
7592 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7593 register_name
= "EntryLo0";
7596 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7597 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7598 register_name
= "TCStatus";
7601 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7602 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7603 register_name
= "TCBind";
7606 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7607 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7608 register_name
= "TCRestart";
7611 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7612 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7613 register_name
= "TCHalt";
7616 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7617 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7618 register_name
= "TCContext";
7621 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7622 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7623 register_name
= "TCSchedule";
7626 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7627 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7628 register_name
= "TCScheFBack";
7631 goto cp0_unimplemented
;
7634 case CP0_REGISTER_03
:
7637 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7638 register_name
= "EntryLo1";
7643 register_name
= "GlobalNumber";
7646 goto cp0_unimplemented
;
7649 case CP0_REGISTER_04
:
7652 gen_helper_mtc0_context(cpu_env
, arg
);
7653 register_name
= "Context";
7656 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7657 register_name
= "ContextConfig";
7658 goto cp0_unimplemented
;
7660 CP0_CHECK(ctx
->ulri
);
7661 tcg_gen_st_tl(arg
, cpu_env
,
7662 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7663 register_name
= "UserLocal";
7666 goto cp0_unimplemented
;
7669 case CP0_REGISTER_05
:
7672 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7673 register_name
= "PageMask";
7676 check_insn(ctx
, ISA_MIPS32R2
);
7677 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7678 register_name
= "PageGrain";
7679 ctx
->base
.is_jmp
= DISAS_STOP
;
7683 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7684 register_name
= "SegCtl0";
7688 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7689 register_name
= "SegCtl1";
7693 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7694 register_name
= "SegCtl2";
7698 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7699 register_name
= "PWBase";
7703 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7704 register_name
= "PWField";
7708 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7709 register_name
= "PWSize";
7712 goto cp0_unimplemented
;
7715 case CP0_REGISTER_06
:
7718 gen_helper_mtc0_wired(cpu_env
, arg
);
7719 register_name
= "Wired";
7722 check_insn(ctx
, ISA_MIPS32R2
);
7723 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7724 register_name
= "SRSConf0";
7727 check_insn(ctx
, ISA_MIPS32R2
);
7728 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7729 register_name
= "SRSConf1";
7732 check_insn(ctx
, ISA_MIPS32R2
);
7733 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7734 register_name
= "SRSConf2";
7737 check_insn(ctx
, ISA_MIPS32R2
);
7738 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7739 register_name
= "SRSConf3";
7742 check_insn(ctx
, ISA_MIPS32R2
);
7743 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7744 register_name
= "SRSConf4";
7748 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7749 register_name
= "PWCtl";
7752 goto cp0_unimplemented
;
7755 case CP0_REGISTER_07
:
7758 check_insn(ctx
, ISA_MIPS32R2
);
7759 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7760 ctx
->base
.is_jmp
= DISAS_STOP
;
7761 register_name
= "HWREna";
7764 goto cp0_unimplemented
;
7767 case CP0_REGISTER_08
:
7771 register_name
= "BadVAddr";
7775 register_name
= "BadInstr";
7779 register_name
= "BadInstrP";
7783 register_name
= "BadInstrX";
7786 goto cp0_unimplemented
;
7789 case CP0_REGISTER_09
:
7792 gen_helper_mtc0_count(cpu_env
, arg
);
7793 register_name
= "Count";
7796 CP0_CHECK(ctx
->saar
);
7797 gen_helper_mtc0_saari(cpu_env
, arg
);
7798 register_name
= "SAARI";
7801 CP0_CHECK(ctx
->saar
);
7802 gen_helper_mtc0_saar(cpu_env
, arg
);
7803 register_name
= "SAAR";
7806 goto cp0_unimplemented
;
7809 case CP0_REGISTER_10
:
7812 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7813 register_name
= "EntryHi";
7816 goto cp0_unimplemented
;
7819 case CP0_REGISTER_11
:
7822 gen_helper_mtc0_compare(cpu_env
, arg
);
7823 register_name
= "Compare";
7825 /* 6,7 are implementation dependent */
7827 goto cp0_unimplemented
;
7830 case CP0_REGISTER_12
:
7833 save_cpu_state(ctx
, 1);
7834 gen_helper_mtc0_status(cpu_env
, arg
);
7835 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7836 gen_save_pc(ctx
->base
.pc_next
+ 4);
7837 ctx
->base
.is_jmp
= DISAS_EXIT
;
7838 register_name
= "Status";
7841 check_insn(ctx
, ISA_MIPS32R2
);
7842 gen_helper_mtc0_intctl(cpu_env
, arg
);
7843 /* Stop translation as we may have switched the execution mode */
7844 ctx
->base
.is_jmp
= DISAS_STOP
;
7845 register_name
= "IntCtl";
7848 check_insn(ctx
, ISA_MIPS32R2
);
7849 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7850 /* Stop translation as we may have switched the execution mode */
7851 ctx
->base
.is_jmp
= DISAS_STOP
;
7852 register_name
= "SRSCtl";
7855 check_insn(ctx
, ISA_MIPS32R2
);
7856 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7857 /* Stop translation as we may have switched the execution mode */
7858 ctx
->base
.is_jmp
= DISAS_STOP
;
7859 register_name
= "SRSMap";
7862 goto cp0_unimplemented
;
7865 case CP0_REGISTER_13
:
7868 save_cpu_state(ctx
, 1);
7869 gen_helper_mtc0_cause(cpu_env
, arg
);
7870 /* Stop translation as we may have triggered an interrupt.
7871 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7872 * translated code to check for pending interrupts. */
7873 gen_save_pc(ctx
->base
.pc_next
+ 4);
7874 ctx
->base
.is_jmp
= DISAS_EXIT
;
7875 register_name
= "Cause";
7878 goto cp0_unimplemented
;
7881 case CP0_REGISTER_14
:
7884 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7885 register_name
= "EPC";
7888 goto cp0_unimplemented
;
7891 case CP0_REGISTER_15
:
7895 register_name
= "PRid";
7898 check_insn(ctx
, ISA_MIPS32R2
);
7899 gen_helper_mtc0_ebase(cpu_env
, arg
);
7900 register_name
= "EBase";
7903 goto cp0_unimplemented
;
7906 case CP0_REGISTER_16
:
7909 gen_helper_mtc0_config0(cpu_env
, arg
);
7910 register_name
= "Config";
7911 /* Stop translation as we may have switched the execution mode */
7912 ctx
->base
.is_jmp
= DISAS_STOP
;
7915 /* ignored, read only */
7916 register_name
= "Config1";
7919 gen_helper_mtc0_config2(cpu_env
, arg
);
7920 register_name
= "Config2";
7921 /* Stop translation as we may have switched the execution mode */
7922 ctx
->base
.is_jmp
= DISAS_STOP
;
7925 gen_helper_mtc0_config3(cpu_env
, arg
);
7926 register_name
= "Config3";
7927 /* Stop translation as we may have switched the execution mode */
7928 ctx
->base
.is_jmp
= DISAS_STOP
;
7931 gen_helper_mtc0_config4(cpu_env
, arg
);
7932 register_name
= "Config4";
7933 ctx
->base
.is_jmp
= DISAS_STOP
;
7936 gen_helper_mtc0_config5(cpu_env
, arg
);
7937 register_name
= "Config5";
7938 /* Stop translation as we may have switched the execution mode */
7939 ctx
->base
.is_jmp
= DISAS_STOP
;
7941 /* 6,7 are implementation dependent */
7944 register_name
= "Config6";
7948 register_name
= "Config7";
7951 register_name
= "Invalid config selector";
7952 goto cp0_unimplemented
;
7955 case CP0_REGISTER_17
:
7958 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7959 register_name
= "LLAddr";
7962 CP0_CHECK(ctx
->mrp
);
7963 gen_helper_mtc0_maar(cpu_env
, arg
);
7964 register_name
= "MAAR";
7967 CP0_CHECK(ctx
->mrp
);
7968 gen_helper_mtc0_maari(cpu_env
, arg
);
7969 register_name
= "MAARI";
7972 goto cp0_unimplemented
;
7975 case CP0_REGISTER_18
:
7985 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7986 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7987 register_name
= "WatchLo";
7990 goto cp0_unimplemented
;
7993 case CP0_REGISTER_19
:
8003 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8004 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
8005 register_name
= "WatchHi";
8008 goto cp0_unimplemented
;
8011 case CP0_REGISTER_20
:
8014 #if defined(TARGET_MIPS64)
8015 check_insn(ctx
, ISA_MIPS3
);
8016 gen_helper_mtc0_xcontext(cpu_env
, arg
);
8017 register_name
= "XContext";
8021 goto cp0_unimplemented
;
8024 case CP0_REGISTER_21
:
8025 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8026 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8029 gen_helper_mtc0_framemask(cpu_env
, arg
);
8030 register_name
= "Framemask";
8033 goto cp0_unimplemented
;
8036 case CP0_REGISTER_22
:
8038 register_name
= "Diagnostic"; /* implementation dependent */
8040 case CP0_REGISTER_23
:
8043 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
8044 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8045 gen_save_pc(ctx
->base
.pc_next
+ 4);
8046 ctx
->base
.is_jmp
= DISAS_EXIT
;
8047 register_name
= "Debug";
8050 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
8051 register_name
= "TraceControl";
8052 /* Stop translation as we may have switched the execution mode */
8053 ctx
->base
.is_jmp
= DISAS_STOP
;
8054 goto cp0_unimplemented
;
8056 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
8057 register_name
= "TraceControl2";
8058 /* Stop translation as we may have switched the execution mode */
8059 ctx
->base
.is_jmp
= DISAS_STOP
;
8060 goto cp0_unimplemented
;
8062 /* Stop translation as we may have switched the execution mode */
8063 ctx
->base
.is_jmp
= DISAS_STOP
;
8064 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
8065 register_name
= "UserTraceData";
8066 /* Stop translation as we may have switched the execution mode */
8067 ctx
->base
.is_jmp
= DISAS_STOP
;
8068 goto cp0_unimplemented
;
8070 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
8071 /* Stop translation as we may have switched the execution mode */
8072 ctx
->base
.is_jmp
= DISAS_STOP
;
8073 register_name
= "TraceBPC";
8074 goto cp0_unimplemented
;
8076 goto cp0_unimplemented
;
8079 case CP0_REGISTER_24
:
8083 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8084 register_name
= "DEPC";
8087 goto cp0_unimplemented
;
8090 case CP0_REGISTER_25
:
8093 gen_helper_mtc0_performance0(cpu_env
, arg
);
8094 register_name
= "Performance0";
8097 // gen_helper_mtc0_performance1(arg);
8098 register_name
= "Performance1";
8099 goto cp0_unimplemented
;
8101 // gen_helper_mtc0_performance2(arg);
8102 register_name
= "Performance2";
8103 goto cp0_unimplemented
;
8105 // gen_helper_mtc0_performance3(arg);
8106 register_name
= "Performance3";
8107 goto cp0_unimplemented
;
8109 // gen_helper_mtc0_performance4(arg);
8110 register_name
= "Performance4";
8111 goto cp0_unimplemented
;
8113 // gen_helper_mtc0_performance5(arg);
8114 register_name
= "Performance5";
8115 goto cp0_unimplemented
;
8117 // gen_helper_mtc0_performance6(arg);
8118 register_name
= "Performance6";
8119 goto cp0_unimplemented
;
8121 // gen_helper_mtc0_performance7(arg);
8122 register_name
= "Performance7";
8123 goto cp0_unimplemented
;
8125 goto cp0_unimplemented
;
8128 case CP0_REGISTER_26
:
8131 gen_helper_mtc0_errctl(cpu_env
, arg
);
8132 ctx
->base
.is_jmp
= DISAS_STOP
;
8133 register_name
= "ErrCtl";
8136 goto cp0_unimplemented
;
8139 case CP0_REGISTER_27
:
8146 register_name
= "CacheErr";
8149 goto cp0_unimplemented
;
8152 case CP0_REGISTER_28
:
8158 gen_helper_mtc0_taglo(cpu_env
, arg
);
8159 register_name
= "TagLo";
8165 gen_helper_mtc0_datalo(cpu_env
, arg
);
8166 register_name
= "DataLo";
8169 goto cp0_unimplemented
;
8172 case CP0_REGISTER_29
:
8178 gen_helper_mtc0_taghi(cpu_env
, arg
);
8179 register_name
= "TagHi";
8185 gen_helper_mtc0_datahi(cpu_env
, arg
);
8186 register_name
= "DataHi";
8189 register_name
= "invalid sel";
8190 goto cp0_unimplemented
;
8193 case CP0_REGISTER_30
:
8196 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8197 register_name
= "ErrorEPC";
8200 goto cp0_unimplemented
;
8203 case CP0_REGISTER_31
:
8207 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8208 register_name
= "DESAVE";
8216 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8217 tcg_gen_st_tl(arg
, cpu_env
,
8218 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8219 register_name
= "KScratch";
8222 goto cp0_unimplemented
;
8226 goto cp0_unimplemented
;
8228 trace_mips_translate_c0("mtc0", register_name
, reg
, sel
);
8230 /* For simplicity assume that all writes can cause interrupts. */
8231 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8233 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
8234 * translated code to check for pending interrupts. */
8235 gen_save_pc(ctx
->base
.pc_next
+ 4);
8236 ctx
->base
.is_jmp
= DISAS_EXIT
;
8241 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n",
8242 register_name
, reg
, sel
);
8245 #if defined(TARGET_MIPS64)
8246 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8248 const char *register_name
= "invalid";
8251 check_insn(ctx
, ISA_MIPS64
);
8254 case CP0_REGISTER_00
:
8257 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
8258 register_name
= "Index";
8261 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8262 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
8263 register_name
= "MVPControl";
8266 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8267 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
8268 register_name
= "MVPConf0";
8271 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8272 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
8273 register_name
= "MVPConf1";
8277 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
8278 register_name
= "VPControl";
8281 goto cp0_unimplemented
;
8284 case CP0_REGISTER_01
:
8287 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8288 gen_helper_mfc0_random(arg
, cpu_env
);
8289 register_name
= "Random";
8292 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8293 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
8294 register_name
= "VPEControl";
8297 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8298 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
8299 register_name
= "VPEConf0";
8302 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8303 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
8304 register_name
= "VPEConf1";
8307 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8308 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
8309 register_name
= "YQMask";
8312 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8313 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8314 register_name
= "VPESchedule";
8317 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8318 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8319 register_name
= "VPEScheFBack";
8322 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8323 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
8324 register_name
= "VPEOpt";
8327 goto cp0_unimplemented
;
8330 case CP0_REGISTER_02
:
8333 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
8334 register_name
= "EntryLo0";
8337 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8338 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8339 register_name
= "TCStatus";
8342 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8343 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8344 register_name
= "TCBind";
8347 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8348 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8349 register_name
= "TCRestart";
8352 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8353 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8354 register_name
= "TCHalt";
8357 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8358 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8359 register_name
= "TCContext";
8362 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8363 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8364 register_name
= "TCSchedule";
8367 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8368 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8369 register_name
= "TCScheFBack";
8372 goto cp0_unimplemented
;
8375 case CP0_REGISTER_03
:
8378 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8379 register_name
= "EntryLo1";
8383 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8384 register_name
= "GlobalNumber";
8387 goto cp0_unimplemented
;
8390 case CP0_REGISTER_04
:
8393 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8394 register_name
= "Context";
8397 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8398 register_name
= "ContextConfig";
8399 goto cp0_unimplemented
;
8401 CP0_CHECK(ctx
->ulri
);
8402 tcg_gen_ld_tl(arg
, cpu_env
,
8403 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8404 register_name
= "UserLocal";
8407 goto cp0_unimplemented
;
8410 case CP0_REGISTER_05
:
8413 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8414 register_name
= "PageMask";
8417 check_insn(ctx
, ISA_MIPS32R2
);
8418 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8419 register_name
= "PageGrain";
8423 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8424 register_name
= "SegCtl0";
8428 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8429 register_name
= "SegCtl1";
8433 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8434 register_name
= "SegCtl2";
8438 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8439 register_name
= "PWBase";
8443 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8444 register_name
= "PWField";
8448 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8449 register_name
= "PWSize";
8452 goto cp0_unimplemented
;
8455 case CP0_REGISTER_06
:
8458 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8459 register_name
= "Wired";
8462 check_insn(ctx
, ISA_MIPS32R2
);
8463 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8464 register_name
= "SRSConf0";
8467 check_insn(ctx
, ISA_MIPS32R2
);
8468 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8469 register_name
= "SRSConf1";
8472 check_insn(ctx
, ISA_MIPS32R2
);
8473 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8474 register_name
= "SRSConf2";
8477 check_insn(ctx
, ISA_MIPS32R2
);
8478 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8479 register_name
= "SRSConf3";
8482 check_insn(ctx
, ISA_MIPS32R2
);
8483 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8484 register_name
= "SRSConf4";
8488 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8489 register_name
= "PWCtl";
8492 goto cp0_unimplemented
;
8495 case CP0_REGISTER_07
:
8498 check_insn(ctx
, ISA_MIPS32R2
);
8499 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8500 register_name
= "HWREna";
8503 goto cp0_unimplemented
;
8506 case CP0_REGISTER_08
:
8509 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8510 register_name
= "BadVAddr";
8514 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8515 register_name
= "BadInstr";
8519 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8520 register_name
= "BadInstrP";
8524 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8525 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8526 register_name
= "BadInstrX";
8529 goto cp0_unimplemented
;
8532 case CP0_REGISTER_09
:
8535 /* Mark as an IO operation because we read the time. */
8536 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8539 gen_helper_mfc0_count(arg
, cpu_env
);
8540 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8543 /* Break the TB to be able to take timer interrupts immediately
8544 after reading count. DISAS_STOP isn't sufficient, we need to
8545 ensure we break completely out of translated code. */
8546 gen_save_pc(ctx
->base
.pc_next
+ 4);
8547 ctx
->base
.is_jmp
= DISAS_EXIT
;
8548 register_name
= "Count";
8551 CP0_CHECK(ctx
->saar
);
8552 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
8553 register_name
= "SAARI";
8556 CP0_CHECK(ctx
->saar
);
8557 gen_helper_dmfc0_saar(arg
, cpu_env
);
8558 register_name
= "SAAR";
8561 goto cp0_unimplemented
;
8564 case CP0_REGISTER_10
:
8567 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8568 register_name
= "EntryHi";
8571 goto cp0_unimplemented
;
8574 case CP0_REGISTER_11
:
8577 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8578 register_name
= "Compare";
8580 /* 6,7 are implementation dependent */
8582 goto cp0_unimplemented
;
8585 case CP0_REGISTER_12
:
8588 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8589 register_name
= "Status";
8592 check_insn(ctx
, ISA_MIPS32R2
);
8593 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8594 register_name
= "IntCtl";
8597 check_insn(ctx
, ISA_MIPS32R2
);
8598 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8599 register_name
= "SRSCtl";
8602 check_insn(ctx
, ISA_MIPS32R2
);
8603 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8604 register_name
= "SRSMap";
8607 goto cp0_unimplemented
;
8610 case CP0_REGISTER_13
:
8613 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8614 register_name
= "Cause";
8617 goto cp0_unimplemented
;
8620 case CP0_REGISTER_14
:
8623 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8624 register_name
= "EPC";
8627 goto cp0_unimplemented
;
8630 case CP0_REGISTER_15
:
8633 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8634 register_name
= "PRid";
8637 check_insn(ctx
, ISA_MIPS32R2
);
8638 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8639 register_name
= "EBase";
8642 check_insn(ctx
, ISA_MIPS32R2
);
8643 CP0_CHECK(ctx
->cmgcr
);
8644 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8645 register_name
= "CMGCRBase";
8648 goto cp0_unimplemented
;
8651 case CP0_REGISTER_16
:
8654 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8655 register_name
= "Config";
8658 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8659 register_name
= "Config1";
8662 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8663 register_name
= "Config2";
8666 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8667 register_name
= "Config3";
8670 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8671 register_name
= "Config4";
8674 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8675 register_name
= "Config5";
8677 /* 6,7 are implementation dependent */
8679 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8680 register_name
= "Config6";
8683 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8684 register_name
= "Config7";
8687 goto cp0_unimplemented
;
8690 case CP0_REGISTER_17
:
8693 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8694 register_name
= "LLAddr";
8697 CP0_CHECK(ctx
->mrp
);
8698 gen_helper_dmfc0_maar(arg
, cpu_env
);
8699 register_name
= "MAAR";
8702 CP0_CHECK(ctx
->mrp
);
8703 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8704 register_name
= "MAARI";
8707 goto cp0_unimplemented
;
8710 case CP0_REGISTER_18
:
8720 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8721 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8722 register_name
= "WatchLo";
8725 goto cp0_unimplemented
;
8728 case CP0_REGISTER_19
:
8738 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8739 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8740 register_name
= "WatchHi";
8743 goto cp0_unimplemented
;
8746 case CP0_REGISTER_20
:
8749 check_insn(ctx
, ISA_MIPS3
);
8750 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8751 register_name
= "XContext";
8754 goto cp0_unimplemented
;
8757 case CP0_REGISTER_21
:
8758 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8759 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8762 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8763 register_name
= "Framemask";
8766 goto cp0_unimplemented
;
8769 case CP0_REGISTER_22
:
8770 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8771 register_name
= "'Diagnostic"; /* implementation dependent */
8773 case CP0_REGISTER_23
:
8776 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8777 register_name
= "Debug";
8780 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8781 register_name
= "TraceControl";
8782 goto cp0_unimplemented
;
8784 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8785 register_name
= "TraceControl2";
8786 goto cp0_unimplemented
;
8788 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8789 register_name
= "UserTraceData";
8790 goto cp0_unimplemented
;
8792 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8793 register_name
= "TraceBPC";
8794 goto cp0_unimplemented
;
8796 goto cp0_unimplemented
;
8799 case CP0_REGISTER_24
:
8803 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8804 register_name
= "DEPC";
8807 goto cp0_unimplemented
;
8810 case CP0_REGISTER_25
:
8813 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8814 register_name
= "Performance0";
8817 // gen_helper_dmfc0_performance1(arg);
8818 register_name
= "Performance1";
8819 goto cp0_unimplemented
;
8821 // gen_helper_dmfc0_performance2(arg);
8822 register_name
= "Performance2";
8823 goto cp0_unimplemented
;
8825 // gen_helper_dmfc0_performance3(arg);
8826 register_name
= "Performance3";
8827 goto cp0_unimplemented
;
8829 // gen_helper_dmfc0_performance4(arg);
8830 register_name
= "Performance4";
8831 goto cp0_unimplemented
;
8833 // gen_helper_dmfc0_performance5(arg);
8834 register_name
= "Performance5";
8835 goto cp0_unimplemented
;
8837 // gen_helper_dmfc0_performance6(arg);
8838 register_name
= "Performance6";
8839 goto cp0_unimplemented
;
8841 // gen_helper_dmfc0_performance7(arg);
8842 register_name
= "Performance7";
8843 goto cp0_unimplemented
;
8845 goto cp0_unimplemented
;
8848 case CP0_REGISTER_26
:
8851 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8852 register_name
= "ErrCtl";
8855 goto cp0_unimplemented
;
8858 case CP0_REGISTER_27
:
8865 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8866 register_name
= "CacheErr";
8869 goto cp0_unimplemented
;
8872 case CP0_REGISTER_28
:
8878 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8879 register_name
= "TagLo";
8885 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8886 register_name
= "DataLo";
8889 goto cp0_unimplemented
;
8892 case CP0_REGISTER_29
:
8898 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8899 register_name
= "TagHi";
8905 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8906 register_name
= "DataHi";
8909 goto cp0_unimplemented
;
8912 case CP0_REGISTER_30
:
8915 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8916 register_name
= "ErrorEPC";
8919 goto cp0_unimplemented
;
8922 case CP0_REGISTER_31
:
8926 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8927 register_name
= "DESAVE";
8935 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8936 tcg_gen_ld_tl(arg
, cpu_env
,
8937 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8938 register_name
= "KScratch";
8941 goto cp0_unimplemented
;
8945 goto cp0_unimplemented
;
8947 trace_mips_translate_c0("dmfc0", register_name
, reg
, sel
);
8951 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n",
8952 register_name
, reg
, sel
);
8953 gen_mfc0_unimplemented(ctx
, arg
);
8956 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8958 const char *register_name
= "invalid";
8961 check_insn(ctx
, ISA_MIPS64
);
8963 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8968 case CP0_REGISTER_00
:
8971 gen_helper_mtc0_index(cpu_env
, arg
);
8972 register_name
= "Index";
8975 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8976 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
8977 register_name
= "MVPControl";
8980 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8982 register_name
= "MVPConf0";
8985 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8987 register_name
= "MVPConf1";
8992 register_name
= "VPControl";
8995 goto cp0_unimplemented
;
8998 case CP0_REGISTER_01
:
9002 register_name
= "Random";
9005 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9006 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
9007 register_name
= "VPEControl";
9010 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9011 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
9012 register_name
= "VPEConf0";
9015 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9016 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
9017 register_name
= "VPEConf1";
9020 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9021 gen_helper_mtc0_yqmask(cpu_env
, arg
);
9022 register_name
= "YQMask";
9025 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9026 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
9027 register_name
= "VPESchedule";
9030 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9031 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
9032 register_name
= "VPEScheFBack";
9035 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9036 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
9037 register_name
= "VPEOpt";
9040 goto cp0_unimplemented
;
9043 case CP0_REGISTER_02
:
9046 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
9047 register_name
= "EntryLo0";
9050 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9051 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
9052 register_name
= "TCStatus";
9055 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9056 gen_helper_mtc0_tcbind(cpu_env
, arg
);
9057 register_name
= "TCBind";
9060 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9061 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
9062 register_name
= "TCRestart";
9065 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9066 gen_helper_mtc0_tchalt(cpu_env
, arg
);
9067 register_name
= "TCHalt";
9070 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9071 gen_helper_mtc0_tccontext(cpu_env
, arg
);
9072 register_name
= "TCContext";
9075 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9076 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
9077 register_name
= "TCSchedule";
9080 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9081 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
9082 register_name
= "TCScheFBack";
9085 goto cp0_unimplemented
;
9088 case CP0_REGISTER_03
:
9091 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
9092 register_name
= "EntryLo1";
9097 register_name
= "GlobalNumber";
9100 goto cp0_unimplemented
;
9103 case CP0_REGISTER_04
:
9106 gen_helper_mtc0_context(cpu_env
, arg
);
9107 register_name
= "Context";
9110 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
9111 register_name
= "ContextConfig";
9112 goto cp0_unimplemented
;
9114 CP0_CHECK(ctx
->ulri
);
9115 tcg_gen_st_tl(arg
, cpu_env
,
9116 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
9117 register_name
= "UserLocal";
9120 goto cp0_unimplemented
;
9123 case CP0_REGISTER_05
:
9126 gen_helper_mtc0_pagemask(cpu_env
, arg
);
9127 register_name
= "PageMask";
9130 check_insn(ctx
, ISA_MIPS32R2
);
9131 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
9132 register_name
= "PageGrain";
9136 gen_helper_mtc0_segctl0(cpu_env
, arg
);
9137 register_name
= "SegCtl0";
9141 gen_helper_mtc0_segctl1(cpu_env
, arg
);
9142 register_name
= "SegCtl1";
9146 gen_helper_mtc0_segctl2(cpu_env
, arg
);
9147 register_name
= "SegCtl2";
9151 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
9152 register_name
= "PWBase";
9156 gen_helper_mtc0_pwfield(cpu_env
, arg
);
9157 register_name
= "PWField";
9161 gen_helper_mtc0_pwsize(cpu_env
, arg
);
9162 register_name
= "PWSize";
9165 goto cp0_unimplemented
;
9168 case CP0_REGISTER_06
:
9171 gen_helper_mtc0_wired(cpu_env
, arg
);
9172 register_name
= "Wired";
9175 check_insn(ctx
, ISA_MIPS32R2
);
9176 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
9177 register_name
= "SRSConf0";
9180 check_insn(ctx
, ISA_MIPS32R2
);
9181 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
9182 register_name
= "SRSConf1";
9185 check_insn(ctx
, ISA_MIPS32R2
);
9186 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
9187 register_name
= "SRSConf2";
9190 check_insn(ctx
, ISA_MIPS32R2
);
9191 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
9192 register_name
= "SRSConf3";
9195 check_insn(ctx
, ISA_MIPS32R2
);
9196 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
9197 register_name
= "SRSConf4";
9201 gen_helper_mtc0_pwctl(cpu_env
, arg
);
9202 register_name
= "PWCtl";
9205 goto cp0_unimplemented
;
9208 case CP0_REGISTER_07
:
9211 check_insn(ctx
, ISA_MIPS32R2
);
9212 gen_helper_mtc0_hwrena(cpu_env
, arg
);
9213 ctx
->base
.is_jmp
= DISAS_STOP
;
9214 register_name
= "HWREna";
9217 goto cp0_unimplemented
;
9220 case CP0_REGISTER_08
:
9224 register_name
= "BadVAddr";
9228 register_name
= "BadInstr";
9232 register_name
= "BadInstrP";
9236 register_name
= "BadInstrX";
9239 goto cp0_unimplemented
;
9242 case CP0_REGISTER_09
:
9245 gen_helper_mtc0_count(cpu_env
, arg
);
9246 register_name
= "Count";
9249 CP0_CHECK(ctx
->saar
);
9250 gen_helper_mtc0_saari(cpu_env
, arg
);
9251 register_name
= "SAARI";
9254 CP0_CHECK(ctx
->saar
);
9255 gen_helper_mtc0_saar(cpu_env
, arg
);
9256 register_name
= "SAAR";
9259 goto cp0_unimplemented
;
9261 /* Stop translation as we may have switched the execution mode */
9262 ctx
->base
.is_jmp
= DISAS_STOP
;
9264 case CP0_REGISTER_10
:
9267 gen_helper_mtc0_entryhi(cpu_env
, arg
);
9268 register_name
= "EntryHi";
9271 goto cp0_unimplemented
;
9274 case CP0_REGISTER_11
:
9277 gen_helper_mtc0_compare(cpu_env
, arg
);
9278 register_name
= "Compare";
9280 /* 6,7 are implementation dependent */
9282 goto cp0_unimplemented
;
9284 /* Stop translation as we may have switched the execution mode */
9285 ctx
->base
.is_jmp
= DISAS_STOP
;
9287 case CP0_REGISTER_12
:
9290 save_cpu_state(ctx
, 1);
9291 gen_helper_mtc0_status(cpu_env
, arg
);
9292 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9293 gen_save_pc(ctx
->base
.pc_next
+ 4);
9294 ctx
->base
.is_jmp
= DISAS_EXIT
;
9295 register_name
= "Status";
9298 check_insn(ctx
, ISA_MIPS32R2
);
9299 gen_helper_mtc0_intctl(cpu_env
, arg
);
9300 /* Stop translation as we may have switched the execution mode */
9301 ctx
->base
.is_jmp
= DISAS_STOP
;
9302 register_name
= "IntCtl";
9305 check_insn(ctx
, ISA_MIPS32R2
);
9306 gen_helper_mtc0_srsctl(cpu_env
, arg
);
9307 /* Stop translation as we may have switched the execution mode */
9308 ctx
->base
.is_jmp
= DISAS_STOP
;
9309 register_name
= "SRSCtl";
9312 check_insn(ctx
, ISA_MIPS32R2
);
9313 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
9314 /* Stop translation as we may have switched the execution mode */
9315 ctx
->base
.is_jmp
= DISAS_STOP
;
9316 register_name
= "SRSMap";
9319 goto cp0_unimplemented
;
9322 case CP0_REGISTER_13
:
9325 save_cpu_state(ctx
, 1);
9326 gen_helper_mtc0_cause(cpu_env
, arg
);
9327 /* Stop translation as we may have triggered an interrupt.
9328 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9329 * translated code to check for pending interrupts. */
9330 gen_save_pc(ctx
->base
.pc_next
+ 4);
9331 ctx
->base
.is_jmp
= DISAS_EXIT
;
9332 register_name
= "Cause";
9335 goto cp0_unimplemented
;
9338 case CP0_REGISTER_14
:
9341 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
9342 register_name
= "EPC";
9345 goto cp0_unimplemented
;
9348 case CP0_REGISTER_15
:
9352 register_name
= "PRid";
9355 check_insn(ctx
, ISA_MIPS32R2
);
9356 gen_helper_mtc0_ebase(cpu_env
, arg
);
9357 register_name
= "EBase";
9360 goto cp0_unimplemented
;
9363 case CP0_REGISTER_16
:
9366 gen_helper_mtc0_config0(cpu_env
, arg
);
9367 register_name
= "Config";
9368 /* Stop translation as we may have switched the execution mode */
9369 ctx
->base
.is_jmp
= DISAS_STOP
;
9372 /* ignored, read only */
9373 register_name
= "Config1";
9376 gen_helper_mtc0_config2(cpu_env
, arg
);
9377 register_name
= "Config2";
9378 /* Stop translation as we may have switched the execution mode */
9379 ctx
->base
.is_jmp
= DISAS_STOP
;
9382 gen_helper_mtc0_config3(cpu_env
, arg
);
9383 register_name
= "Config3";
9384 /* Stop translation as we may have switched the execution mode */
9385 ctx
->base
.is_jmp
= DISAS_STOP
;
9388 /* currently ignored */
9389 register_name
= "Config4";
9392 gen_helper_mtc0_config5(cpu_env
, arg
);
9393 register_name
= "Config5";
9394 /* Stop translation as we may have switched the execution mode */
9395 ctx
->base
.is_jmp
= DISAS_STOP
;
9397 /* 6,7 are implementation dependent */
9399 register_name
= "Invalid config selector";
9400 goto cp0_unimplemented
;
9403 case CP0_REGISTER_17
:
9406 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9407 register_name
= "LLAddr";
9410 CP0_CHECK(ctx
->mrp
);
9411 gen_helper_mtc0_maar(cpu_env
, arg
);
9412 register_name
= "MAAR";
9415 CP0_CHECK(ctx
->mrp
);
9416 gen_helper_mtc0_maari(cpu_env
, arg
);
9417 register_name
= "MAARI";
9420 goto cp0_unimplemented
;
9423 case CP0_REGISTER_18
:
9433 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9434 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9435 register_name
= "WatchLo";
9438 goto cp0_unimplemented
;
9441 case CP0_REGISTER_19
:
9451 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9452 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9453 register_name
= "WatchHi";
9456 goto cp0_unimplemented
;
9459 case CP0_REGISTER_20
:
9462 check_insn(ctx
, ISA_MIPS3
);
9463 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9464 register_name
= "XContext";
9467 goto cp0_unimplemented
;
9470 case CP0_REGISTER_21
:
9471 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9472 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9475 gen_helper_mtc0_framemask(cpu_env
, arg
);
9476 register_name
= "Framemask";
9479 goto cp0_unimplemented
;
9482 case CP0_REGISTER_22
:
9484 register_name
= "Diagnostic"; /* implementation dependent */
9486 case CP0_REGISTER_23
:
9489 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9490 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9491 gen_save_pc(ctx
->base
.pc_next
+ 4);
9492 ctx
->base
.is_jmp
= DISAS_EXIT
;
9493 register_name
= "Debug";
9496 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9497 /* Stop translation as we may have switched the execution mode */
9498 ctx
->base
.is_jmp
= DISAS_STOP
;
9499 register_name
= "TraceControl";
9500 goto cp0_unimplemented
;
9502 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9503 /* Stop translation as we may have switched the execution mode */
9504 ctx
->base
.is_jmp
= DISAS_STOP
;
9505 register_name
= "TraceControl2";
9506 goto cp0_unimplemented
;
9508 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9509 /* Stop translation as we may have switched the execution mode */
9510 ctx
->base
.is_jmp
= DISAS_STOP
;
9511 register_name
= "UserTraceData";
9512 goto cp0_unimplemented
;
9514 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9515 /* Stop translation as we may have switched the execution mode */
9516 ctx
->base
.is_jmp
= DISAS_STOP
;
9517 register_name
= "TraceBPC";
9518 goto cp0_unimplemented
;
9520 goto cp0_unimplemented
;
9523 case CP0_REGISTER_24
:
9527 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9528 register_name
= "DEPC";
9531 goto cp0_unimplemented
;
9534 case CP0_REGISTER_25
:
9537 gen_helper_mtc0_performance0(cpu_env
, arg
);
9538 register_name
= "Performance0";
9541 // gen_helper_mtc0_performance1(cpu_env, arg);
9542 register_name
= "Performance1";
9543 goto cp0_unimplemented
;
9545 // gen_helper_mtc0_performance2(cpu_env, arg);
9546 register_name
= "Performance2";
9547 goto cp0_unimplemented
;
9549 // gen_helper_mtc0_performance3(cpu_env, arg);
9550 register_name
= "Performance3";
9551 goto cp0_unimplemented
;
9553 // gen_helper_mtc0_performance4(cpu_env, arg);
9554 register_name
= "Performance4";
9555 goto cp0_unimplemented
;
9557 // gen_helper_mtc0_performance5(cpu_env, arg);
9558 register_name
= "Performance5";
9559 goto cp0_unimplemented
;
9561 // gen_helper_mtc0_performance6(cpu_env, arg);
9562 register_name
= "Performance6";
9563 goto cp0_unimplemented
;
9565 // gen_helper_mtc0_performance7(cpu_env, arg);
9566 register_name
= "Performance7";
9567 goto cp0_unimplemented
;
9569 goto cp0_unimplemented
;
9572 case CP0_REGISTER_26
:
9575 gen_helper_mtc0_errctl(cpu_env
, arg
);
9576 ctx
->base
.is_jmp
= DISAS_STOP
;
9577 register_name
= "ErrCtl";
9580 goto cp0_unimplemented
;
9583 case CP0_REGISTER_27
:
9590 register_name
= "CacheErr";
9593 goto cp0_unimplemented
;
9596 case CP0_REGISTER_28
:
9602 gen_helper_mtc0_taglo(cpu_env
, arg
);
9603 register_name
= "TagLo";
9609 gen_helper_mtc0_datalo(cpu_env
, arg
);
9610 register_name
= "DataLo";
9613 goto cp0_unimplemented
;
9616 case CP0_REGISTER_29
:
9622 gen_helper_mtc0_taghi(cpu_env
, arg
);
9623 register_name
= "TagHi";
9629 gen_helper_mtc0_datahi(cpu_env
, arg
);
9630 register_name
= "DataHi";
9633 register_name
= "invalid sel";
9634 goto cp0_unimplemented
;
9637 case CP0_REGISTER_30
:
9640 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9641 register_name
= "ErrorEPC";
9644 goto cp0_unimplemented
;
9647 case CP0_REGISTER_31
:
9651 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9652 register_name
= "DESAVE";
9660 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9661 tcg_gen_st_tl(arg
, cpu_env
,
9662 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9663 register_name
= "KScratch";
9666 goto cp0_unimplemented
;
9670 goto cp0_unimplemented
;
9672 trace_mips_translate_c0("dmtc0", register_name
, reg
, sel
);
9674 /* For simplicity assume that all writes can cause interrupts. */
9675 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9677 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
9678 * translated code to check for pending interrupts. */
9679 gen_save_pc(ctx
->base
.pc_next
+ 4);
9680 ctx
->base
.is_jmp
= DISAS_EXIT
;
9685 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n",
9686 register_name
, reg
, sel
);
9688 #endif /* TARGET_MIPS64 */
9690 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9691 int u
, int sel
, int h
)
9693 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9694 TCGv t0
= tcg_temp_local_new();
9696 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9697 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9698 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9699 tcg_gen_movi_tl(t0
, -1);
9700 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9701 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9702 tcg_gen_movi_tl(t0
, -1);
9708 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9711 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9721 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9724 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9727 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9730 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9733 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9736 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9739 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9742 gen_mfc0(ctx
, t0
, rt
, sel
);
9749 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9752 gen_mfc0(ctx
, t0
, rt
, sel
);
9758 gen_helper_mftc0_status(t0
, cpu_env
);
9761 gen_mfc0(ctx
, t0
, rt
, sel
);
9767 gen_helper_mftc0_cause(t0
, cpu_env
);
9777 gen_helper_mftc0_epc(t0
, cpu_env
);
9787 gen_helper_mftc0_ebase(t0
, cpu_env
);
9804 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9814 gen_helper_mftc0_debug(t0
, cpu_env
);
9817 gen_mfc0(ctx
, t0
, rt
, sel
);
9822 gen_mfc0(ctx
, t0
, rt
, sel
);
9824 } else switch (sel
) {
9825 /* GPR registers. */
9827 gen_helper_1e0i(mftgpr
, t0
, rt
);
9829 /* Auxiliary CPU registers */
9833 gen_helper_1e0i(mftlo
, t0
, 0);
9836 gen_helper_1e0i(mfthi
, t0
, 0);
9839 gen_helper_1e0i(mftacx
, t0
, 0);
9842 gen_helper_1e0i(mftlo
, t0
, 1);
9845 gen_helper_1e0i(mfthi
, t0
, 1);
9848 gen_helper_1e0i(mftacx
, t0
, 1);
9851 gen_helper_1e0i(mftlo
, t0
, 2);
9854 gen_helper_1e0i(mfthi
, t0
, 2);
9857 gen_helper_1e0i(mftacx
, t0
, 2);
9860 gen_helper_1e0i(mftlo
, t0
, 3);
9863 gen_helper_1e0i(mfthi
, t0
, 3);
9866 gen_helper_1e0i(mftacx
, t0
, 3);
9869 gen_helper_mftdsp(t0
, cpu_env
);
9875 /* Floating point (COP1). */
9877 /* XXX: For now we support only a single FPU context. */
9879 TCGv_i32 fp0
= tcg_temp_new_i32();
9881 gen_load_fpr32(ctx
, fp0
, rt
);
9882 tcg_gen_ext_i32_tl(t0
, fp0
);
9883 tcg_temp_free_i32(fp0
);
9885 TCGv_i32 fp0
= tcg_temp_new_i32();
9887 gen_load_fpr32h(ctx
, fp0
, rt
);
9888 tcg_gen_ext_i32_tl(t0
, fp0
);
9889 tcg_temp_free_i32(fp0
);
9893 /* XXX: For now we support only a single FPU context. */
9894 gen_helper_1e0i(cfc1
, t0
, rt
);
9896 /* COP2: Not implemented. */
9903 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9904 gen_store_gpr(t0
, rd
);
9910 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9911 generate_exception_end(ctx
, EXCP_RI
);
9914 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9915 int u
, int sel
, int h
)
9917 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9918 TCGv t0
= tcg_temp_local_new();
9920 gen_load_gpr(t0
, rt
);
9921 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9922 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9923 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9925 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9926 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9933 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
9936 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
9946 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
9949 gen_helper_mttc0_tcbind(cpu_env
, t0
);
9952 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
9955 gen_helper_mttc0_tchalt(cpu_env
, t0
);
9958 gen_helper_mttc0_tccontext(cpu_env
, t0
);
9961 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
9964 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
9967 gen_mtc0(ctx
, t0
, rd
, sel
);
9974 gen_helper_mttc0_entryhi(cpu_env
, t0
);
9977 gen_mtc0(ctx
, t0
, rd
, sel
);
9983 gen_helper_mttc0_status(cpu_env
, t0
);
9986 gen_mtc0(ctx
, t0
, rd
, sel
);
9992 gen_helper_mttc0_cause(cpu_env
, t0
);
10002 gen_helper_mttc0_ebase(cpu_env
, t0
);
10012 gen_helper_mttc0_debug(cpu_env
, t0
);
10015 gen_mtc0(ctx
, t0
, rd
, sel
);
10020 gen_mtc0(ctx
, t0
, rd
, sel
);
10022 } else switch (sel
) {
10023 /* GPR registers. */
10025 gen_helper_0e1i(mttgpr
, t0
, rd
);
10027 /* Auxiliary CPU registers */
10031 gen_helper_0e1i(mttlo
, t0
, 0);
10034 gen_helper_0e1i(mtthi
, t0
, 0);
10037 gen_helper_0e1i(mttacx
, t0
, 0);
10040 gen_helper_0e1i(mttlo
, t0
, 1);
10043 gen_helper_0e1i(mtthi
, t0
, 1);
10046 gen_helper_0e1i(mttacx
, t0
, 1);
10049 gen_helper_0e1i(mttlo
, t0
, 2);
10052 gen_helper_0e1i(mtthi
, t0
, 2);
10055 gen_helper_0e1i(mttacx
, t0
, 2);
10058 gen_helper_0e1i(mttlo
, t0
, 3);
10061 gen_helper_0e1i(mtthi
, t0
, 3);
10064 gen_helper_0e1i(mttacx
, t0
, 3);
10067 gen_helper_mttdsp(cpu_env
, t0
);
10073 /* Floating point (COP1). */
10075 /* XXX: For now we support only a single FPU context. */
10077 TCGv_i32 fp0
= tcg_temp_new_i32();
10079 tcg_gen_trunc_tl_i32(fp0
, t0
);
10080 gen_store_fpr32(ctx
, fp0
, rd
);
10081 tcg_temp_free_i32(fp0
);
10083 TCGv_i32 fp0
= tcg_temp_new_i32();
10085 tcg_gen_trunc_tl_i32(fp0
, t0
);
10086 gen_store_fpr32h(ctx
, fp0
, rd
);
10087 tcg_temp_free_i32(fp0
);
10091 /* XXX: For now we support only a single FPU context. */
10093 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
10095 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10096 tcg_temp_free_i32(fs_tmp
);
10098 /* Stop translation as we may have changed hflags */
10099 ctx
->base
.is_jmp
= DISAS_STOP
;
10101 /* COP2: Not implemented. */
10108 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
10114 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
10115 generate_exception_end(ctx
, EXCP_RI
);
10118 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
10120 const char *opn
= "ldst";
10122 check_cp0_enabled(ctx
);
10126 /* Treat as NOP. */
10129 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10134 TCGv t0
= tcg_temp_new();
10136 gen_load_gpr(t0
, rt
);
10137 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10142 #if defined(TARGET_MIPS64)
10144 check_insn(ctx
, ISA_MIPS3
);
10146 /* Treat as NOP. */
10149 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10153 check_insn(ctx
, ISA_MIPS3
);
10155 TCGv t0
= tcg_temp_new();
10157 gen_load_gpr(t0
, rt
);
10158 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10167 /* Treat as NOP. */
10170 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10176 TCGv t0
= tcg_temp_new();
10177 gen_load_gpr(t0
, rt
);
10178 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10184 check_cp0_enabled(ctx
);
10186 /* Treat as NOP. */
10189 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
10190 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10194 check_cp0_enabled(ctx
);
10195 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
10196 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10201 if (!env
->tlb
->helper_tlbwi
)
10203 gen_helper_tlbwi(cpu_env
);
10207 if (ctx
->ie
>= 2) {
10208 if (!env
->tlb
->helper_tlbinv
) {
10211 gen_helper_tlbinv(cpu_env
);
10212 } /* treat as nop if TLBINV not supported */
10216 if (ctx
->ie
>= 2) {
10217 if (!env
->tlb
->helper_tlbinvf
) {
10220 gen_helper_tlbinvf(cpu_env
);
10221 } /* treat as nop if TLBINV not supported */
10225 if (!env
->tlb
->helper_tlbwr
)
10227 gen_helper_tlbwr(cpu_env
);
10231 if (!env
->tlb
->helper_tlbp
)
10233 gen_helper_tlbp(cpu_env
);
10237 if (!env
->tlb
->helper_tlbr
)
10239 gen_helper_tlbr(cpu_env
);
10241 case OPC_ERET
: /* OPC_ERETNC */
10242 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10243 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10246 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
10247 if (ctx
->opcode
& (1 << bit_shift
)) {
10250 check_insn(ctx
, ISA_MIPS32R5
);
10251 gen_helper_eretnc(cpu_env
);
10255 check_insn(ctx
, ISA_MIPS2
);
10256 gen_helper_eret(cpu_env
);
10258 ctx
->base
.is_jmp
= DISAS_EXIT
;
10263 check_insn(ctx
, ISA_MIPS32
);
10264 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10265 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10268 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10270 generate_exception_end(ctx
, EXCP_RI
);
10272 gen_helper_deret(cpu_env
);
10273 ctx
->base
.is_jmp
= DISAS_EXIT
;
10278 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
10279 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10280 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10283 /* If we get an exception, we want to restart at next instruction */
10284 ctx
->base
.pc_next
+= 4;
10285 save_cpu_state(ctx
, 1);
10286 ctx
->base
.pc_next
-= 4;
10287 gen_helper_wait(cpu_env
);
10288 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10293 generate_exception_end(ctx
, EXCP_RI
);
10296 (void)opn
; /* avoid a compiler warning */
10298 #endif /* !CONFIG_USER_ONLY */
10300 /* CP1 Branches (before delay slot) */
10301 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
10302 int32_t cc
, int32_t offset
)
10304 target_ulong btarget
;
10305 TCGv_i32 t0
= tcg_temp_new_i32();
10307 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10308 generate_exception_end(ctx
, EXCP_RI
);
10313 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
10315 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
10319 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10320 tcg_gen_not_i32(t0
, t0
);
10321 tcg_gen_andi_i32(t0
, t0
, 1);
10322 tcg_gen_extu_i32_tl(bcond
, t0
);
10325 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10326 tcg_gen_not_i32(t0
, t0
);
10327 tcg_gen_andi_i32(t0
, t0
, 1);
10328 tcg_gen_extu_i32_tl(bcond
, t0
);
10331 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10332 tcg_gen_andi_i32(t0
, t0
, 1);
10333 tcg_gen_extu_i32_tl(bcond
, t0
);
10336 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10337 tcg_gen_andi_i32(t0
, t0
, 1);
10338 tcg_gen_extu_i32_tl(bcond
, t0
);
10340 ctx
->hflags
|= MIPS_HFLAG_BL
;
10344 TCGv_i32 t1
= tcg_temp_new_i32();
10345 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10346 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10347 tcg_gen_nand_i32(t0
, t0
, t1
);
10348 tcg_temp_free_i32(t1
);
10349 tcg_gen_andi_i32(t0
, t0
, 1);
10350 tcg_gen_extu_i32_tl(bcond
, t0
);
10355 TCGv_i32 t1
= tcg_temp_new_i32();
10356 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10357 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10358 tcg_gen_or_i32(t0
, t0
, t1
);
10359 tcg_temp_free_i32(t1
);
10360 tcg_gen_andi_i32(t0
, t0
, 1);
10361 tcg_gen_extu_i32_tl(bcond
, t0
);
10366 TCGv_i32 t1
= tcg_temp_new_i32();
10367 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10368 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10369 tcg_gen_and_i32(t0
, t0
, t1
);
10370 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10371 tcg_gen_and_i32(t0
, t0
, t1
);
10372 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10373 tcg_gen_nand_i32(t0
, t0
, t1
);
10374 tcg_temp_free_i32(t1
);
10375 tcg_gen_andi_i32(t0
, t0
, 1);
10376 tcg_gen_extu_i32_tl(bcond
, t0
);
10381 TCGv_i32 t1
= tcg_temp_new_i32();
10382 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10383 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10384 tcg_gen_or_i32(t0
, t0
, t1
);
10385 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10386 tcg_gen_or_i32(t0
, t0
, t1
);
10387 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10388 tcg_gen_or_i32(t0
, t0
, t1
);
10389 tcg_temp_free_i32(t1
);
10390 tcg_gen_andi_i32(t0
, t0
, 1);
10391 tcg_gen_extu_i32_tl(bcond
, t0
);
10394 ctx
->hflags
|= MIPS_HFLAG_BC
;
10397 MIPS_INVAL("cp1 cond branch");
10398 generate_exception_end(ctx
, EXCP_RI
);
10401 ctx
->btarget
= btarget
;
10402 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10404 tcg_temp_free_i32(t0
);
10407 /* R6 CP1 Branches */
10408 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10409 int32_t ft
, int32_t offset
,
10410 int delayslot_size
)
10412 target_ulong btarget
;
10413 TCGv_i64 t0
= tcg_temp_new_i64();
10415 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10416 #ifdef MIPS_DEBUG_DISAS
10417 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10418 "\n", ctx
->base
.pc_next
);
10420 generate_exception_end(ctx
, EXCP_RI
);
10424 gen_load_fpr64(ctx
, t0
, ft
);
10425 tcg_gen_andi_i64(t0
, t0
, 1);
10427 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10431 tcg_gen_xori_i64(t0
, t0
, 1);
10432 ctx
->hflags
|= MIPS_HFLAG_BC
;
10435 /* t0 already set */
10436 ctx
->hflags
|= MIPS_HFLAG_BC
;
10439 MIPS_INVAL("cp1 cond branch");
10440 generate_exception_end(ctx
, EXCP_RI
);
10444 tcg_gen_trunc_i64_tl(bcond
, t0
);
10446 ctx
->btarget
= btarget
;
10448 switch (delayslot_size
) {
10450 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10453 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10458 tcg_temp_free_i64(t0
);
10461 /* Coprocessor 1 (FPU) */
10463 #define FOP(func, fmt) (((fmt) << 21) | (func))
10466 OPC_ADD_S
= FOP(0, FMT_S
),
10467 OPC_SUB_S
= FOP(1, FMT_S
),
10468 OPC_MUL_S
= FOP(2, FMT_S
),
10469 OPC_DIV_S
= FOP(3, FMT_S
),
10470 OPC_SQRT_S
= FOP(4, FMT_S
),
10471 OPC_ABS_S
= FOP(5, FMT_S
),
10472 OPC_MOV_S
= FOP(6, FMT_S
),
10473 OPC_NEG_S
= FOP(7, FMT_S
),
10474 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10475 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10476 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10477 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10478 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10479 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10480 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10481 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10482 OPC_SEL_S
= FOP(16, FMT_S
),
10483 OPC_MOVCF_S
= FOP(17, FMT_S
),
10484 OPC_MOVZ_S
= FOP(18, FMT_S
),
10485 OPC_MOVN_S
= FOP(19, FMT_S
),
10486 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10487 OPC_RECIP_S
= FOP(21, FMT_S
),
10488 OPC_RSQRT_S
= FOP(22, FMT_S
),
10489 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10490 OPC_MADDF_S
= FOP(24, FMT_S
),
10491 OPC_MSUBF_S
= FOP(25, FMT_S
),
10492 OPC_RINT_S
= FOP(26, FMT_S
),
10493 OPC_CLASS_S
= FOP(27, FMT_S
),
10494 OPC_MIN_S
= FOP(28, FMT_S
),
10495 OPC_RECIP2_S
= FOP(28, FMT_S
),
10496 OPC_MINA_S
= FOP(29, FMT_S
),
10497 OPC_RECIP1_S
= FOP(29, FMT_S
),
10498 OPC_MAX_S
= FOP(30, FMT_S
),
10499 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10500 OPC_MAXA_S
= FOP(31, FMT_S
),
10501 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10502 OPC_CVT_D_S
= FOP(33, FMT_S
),
10503 OPC_CVT_W_S
= FOP(36, FMT_S
),
10504 OPC_CVT_L_S
= FOP(37, FMT_S
),
10505 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10506 OPC_CMP_F_S
= FOP (48, FMT_S
),
10507 OPC_CMP_UN_S
= FOP (49, FMT_S
),
10508 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
10509 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
10510 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
10511 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
10512 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
10513 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
10514 OPC_CMP_SF_S
= FOP (56, FMT_S
),
10515 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
10516 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
10517 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
10518 OPC_CMP_LT_S
= FOP (60, FMT_S
),
10519 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
10520 OPC_CMP_LE_S
= FOP (62, FMT_S
),
10521 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
10523 OPC_ADD_D
= FOP(0, FMT_D
),
10524 OPC_SUB_D
= FOP(1, FMT_D
),
10525 OPC_MUL_D
= FOP(2, FMT_D
),
10526 OPC_DIV_D
= FOP(3, FMT_D
),
10527 OPC_SQRT_D
= FOP(4, FMT_D
),
10528 OPC_ABS_D
= FOP(5, FMT_D
),
10529 OPC_MOV_D
= FOP(6, FMT_D
),
10530 OPC_NEG_D
= FOP(7, FMT_D
),
10531 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10532 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10533 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10534 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10535 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10536 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10537 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10538 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10539 OPC_SEL_D
= FOP(16, FMT_D
),
10540 OPC_MOVCF_D
= FOP(17, FMT_D
),
10541 OPC_MOVZ_D
= FOP(18, FMT_D
),
10542 OPC_MOVN_D
= FOP(19, FMT_D
),
10543 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10544 OPC_RECIP_D
= FOP(21, FMT_D
),
10545 OPC_RSQRT_D
= FOP(22, FMT_D
),
10546 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10547 OPC_MADDF_D
= FOP(24, FMT_D
),
10548 OPC_MSUBF_D
= FOP(25, FMT_D
),
10549 OPC_RINT_D
= FOP(26, FMT_D
),
10550 OPC_CLASS_D
= FOP(27, FMT_D
),
10551 OPC_MIN_D
= FOP(28, FMT_D
),
10552 OPC_RECIP2_D
= FOP(28, FMT_D
),
10553 OPC_MINA_D
= FOP(29, FMT_D
),
10554 OPC_RECIP1_D
= FOP(29, FMT_D
),
10555 OPC_MAX_D
= FOP(30, FMT_D
),
10556 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10557 OPC_MAXA_D
= FOP(31, FMT_D
),
10558 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10559 OPC_CVT_S_D
= FOP(32, FMT_D
),
10560 OPC_CVT_W_D
= FOP(36, FMT_D
),
10561 OPC_CVT_L_D
= FOP(37, FMT_D
),
10562 OPC_CMP_F_D
= FOP (48, FMT_D
),
10563 OPC_CMP_UN_D
= FOP (49, FMT_D
),
10564 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
10565 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
10566 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
10567 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
10568 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
10569 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
10570 OPC_CMP_SF_D
= FOP (56, FMT_D
),
10571 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
10572 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
10573 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
10574 OPC_CMP_LT_D
= FOP (60, FMT_D
),
10575 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
10576 OPC_CMP_LE_D
= FOP (62, FMT_D
),
10577 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
10579 OPC_CVT_S_W
= FOP(32, FMT_W
),
10580 OPC_CVT_D_W
= FOP(33, FMT_W
),
10581 OPC_CVT_S_L
= FOP(32, FMT_L
),
10582 OPC_CVT_D_L
= FOP(33, FMT_L
),
10583 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10585 OPC_ADD_PS
= FOP(0, FMT_PS
),
10586 OPC_SUB_PS
= FOP(1, FMT_PS
),
10587 OPC_MUL_PS
= FOP(2, FMT_PS
),
10588 OPC_DIV_PS
= FOP(3, FMT_PS
),
10589 OPC_ABS_PS
= FOP(5, FMT_PS
),
10590 OPC_MOV_PS
= FOP(6, FMT_PS
),
10591 OPC_NEG_PS
= FOP(7, FMT_PS
),
10592 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10593 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10594 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10595 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10596 OPC_MULR_PS
= FOP(26, FMT_PS
),
10597 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10598 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10599 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10600 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10602 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10603 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10604 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10605 OPC_PLL_PS
= FOP(44, FMT_PS
),
10606 OPC_PLU_PS
= FOP(45, FMT_PS
),
10607 OPC_PUL_PS
= FOP(46, FMT_PS
),
10608 OPC_PUU_PS
= FOP(47, FMT_PS
),
10609 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
10610 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
10611 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
10612 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
10613 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
10614 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
10615 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
10616 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
10617 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
10618 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
10619 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
10620 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
10621 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
10622 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
10623 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
10624 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
10628 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10629 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10630 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10631 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10632 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10633 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10634 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10635 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10636 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10637 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10638 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10639 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10640 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10641 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10642 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10643 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10644 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10645 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10646 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10647 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10648 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10649 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10651 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10652 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10653 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10654 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10655 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10656 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10657 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10658 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10659 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10660 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10661 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10662 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10663 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10664 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10665 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10666 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10667 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10668 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10669 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10670 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10671 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10672 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10674 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10676 TCGv t0
= tcg_temp_new();
10681 TCGv_i32 fp0
= tcg_temp_new_i32();
10683 gen_load_fpr32(ctx
, fp0
, fs
);
10684 tcg_gen_ext_i32_tl(t0
, fp0
);
10685 tcg_temp_free_i32(fp0
);
10687 gen_store_gpr(t0
, rt
);
10690 gen_load_gpr(t0
, rt
);
10692 TCGv_i32 fp0
= tcg_temp_new_i32();
10694 tcg_gen_trunc_tl_i32(fp0
, t0
);
10695 gen_store_fpr32(ctx
, fp0
, fs
);
10696 tcg_temp_free_i32(fp0
);
10700 gen_helper_1e0i(cfc1
, t0
, fs
);
10701 gen_store_gpr(t0
, rt
);
10704 gen_load_gpr(t0
, rt
);
10705 save_cpu_state(ctx
, 0);
10707 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10709 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10710 tcg_temp_free_i32(fs_tmp
);
10712 /* Stop translation as we may have changed hflags */
10713 ctx
->base
.is_jmp
= DISAS_STOP
;
10715 #if defined(TARGET_MIPS64)
10717 gen_load_fpr64(ctx
, t0
, fs
);
10718 gen_store_gpr(t0
, rt
);
10721 gen_load_gpr(t0
, rt
);
10722 gen_store_fpr64(ctx
, t0
, fs
);
10727 TCGv_i32 fp0
= tcg_temp_new_i32();
10729 gen_load_fpr32h(ctx
, fp0
, fs
);
10730 tcg_gen_ext_i32_tl(t0
, fp0
);
10731 tcg_temp_free_i32(fp0
);
10733 gen_store_gpr(t0
, rt
);
10736 gen_load_gpr(t0
, rt
);
10738 TCGv_i32 fp0
= tcg_temp_new_i32();
10740 tcg_gen_trunc_tl_i32(fp0
, t0
);
10741 gen_store_fpr32h(ctx
, fp0
, fs
);
10742 tcg_temp_free_i32(fp0
);
10746 MIPS_INVAL("cp1 move");
10747 generate_exception_end(ctx
, EXCP_RI
);
10755 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10762 /* Treat as NOP. */
10767 cond
= TCG_COND_EQ
;
10769 cond
= TCG_COND_NE
;
10771 l1
= gen_new_label();
10772 t0
= tcg_temp_new_i32();
10773 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10774 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10775 tcg_temp_free_i32(t0
);
10777 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10779 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10784 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10788 TCGv_i32 t0
= tcg_temp_new_i32();
10789 TCGLabel
*l1
= gen_new_label();
10792 cond
= TCG_COND_EQ
;
10794 cond
= TCG_COND_NE
;
10796 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10797 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10798 gen_load_fpr32(ctx
, t0
, fs
);
10799 gen_store_fpr32(ctx
, t0
, fd
);
10801 tcg_temp_free_i32(t0
);
10804 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
10807 TCGv_i32 t0
= tcg_temp_new_i32();
10809 TCGLabel
*l1
= gen_new_label();
10812 cond
= TCG_COND_EQ
;
10814 cond
= TCG_COND_NE
;
10816 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10817 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10818 tcg_temp_free_i32(t0
);
10819 fp0
= tcg_temp_new_i64();
10820 gen_load_fpr64(ctx
, fp0
, fs
);
10821 gen_store_fpr64(ctx
, fp0
, fd
);
10822 tcg_temp_free_i64(fp0
);
10826 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10830 TCGv_i32 t0
= tcg_temp_new_i32();
10831 TCGLabel
*l1
= gen_new_label();
10832 TCGLabel
*l2
= gen_new_label();
10835 cond
= TCG_COND_EQ
;
10837 cond
= TCG_COND_NE
;
10839 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10840 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10841 gen_load_fpr32(ctx
, t0
, fs
);
10842 gen_store_fpr32(ctx
, t0
, fd
);
10845 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10846 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10847 gen_load_fpr32h(ctx
, t0
, fs
);
10848 gen_store_fpr32h(ctx
, t0
, fd
);
10849 tcg_temp_free_i32(t0
);
10853 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10856 TCGv_i32 t1
= tcg_const_i32(0);
10857 TCGv_i32 fp0
= tcg_temp_new_i32();
10858 TCGv_i32 fp1
= tcg_temp_new_i32();
10859 TCGv_i32 fp2
= tcg_temp_new_i32();
10860 gen_load_fpr32(ctx
, fp0
, fd
);
10861 gen_load_fpr32(ctx
, fp1
, ft
);
10862 gen_load_fpr32(ctx
, fp2
, fs
);
10866 tcg_gen_andi_i32(fp0
, fp0
, 1);
10867 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10870 tcg_gen_andi_i32(fp1
, fp1
, 1);
10871 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10874 tcg_gen_andi_i32(fp1
, fp1
, 1);
10875 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10878 MIPS_INVAL("gen_sel_s");
10879 generate_exception_end(ctx
, EXCP_RI
);
10883 gen_store_fpr32(ctx
, fp0
, fd
);
10884 tcg_temp_free_i32(fp2
);
10885 tcg_temp_free_i32(fp1
);
10886 tcg_temp_free_i32(fp0
);
10887 tcg_temp_free_i32(t1
);
10890 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10893 TCGv_i64 t1
= tcg_const_i64(0);
10894 TCGv_i64 fp0
= tcg_temp_new_i64();
10895 TCGv_i64 fp1
= tcg_temp_new_i64();
10896 TCGv_i64 fp2
= tcg_temp_new_i64();
10897 gen_load_fpr64(ctx
, fp0
, fd
);
10898 gen_load_fpr64(ctx
, fp1
, ft
);
10899 gen_load_fpr64(ctx
, fp2
, fs
);
10903 tcg_gen_andi_i64(fp0
, fp0
, 1);
10904 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10907 tcg_gen_andi_i64(fp1
, fp1
, 1);
10908 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10911 tcg_gen_andi_i64(fp1
, fp1
, 1);
10912 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10915 MIPS_INVAL("gen_sel_d");
10916 generate_exception_end(ctx
, EXCP_RI
);
10920 gen_store_fpr64(ctx
, fp0
, fd
);
10921 tcg_temp_free_i64(fp2
);
10922 tcg_temp_free_i64(fp1
);
10923 tcg_temp_free_i64(fp0
);
10924 tcg_temp_free_i64(t1
);
10927 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
10928 int ft
, int fs
, int fd
, int cc
)
10930 uint32_t func
= ctx
->opcode
& 0x3f;
10934 TCGv_i32 fp0
= tcg_temp_new_i32();
10935 TCGv_i32 fp1
= tcg_temp_new_i32();
10937 gen_load_fpr32(ctx
, fp0
, fs
);
10938 gen_load_fpr32(ctx
, fp1
, ft
);
10939 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
10940 tcg_temp_free_i32(fp1
);
10941 gen_store_fpr32(ctx
, fp0
, fd
);
10942 tcg_temp_free_i32(fp0
);
10947 TCGv_i32 fp0
= tcg_temp_new_i32();
10948 TCGv_i32 fp1
= tcg_temp_new_i32();
10950 gen_load_fpr32(ctx
, fp0
, fs
);
10951 gen_load_fpr32(ctx
, fp1
, ft
);
10952 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
10953 tcg_temp_free_i32(fp1
);
10954 gen_store_fpr32(ctx
, fp0
, fd
);
10955 tcg_temp_free_i32(fp0
);
10960 TCGv_i32 fp0
= tcg_temp_new_i32();
10961 TCGv_i32 fp1
= tcg_temp_new_i32();
10963 gen_load_fpr32(ctx
, fp0
, fs
);
10964 gen_load_fpr32(ctx
, fp1
, ft
);
10965 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
10966 tcg_temp_free_i32(fp1
);
10967 gen_store_fpr32(ctx
, fp0
, fd
);
10968 tcg_temp_free_i32(fp0
);
10973 TCGv_i32 fp0
= tcg_temp_new_i32();
10974 TCGv_i32 fp1
= tcg_temp_new_i32();
10976 gen_load_fpr32(ctx
, fp0
, fs
);
10977 gen_load_fpr32(ctx
, fp1
, ft
);
10978 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
10979 tcg_temp_free_i32(fp1
);
10980 gen_store_fpr32(ctx
, fp0
, fd
);
10981 tcg_temp_free_i32(fp0
);
10986 TCGv_i32 fp0
= tcg_temp_new_i32();
10988 gen_load_fpr32(ctx
, fp0
, fs
);
10989 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
10990 gen_store_fpr32(ctx
, fp0
, fd
);
10991 tcg_temp_free_i32(fp0
);
10996 TCGv_i32 fp0
= tcg_temp_new_i32();
10998 gen_load_fpr32(ctx
, fp0
, fs
);
10999 if (ctx
->abs2008
) {
11000 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
11002 gen_helper_float_abs_s(fp0
, fp0
);
11004 gen_store_fpr32(ctx
, fp0
, fd
);
11005 tcg_temp_free_i32(fp0
);
11010 TCGv_i32 fp0
= tcg_temp_new_i32();
11012 gen_load_fpr32(ctx
, fp0
, fs
);
11013 gen_store_fpr32(ctx
, fp0
, fd
);
11014 tcg_temp_free_i32(fp0
);
11019 TCGv_i32 fp0
= tcg_temp_new_i32();
11021 gen_load_fpr32(ctx
, fp0
, fs
);
11022 if (ctx
->abs2008
) {
11023 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
11025 gen_helper_float_chs_s(fp0
, fp0
);
11027 gen_store_fpr32(ctx
, fp0
, fd
);
11028 tcg_temp_free_i32(fp0
);
11031 case OPC_ROUND_L_S
:
11032 check_cp1_64bitmode(ctx
);
11034 TCGv_i32 fp32
= tcg_temp_new_i32();
11035 TCGv_i64 fp64
= tcg_temp_new_i64();
11037 gen_load_fpr32(ctx
, fp32
, fs
);
11038 if (ctx
->nan2008
) {
11039 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
11041 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
11043 tcg_temp_free_i32(fp32
);
11044 gen_store_fpr64(ctx
, fp64
, fd
);
11045 tcg_temp_free_i64(fp64
);
11048 case OPC_TRUNC_L_S
:
11049 check_cp1_64bitmode(ctx
);
11051 TCGv_i32 fp32
= tcg_temp_new_i32();
11052 TCGv_i64 fp64
= tcg_temp_new_i64();
11054 gen_load_fpr32(ctx
, fp32
, fs
);
11055 if (ctx
->nan2008
) {
11056 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
11058 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
11060 tcg_temp_free_i32(fp32
);
11061 gen_store_fpr64(ctx
, fp64
, fd
);
11062 tcg_temp_free_i64(fp64
);
11066 check_cp1_64bitmode(ctx
);
11068 TCGv_i32 fp32
= tcg_temp_new_i32();
11069 TCGv_i64 fp64
= tcg_temp_new_i64();
11071 gen_load_fpr32(ctx
, fp32
, fs
);
11072 if (ctx
->nan2008
) {
11073 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
11075 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
11077 tcg_temp_free_i32(fp32
);
11078 gen_store_fpr64(ctx
, fp64
, fd
);
11079 tcg_temp_free_i64(fp64
);
11082 case OPC_FLOOR_L_S
:
11083 check_cp1_64bitmode(ctx
);
11085 TCGv_i32 fp32
= tcg_temp_new_i32();
11086 TCGv_i64 fp64
= tcg_temp_new_i64();
11088 gen_load_fpr32(ctx
, fp32
, fs
);
11089 if (ctx
->nan2008
) {
11090 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
11092 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
11094 tcg_temp_free_i32(fp32
);
11095 gen_store_fpr64(ctx
, fp64
, fd
);
11096 tcg_temp_free_i64(fp64
);
11099 case OPC_ROUND_W_S
:
11101 TCGv_i32 fp0
= tcg_temp_new_i32();
11103 gen_load_fpr32(ctx
, fp0
, fs
);
11104 if (ctx
->nan2008
) {
11105 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
11107 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
11109 gen_store_fpr32(ctx
, fp0
, fd
);
11110 tcg_temp_free_i32(fp0
);
11113 case OPC_TRUNC_W_S
:
11115 TCGv_i32 fp0
= tcg_temp_new_i32();
11117 gen_load_fpr32(ctx
, fp0
, fs
);
11118 if (ctx
->nan2008
) {
11119 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
11121 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
11123 gen_store_fpr32(ctx
, fp0
, fd
);
11124 tcg_temp_free_i32(fp0
);
11129 TCGv_i32 fp0
= tcg_temp_new_i32();
11131 gen_load_fpr32(ctx
, fp0
, fs
);
11132 if (ctx
->nan2008
) {
11133 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
11135 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
11137 gen_store_fpr32(ctx
, fp0
, fd
);
11138 tcg_temp_free_i32(fp0
);
11141 case OPC_FLOOR_W_S
:
11143 TCGv_i32 fp0
= tcg_temp_new_i32();
11145 gen_load_fpr32(ctx
, fp0
, fs
);
11146 if (ctx
->nan2008
) {
11147 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
11149 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
11151 gen_store_fpr32(ctx
, fp0
, fd
);
11152 tcg_temp_free_i32(fp0
);
11156 check_insn(ctx
, ISA_MIPS32R6
);
11157 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11160 check_insn(ctx
, ISA_MIPS32R6
);
11161 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11164 check_insn(ctx
, ISA_MIPS32R6
);
11165 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11168 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11169 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11172 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11174 TCGLabel
*l1
= gen_new_label();
11178 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11180 fp0
= tcg_temp_new_i32();
11181 gen_load_fpr32(ctx
, fp0
, fs
);
11182 gen_store_fpr32(ctx
, fp0
, fd
);
11183 tcg_temp_free_i32(fp0
);
11188 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11190 TCGLabel
*l1
= gen_new_label();
11194 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11195 fp0
= tcg_temp_new_i32();
11196 gen_load_fpr32(ctx
, fp0
, fs
);
11197 gen_store_fpr32(ctx
, fp0
, fd
);
11198 tcg_temp_free_i32(fp0
);
11205 TCGv_i32 fp0
= tcg_temp_new_i32();
11207 gen_load_fpr32(ctx
, fp0
, fs
);
11208 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
11209 gen_store_fpr32(ctx
, fp0
, fd
);
11210 tcg_temp_free_i32(fp0
);
11215 TCGv_i32 fp0
= tcg_temp_new_i32();
11217 gen_load_fpr32(ctx
, fp0
, fs
);
11218 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
11219 gen_store_fpr32(ctx
, fp0
, fd
);
11220 tcg_temp_free_i32(fp0
);
11224 check_insn(ctx
, ISA_MIPS32R6
);
11226 TCGv_i32 fp0
= tcg_temp_new_i32();
11227 TCGv_i32 fp1
= tcg_temp_new_i32();
11228 TCGv_i32 fp2
= tcg_temp_new_i32();
11229 gen_load_fpr32(ctx
, fp0
, fs
);
11230 gen_load_fpr32(ctx
, fp1
, ft
);
11231 gen_load_fpr32(ctx
, fp2
, fd
);
11232 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11233 gen_store_fpr32(ctx
, fp2
, fd
);
11234 tcg_temp_free_i32(fp2
);
11235 tcg_temp_free_i32(fp1
);
11236 tcg_temp_free_i32(fp0
);
11240 check_insn(ctx
, ISA_MIPS32R6
);
11242 TCGv_i32 fp0
= tcg_temp_new_i32();
11243 TCGv_i32 fp1
= tcg_temp_new_i32();
11244 TCGv_i32 fp2
= tcg_temp_new_i32();
11245 gen_load_fpr32(ctx
, fp0
, fs
);
11246 gen_load_fpr32(ctx
, fp1
, ft
);
11247 gen_load_fpr32(ctx
, fp2
, fd
);
11248 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11249 gen_store_fpr32(ctx
, fp2
, fd
);
11250 tcg_temp_free_i32(fp2
);
11251 tcg_temp_free_i32(fp1
);
11252 tcg_temp_free_i32(fp0
);
11256 check_insn(ctx
, ISA_MIPS32R6
);
11258 TCGv_i32 fp0
= tcg_temp_new_i32();
11259 gen_load_fpr32(ctx
, fp0
, fs
);
11260 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
11261 gen_store_fpr32(ctx
, fp0
, fd
);
11262 tcg_temp_free_i32(fp0
);
11266 check_insn(ctx
, ISA_MIPS32R6
);
11268 TCGv_i32 fp0
= tcg_temp_new_i32();
11269 gen_load_fpr32(ctx
, fp0
, fs
);
11270 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
11271 gen_store_fpr32(ctx
, fp0
, fd
);
11272 tcg_temp_free_i32(fp0
);
11275 case OPC_MIN_S
: /* OPC_RECIP2_S */
11276 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11278 TCGv_i32 fp0
= tcg_temp_new_i32();
11279 TCGv_i32 fp1
= tcg_temp_new_i32();
11280 TCGv_i32 fp2
= tcg_temp_new_i32();
11281 gen_load_fpr32(ctx
, fp0
, fs
);
11282 gen_load_fpr32(ctx
, fp1
, ft
);
11283 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
11284 gen_store_fpr32(ctx
, fp2
, fd
);
11285 tcg_temp_free_i32(fp2
);
11286 tcg_temp_free_i32(fp1
);
11287 tcg_temp_free_i32(fp0
);
11290 check_cp1_64bitmode(ctx
);
11292 TCGv_i32 fp0
= tcg_temp_new_i32();
11293 TCGv_i32 fp1
= tcg_temp_new_i32();
11295 gen_load_fpr32(ctx
, fp0
, fs
);
11296 gen_load_fpr32(ctx
, fp1
, ft
);
11297 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
11298 tcg_temp_free_i32(fp1
);
11299 gen_store_fpr32(ctx
, fp0
, fd
);
11300 tcg_temp_free_i32(fp0
);
11304 case OPC_MINA_S
: /* OPC_RECIP1_S */
11305 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11307 TCGv_i32 fp0
= tcg_temp_new_i32();
11308 TCGv_i32 fp1
= tcg_temp_new_i32();
11309 TCGv_i32 fp2
= tcg_temp_new_i32();
11310 gen_load_fpr32(ctx
, fp0
, fs
);
11311 gen_load_fpr32(ctx
, fp1
, ft
);
11312 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
11313 gen_store_fpr32(ctx
, fp2
, fd
);
11314 tcg_temp_free_i32(fp2
);
11315 tcg_temp_free_i32(fp1
);
11316 tcg_temp_free_i32(fp0
);
11319 check_cp1_64bitmode(ctx
);
11321 TCGv_i32 fp0
= tcg_temp_new_i32();
11323 gen_load_fpr32(ctx
, fp0
, fs
);
11324 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
11325 gen_store_fpr32(ctx
, fp0
, fd
);
11326 tcg_temp_free_i32(fp0
);
11330 case OPC_MAX_S
: /* OPC_RSQRT1_S */
11331 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11333 TCGv_i32 fp0
= tcg_temp_new_i32();
11334 TCGv_i32 fp1
= tcg_temp_new_i32();
11335 gen_load_fpr32(ctx
, fp0
, fs
);
11336 gen_load_fpr32(ctx
, fp1
, ft
);
11337 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
11338 gen_store_fpr32(ctx
, fp1
, fd
);
11339 tcg_temp_free_i32(fp1
);
11340 tcg_temp_free_i32(fp0
);
11343 check_cp1_64bitmode(ctx
);
11345 TCGv_i32 fp0
= tcg_temp_new_i32();
11347 gen_load_fpr32(ctx
, fp0
, fs
);
11348 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
11349 gen_store_fpr32(ctx
, fp0
, fd
);
11350 tcg_temp_free_i32(fp0
);
11354 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11355 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11357 TCGv_i32 fp0
= tcg_temp_new_i32();
11358 TCGv_i32 fp1
= tcg_temp_new_i32();
11359 gen_load_fpr32(ctx
, fp0
, fs
);
11360 gen_load_fpr32(ctx
, fp1
, ft
);
11361 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11362 gen_store_fpr32(ctx
, fp1
, fd
);
11363 tcg_temp_free_i32(fp1
);
11364 tcg_temp_free_i32(fp0
);
11367 check_cp1_64bitmode(ctx
);
11369 TCGv_i32 fp0
= tcg_temp_new_i32();
11370 TCGv_i32 fp1
= tcg_temp_new_i32();
11372 gen_load_fpr32(ctx
, fp0
, fs
);
11373 gen_load_fpr32(ctx
, fp1
, ft
);
11374 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11375 tcg_temp_free_i32(fp1
);
11376 gen_store_fpr32(ctx
, fp0
, fd
);
11377 tcg_temp_free_i32(fp0
);
11382 check_cp1_registers(ctx
, fd
);
11384 TCGv_i32 fp32
= tcg_temp_new_i32();
11385 TCGv_i64 fp64
= tcg_temp_new_i64();
11387 gen_load_fpr32(ctx
, fp32
, fs
);
11388 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11389 tcg_temp_free_i32(fp32
);
11390 gen_store_fpr64(ctx
, fp64
, fd
);
11391 tcg_temp_free_i64(fp64
);
11396 TCGv_i32 fp0
= tcg_temp_new_i32();
11398 gen_load_fpr32(ctx
, fp0
, fs
);
11399 if (ctx
->nan2008
) {
11400 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11402 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11404 gen_store_fpr32(ctx
, fp0
, fd
);
11405 tcg_temp_free_i32(fp0
);
11409 check_cp1_64bitmode(ctx
);
11411 TCGv_i32 fp32
= tcg_temp_new_i32();
11412 TCGv_i64 fp64
= tcg_temp_new_i64();
11414 gen_load_fpr32(ctx
, fp32
, fs
);
11415 if (ctx
->nan2008
) {
11416 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11418 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11420 tcg_temp_free_i32(fp32
);
11421 gen_store_fpr64(ctx
, fp64
, fd
);
11422 tcg_temp_free_i64(fp64
);
11428 TCGv_i64 fp64
= tcg_temp_new_i64();
11429 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11430 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11432 gen_load_fpr32(ctx
, fp32_0
, fs
);
11433 gen_load_fpr32(ctx
, fp32_1
, ft
);
11434 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11435 tcg_temp_free_i32(fp32_1
);
11436 tcg_temp_free_i32(fp32_0
);
11437 gen_store_fpr64(ctx
, fp64
, fd
);
11438 tcg_temp_free_i64(fp64
);
11444 case OPC_CMP_UEQ_S
:
11445 case OPC_CMP_OLT_S
:
11446 case OPC_CMP_ULT_S
:
11447 case OPC_CMP_OLE_S
:
11448 case OPC_CMP_ULE_S
:
11450 case OPC_CMP_NGLE_S
:
11451 case OPC_CMP_SEQ_S
:
11452 case OPC_CMP_NGL_S
:
11454 case OPC_CMP_NGE_S
:
11456 case OPC_CMP_NGT_S
:
11457 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11458 if (ctx
->opcode
& (1 << 6)) {
11459 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
11461 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
11465 check_cp1_registers(ctx
, fs
| ft
| fd
);
11467 TCGv_i64 fp0
= tcg_temp_new_i64();
11468 TCGv_i64 fp1
= tcg_temp_new_i64();
11470 gen_load_fpr64(ctx
, fp0
, fs
);
11471 gen_load_fpr64(ctx
, fp1
, ft
);
11472 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11473 tcg_temp_free_i64(fp1
);
11474 gen_store_fpr64(ctx
, fp0
, fd
);
11475 tcg_temp_free_i64(fp0
);
11479 check_cp1_registers(ctx
, fs
| ft
| fd
);
11481 TCGv_i64 fp0
= tcg_temp_new_i64();
11482 TCGv_i64 fp1
= tcg_temp_new_i64();
11484 gen_load_fpr64(ctx
, fp0
, fs
);
11485 gen_load_fpr64(ctx
, fp1
, ft
);
11486 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11487 tcg_temp_free_i64(fp1
);
11488 gen_store_fpr64(ctx
, fp0
, fd
);
11489 tcg_temp_free_i64(fp0
);
11493 check_cp1_registers(ctx
, fs
| ft
| fd
);
11495 TCGv_i64 fp0
= tcg_temp_new_i64();
11496 TCGv_i64 fp1
= tcg_temp_new_i64();
11498 gen_load_fpr64(ctx
, fp0
, fs
);
11499 gen_load_fpr64(ctx
, fp1
, ft
);
11500 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11501 tcg_temp_free_i64(fp1
);
11502 gen_store_fpr64(ctx
, fp0
, fd
);
11503 tcg_temp_free_i64(fp0
);
11507 check_cp1_registers(ctx
, fs
| ft
| fd
);
11509 TCGv_i64 fp0
= tcg_temp_new_i64();
11510 TCGv_i64 fp1
= tcg_temp_new_i64();
11512 gen_load_fpr64(ctx
, fp0
, fs
);
11513 gen_load_fpr64(ctx
, fp1
, ft
);
11514 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11515 tcg_temp_free_i64(fp1
);
11516 gen_store_fpr64(ctx
, fp0
, fd
);
11517 tcg_temp_free_i64(fp0
);
11521 check_cp1_registers(ctx
, fs
| fd
);
11523 TCGv_i64 fp0
= tcg_temp_new_i64();
11525 gen_load_fpr64(ctx
, fp0
, fs
);
11526 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11527 gen_store_fpr64(ctx
, fp0
, fd
);
11528 tcg_temp_free_i64(fp0
);
11532 check_cp1_registers(ctx
, fs
| fd
);
11534 TCGv_i64 fp0
= tcg_temp_new_i64();
11536 gen_load_fpr64(ctx
, fp0
, fs
);
11537 if (ctx
->abs2008
) {
11538 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11540 gen_helper_float_abs_d(fp0
, fp0
);
11542 gen_store_fpr64(ctx
, fp0
, fd
);
11543 tcg_temp_free_i64(fp0
);
11547 check_cp1_registers(ctx
, fs
| fd
);
11549 TCGv_i64 fp0
= tcg_temp_new_i64();
11551 gen_load_fpr64(ctx
, fp0
, fs
);
11552 gen_store_fpr64(ctx
, fp0
, fd
);
11553 tcg_temp_free_i64(fp0
);
11557 check_cp1_registers(ctx
, fs
| fd
);
11559 TCGv_i64 fp0
= tcg_temp_new_i64();
11561 gen_load_fpr64(ctx
, fp0
, fs
);
11562 if (ctx
->abs2008
) {
11563 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11565 gen_helper_float_chs_d(fp0
, fp0
);
11567 gen_store_fpr64(ctx
, fp0
, fd
);
11568 tcg_temp_free_i64(fp0
);
11571 case OPC_ROUND_L_D
:
11572 check_cp1_64bitmode(ctx
);
11574 TCGv_i64 fp0
= tcg_temp_new_i64();
11576 gen_load_fpr64(ctx
, fp0
, fs
);
11577 if (ctx
->nan2008
) {
11578 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11580 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11582 gen_store_fpr64(ctx
, fp0
, fd
);
11583 tcg_temp_free_i64(fp0
);
11586 case OPC_TRUNC_L_D
:
11587 check_cp1_64bitmode(ctx
);
11589 TCGv_i64 fp0
= tcg_temp_new_i64();
11591 gen_load_fpr64(ctx
, fp0
, fs
);
11592 if (ctx
->nan2008
) {
11593 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11595 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11597 gen_store_fpr64(ctx
, fp0
, fd
);
11598 tcg_temp_free_i64(fp0
);
11602 check_cp1_64bitmode(ctx
);
11604 TCGv_i64 fp0
= tcg_temp_new_i64();
11606 gen_load_fpr64(ctx
, fp0
, fs
);
11607 if (ctx
->nan2008
) {
11608 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11610 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11612 gen_store_fpr64(ctx
, fp0
, fd
);
11613 tcg_temp_free_i64(fp0
);
11616 case OPC_FLOOR_L_D
:
11617 check_cp1_64bitmode(ctx
);
11619 TCGv_i64 fp0
= tcg_temp_new_i64();
11621 gen_load_fpr64(ctx
, fp0
, fs
);
11622 if (ctx
->nan2008
) {
11623 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11625 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11627 gen_store_fpr64(ctx
, fp0
, fd
);
11628 tcg_temp_free_i64(fp0
);
11631 case OPC_ROUND_W_D
:
11632 check_cp1_registers(ctx
, fs
);
11634 TCGv_i32 fp32
= tcg_temp_new_i32();
11635 TCGv_i64 fp64
= tcg_temp_new_i64();
11637 gen_load_fpr64(ctx
, fp64
, fs
);
11638 if (ctx
->nan2008
) {
11639 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11641 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11643 tcg_temp_free_i64(fp64
);
11644 gen_store_fpr32(ctx
, fp32
, fd
);
11645 tcg_temp_free_i32(fp32
);
11648 case OPC_TRUNC_W_D
:
11649 check_cp1_registers(ctx
, fs
);
11651 TCGv_i32 fp32
= tcg_temp_new_i32();
11652 TCGv_i64 fp64
= tcg_temp_new_i64();
11654 gen_load_fpr64(ctx
, fp64
, fs
);
11655 if (ctx
->nan2008
) {
11656 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11658 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11660 tcg_temp_free_i64(fp64
);
11661 gen_store_fpr32(ctx
, fp32
, fd
);
11662 tcg_temp_free_i32(fp32
);
11666 check_cp1_registers(ctx
, fs
);
11668 TCGv_i32 fp32
= tcg_temp_new_i32();
11669 TCGv_i64 fp64
= tcg_temp_new_i64();
11671 gen_load_fpr64(ctx
, fp64
, fs
);
11672 if (ctx
->nan2008
) {
11673 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11675 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11677 tcg_temp_free_i64(fp64
);
11678 gen_store_fpr32(ctx
, fp32
, fd
);
11679 tcg_temp_free_i32(fp32
);
11682 case OPC_FLOOR_W_D
:
11683 check_cp1_registers(ctx
, fs
);
11685 TCGv_i32 fp32
= tcg_temp_new_i32();
11686 TCGv_i64 fp64
= tcg_temp_new_i64();
11688 gen_load_fpr64(ctx
, fp64
, fs
);
11689 if (ctx
->nan2008
) {
11690 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11692 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11694 tcg_temp_free_i64(fp64
);
11695 gen_store_fpr32(ctx
, fp32
, fd
);
11696 tcg_temp_free_i32(fp32
);
11700 check_insn(ctx
, ISA_MIPS32R6
);
11701 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11704 check_insn(ctx
, ISA_MIPS32R6
);
11705 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11708 check_insn(ctx
, ISA_MIPS32R6
);
11709 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11712 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11713 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11716 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11718 TCGLabel
*l1
= gen_new_label();
11722 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11724 fp0
= tcg_temp_new_i64();
11725 gen_load_fpr64(ctx
, fp0
, fs
);
11726 gen_store_fpr64(ctx
, fp0
, fd
);
11727 tcg_temp_free_i64(fp0
);
11732 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11734 TCGLabel
*l1
= gen_new_label();
11738 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11739 fp0
= tcg_temp_new_i64();
11740 gen_load_fpr64(ctx
, fp0
, fs
);
11741 gen_store_fpr64(ctx
, fp0
, fd
);
11742 tcg_temp_free_i64(fp0
);
11748 check_cp1_registers(ctx
, fs
| fd
);
11750 TCGv_i64 fp0
= tcg_temp_new_i64();
11752 gen_load_fpr64(ctx
, fp0
, fs
);
11753 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11754 gen_store_fpr64(ctx
, fp0
, fd
);
11755 tcg_temp_free_i64(fp0
);
11759 check_cp1_registers(ctx
, fs
| fd
);
11761 TCGv_i64 fp0
= tcg_temp_new_i64();
11763 gen_load_fpr64(ctx
, fp0
, fs
);
11764 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11765 gen_store_fpr64(ctx
, fp0
, fd
);
11766 tcg_temp_free_i64(fp0
);
11770 check_insn(ctx
, ISA_MIPS32R6
);
11772 TCGv_i64 fp0
= tcg_temp_new_i64();
11773 TCGv_i64 fp1
= tcg_temp_new_i64();
11774 TCGv_i64 fp2
= tcg_temp_new_i64();
11775 gen_load_fpr64(ctx
, fp0
, fs
);
11776 gen_load_fpr64(ctx
, fp1
, ft
);
11777 gen_load_fpr64(ctx
, fp2
, fd
);
11778 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11779 gen_store_fpr64(ctx
, fp2
, fd
);
11780 tcg_temp_free_i64(fp2
);
11781 tcg_temp_free_i64(fp1
);
11782 tcg_temp_free_i64(fp0
);
11786 check_insn(ctx
, ISA_MIPS32R6
);
11788 TCGv_i64 fp0
= tcg_temp_new_i64();
11789 TCGv_i64 fp1
= tcg_temp_new_i64();
11790 TCGv_i64 fp2
= tcg_temp_new_i64();
11791 gen_load_fpr64(ctx
, fp0
, fs
);
11792 gen_load_fpr64(ctx
, fp1
, ft
);
11793 gen_load_fpr64(ctx
, fp2
, fd
);
11794 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11795 gen_store_fpr64(ctx
, fp2
, fd
);
11796 tcg_temp_free_i64(fp2
);
11797 tcg_temp_free_i64(fp1
);
11798 tcg_temp_free_i64(fp0
);
11802 check_insn(ctx
, ISA_MIPS32R6
);
11804 TCGv_i64 fp0
= tcg_temp_new_i64();
11805 gen_load_fpr64(ctx
, fp0
, fs
);
11806 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11807 gen_store_fpr64(ctx
, fp0
, fd
);
11808 tcg_temp_free_i64(fp0
);
11812 check_insn(ctx
, ISA_MIPS32R6
);
11814 TCGv_i64 fp0
= tcg_temp_new_i64();
11815 gen_load_fpr64(ctx
, fp0
, fs
);
11816 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11817 gen_store_fpr64(ctx
, fp0
, fd
);
11818 tcg_temp_free_i64(fp0
);
11821 case OPC_MIN_D
: /* OPC_RECIP2_D */
11822 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11824 TCGv_i64 fp0
= tcg_temp_new_i64();
11825 TCGv_i64 fp1
= tcg_temp_new_i64();
11826 gen_load_fpr64(ctx
, fp0
, fs
);
11827 gen_load_fpr64(ctx
, fp1
, ft
);
11828 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11829 gen_store_fpr64(ctx
, fp1
, fd
);
11830 tcg_temp_free_i64(fp1
);
11831 tcg_temp_free_i64(fp0
);
11834 check_cp1_64bitmode(ctx
);
11836 TCGv_i64 fp0
= tcg_temp_new_i64();
11837 TCGv_i64 fp1
= tcg_temp_new_i64();
11839 gen_load_fpr64(ctx
, fp0
, fs
);
11840 gen_load_fpr64(ctx
, fp1
, ft
);
11841 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11842 tcg_temp_free_i64(fp1
);
11843 gen_store_fpr64(ctx
, fp0
, fd
);
11844 tcg_temp_free_i64(fp0
);
11848 case OPC_MINA_D
: /* OPC_RECIP1_D */
11849 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11851 TCGv_i64 fp0
= tcg_temp_new_i64();
11852 TCGv_i64 fp1
= tcg_temp_new_i64();
11853 gen_load_fpr64(ctx
, fp0
, fs
);
11854 gen_load_fpr64(ctx
, fp1
, ft
);
11855 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11856 gen_store_fpr64(ctx
, fp1
, fd
);
11857 tcg_temp_free_i64(fp1
);
11858 tcg_temp_free_i64(fp0
);
11861 check_cp1_64bitmode(ctx
);
11863 TCGv_i64 fp0
= tcg_temp_new_i64();
11865 gen_load_fpr64(ctx
, fp0
, fs
);
11866 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11867 gen_store_fpr64(ctx
, fp0
, fd
);
11868 tcg_temp_free_i64(fp0
);
11872 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11873 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11875 TCGv_i64 fp0
= tcg_temp_new_i64();
11876 TCGv_i64 fp1
= tcg_temp_new_i64();
11877 gen_load_fpr64(ctx
, fp0
, fs
);
11878 gen_load_fpr64(ctx
, fp1
, ft
);
11879 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11880 gen_store_fpr64(ctx
, fp1
, fd
);
11881 tcg_temp_free_i64(fp1
);
11882 tcg_temp_free_i64(fp0
);
11885 check_cp1_64bitmode(ctx
);
11887 TCGv_i64 fp0
= tcg_temp_new_i64();
11889 gen_load_fpr64(ctx
, fp0
, fs
);
11890 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11891 gen_store_fpr64(ctx
, fp0
, fd
);
11892 tcg_temp_free_i64(fp0
);
11896 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11897 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11899 TCGv_i64 fp0
= tcg_temp_new_i64();
11900 TCGv_i64 fp1
= tcg_temp_new_i64();
11901 gen_load_fpr64(ctx
, fp0
, fs
);
11902 gen_load_fpr64(ctx
, fp1
, ft
);
11903 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11904 gen_store_fpr64(ctx
, fp1
, fd
);
11905 tcg_temp_free_i64(fp1
);
11906 tcg_temp_free_i64(fp0
);
11909 check_cp1_64bitmode(ctx
);
11911 TCGv_i64 fp0
= tcg_temp_new_i64();
11912 TCGv_i64 fp1
= tcg_temp_new_i64();
11914 gen_load_fpr64(ctx
, fp0
, fs
);
11915 gen_load_fpr64(ctx
, fp1
, ft
);
11916 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
11917 tcg_temp_free_i64(fp1
);
11918 gen_store_fpr64(ctx
, fp0
, fd
);
11919 tcg_temp_free_i64(fp0
);
11926 case OPC_CMP_UEQ_D
:
11927 case OPC_CMP_OLT_D
:
11928 case OPC_CMP_ULT_D
:
11929 case OPC_CMP_OLE_D
:
11930 case OPC_CMP_ULE_D
:
11932 case OPC_CMP_NGLE_D
:
11933 case OPC_CMP_SEQ_D
:
11934 case OPC_CMP_NGL_D
:
11936 case OPC_CMP_NGE_D
:
11938 case OPC_CMP_NGT_D
:
11939 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11940 if (ctx
->opcode
& (1 << 6)) {
11941 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
11943 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
11947 check_cp1_registers(ctx
, fs
);
11949 TCGv_i32 fp32
= tcg_temp_new_i32();
11950 TCGv_i64 fp64
= tcg_temp_new_i64();
11952 gen_load_fpr64(ctx
, fp64
, fs
);
11953 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
11954 tcg_temp_free_i64(fp64
);
11955 gen_store_fpr32(ctx
, fp32
, fd
);
11956 tcg_temp_free_i32(fp32
);
11960 check_cp1_registers(ctx
, fs
);
11962 TCGv_i32 fp32
= tcg_temp_new_i32();
11963 TCGv_i64 fp64
= tcg_temp_new_i64();
11965 gen_load_fpr64(ctx
, fp64
, fs
);
11966 if (ctx
->nan2008
) {
11967 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
11969 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
11971 tcg_temp_free_i64(fp64
);
11972 gen_store_fpr32(ctx
, fp32
, fd
);
11973 tcg_temp_free_i32(fp32
);
11977 check_cp1_64bitmode(ctx
);
11979 TCGv_i64 fp0
= tcg_temp_new_i64();
11981 gen_load_fpr64(ctx
, fp0
, fs
);
11982 if (ctx
->nan2008
) {
11983 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
11985 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
11987 gen_store_fpr64(ctx
, fp0
, fd
);
11988 tcg_temp_free_i64(fp0
);
11993 TCGv_i32 fp0
= tcg_temp_new_i32();
11995 gen_load_fpr32(ctx
, fp0
, fs
);
11996 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
11997 gen_store_fpr32(ctx
, fp0
, fd
);
11998 tcg_temp_free_i32(fp0
);
12002 check_cp1_registers(ctx
, fd
);
12004 TCGv_i32 fp32
= tcg_temp_new_i32();
12005 TCGv_i64 fp64
= tcg_temp_new_i64();
12007 gen_load_fpr32(ctx
, fp32
, fs
);
12008 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
12009 tcg_temp_free_i32(fp32
);
12010 gen_store_fpr64(ctx
, fp64
, fd
);
12011 tcg_temp_free_i64(fp64
);
12015 check_cp1_64bitmode(ctx
);
12017 TCGv_i32 fp32
= tcg_temp_new_i32();
12018 TCGv_i64 fp64
= tcg_temp_new_i64();
12020 gen_load_fpr64(ctx
, fp64
, fs
);
12021 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
12022 tcg_temp_free_i64(fp64
);
12023 gen_store_fpr32(ctx
, fp32
, fd
);
12024 tcg_temp_free_i32(fp32
);
12028 check_cp1_64bitmode(ctx
);
12030 TCGv_i64 fp0
= tcg_temp_new_i64();
12032 gen_load_fpr64(ctx
, fp0
, fs
);
12033 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
12034 gen_store_fpr64(ctx
, fp0
, fd
);
12035 tcg_temp_free_i64(fp0
);
12038 case OPC_CVT_PS_PW
:
12041 TCGv_i64 fp0
= tcg_temp_new_i64();
12043 gen_load_fpr64(ctx
, fp0
, fs
);
12044 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
12045 gen_store_fpr64(ctx
, fp0
, fd
);
12046 tcg_temp_free_i64(fp0
);
12052 TCGv_i64 fp0
= tcg_temp_new_i64();
12053 TCGv_i64 fp1
= tcg_temp_new_i64();
12055 gen_load_fpr64(ctx
, fp0
, fs
);
12056 gen_load_fpr64(ctx
, fp1
, ft
);
12057 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
12058 tcg_temp_free_i64(fp1
);
12059 gen_store_fpr64(ctx
, fp0
, fd
);
12060 tcg_temp_free_i64(fp0
);
12066 TCGv_i64 fp0
= tcg_temp_new_i64();
12067 TCGv_i64 fp1
= tcg_temp_new_i64();
12069 gen_load_fpr64(ctx
, fp0
, fs
);
12070 gen_load_fpr64(ctx
, fp1
, ft
);
12071 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
12072 tcg_temp_free_i64(fp1
);
12073 gen_store_fpr64(ctx
, fp0
, fd
);
12074 tcg_temp_free_i64(fp0
);
12080 TCGv_i64 fp0
= tcg_temp_new_i64();
12081 TCGv_i64 fp1
= tcg_temp_new_i64();
12083 gen_load_fpr64(ctx
, fp0
, fs
);
12084 gen_load_fpr64(ctx
, fp1
, ft
);
12085 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
12086 tcg_temp_free_i64(fp1
);
12087 gen_store_fpr64(ctx
, fp0
, fd
);
12088 tcg_temp_free_i64(fp0
);
12094 TCGv_i64 fp0
= tcg_temp_new_i64();
12096 gen_load_fpr64(ctx
, fp0
, fs
);
12097 gen_helper_float_abs_ps(fp0
, fp0
);
12098 gen_store_fpr64(ctx
, fp0
, fd
);
12099 tcg_temp_free_i64(fp0
);
12105 TCGv_i64 fp0
= tcg_temp_new_i64();
12107 gen_load_fpr64(ctx
, fp0
, fs
);
12108 gen_store_fpr64(ctx
, fp0
, fd
);
12109 tcg_temp_free_i64(fp0
);
12115 TCGv_i64 fp0
= tcg_temp_new_i64();
12117 gen_load_fpr64(ctx
, fp0
, fs
);
12118 gen_helper_float_chs_ps(fp0
, fp0
);
12119 gen_store_fpr64(ctx
, fp0
, fd
);
12120 tcg_temp_free_i64(fp0
);
12125 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
12130 TCGLabel
*l1
= gen_new_label();
12134 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
12135 fp0
= tcg_temp_new_i64();
12136 gen_load_fpr64(ctx
, fp0
, fs
);
12137 gen_store_fpr64(ctx
, fp0
, fd
);
12138 tcg_temp_free_i64(fp0
);
12145 TCGLabel
*l1
= gen_new_label();
12149 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
12150 fp0
= tcg_temp_new_i64();
12151 gen_load_fpr64(ctx
, fp0
, fs
);
12152 gen_store_fpr64(ctx
, fp0
, fd
);
12153 tcg_temp_free_i64(fp0
);
12161 TCGv_i64 fp0
= tcg_temp_new_i64();
12162 TCGv_i64 fp1
= tcg_temp_new_i64();
12164 gen_load_fpr64(ctx
, fp0
, ft
);
12165 gen_load_fpr64(ctx
, fp1
, fs
);
12166 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
12167 tcg_temp_free_i64(fp1
);
12168 gen_store_fpr64(ctx
, fp0
, fd
);
12169 tcg_temp_free_i64(fp0
);
12175 TCGv_i64 fp0
= tcg_temp_new_i64();
12176 TCGv_i64 fp1
= tcg_temp_new_i64();
12178 gen_load_fpr64(ctx
, fp0
, ft
);
12179 gen_load_fpr64(ctx
, fp1
, fs
);
12180 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
12181 tcg_temp_free_i64(fp1
);
12182 gen_store_fpr64(ctx
, fp0
, fd
);
12183 tcg_temp_free_i64(fp0
);
12186 case OPC_RECIP2_PS
:
12189 TCGv_i64 fp0
= tcg_temp_new_i64();
12190 TCGv_i64 fp1
= tcg_temp_new_i64();
12192 gen_load_fpr64(ctx
, fp0
, fs
);
12193 gen_load_fpr64(ctx
, fp1
, ft
);
12194 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
12195 tcg_temp_free_i64(fp1
);
12196 gen_store_fpr64(ctx
, fp0
, fd
);
12197 tcg_temp_free_i64(fp0
);
12200 case OPC_RECIP1_PS
:
12203 TCGv_i64 fp0
= tcg_temp_new_i64();
12205 gen_load_fpr64(ctx
, fp0
, fs
);
12206 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
12207 gen_store_fpr64(ctx
, fp0
, fd
);
12208 tcg_temp_free_i64(fp0
);
12211 case OPC_RSQRT1_PS
:
12214 TCGv_i64 fp0
= tcg_temp_new_i64();
12216 gen_load_fpr64(ctx
, fp0
, fs
);
12217 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
12218 gen_store_fpr64(ctx
, fp0
, fd
);
12219 tcg_temp_free_i64(fp0
);
12222 case OPC_RSQRT2_PS
:
12225 TCGv_i64 fp0
= tcg_temp_new_i64();
12226 TCGv_i64 fp1
= tcg_temp_new_i64();
12228 gen_load_fpr64(ctx
, fp0
, fs
);
12229 gen_load_fpr64(ctx
, fp1
, ft
);
12230 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
12231 tcg_temp_free_i64(fp1
);
12232 gen_store_fpr64(ctx
, fp0
, fd
);
12233 tcg_temp_free_i64(fp0
);
12237 check_cp1_64bitmode(ctx
);
12239 TCGv_i32 fp0
= tcg_temp_new_i32();
12241 gen_load_fpr32h(ctx
, fp0
, fs
);
12242 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
12243 gen_store_fpr32(ctx
, fp0
, fd
);
12244 tcg_temp_free_i32(fp0
);
12247 case OPC_CVT_PW_PS
:
12250 TCGv_i64 fp0
= tcg_temp_new_i64();
12252 gen_load_fpr64(ctx
, fp0
, fs
);
12253 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
12254 gen_store_fpr64(ctx
, fp0
, fd
);
12255 tcg_temp_free_i64(fp0
);
12259 check_cp1_64bitmode(ctx
);
12261 TCGv_i32 fp0
= tcg_temp_new_i32();
12263 gen_load_fpr32(ctx
, fp0
, fs
);
12264 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
12265 gen_store_fpr32(ctx
, fp0
, fd
);
12266 tcg_temp_free_i32(fp0
);
12272 TCGv_i32 fp0
= tcg_temp_new_i32();
12273 TCGv_i32 fp1
= tcg_temp_new_i32();
12275 gen_load_fpr32(ctx
, fp0
, fs
);
12276 gen_load_fpr32(ctx
, fp1
, ft
);
12277 gen_store_fpr32h(ctx
, fp0
, fd
);
12278 gen_store_fpr32(ctx
, fp1
, fd
);
12279 tcg_temp_free_i32(fp0
);
12280 tcg_temp_free_i32(fp1
);
12286 TCGv_i32 fp0
= tcg_temp_new_i32();
12287 TCGv_i32 fp1
= tcg_temp_new_i32();
12289 gen_load_fpr32(ctx
, fp0
, fs
);
12290 gen_load_fpr32h(ctx
, fp1
, ft
);
12291 gen_store_fpr32(ctx
, fp1
, fd
);
12292 gen_store_fpr32h(ctx
, fp0
, fd
);
12293 tcg_temp_free_i32(fp0
);
12294 tcg_temp_free_i32(fp1
);
12300 TCGv_i32 fp0
= tcg_temp_new_i32();
12301 TCGv_i32 fp1
= tcg_temp_new_i32();
12303 gen_load_fpr32h(ctx
, fp0
, fs
);
12304 gen_load_fpr32(ctx
, fp1
, ft
);
12305 gen_store_fpr32(ctx
, fp1
, fd
);
12306 gen_store_fpr32h(ctx
, fp0
, fd
);
12307 tcg_temp_free_i32(fp0
);
12308 tcg_temp_free_i32(fp1
);
12314 TCGv_i32 fp0
= tcg_temp_new_i32();
12315 TCGv_i32 fp1
= tcg_temp_new_i32();
12317 gen_load_fpr32h(ctx
, fp0
, fs
);
12318 gen_load_fpr32h(ctx
, fp1
, ft
);
12319 gen_store_fpr32(ctx
, fp1
, fd
);
12320 gen_store_fpr32h(ctx
, fp0
, fd
);
12321 tcg_temp_free_i32(fp0
);
12322 tcg_temp_free_i32(fp1
);
12326 case OPC_CMP_UN_PS
:
12327 case OPC_CMP_EQ_PS
:
12328 case OPC_CMP_UEQ_PS
:
12329 case OPC_CMP_OLT_PS
:
12330 case OPC_CMP_ULT_PS
:
12331 case OPC_CMP_OLE_PS
:
12332 case OPC_CMP_ULE_PS
:
12333 case OPC_CMP_SF_PS
:
12334 case OPC_CMP_NGLE_PS
:
12335 case OPC_CMP_SEQ_PS
:
12336 case OPC_CMP_NGL_PS
:
12337 case OPC_CMP_LT_PS
:
12338 case OPC_CMP_NGE_PS
:
12339 case OPC_CMP_LE_PS
:
12340 case OPC_CMP_NGT_PS
:
12341 if (ctx
->opcode
& (1 << 6)) {
12342 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
12344 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
12348 MIPS_INVAL("farith");
12349 generate_exception_end(ctx
, EXCP_RI
);
12354 /* Coprocessor 3 (FPU) */
12355 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
12356 int fd
, int fs
, int base
, int index
)
12358 TCGv t0
= tcg_temp_new();
12361 gen_load_gpr(t0
, index
);
12362 } else if (index
== 0) {
12363 gen_load_gpr(t0
, base
);
12365 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12367 /* Don't do NOP if destination is zero: we must perform the actual
12373 TCGv_i32 fp0
= tcg_temp_new_i32();
12375 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12376 tcg_gen_trunc_tl_i32(fp0
, t0
);
12377 gen_store_fpr32(ctx
, fp0
, fd
);
12378 tcg_temp_free_i32(fp0
);
12383 check_cp1_registers(ctx
, fd
);
12385 TCGv_i64 fp0
= tcg_temp_new_i64();
12386 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12387 gen_store_fpr64(ctx
, fp0
, fd
);
12388 tcg_temp_free_i64(fp0
);
12392 check_cp1_64bitmode(ctx
);
12393 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12395 TCGv_i64 fp0
= tcg_temp_new_i64();
12397 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12398 gen_store_fpr64(ctx
, fp0
, fd
);
12399 tcg_temp_free_i64(fp0
);
12405 TCGv_i32 fp0
= tcg_temp_new_i32();
12406 gen_load_fpr32(ctx
, fp0
, fs
);
12407 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12408 tcg_temp_free_i32(fp0
);
12413 check_cp1_registers(ctx
, fs
);
12415 TCGv_i64 fp0
= tcg_temp_new_i64();
12416 gen_load_fpr64(ctx
, fp0
, fs
);
12417 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12418 tcg_temp_free_i64(fp0
);
12422 check_cp1_64bitmode(ctx
);
12423 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12425 TCGv_i64 fp0
= tcg_temp_new_i64();
12426 gen_load_fpr64(ctx
, fp0
, fs
);
12427 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12428 tcg_temp_free_i64(fp0
);
12435 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
12436 int fd
, int fr
, int fs
, int ft
)
12442 TCGv t0
= tcg_temp_local_new();
12443 TCGv_i32 fp
= tcg_temp_new_i32();
12444 TCGv_i32 fph
= tcg_temp_new_i32();
12445 TCGLabel
*l1
= gen_new_label();
12446 TCGLabel
*l2
= gen_new_label();
12448 gen_load_gpr(t0
, fr
);
12449 tcg_gen_andi_tl(t0
, t0
, 0x7);
12451 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12452 gen_load_fpr32(ctx
, fp
, fs
);
12453 gen_load_fpr32h(ctx
, fph
, fs
);
12454 gen_store_fpr32(ctx
, fp
, fd
);
12455 gen_store_fpr32h(ctx
, fph
, fd
);
12458 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12460 #ifdef TARGET_WORDS_BIGENDIAN
12461 gen_load_fpr32(ctx
, fp
, fs
);
12462 gen_load_fpr32h(ctx
, fph
, ft
);
12463 gen_store_fpr32h(ctx
, fp
, fd
);
12464 gen_store_fpr32(ctx
, fph
, fd
);
12466 gen_load_fpr32h(ctx
, fph
, fs
);
12467 gen_load_fpr32(ctx
, fp
, ft
);
12468 gen_store_fpr32(ctx
, fph
, fd
);
12469 gen_store_fpr32h(ctx
, fp
, fd
);
12472 tcg_temp_free_i32(fp
);
12473 tcg_temp_free_i32(fph
);
12479 TCGv_i32 fp0
= tcg_temp_new_i32();
12480 TCGv_i32 fp1
= tcg_temp_new_i32();
12481 TCGv_i32 fp2
= tcg_temp_new_i32();
12483 gen_load_fpr32(ctx
, fp0
, fs
);
12484 gen_load_fpr32(ctx
, fp1
, ft
);
12485 gen_load_fpr32(ctx
, fp2
, fr
);
12486 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12487 tcg_temp_free_i32(fp0
);
12488 tcg_temp_free_i32(fp1
);
12489 gen_store_fpr32(ctx
, fp2
, fd
);
12490 tcg_temp_free_i32(fp2
);
12495 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12497 TCGv_i64 fp0
= tcg_temp_new_i64();
12498 TCGv_i64 fp1
= tcg_temp_new_i64();
12499 TCGv_i64 fp2
= tcg_temp_new_i64();
12501 gen_load_fpr64(ctx
, fp0
, fs
);
12502 gen_load_fpr64(ctx
, fp1
, ft
);
12503 gen_load_fpr64(ctx
, fp2
, fr
);
12504 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12505 tcg_temp_free_i64(fp0
);
12506 tcg_temp_free_i64(fp1
);
12507 gen_store_fpr64(ctx
, fp2
, fd
);
12508 tcg_temp_free_i64(fp2
);
12514 TCGv_i64 fp0
= tcg_temp_new_i64();
12515 TCGv_i64 fp1
= tcg_temp_new_i64();
12516 TCGv_i64 fp2
= tcg_temp_new_i64();
12518 gen_load_fpr64(ctx
, fp0
, fs
);
12519 gen_load_fpr64(ctx
, fp1
, ft
);
12520 gen_load_fpr64(ctx
, fp2
, fr
);
12521 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12522 tcg_temp_free_i64(fp0
);
12523 tcg_temp_free_i64(fp1
);
12524 gen_store_fpr64(ctx
, fp2
, fd
);
12525 tcg_temp_free_i64(fp2
);
12531 TCGv_i32 fp0
= tcg_temp_new_i32();
12532 TCGv_i32 fp1
= tcg_temp_new_i32();
12533 TCGv_i32 fp2
= tcg_temp_new_i32();
12535 gen_load_fpr32(ctx
, fp0
, fs
);
12536 gen_load_fpr32(ctx
, fp1
, ft
);
12537 gen_load_fpr32(ctx
, fp2
, fr
);
12538 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12539 tcg_temp_free_i32(fp0
);
12540 tcg_temp_free_i32(fp1
);
12541 gen_store_fpr32(ctx
, fp2
, fd
);
12542 tcg_temp_free_i32(fp2
);
12547 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12549 TCGv_i64 fp0
= tcg_temp_new_i64();
12550 TCGv_i64 fp1
= tcg_temp_new_i64();
12551 TCGv_i64 fp2
= tcg_temp_new_i64();
12553 gen_load_fpr64(ctx
, fp0
, fs
);
12554 gen_load_fpr64(ctx
, fp1
, ft
);
12555 gen_load_fpr64(ctx
, fp2
, fr
);
12556 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12557 tcg_temp_free_i64(fp0
);
12558 tcg_temp_free_i64(fp1
);
12559 gen_store_fpr64(ctx
, fp2
, fd
);
12560 tcg_temp_free_i64(fp2
);
12566 TCGv_i64 fp0
= tcg_temp_new_i64();
12567 TCGv_i64 fp1
= tcg_temp_new_i64();
12568 TCGv_i64 fp2
= tcg_temp_new_i64();
12570 gen_load_fpr64(ctx
, fp0
, fs
);
12571 gen_load_fpr64(ctx
, fp1
, ft
);
12572 gen_load_fpr64(ctx
, fp2
, fr
);
12573 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12574 tcg_temp_free_i64(fp0
);
12575 tcg_temp_free_i64(fp1
);
12576 gen_store_fpr64(ctx
, fp2
, fd
);
12577 tcg_temp_free_i64(fp2
);
12583 TCGv_i32 fp0
= tcg_temp_new_i32();
12584 TCGv_i32 fp1
= tcg_temp_new_i32();
12585 TCGv_i32 fp2
= tcg_temp_new_i32();
12587 gen_load_fpr32(ctx
, fp0
, fs
);
12588 gen_load_fpr32(ctx
, fp1
, ft
);
12589 gen_load_fpr32(ctx
, fp2
, fr
);
12590 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12591 tcg_temp_free_i32(fp0
);
12592 tcg_temp_free_i32(fp1
);
12593 gen_store_fpr32(ctx
, fp2
, fd
);
12594 tcg_temp_free_i32(fp2
);
12599 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12601 TCGv_i64 fp0
= tcg_temp_new_i64();
12602 TCGv_i64 fp1
= tcg_temp_new_i64();
12603 TCGv_i64 fp2
= tcg_temp_new_i64();
12605 gen_load_fpr64(ctx
, fp0
, fs
);
12606 gen_load_fpr64(ctx
, fp1
, ft
);
12607 gen_load_fpr64(ctx
, fp2
, fr
);
12608 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12609 tcg_temp_free_i64(fp0
);
12610 tcg_temp_free_i64(fp1
);
12611 gen_store_fpr64(ctx
, fp2
, fd
);
12612 tcg_temp_free_i64(fp2
);
12618 TCGv_i64 fp0
= tcg_temp_new_i64();
12619 TCGv_i64 fp1
= tcg_temp_new_i64();
12620 TCGv_i64 fp2
= tcg_temp_new_i64();
12622 gen_load_fpr64(ctx
, fp0
, fs
);
12623 gen_load_fpr64(ctx
, fp1
, ft
);
12624 gen_load_fpr64(ctx
, fp2
, fr
);
12625 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12626 tcg_temp_free_i64(fp0
);
12627 tcg_temp_free_i64(fp1
);
12628 gen_store_fpr64(ctx
, fp2
, fd
);
12629 tcg_temp_free_i64(fp2
);
12635 TCGv_i32 fp0
= tcg_temp_new_i32();
12636 TCGv_i32 fp1
= tcg_temp_new_i32();
12637 TCGv_i32 fp2
= tcg_temp_new_i32();
12639 gen_load_fpr32(ctx
, fp0
, fs
);
12640 gen_load_fpr32(ctx
, fp1
, ft
);
12641 gen_load_fpr32(ctx
, fp2
, fr
);
12642 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12643 tcg_temp_free_i32(fp0
);
12644 tcg_temp_free_i32(fp1
);
12645 gen_store_fpr32(ctx
, fp2
, fd
);
12646 tcg_temp_free_i32(fp2
);
12651 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12653 TCGv_i64 fp0
= tcg_temp_new_i64();
12654 TCGv_i64 fp1
= tcg_temp_new_i64();
12655 TCGv_i64 fp2
= tcg_temp_new_i64();
12657 gen_load_fpr64(ctx
, fp0
, fs
);
12658 gen_load_fpr64(ctx
, fp1
, ft
);
12659 gen_load_fpr64(ctx
, fp2
, fr
);
12660 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12661 tcg_temp_free_i64(fp0
);
12662 tcg_temp_free_i64(fp1
);
12663 gen_store_fpr64(ctx
, fp2
, fd
);
12664 tcg_temp_free_i64(fp2
);
12670 TCGv_i64 fp0
= tcg_temp_new_i64();
12671 TCGv_i64 fp1
= tcg_temp_new_i64();
12672 TCGv_i64 fp2
= tcg_temp_new_i64();
12674 gen_load_fpr64(ctx
, fp0
, fs
);
12675 gen_load_fpr64(ctx
, fp1
, ft
);
12676 gen_load_fpr64(ctx
, fp2
, fr
);
12677 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12678 tcg_temp_free_i64(fp0
);
12679 tcg_temp_free_i64(fp1
);
12680 gen_store_fpr64(ctx
, fp2
, fd
);
12681 tcg_temp_free_i64(fp2
);
12685 MIPS_INVAL("flt3_arith");
12686 generate_exception_end(ctx
, EXCP_RI
);
12691 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12695 #if !defined(CONFIG_USER_ONLY)
12696 /* The Linux kernel will emulate rdhwr if it's not supported natively.
12697 Therefore only check the ISA in system mode. */
12698 check_insn(ctx
, ISA_MIPS32R2
);
12700 t0
= tcg_temp_new();
12704 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12705 gen_store_gpr(t0
, rt
);
12708 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12709 gen_store_gpr(t0
, rt
);
12712 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12715 gen_helper_rdhwr_cc(t0
, cpu_env
);
12716 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12719 gen_store_gpr(t0
, rt
);
12720 /* Break the TB to be able to take timer interrupts immediately
12721 after reading count. DISAS_STOP isn't sufficient, we need to ensure
12722 we break completely out of translated code. */
12723 gen_save_pc(ctx
->base
.pc_next
+ 4);
12724 ctx
->base
.is_jmp
= DISAS_EXIT
;
12727 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12728 gen_store_gpr(t0
, rt
);
12731 check_insn(ctx
, ISA_MIPS32R6
);
12733 /* Performance counter registers are not implemented other than
12734 * control register 0.
12736 generate_exception(ctx
, EXCP_RI
);
12738 gen_helper_rdhwr_performance(t0
, cpu_env
);
12739 gen_store_gpr(t0
, rt
);
12742 check_insn(ctx
, ISA_MIPS32R6
);
12743 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12744 gen_store_gpr(t0
, rt
);
12747 #if defined(CONFIG_USER_ONLY)
12748 tcg_gen_ld_tl(t0
, cpu_env
,
12749 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12750 gen_store_gpr(t0
, rt
);
12753 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12754 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12755 tcg_gen_ld_tl(t0
, cpu_env
,
12756 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12757 gen_store_gpr(t0
, rt
);
12759 generate_exception_end(ctx
, EXCP_RI
);
12763 default: /* Invalid */
12764 MIPS_INVAL("rdhwr");
12765 generate_exception_end(ctx
, EXCP_RI
);
12771 static inline void clear_branch_hflags(DisasContext
*ctx
)
12773 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12774 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12775 save_cpu_state(ctx
, 0);
12777 /* it is not safe to save ctx->hflags as hflags may be changed
12778 in execution time by the instruction in delay / forbidden slot. */
12779 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12783 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12785 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12786 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12787 /* Branches completion */
12788 clear_branch_hflags(ctx
);
12789 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12790 /* FIXME: Need to clear can_do_io. */
12791 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12792 case MIPS_HFLAG_FBNSLOT
:
12793 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12796 /* unconditional branch */
12797 if (proc_hflags
& MIPS_HFLAG_BX
) {
12798 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12800 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12802 case MIPS_HFLAG_BL
:
12803 /* blikely taken case */
12804 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12806 case MIPS_HFLAG_BC
:
12807 /* Conditional branch */
12809 TCGLabel
*l1
= gen_new_label();
12811 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12812 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12814 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12817 case MIPS_HFLAG_BR
:
12818 /* unconditional branch to register */
12819 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12820 TCGv t0
= tcg_temp_new();
12821 TCGv_i32 t1
= tcg_temp_new_i32();
12823 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12824 tcg_gen_trunc_tl_i32(t1
, t0
);
12826 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12827 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12828 tcg_gen_or_i32(hflags
, hflags
, t1
);
12829 tcg_temp_free_i32(t1
);
12831 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12833 tcg_gen_mov_tl(cpu_PC
, btarget
);
12835 if (ctx
->base
.singlestep_enabled
) {
12836 save_cpu_state(ctx
, 0);
12837 gen_helper_raise_exception_debug(cpu_env
);
12839 tcg_gen_lookup_and_goto_ptr();
12842 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12848 /* Compact Branches */
12849 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12850 int rs
, int rt
, int32_t offset
)
12852 int bcond_compute
= 0;
12853 TCGv t0
= tcg_temp_new();
12854 TCGv t1
= tcg_temp_new();
12855 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12857 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12858 #ifdef MIPS_DEBUG_DISAS
12859 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12860 "\n", ctx
->base
.pc_next
);
12862 generate_exception_end(ctx
, EXCP_RI
);
12866 /* Load needed operands and calculate btarget */
12868 /* compact branch */
12869 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12870 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12871 gen_load_gpr(t0
, rs
);
12872 gen_load_gpr(t1
, rt
);
12874 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12875 if (rs
<= rt
&& rs
== 0) {
12876 /* OPC_BEQZALC, OPC_BNEZALC */
12877 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12880 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12881 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12882 gen_load_gpr(t0
, rs
);
12883 gen_load_gpr(t1
, rt
);
12885 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12887 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12888 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12889 if (rs
== 0 || rs
== rt
) {
12890 /* OPC_BLEZALC, OPC_BGEZALC */
12891 /* OPC_BGTZALC, OPC_BLTZALC */
12892 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12894 gen_load_gpr(t0
, rs
);
12895 gen_load_gpr(t1
, rt
);
12897 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12901 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12906 /* OPC_BEQZC, OPC_BNEZC */
12907 gen_load_gpr(t0
, rs
);
12909 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12911 /* OPC_JIC, OPC_JIALC */
12912 TCGv tbase
= tcg_temp_new();
12913 TCGv toffset
= tcg_temp_new();
12915 gen_load_gpr(tbase
, rt
);
12916 tcg_gen_movi_tl(toffset
, offset
);
12917 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
12918 tcg_temp_free(tbase
);
12919 tcg_temp_free(toffset
);
12923 MIPS_INVAL("Compact branch/jump");
12924 generate_exception_end(ctx
, EXCP_RI
);
12928 if (bcond_compute
== 0) {
12929 /* Uncoditional compact branch */
12932 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12935 ctx
->hflags
|= MIPS_HFLAG_BR
;
12938 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12941 ctx
->hflags
|= MIPS_HFLAG_B
;
12944 MIPS_INVAL("Compact branch/jump");
12945 generate_exception_end(ctx
, EXCP_RI
);
12949 /* Generating branch here as compact branches don't have delay slot */
12950 gen_branch(ctx
, 4);
12952 /* Conditional compact branch */
12953 TCGLabel
*fs
= gen_new_label();
12954 save_cpu_state(ctx
, 0);
12957 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12958 if (rs
== 0 && rt
!= 0) {
12960 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12961 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12963 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12966 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
12969 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12970 if (rs
== 0 && rt
!= 0) {
12972 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12973 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12975 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12978 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
12981 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12982 if (rs
== 0 && rt
!= 0) {
12984 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12985 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12987 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12990 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
12993 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12994 if (rs
== 0 && rt
!= 0) {
12996 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12997 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12999 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
13002 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
13005 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
13006 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
13008 /* OPC_BOVC, OPC_BNVC */
13009 TCGv t2
= tcg_temp_new();
13010 TCGv t3
= tcg_temp_new();
13011 TCGv t4
= tcg_temp_new();
13012 TCGv input_overflow
= tcg_temp_new();
13014 gen_load_gpr(t0
, rs
);
13015 gen_load_gpr(t1
, rt
);
13016 tcg_gen_ext32s_tl(t2
, t0
);
13017 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
13018 tcg_gen_ext32s_tl(t3
, t1
);
13019 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
13020 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
13022 tcg_gen_add_tl(t4
, t2
, t3
);
13023 tcg_gen_ext32s_tl(t4
, t4
);
13024 tcg_gen_xor_tl(t2
, t2
, t3
);
13025 tcg_gen_xor_tl(t3
, t4
, t3
);
13026 tcg_gen_andc_tl(t2
, t3
, t2
);
13027 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
13028 tcg_gen_or_tl(t4
, t4
, input_overflow
);
13029 if (opc
== OPC_BOVC
) {
13031 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
13034 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
13036 tcg_temp_free(input_overflow
);
13040 } else if (rs
< rt
&& rs
== 0) {
13041 /* OPC_BEQZALC, OPC_BNEZALC */
13042 if (opc
== OPC_BEQZALC
) {
13044 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
13047 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
13050 /* OPC_BEQC, OPC_BNEC */
13051 if (opc
== OPC_BEQC
) {
13053 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
13056 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
13061 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
13064 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
13067 MIPS_INVAL("Compact conditional branch/jump");
13068 generate_exception_end(ctx
, EXCP_RI
);
13072 /* Generating branch here as compact branches don't have delay slot */
13073 gen_goto_tb(ctx
, 1, ctx
->btarget
);
13076 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
13084 /* ISA extensions (ASEs) */
13085 /* MIPS16 extension to MIPS32 */
13087 /* MIPS16 major opcodes */
13089 M16_OPC_ADDIUSP
= 0x00,
13090 M16_OPC_ADDIUPC
= 0x01,
13092 M16_OPC_JAL
= 0x03,
13093 M16_OPC_BEQZ
= 0x04,
13094 M16_OPC_BNEQZ
= 0x05,
13095 M16_OPC_SHIFT
= 0x06,
13097 M16_OPC_RRIA
= 0x08,
13098 M16_OPC_ADDIU8
= 0x09,
13099 M16_OPC_SLTI
= 0x0a,
13100 M16_OPC_SLTIU
= 0x0b,
13103 M16_OPC_CMPI
= 0x0e,
13107 M16_OPC_LWSP
= 0x12,
13109 M16_OPC_LBU
= 0x14,
13110 M16_OPC_LHU
= 0x15,
13111 M16_OPC_LWPC
= 0x16,
13112 M16_OPC_LWU
= 0x17,
13115 M16_OPC_SWSP
= 0x1a,
13117 M16_OPC_RRR
= 0x1c,
13119 M16_OPC_EXTEND
= 0x1e,
13123 /* I8 funct field */
13142 /* RR funct field */
13176 /* I64 funct field */
13184 I64_DADDIUPC
= 0x6,
13188 /* RR ry field for CNVT */
13190 RR_RY_CNVT_ZEB
= 0x0,
13191 RR_RY_CNVT_ZEH
= 0x1,
13192 RR_RY_CNVT_ZEW
= 0x2,
13193 RR_RY_CNVT_SEB
= 0x4,
13194 RR_RY_CNVT_SEH
= 0x5,
13195 RR_RY_CNVT_SEW
= 0x6,
13198 static int xlat (int r
)
13200 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13205 static void gen_mips16_save (DisasContext
*ctx
,
13206 int xsregs
, int aregs
,
13207 int do_ra
, int do_s0
, int do_s1
,
13210 TCGv t0
= tcg_temp_new();
13211 TCGv t1
= tcg_temp_new();
13212 TCGv t2
= tcg_temp_new();
13242 generate_exception_end(ctx
, EXCP_RI
);
13248 gen_base_offset_addr(ctx
, t0
, 29, 12);
13249 gen_load_gpr(t1
, 7);
13250 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13253 gen_base_offset_addr(ctx
, t0
, 29, 8);
13254 gen_load_gpr(t1
, 6);
13255 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13258 gen_base_offset_addr(ctx
, t0
, 29, 4);
13259 gen_load_gpr(t1
, 5);
13260 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13263 gen_base_offset_addr(ctx
, t0
, 29, 0);
13264 gen_load_gpr(t1
, 4);
13265 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13268 gen_load_gpr(t0
, 29);
13270 #define DECR_AND_STORE(reg) do { \
13271 tcg_gen_movi_tl(t2, -4); \
13272 gen_op_addr_add(ctx, t0, t0, t2); \
13273 gen_load_gpr(t1, reg); \
13274 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
13278 DECR_AND_STORE(31);
13283 DECR_AND_STORE(30);
13286 DECR_AND_STORE(23);
13289 DECR_AND_STORE(22);
13292 DECR_AND_STORE(21);
13295 DECR_AND_STORE(20);
13298 DECR_AND_STORE(19);
13301 DECR_AND_STORE(18);
13305 DECR_AND_STORE(17);
13308 DECR_AND_STORE(16);
13338 generate_exception_end(ctx
, EXCP_RI
);
13354 #undef DECR_AND_STORE
13356 tcg_gen_movi_tl(t2
, -framesize
);
13357 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13363 static void gen_mips16_restore (DisasContext
*ctx
,
13364 int xsregs
, int aregs
,
13365 int do_ra
, int do_s0
, int do_s1
,
13369 TCGv t0
= tcg_temp_new();
13370 TCGv t1
= tcg_temp_new();
13371 TCGv t2
= tcg_temp_new();
13373 tcg_gen_movi_tl(t2
, framesize
);
13374 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13376 #define DECR_AND_LOAD(reg) do { \
13377 tcg_gen_movi_tl(t2, -4); \
13378 gen_op_addr_add(ctx, t0, t0, t2); \
13379 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13380 gen_store_gpr(t1, reg); \
13444 generate_exception_end(ctx
, EXCP_RI
);
13460 #undef DECR_AND_LOAD
13462 tcg_gen_movi_tl(t2
, framesize
);
13463 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13469 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
13470 int is_64_bit
, int extended
)
13474 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13475 generate_exception_end(ctx
, EXCP_RI
);
13479 t0
= tcg_temp_new();
13481 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13482 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13484 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13490 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13493 TCGv_i32 t0
= tcg_const_i32(op
);
13494 TCGv t1
= tcg_temp_new();
13495 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13496 gen_helper_cache(cpu_env
, t1
, t0
);
13499 #if defined(TARGET_MIPS64)
13500 static void decode_i64_mips16 (DisasContext
*ctx
,
13501 int ry
, int funct
, int16_t offset
,
13506 check_insn(ctx
, ISA_MIPS3
);
13507 check_mips_64(ctx
);
13508 offset
= extended
? offset
: offset
<< 3;
13509 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13512 check_insn(ctx
, ISA_MIPS3
);
13513 check_mips_64(ctx
);
13514 offset
= extended
? offset
: offset
<< 3;
13515 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13518 check_insn(ctx
, ISA_MIPS3
);
13519 check_mips_64(ctx
);
13520 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13521 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13524 check_insn(ctx
, ISA_MIPS3
);
13525 check_mips_64(ctx
);
13526 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13527 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13530 check_insn(ctx
, ISA_MIPS3
);
13531 check_mips_64(ctx
);
13532 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13533 generate_exception_end(ctx
, EXCP_RI
);
13535 offset
= extended
? offset
: offset
<< 3;
13536 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13540 check_insn(ctx
, ISA_MIPS3
);
13541 check_mips_64(ctx
);
13542 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13543 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13546 check_insn(ctx
, ISA_MIPS3
);
13547 check_mips_64(ctx
);
13548 offset
= extended
? offset
: offset
<< 2;
13549 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13552 check_insn(ctx
, ISA_MIPS3
);
13553 check_mips_64(ctx
);
13554 offset
= extended
? offset
: offset
<< 2;
13555 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13561 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13563 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13564 int op
, rx
, ry
, funct
, sa
;
13565 int16_t imm
, offset
;
13567 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13568 op
= (ctx
->opcode
>> 11) & 0x1f;
13569 sa
= (ctx
->opcode
>> 22) & 0x1f;
13570 funct
= (ctx
->opcode
>> 8) & 0x7;
13571 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13572 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13573 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13574 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13575 | (ctx
->opcode
& 0x1f));
13577 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
13580 case M16_OPC_ADDIUSP
:
13581 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13583 case M16_OPC_ADDIUPC
:
13584 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13587 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13588 /* No delay slot, so just process as a normal instruction */
13591 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13592 /* No delay slot, so just process as a normal instruction */
13594 case M16_OPC_BNEQZ
:
13595 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13596 /* No delay slot, so just process as a normal instruction */
13598 case M16_OPC_SHIFT
:
13599 switch (ctx
->opcode
& 0x3) {
13601 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13604 #if defined(TARGET_MIPS64)
13605 check_mips_64(ctx
);
13606 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13608 generate_exception_end(ctx
, EXCP_RI
);
13612 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13615 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13619 #if defined(TARGET_MIPS64)
13621 check_insn(ctx
, ISA_MIPS3
);
13622 check_mips_64(ctx
);
13623 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13627 imm
= ctx
->opcode
& 0xf;
13628 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13629 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13630 imm
= (int16_t) (imm
<< 1) >> 1;
13631 if ((ctx
->opcode
>> 4) & 0x1) {
13632 #if defined(TARGET_MIPS64)
13633 check_mips_64(ctx
);
13634 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13636 generate_exception_end(ctx
, EXCP_RI
);
13639 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13642 case M16_OPC_ADDIU8
:
13643 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13646 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13648 case M16_OPC_SLTIU
:
13649 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13654 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13657 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13660 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13663 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13666 check_insn(ctx
, ISA_MIPS32
);
13668 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13669 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13670 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13671 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13672 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13673 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13674 | (ctx
->opcode
& 0xf)) << 3;
13676 if (ctx
->opcode
& (1 << 7)) {
13677 gen_mips16_save(ctx
, xsregs
, aregs
,
13678 do_ra
, do_s0
, do_s1
,
13681 gen_mips16_restore(ctx
, xsregs
, aregs
,
13682 do_ra
, do_s0
, do_s1
,
13688 generate_exception_end(ctx
, EXCP_RI
);
13693 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13696 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13698 #if defined(TARGET_MIPS64)
13700 check_insn(ctx
, ISA_MIPS3
);
13701 check_mips_64(ctx
);
13702 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13706 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13709 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13712 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13715 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13718 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13721 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13724 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13726 #if defined(TARGET_MIPS64)
13728 check_insn(ctx
, ISA_MIPS3
);
13729 check_mips_64(ctx
);
13730 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13734 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13737 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13740 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13743 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13745 #if defined(TARGET_MIPS64)
13747 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13751 generate_exception_end(ctx
, EXCP_RI
);
13758 static inline bool is_uhi(int sdbbp_code
)
13760 #ifdef CONFIG_USER_ONLY
13763 return semihosting_enabled() && sdbbp_code
== 1;
13767 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13771 int op
, cnvt_op
, op1
, offset
;
13775 op
= (ctx
->opcode
>> 11) & 0x1f;
13776 sa
= (ctx
->opcode
>> 2) & 0x7;
13777 sa
= sa
== 0 ? 8 : sa
;
13778 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13779 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13780 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13781 op1
= offset
= ctx
->opcode
& 0x1f;
13786 case M16_OPC_ADDIUSP
:
13788 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13790 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13793 case M16_OPC_ADDIUPC
:
13794 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13797 offset
= (ctx
->opcode
& 0x7ff) << 1;
13798 offset
= (int16_t)(offset
<< 4) >> 4;
13799 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13800 /* No delay slot, so just process as a normal instruction */
13803 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13804 offset
= (((ctx
->opcode
& 0x1f) << 21)
13805 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13807 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13808 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13812 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13813 ((int8_t)ctx
->opcode
) << 1, 0);
13814 /* No delay slot, so just process as a normal instruction */
13816 case M16_OPC_BNEQZ
:
13817 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13818 ((int8_t)ctx
->opcode
) << 1, 0);
13819 /* No delay slot, so just process as a normal instruction */
13821 case M16_OPC_SHIFT
:
13822 switch (ctx
->opcode
& 0x3) {
13824 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13827 #if defined(TARGET_MIPS64)
13828 check_insn(ctx
, ISA_MIPS3
);
13829 check_mips_64(ctx
);
13830 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13832 generate_exception_end(ctx
, EXCP_RI
);
13836 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13839 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13843 #if defined(TARGET_MIPS64)
13845 check_insn(ctx
, ISA_MIPS3
);
13846 check_mips_64(ctx
);
13847 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13852 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13854 if ((ctx
->opcode
>> 4) & 1) {
13855 #if defined(TARGET_MIPS64)
13856 check_insn(ctx
, ISA_MIPS3
);
13857 check_mips_64(ctx
);
13858 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13860 generate_exception_end(ctx
, EXCP_RI
);
13863 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13867 case M16_OPC_ADDIU8
:
13869 int16_t imm
= (int8_t) ctx
->opcode
;
13871 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13876 int16_t imm
= (uint8_t) ctx
->opcode
;
13877 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13880 case M16_OPC_SLTIU
:
13882 int16_t imm
= (uint8_t) ctx
->opcode
;
13883 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13890 funct
= (ctx
->opcode
>> 8) & 0x7;
13893 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
13894 ((int8_t)ctx
->opcode
) << 1, 0);
13897 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
13898 ((int8_t)ctx
->opcode
) << 1, 0);
13901 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
13904 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
13905 ((int8_t)ctx
->opcode
) << 3);
13908 check_insn(ctx
, ISA_MIPS32
);
13910 int do_ra
= ctx
->opcode
& (1 << 6);
13911 int do_s0
= ctx
->opcode
& (1 << 5);
13912 int do_s1
= ctx
->opcode
& (1 << 4);
13913 int framesize
= ctx
->opcode
& 0xf;
13915 if (framesize
== 0) {
13918 framesize
= framesize
<< 3;
13921 if (ctx
->opcode
& (1 << 7)) {
13922 gen_mips16_save(ctx
, 0, 0,
13923 do_ra
, do_s0
, do_s1
, framesize
);
13925 gen_mips16_restore(ctx
, 0, 0,
13926 do_ra
, do_s0
, do_s1
, framesize
);
13932 int rz
= xlat(ctx
->opcode
& 0x7);
13934 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
13935 ((ctx
->opcode
>> 5) & 0x7);
13936 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
13940 reg32
= ctx
->opcode
& 0x1f;
13941 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
13944 generate_exception_end(ctx
, EXCP_RI
);
13951 int16_t imm
= (uint8_t) ctx
->opcode
;
13953 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
13958 int16_t imm
= (uint8_t) ctx
->opcode
;
13959 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
13962 #if defined(TARGET_MIPS64)
13964 check_insn(ctx
, ISA_MIPS3
);
13965 check_mips_64(ctx
);
13966 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
13970 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13973 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
13976 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13979 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
13982 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13985 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
13988 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
13990 #if defined (TARGET_MIPS64)
13992 check_insn(ctx
, ISA_MIPS3
);
13993 check_mips_64(ctx
);
13994 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
13998 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
14001 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
14004 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
14007 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
14011 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
14014 switch (ctx
->opcode
& 0x3) {
14016 mips32_op
= OPC_ADDU
;
14019 mips32_op
= OPC_SUBU
;
14021 #if defined(TARGET_MIPS64)
14023 mips32_op
= OPC_DADDU
;
14024 check_insn(ctx
, ISA_MIPS3
);
14025 check_mips_64(ctx
);
14028 mips32_op
= OPC_DSUBU
;
14029 check_insn(ctx
, ISA_MIPS3
);
14030 check_mips_64(ctx
);
14034 generate_exception_end(ctx
, EXCP_RI
);
14038 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
14047 int nd
= (ctx
->opcode
>> 7) & 0x1;
14048 int link
= (ctx
->opcode
>> 6) & 0x1;
14049 int ra
= (ctx
->opcode
>> 5) & 0x1;
14052 check_insn(ctx
, ISA_MIPS32
);
14061 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
14066 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
14067 gen_helper_do_semihosting(cpu_env
);
14069 /* XXX: not clear which exception should be raised
14070 * when in debug mode...
14072 check_insn(ctx
, ISA_MIPS32
);
14073 generate_exception_end(ctx
, EXCP_DBp
);
14077 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
14080 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
14083 generate_exception_end(ctx
, EXCP_BREAK
);
14086 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
14089 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
14092 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
14094 #if defined (TARGET_MIPS64)
14096 check_insn(ctx
, ISA_MIPS3
);
14097 check_mips_64(ctx
);
14098 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
14102 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
14105 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
14108 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
14111 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
14114 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
14117 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
14120 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
14123 check_insn(ctx
, ISA_MIPS32
);
14125 case RR_RY_CNVT_ZEB
:
14126 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14128 case RR_RY_CNVT_ZEH
:
14129 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14131 case RR_RY_CNVT_SEB
:
14132 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14134 case RR_RY_CNVT_SEH
:
14135 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14137 #if defined (TARGET_MIPS64)
14138 case RR_RY_CNVT_ZEW
:
14139 check_insn(ctx
, ISA_MIPS64
);
14140 check_mips_64(ctx
);
14141 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14143 case RR_RY_CNVT_SEW
:
14144 check_insn(ctx
, ISA_MIPS64
);
14145 check_mips_64(ctx
);
14146 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14150 generate_exception_end(ctx
, EXCP_RI
);
14155 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
14157 #if defined (TARGET_MIPS64)
14159 check_insn(ctx
, ISA_MIPS3
);
14160 check_mips_64(ctx
);
14161 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
14164 check_insn(ctx
, ISA_MIPS3
);
14165 check_mips_64(ctx
);
14166 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
14169 check_insn(ctx
, ISA_MIPS3
);
14170 check_mips_64(ctx
);
14171 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
14174 check_insn(ctx
, ISA_MIPS3
);
14175 check_mips_64(ctx
);
14176 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
14180 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
14183 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
14186 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
14189 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
14191 #if defined (TARGET_MIPS64)
14193 check_insn(ctx
, ISA_MIPS3
);
14194 check_mips_64(ctx
);
14195 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
14198 check_insn(ctx
, ISA_MIPS3
);
14199 check_mips_64(ctx
);
14200 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
14203 check_insn(ctx
, ISA_MIPS3
);
14204 check_mips_64(ctx
);
14205 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
14208 check_insn(ctx
, ISA_MIPS3
);
14209 check_mips_64(ctx
);
14210 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
14214 generate_exception_end(ctx
, EXCP_RI
);
14218 case M16_OPC_EXTEND
:
14219 decode_extended_mips16_opc(env
, ctx
);
14222 #if defined(TARGET_MIPS64)
14224 funct
= (ctx
->opcode
>> 8) & 0x7;
14225 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
14229 generate_exception_end(ctx
, EXCP_RI
);
14236 /* microMIPS extension to MIPS32/MIPS64 */
14239 * microMIPS32/microMIPS64 major opcodes
14241 * 1. MIPS Architecture for Programmers Volume II-B:
14242 * The microMIPS32 Instruction Set (Revision 3.05)
14244 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
14246 * 2. MIPS Architecture For Programmers Volume II-A:
14247 * The MIPS64 Instruction Set (Revision 3.51)
14277 POOL32S
= 0x16, /* MIPS64 */
14278 DADDIU32
= 0x17, /* MIPS64 */
14307 /* 0x29 is reserved */
14320 /* 0x31 is reserved */
14333 SD32
= 0x36, /* MIPS64 */
14334 LD32
= 0x37, /* MIPS64 */
14336 /* 0x39 is reserved */
14352 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14374 /* POOL32A encoding of minor opcode field */
14377 /* These opcodes are distinguished only by bits 9..6; those bits are
14378 * what are recorded below. */
14415 /* The following can be distinguished by their lower 6 bits. */
14425 /* POOL32AXF encoding of minor opcode field extension */
14428 * 1. MIPS Architecture for Programmers Volume II-B:
14429 * The microMIPS32 Instruction Set (Revision 3.05)
14431 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14433 * 2. MIPS Architecture for Programmers VolumeIV-e:
14434 * The MIPS DSP Application-Specific Extension
14435 * to the microMIPS32 Architecture (Revision 2.34)
14437 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14452 /* begin of microMIPS32 DSP */
14454 /* bits 13..12 for 0x01 */
14460 /* bits 13..12 for 0x2a */
14466 /* bits 13..12 for 0x32 */
14470 /* end of microMIPS32 DSP */
14472 /* bits 15..12 for 0x2c */
14489 /* bits 15..12 for 0x34 */
14497 /* bits 15..12 for 0x3c */
14499 JR
= 0x0, /* alias */
14507 /* bits 15..12 for 0x05 */
14511 /* bits 15..12 for 0x0d */
14523 /* bits 15..12 for 0x15 */
14529 /* bits 15..12 for 0x1d */
14533 /* bits 15..12 for 0x2d */
14538 /* bits 15..12 for 0x35 */
14545 /* POOL32B encoding of minor opcode field (bits 15..12) */
14561 /* POOL32C encoding of minor opcode field (bits 15..12) */
14582 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14595 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14608 /* POOL32F encoding of minor opcode field (bits 5..0) */
14611 /* These are the bit 7..6 values */
14620 /* These are the bit 8..6 values */
14645 MOVZ_FMT_05
= 0x05,
14679 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14686 /* POOL32Fxf encoding of minor opcode extension field */
14724 /* POOL32I encoding of minor opcode field (bits 25..21) */
14754 /* These overlap and are distinguished by bit16 of the instruction */
14763 /* POOL16A encoding of minor opcode field */
14770 /* POOL16B encoding of minor opcode field */
14777 /* POOL16C encoding of minor opcode field */
14797 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14821 /* POOL16D encoding of minor opcode field */
14828 /* POOL16E encoding of minor opcode field */
14835 static int mmreg (int r
)
14837 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14842 /* Used for 16-bit store instructions. */
14843 static int mmreg2 (int r
)
14845 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14850 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14851 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14852 #define uMIPS_RS2(op) uMIPS_RS(op)
14853 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14854 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14855 #define uMIPS_RS5(op) (op & 0x1f)
14857 /* Signed immediate */
14858 #define SIMM(op, start, width) \
14859 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14862 /* Zero-extended immediate */
14863 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14865 static void gen_addiur1sp(DisasContext
*ctx
)
14867 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14869 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14872 static void gen_addiur2(DisasContext
*ctx
)
14874 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14875 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14876 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14878 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14881 static void gen_addiusp(DisasContext
*ctx
)
14883 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14886 if (encoded
<= 1) {
14887 decoded
= 256 + encoded
;
14888 } else if (encoded
<= 255) {
14890 } else if (encoded
<= 509) {
14891 decoded
= encoded
- 512;
14893 decoded
= encoded
- 768;
14896 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
14899 static void gen_addius5(DisasContext
*ctx
)
14901 int imm
= SIMM(ctx
->opcode
, 1, 4);
14902 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14904 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
14907 static void gen_andi16(DisasContext
*ctx
)
14909 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
14910 31, 32, 63, 64, 255, 32768, 65535 };
14911 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14912 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14913 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
14915 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
14918 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
14919 int base
, int16_t offset
)
14924 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
14925 generate_exception_end(ctx
, EXCP_RI
);
14929 t0
= tcg_temp_new();
14931 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14933 t1
= tcg_const_tl(reglist
);
14934 t2
= tcg_const_i32(ctx
->mem_idx
);
14936 save_cpu_state(ctx
, 1);
14939 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
14942 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
14944 #ifdef TARGET_MIPS64
14946 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
14949 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
14955 tcg_temp_free_i32(t2
);
14959 static void gen_pool16c_insn(DisasContext
*ctx
)
14961 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
14962 int rs
= mmreg(ctx
->opcode
& 0x7);
14964 switch (((ctx
->opcode
) >> 4) & 0x3f) {
14969 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
14975 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
14981 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
14987 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
14994 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14995 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14997 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
15006 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
15007 int offset
= ZIMM(ctx
->opcode
, 0, 4);
15009 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
15016 int reg
= ctx
->opcode
& 0x1f;
15018 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
15024 int reg
= ctx
->opcode
& 0x1f;
15025 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
15026 /* Let normal delay slot handling in our caller take us
15027 to the branch target. */
15032 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
15033 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15037 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
15038 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15042 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
15046 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
15049 generate_exception_end(ctx
, EXCP_BREAK
);
15052 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
15053 gen_helper_do_semihosting(cpu_env
);
15055 /* XXX: not clear which exception should be raised
15056 * when in debug mode...
15058 check_insn(ctx
, ISA_MIPS32
);
15059 generate_exception_end(ctx
, EXCP_DBp
);
15062 case JRADDIUSP
+ 0:
15063 case JRADDIUSP
+ 1:
15065 int imm
= ZIMM(ctx
->opcode
, 0, 5);
15066 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15067 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15068 /* Let normal delay slot handling in our caller take us
15069 to the branch target. */
15073 generate_exception_end(ctx
, EXCP_RI
);
15078 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
15081 int rd
, rs
, re
, rt
;
15082 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
15083 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
15084 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
15085 rd
= rd_enc
[enc_dest
];
15086 re
= re_enc
[enc_dest
];
15087 rs
= rs_rt_enc
[enc_rs
];
15088 rt
= rs_rt_enc
[enc_rt
];
15090 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
15092 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
15095 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
15097 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
15101 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
15103 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
15104 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
15106 switch (ctx
->opcode
& 0xf) {
15108 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
15111 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
15115 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15116 int offset
= extract32(ctx
->opcode
, 4, 4);
15117 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
15120 case R6_JRC16
: /* JRCADDIUSP */
15121 if ((ctx
->opcode
>> 4) & 1) {
15123 int imm
= extract32(ctx
->opcode
, 5, 5);
15124 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15125 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15128 rs
= extract32(ctx
->opcode
, 5, 5);
15129 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
15141 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15142 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15143 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
15144 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15148 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
15151 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
15155 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15156 int offset
= extract32(ctx
->opcode
, 4, 4);
15157 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
15160 case JALRC16
: /* BREAK16, SDBBP16 */
15161 switch (ctx
->opcode
& 0x3f) {
15163 case JALRC16
+ 0x20:
15165 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
15170 generate_exception(ctx
, EXCP_BREAK
);
15174 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
15175 gen_helper_do_semihosting(cpu_env
);
15177 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15178 generate_exception(ctx
, EXCP_RI
);
15180 generate_exception(ctx
, EXCP_DBp
);
15187 generate_exception(ctx
, EXCP_RI
);
15192 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
15194 TCGv t0
= tcg_temp_new();
15195 TCGv t1
= tcg_temp_new();
15197 gen_load_gpr(t0
, base
);
15200 gen_load_gpr(t1
, index
);
15201 tcg_gen_shli_tl(t1
, t1
, 2);
15202 gen_op_addr_add(ctx
, t0
, t1
, t0
);
15205 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15206 gen_store_gpr(t1
, rd
);
15212 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
15213 int base
, int16_t offset
)
15217 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
15218 generate_exception_end(ctx
, EXCP_RI
);
15222 t0
= tcg_temp_new();
15223 t1
= tcg_temp_new();
15225 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15230 generate_exception_end(ctx
, EXCP_RI
);
15233 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15234 gen_store_gpr(t1
, rd
);
15235 tcg_gen_movi_tl(t1
, 4);
15236 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15237 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15238 gen_store_gpr(t1
, rd
+1);
15241 gen_load_gpr(t1
, rd
);
15242 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15243 tcg_gen_movi_tl(t1
, 4);
15244 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15245 gen_load_gpr(t1
, rd
+1);
15246 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15248 #ifdef TARGET_MIPS64
15251 generate_exception_end(ctx
, EXCP_RI
);
15254 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15255 gen_store_gpr(t1
, rd
);
15256 tcg_gen_movi_tl(t1
, 8);
15257 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15258 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15259 gen_store_gpr(t1
, rd
+1);
15262 gen_load_gpr(t1
, rd
);
15263 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15264 tcg_gen_movi_tl(t1
, 8);
15265 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15266 gen_load_gpr(t1
, rd
+1);
15267 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15275 static void gen_sync(int stype
)
15277 TCGBar tcg_mo
= TCG_BAR_SC
;
15280 case 0x4: /* SYNC_WMB */
15281 tcg_mo
|= TCG_MO_ST_ST
;
15283 case 0x10: /* SYNC_MB */
15284 tcg_mo
|= TCG_MO_ALL
;
15286 case 0x11: /* SYNC_ACQUIRE */
15287 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
15289 case 0x12: /* SYNC_RELEASE */
15290 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
15292 case 0x13: /* SYNC_RMB */
15293 tcg_mo
|= TCG_MO_LD_LD
;
15296 tcg_mo
|= TCG_MO_ALL
;
15300 tcg_gen_mb(tcg_mo
);
15303 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
15305 int extension
= (ctx
->opcode
>> 6) & 0x3f;
15306 int minor
= (ctx
->opcode
>> 12) & 0xf;
15307 uint32_t mips32_op
;
15309 switch (extension
) {
15311 mips32_op
= OPC_TEQ
;
15314 mips32_op
= OPC_TGE
;
15317 mips32_op
= OPC_TGEU
;
15320 mips32_op
= OPC_TLT
;
15323 mips32_op
= OPC_TLTU
;
15326 mips32_op
= OPC_TNE
;
15328 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
15330 #ifndef CONFIG_USER_ONLY
15333 check_cp0_enabled(ctx
);
15335 /* Treat as NOP. */
15338 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
15342 check_cp0_enabled(ctx
);
15344 TCGv t0
= tcg_temp_new();
15346 gen_load_gpr(t0
, rt
);
15347 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
15353 switch (minor
& 3) {
15355 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15358 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15361 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15364 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15367 goto pool32axf_invalid
;
15371 switch (minor
& 3) {
15373 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15376 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15379 goto pool32axf_invalid
;
15385 check_insn(ctx
, ISA_MIPS32R6
);
15386 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15389 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15392 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15395 mips32_op
= OPC_CLO
;
15398 mips32_op
= OPC_CLZ
;
15400 check_insn(ctx
, ISA_MIPS32
);
15401 gen_cl(ctx
, mips32_op
, rt
, rs
);
15404 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15405 gen_rdhwr(ctx
, rt
, rs
, 0);
15408 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15411 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15412 mips32_op
= OPC_MULT
;
15415 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15416 mips32_op
= OPC_MULTU
;
15419 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15420 mips32_op
= OPC_DIV
;
15423 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15424 mips32_op
= OPC_DIVU
;
15427 check_insn(ctx
, ISA_MIPS32
);
15428 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15431 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15432 mips32_op
= OPC_MADD
;
15435 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15436 mips32_op
= OPC_MADDU
;
15439 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15440 mips32_op
= OPC_MSUB
;
15443 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15444 mips32_op
= OPC_MSUBU
;
15446 check_insn(ctx
, ISA_MIPS32
);
15447 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15450 goto pool32axf_invalid
;
15461 generate_exception_err(ctx
, EXCP_CpU
, 2);
15464 goto pool32axf_invalid
;
15469 case JALR
: /* JALRC */
15470 case JALR_HB
: /* JALRC_HB */
15471 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15472 /* JALRC, JALRC_HB */
15473 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15475 /* JALR, JALR_HB */
15476 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15477 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15482 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15483 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15484 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15487 goto pool32axf_invalid
;
15493 check_cp0_enabled(ctx
);
15494 check_insn(ctx
, ISA_MIPS32R2
);
15495 gen_load_srsgpr(rs
, rt
);
15498 check_cp0_enabled(ctx
);
15499 check_insn(ctx
, ISA_MIPS32R2
);
15500 gen_store_srsgpr(rs
, rt
);
15503 goto pool32axf_invalid
;
15506 #ifndef CONFIG_USER_ONLY
15510 mips32_op
= OPC_TLBP
;
15513 mips32_op
= OPC_TLBR
;
15516 mips32_op
= OPC_TLBWI
;
15519 mips32_op
= OPC_TLBWR
;
15522 mips32_op
= OPC_TLBINV
;
15525 mips32_op
= OPC_TLBINVF
;
15528 mips32_op
= OPC_WAIT
;
15531 mips32_op
= OPC_DERET
;
15534 mips32_op
= OPC_ERET
;
15536 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15539 goto pool32axf_invalid
;
15545 check_cp0_enabled(ctx
);
15547 TCGv t0
= tcg_temp_new();
15549 save_cpu_state(ctx
, 1);
15550 gen_helper_di(t0
, cpu_env
);
15551 gen_store_gpr(t0
, rs
);
15552 /* Stop translation as we may have switched the execution mode */
15553 ctx
->base
.is_jmp
= DISAS_STOP
;
15558 check_cp0_enabled(ctx
);
15560 TCGv t0
= tcg_temp_new();
15562 save_cpu_state(ctx
, 1);
15563 gen_helper_ei(t0
, cpu_env
);
15564 gen_store_gpr(t0
, rs
);
15565 /* DISAS_STOP isn't sufficient, we need to ensure we break out
15566 of translated code to check for pending interrupts. */
15567 gen_save_pc(ctx
->base
.pc_next
+ 4);
15568 ctx
->base
.is_jmp
= DISAS_EXIT
;
15573 goto pool32axf_invalid
;
15580 gen_sync(extract32(ctx
->opcode
, 16, 5));
15583 generate_exception_end(ctx
, EXCP_SYSCALL
);
15586 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15587 gen_helper_do_semihosting(cpu_env
);
15589 check_insn(ctx
, ISA_MIPS32
);
15590 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15591 generate_exception_end(ctx
, EXCP_RI
);
15593 generate_exception_end(ctx
, EXCP_DBp
);
15598 goto pool32axf_invalid
;
15602 switch (minor
& 3) {
15604 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15607 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15610 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15613 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15616 goto pool32axf_invalid
;
15620 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15623 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15626 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15629 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15632 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15635 goto pool32axf_invalid
;
15640 MIPS_INVAL("pool32axf");
15641 generate_exception_end(ctx
, EXCP_RI
);
15646 /* Values for microMIPS fmt field. Variable-width, depending on which
15647 formats the instruction supports. */
15666 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15668 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15669 uint32_t mips32_op
;
15671 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
15672 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
15673 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
15675 switch (extension
) {
15676 case FLOAT_1BIT_FMT(CFC1
, 0):
15677 mips32_op
= OPC_CFC1
;
15679 case FLOAT_1BIT_FMT(CTC1
, 0):
15680 mips32_op
= OPC_CTC1
;
15682 case FLOAT_1BIT_FMT(MFC1
, 0):
15683 mips32_op
= OPC_MFC1
;
15685 case FLOAT_1BIT_FMT(MTC1
, 0):
15686 mips32_op
= OPC_MTC1
;
15688 case FLOAT_1BIT_FMT(MFHC1
, 0):
15689 mips32_op
= OPC_MFHC1
;
15691 case FLOAT_1BIT_FMT(MTHC1
, 0):
15692 mips32_op
= OPC_MTHC1
;
15694 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15697 /* Reciprocal square root */
15698 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15699 mips32_op
= OPC_RSQRT_S
;
15701 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15702 mips32_op
= OPC_RSQRT_D
;
15706 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15707 mips32_op
= OPC_SQRT_S
;
15709 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15710 mips32_op
= OPC_SQRT_D
;
15714 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15715 mips32_op
= OPC_RECIP_S
;
15717 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15718 mips32_op
= OPC_RECIP_D
;
15722 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15723 mips32_op
= OPC_FLOOR_L_S
;
15725 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15726 mips32_op
= OPC_FLOOR_L_D
;
15728 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15729 mips32_op
= OPC_FLOOR_W_S
;
15731 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15732 mips32_op
= OPC_FLOOR_W_D
;
15736 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15737 mips32_op
= OPC_CEIL_L_S
;
15739 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15740 mips32_op
= OPC_CEIL_L_D
;
15742 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15743 mips32_op
= OPC_CEIL_W_S
;
15745 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15746 mips32_op
= OPC_CEIL_W_D
;
15750 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15751 mips32_op
= OPC_TRUNC_L_S
;
15753 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15754 mips32_op
= OPC_TRUNC_L_D
;
15756 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15757 mips32_op
= OPC_TRUNC_W_S
;
15759 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15760 mips32_op
= OPC_TRUNC_W_D
;
15764 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15765 mips32_op
= OPC_ROUND_L_S
;
15767 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15768 mips32_op
= OPC_ROUND_L_D
;
15770 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15771 mips32_op
= OPC_ROUND_W_S
;
15773 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15774 mips32_op
= OPC_ROUND_W_D
;
15777 /* Integer to floating-point conversion */
15778 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15779 mips32_op
= OPC_CVT_L_S
;
15781 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15782 mips32_op
= OPC_CVT_L_D
;
15784 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15785 mips32_op
= OPC_CVT_W_S
;
15787 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15788 mips32_op
= OPC_CVT_W_D
;
15791 /* Paired-foo conversions */
15792 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15793 mips32_op
= OPC_CVT_S_PL
;
15795 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15796 mips32_op
= OPC_CVT_S_PU
;
15798 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15799 mips32_op
= OPC_CVT_PW_PS
;
15801 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15802 mips32_op
= OPC_CVT_PS_PW
;
15805 /* Floating-point moves */
15806 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15807 mips32_op
= OPC_MOV_S
;
15809 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15810 mips32_op
= OPC_MOV_D
;
15812 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15813 mips32_op
= OPC_MOV_PS
;
15816 /* Absolute value */
15817 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15818 mips32_op
= OPC_ABS_S
;
15820 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15821 mips32_op
= OPC_ABS_D
;
15823 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15824 mips32_op
= OPC_ABS_PS
;
15828 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15829 mips32_op
= OPC_NEG_S
;
15831 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15832 mips32_op
= OPC_NEG_D
;
15834 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15835 mips32_op
= OPC_NEG_PS
;
15838 /* Reciprocal square root step */
15839 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15840 mips32_op
= OPC_RSQRT1_S
;
15842 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15843 mips32_op
= OPC_RSQRT1_D
;
15845 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15846 mips32_op
= OPC_RSQRT1_PS
;
15849 /* Reciprocal step */
15850 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15851 mips32_op
= OPC_RECIP1_S
;
15853 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15854 mips32_op
= OPC_RECIP1_S
;
15856 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15857 mips32_op
= OPC_RECIP1_PS
;
15860 /* Conversions from double */
15861 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15862 mips32_op
= OPC_CVT_D_S
;
15864 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15865 mips32_op
= OPC_CVT_D_W
;
15867 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15868 mips32_op
= OPC_CVT_D_L
;
15871 /* Conversions from single */
15872 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15873 mips32_op
= OPC_CVT_S_D
;
15875 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15876 mips32_op
= OPC_CVT_S_W
;
15878 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15879 mips32_op
= OPC_CVT_S_L
;
15881 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
15884 /* Conditional moves on floating-point codes */
15885 case COND_FLOAT_MOV(MOVT
, 0):
15886 case COND_FLOAT_MOV(MOVT
, 1):
15887 case COND_FLOAT_MOV(MOVT
, 2):
15888 case COND_FLOAT_MOV(MOVT
, 3):
15889 case COND_FLOAT_MOV(MOVT
, 4):
15890 case COND_FLOAT_MOV(MOVT
, 5):
15891 case COND_FLOAT_MOV(MOVT
, 6):
15892 case COND_FLOAT_MOV(MOVT
, 7):
15893 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15894 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
15896 case COND_FLOAT_MOV(MOVF
, 0):
15897 case COND_FLOAT_MOV(MOVF
, 1):
15898 case COND_FLOAT_MOV(MOVF
, 2):
15899 case COND_FLOAT_MOV(MOVF
, 3):
15900 case COND_FLOAT_MOV(MOVF
, 4):
15901 case COND_FLOAT_MOV(MOVF
, 5):
15902 case COND_FLOAT_MOV(MOVF
, 6):
15903 case COND_FLOAT_MOV(MOVF
, 7):
15904 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15905 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
15908 MIPS_INVAL("pool32fxf");
15909 generate_exception_end(ctx
, EXCP_RI
);
15914 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15918 int rt
, rs
, rd
, rr
;
15920 uint32_t op
, minor
, minor2
, mips32_op
;
15921 uint32_t cond
, fmt
, cc
;
15923 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
15924 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
15926 rt
= (ctx
->opcode
>> 21) & 0x1f;
15927 rs
= (ctx
->opcode
>> 16) & 0x1f;
15928 rd
= (ctx
->opcode
>> 11) & 0x1f;
15929 rr
= (ctx
->opcode
>> 6) & 0x1f;
15930 imm
= (int16_t) ctx
->opcode
;
15932 op
= (ctx
->opcode
>> 26) & 0x3f;
15935 minor
= ctx
->opcode
& 0x3f;
15938 minor
= (ctx
->opcode
>> 6) & 0xf;
15941 mips32_op
= OPC_SLL
;
15944 mips32_op
= OPC_SRA
;
15947 mips32_op
= OPC_SRL
;
15950 mips32_op
= OPC_ROTR
;
15952 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
15955 check_insn(ctx
, ISA_MIPS32R6
);
15956 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
15959 check_insn(ctx
, ISA_MIPS32R6
);
15960 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
15963 check_insn(ctx
, ISA_MIPS32R6
);
15964 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
15967 goto pool32a_invalid
;
15971 minor
= (ctx
->opcode
>> 6) & 0xf;
15975 mips32_op
= OPC_ADD
;
15978 mips32_op
= OPC_ADDU
;
15981 mips32_op
= OPC_SUB
;
15984 mips32_op
= OPC_SUBU
;
15987 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15988 mips32_op
= OPC_MUL
;
15990 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
15994 mips32_op
= OPC_SLLV
;
15997 mips32_op
= OPC_SRLV
;
16000 mips32_op
= OPC_SRAV
;
16003 mips32_op
= OPC_ROTRV
;
16005 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
16007 /* Logical operations */
16009 mips32_op
= OPC_AND
;
16012 mips32_op
= OPC_OR
;
16015 mips32_op
= OPC_NOR
;
16018 mips32_op
= OPC_XOR
;
16020 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
16022 /* Set less than */
16024 mips32_op
= OPC_SLT
;
16027 mips32_op
= OPC_SLTU
;
16029 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
16032 goto pool32a_invalid
;
16036 minor
= (ctx
->opcode
>> 6) & 0xf;
16038 /* Conditional moves */
16039 case MOVN
: /* MUL */
16040 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16042 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
16045 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
16048 case MOVZ
: /* MUH */
16049 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16051 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
16054 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
16058 check_insn(ctx
, ISA_MIPS32R6
);
16059 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
16062 check_insn(ctx
, ISA_MIPS32R6
);
16063 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
16065 case LWXS
: /* DIV */
16066 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16068 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
16071 gen_ldxs(ctx
, rs
, rt
, rd
);
16075 check_insn(ctx
, ISA_MIPS32R6
);
16076 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
16079 check_insn(ctx
, ISA_MIPS32R6
);
16080 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
16083 check_insn(ctx
, ISA_MIPS32R6
);
16084 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
16087 goto pool32a_invalid
;
16091 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
16094 check_insn(ctx
, ISA_MIPS32R6
);
16095 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
16096 extract32(ctx
->opcode
, 9, 2));
16099 check_insn(ctx
, ISA_MIPS32R6
);
16100 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
16103 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
16106 gen_pool32axf(env
, ctx
, rt
, rs
);
16109 generate_exception_end(ctx
, EXCP_BREAK
);
16112 check_insn(ctx
, ISA_MIPS32R6
);
16113 generate_exception_end(ctx
, EXCP_RI
);
16117 MIPS_INVAL("pool32a");
16118 generate_exception_end(ctx
, EXCP_RI
);
16123 minor
= (ctx
->opcode
>> 12) & 0xf;
16126 check_cp0_enabled(ctx
);
16127 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16128 gen_cache_operation(ctx
, rt
, rs
, imm
);
16133 /* COP2: Not implemented. */
16134 generate_exception_err(ctx
, EXCP_CpU
, 2);
16136 #ifdef TARGET_MIPS64
16139 check_insn(ctx
, ISA_MIPS3
);
16140 check_mips_64(ctx
);
16145 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16147 #ifdef TARGET_MIPS64
16150 check_insn(ctx
, ISA_MIPS3
);
16151 check_mips_64(ctx
);
16156 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16159 MIPS_INVAL("pool32b");
16160 generate_exception_end(ctx
, EXCP_RI
);
16165 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
16166 minor
= ctx
->opcode
& 0x3f;
16167 check_cp1_enabled(ctx
);
16170 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16171 mips32_op
= OPC_ALNV_PS
;
16174 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16175 mips32_op
= OPC_MADD_S
;
16178 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16179 mips32_op
= OPC_MADD_D
;
16182 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16183 mips32_op
= OPC_MADD_PS
;
16186 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16187 mips32_op
= OPC_MSUB_S
;
16190 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16191 mips32_op
= OPC_MSUB_D
;
16194 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16195 mips32_op
= OPC_MSUB_PS
;
16198 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16199 mips32_op
= OPC_NMADD_S
;
16202 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16203 mips32_op
= OPC_NMADD_D
;
16206 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16207 mips32_op
= OPC_NMADD_PS
;
16210 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16211 mips32_op
= OPC_NMSUB_S
;
16214 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16215 mips32_op
= OPC_NMSUB_D
;
16218 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16219 mips32_op
= OPC_NMSUB_PS
;
16221 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
16223 case CABS_COND_FMT
:
16224 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16225 cond
= (ctx
->opcode
>> 6) & 0xf;
16226 cc
= (ctx
->opcode
>> 13) & 0x7;
16227 fmt
= (ctx
->opcode
>> 10) & 0x3;
16230 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
16233 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
16236 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
16239 goto pool32f_invalid
;
16243 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16244 cond
= (ctx
->opcode
>> 6) & 0xf;
16245 cc
= (ctx
->opcode
>> 13) & 0x7;
16246 fmt
= (ctx
->opcode
>> 10) & 0x3;
16249 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
16252 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
16255 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
16258 goto pool32f_invalid
;
16262 check_insn(ctx
, ISA_MIPS32R6
);
16263 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16266 check_insn(ctx
, ISA_MIPS32R6
);
16267 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16270 gen_pool32fxf(ctx
, rt
, rs
);
16274 switch ((ctx
->opcode
>> 6) & 0x7) {
16276 mips32_op
= OPC_PLL_PS
;
16279 mips32_op
= OPC_PLU_PS
;
16282 mips32_op
= OPC_PUL_PS
;
16285 mips32_op
= OPC_PUU_PS
;
16288 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16289 mips32_op
= OPC_CVT_PS_S
;
16291 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16294 goto pool32f_invalid
;
16298 check_insn(ctx
, ISA_MIPS32R6
);
16299 switch ((ctx
->opcode
>> 9) & 0x3) {
16301 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
16304 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
16307 goto pool32f_invalid
;
16312 switch ((ctx
->opcode
>> 6) & 0x7) {
16314 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16315 mips32_op
= OPC_LWXC1
;
16318 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16319 mips32_op
= OPC_SWXC1
;
16322 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16323 mips32_op
= OPC_LDXC1
;
16326 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16327 mips32_op
= OPC_SDXC1
;
16330 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16331 mips32_op
= OPC_LUXC1
;
16334 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16335 mips32_op
= OPC_SUXC1
;
16337 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
16340 goto pool32f_invalid
;
16344 check_insn(ctx
, ISA_MIPS32R6
);
16345 switch ((ctx
->opcode
>> 9) & 0x3) {
16347 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
16350 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
16353 goto pool32f_invalid
;
16358 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16359 fmt
= (ctx
->opcode
>> 9) & 0x3;
16360 switch ((ctx
->opcode
>> 6) & 0x7) {
16364 mips32_op
= OPC_RSQRT2_S
;
16367 mips32_op
= OPC_RSQRT2_D
;
16370 mips32_op
= OPC_RSQRT2_PS
;
16373 goto pool32f_invalid
;
16379 mips32_op
= OPC_RECIP2_S
;
16382 mips32_op
= OPC_RECIP2_D
;
16385 mips32_op
= OPC_RECIP2_PS
;
16388 goto pool32f_invalid
;
16392 mips32_op
= OPC_ADDR_PS
;
16395 mips32_op
= OPC_MULR_PS
;
16397 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16400 goto pool32f_invalid
;
16404 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16405 cc
= (ctx
->opcode
>> 13) & 0x7;
16406 fmt
= (ctx
->opcode
>> 9) & 0x3;
16407 switch ((ctx
->opcode
>> 6) & 0x7) {
16408 case MOVF_FMT
: /* RINT_FMT */
16409 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16413 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16416 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16419 goto pool32f_invalid
;
16425 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16428 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16432 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16435 goto pool32f_invalid
;
16439 case MOVT_FMT
: /* CLASS_FMT */
16440 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16444 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16447 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16450 goto pool32f_invalid
;
16456 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16459 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16463 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16466 goto pool32f_invalid
;
16471 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16474 goto pool32f_invalid
;
16477 #define FINSN_3ARG_SDPS(prfx) \
16478 switch ((ctx->opcode >> 8) & 0x3) { \
16480 mips32_op = OPC_##prfx##_S; \
16483 mips32_op = OPC_##prfx##_D; \
16485 case FMT_SDPS_PS: \
16487 mips32_op = OPC_##prfx##_PS; \
16490 goto pool32f_invalid; \
16493 check_insn(ctx
, ISA_MIPS32R6
);
16494 switch ((ctx
->opcode
>> 9) & 0x3) {
16496 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16499 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16502 goto pool32f_invalid
;
16506 check_insn(ctx
, ISA_MIPS32R6
);
16507 switch ((ctx
->opcode
>> 9) & 0x3) {
16509 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16512 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16515 goto pool32f_invalid
;
16519 /* regular FP ops */
16520 switch ((ctx
->opcode
>> 6) & 0x3) {
16522 FINSN_3ARG_SDPS(ADD
);
16525 FINSN_3ARG_SDPS(SUB
);
16528 FINSN_3ARG_SDPS(MUL
);
16531 fmt
= (ctx
->opcode
>> 8) & 0x3;
16533 mips32_op
= OPC_DIV_D
;
16534 } else if (fmt
== 0) {
16535 mips32_op
= OPC_DIV_S
;
16537 goto pool32f_invalid
;
16541 goto pool32f_invalid
;
16546 switch ((ctx
->opcode
>> 6) & 0x7) {
16547 case MOVN_FMT
: /* SELEQZ_FMT */
16548 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16550 switch ((ctx
->opcode
>> 9) & 0x3) {
16552 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16555 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16558 goto pool32f_invalid
;
16562 FINSN_3ARG_SDPS(MOVN
);
16566 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16567 FINSN_3ARG_SDPS(MOVN
);
16569 case MOVZ_FMT
: /* SELNEZ_FMT */
16570 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16572 switch ((ctx
->opcode
>> 9) & 0x3) {
16574 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16577 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16580 goto pool32f_invalid
;
16584 FINSN_3ARG_SDPS(MOVZ
);
16588 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16589 FINSN_3ARG_SDPS(MOVZ
);
16592 check_insn(ctx
, ISA_MIPS32R6
);
16593 switch ((ctx
->opcode
>> 9) & 0x3) {
16595 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16598 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16601 goto pool32f_invalid
;
16605 check_insn(ctx
, ISA_MIPS32R6
);
16606 switch ((ctx
->opcode
>> 9) & 0x3) {
16608 mips32_op
= OPC_MADDF_S
;
16611 mips32_op
= OPC_MADDF_D
;
16614 goto pool32f_invalid
;
16618 check_insn(ctx
, ISA_MIPS32R6
);
16619 switch ((ctx
->opcode
>> 9) & 0x3) {
16621 mips32_op
= OPC_MSUBF_S
;
16624 mips32_op
= OPC_MSUBF_D
;
16627 goto pool32f_invalid
;
16631 goto pool32f_invalid
;
16635 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16639 MIPS_INVAL("pool32f");
16640 generate_exception_end(ctx
, EXCP_RI
);
16644 generate_exception_err(ctx
, EXCP_CpU
, 1);
16648 minor
= (ctx
->opcode
>> 21) & 0x1f;
16651 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16652 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16655 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16656 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16657 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16660 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16661 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16662 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16665 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16666 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16669 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16670 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16671 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16674 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16675 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16676 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16679 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16680 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16683 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16684 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16688 case TLTI
: /* BC1EQZC */
16689 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16691 check_cp1_enabled(ctx
);
16692 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16695 mips32_op
= OPC_TLTI
;
16699 case TGEI
: /* BC1NEZC */
16700 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16702 check_cp1_enabled(ctx
);
16703 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16706 mips32_op
= OPC_TGEI
;
16711 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16712 mips32_op
= OPC_TLTIU
;
16715 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16716 mips32_op
= OPC_TGEIU
;
16718 case TNEI
: /* SYNCI */
16719 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16721 /* Break the TB to be able to sync copied instructions
16723 ctx
->base
.is_jmp
= DISAS_STOP
;
16726 mips32_op
= OPC_TNEI
;
16731 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16732 mips32_op
= OPC_TEQI
;
16734 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16739 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16740 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16741 4, rs
, 0, imm
<< 1, 0);
16742 /* Compact branches don't have a delay slot, so just let
16743 the normal delay slot handling take us to the branch
16747 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16748 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16751 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16752 /* Break the TB to be able to sync copied instructions
16754 ctx
->base
.is_jmp
= DISAS_STOP
;
16758 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16759 /* COP2: Not implemented. */
16760 generate_exception_err(ctx
, EXCP_CpU
, 2);
16763 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16764 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16767 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16768 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16771 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16772 mips32_op
= OPC_BC1FANY4
;
16775 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16776 mips32_op
= OPC_BC1TANY4
;
16779 check_insn(ctx
, ASE_MIPS3D
);
16782 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16783 check_cp1_enabled(ctx
);
16784 gen_compute_branch1(ctx
, mips32_op
,
16785 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16787 generate_exception_err(ctx
, EXCP_CpU
, 1);
16792 /* MIPS DSP: not implemented */
16795 MIPS_INVAL("pool32i");
16796 generate_exception_end(ctx
, EXCP_RI
);
16801 minor
= (ctx
->opcode
>> 12) & 0xf;
16802 offset
= sextract32(ctx
->opcode
, 0,
16803 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16806 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16807 mips32_op
= OPC_LWL
;
16810 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16811 mips32_op
= OPC_SWL
;
16814 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16815 mips32_op
= OPC_LWR
;
16818 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16819 mips32_op
= OPC_SWR
;
16821 #if defined(TARGET_MIPS64)
16823 check_insn(ctx
, ISA_MIPS3
);
16824 check_mips_64(ctx
);
16825 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16826 mips32_op
= OPC_LDL
;
16829 check_insn(ctx
, ISA_MIPS3
);
16830 check_mips_64(ctx
);
16831 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16832 mips32_op
= OPC_SDL
;
16835 check_insn(ctx
, ISA_MIPS3
);
16836 check_mips_64(ctx
);
16837 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16838 mips32_op
= OPC_LDR
;
16841 check_insn(ctx
, ISA_MIPS3
);
16842 check_mips_64(ctx
);
16843 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16844 mips32_op
= OPC_SDR
;
16847 check_insn(ctx
, ISA_MIPS3
);
16848 check_mips_64(ctx
);
16849 mips32_op
= OPC_LWU
;
16852 check_insn(ctx
, ISA_MIPS3
);
16853 check_mips_64(ctx
);
16854 mips32_op
= OPC_LLD
;
16858 mips32_op
= OPC_LL
;
16861 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16864 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16867 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
16869 #if defined(TARGET_MIPS64)
16871 check_insn(ctx
, ISA_MIPS3
);
16872 check_mips_64(ctx
);
16873 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
16878 MIPS_INVAL("pool32c ld-eva");
16879 generate_exception_end(ctx
, EXCP_RI
);
16882 check_cp0_enabled(ctx
);
16884 minor2
= (ctx
->opcode
>> 9) & 0x7;
16885 offset
= sextract32(ctx
->opcode
, 0, 9);
16888 mips32_op
= OPC_LBUE
;
16891 mips32_op
= OPC_LHUE
;
16894 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16895 mips32_op
= OPC_LWLE
;
16898 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16899 mips32_op
= OPC_LWRE
;
16902 mips32_op
= OPC_LBE
;
16905 mips32_op
= OPC_LHE
;
16908 mips32_op
= OPC_LLE
;
16911 mips32_op
= OPC_LWE
;
16917 MIPS_INVAL("pool32c st-eva");
16918 generate_exception_end(ctx
, EXCP_RI
);
16921 check_cp0_enabled(ctx
);
16923 minor2
= (ctx
->opcode
>> 9) & 0x7;
16924 offset
= sextract32(ctx
->opcode
, 0, 9);
16927 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16928 mips32_op
= OPC_SWLE
;
16931 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16932 mips32_op
= OPC_SWRE
;
16935 /* Treat as no-op */
16936 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16937 /* hint codes 24-31 are reserved and signal RI */
16938 generate_exception(ctx
, EXCP_RI
);
16942 /* Treat as no-op */
16943 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16944 gen_cache_operation(ctx
, rt
, rs
, offset
);
16948 mips32_op
= OPC_SBE
;
16951 mips32_op
= OPC_SHE
;
16954 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
16957 mips32_op
= OPC_SWE
;
16962 /* Treat as no-op */
16963 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16964 /* hint codes 24-31 are reserved and signal RI */
16965 generate_exception(ctx
, EXCP_RI
);
16969 MIPS_INVAL("pool32c");
16970 generate_exception_end(ctx
, EXCP_RI
);
16974 case ADDI32
: /* AUI, LUI */
16975 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16977 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
16980 mips32_op
= OPC_ADDI
;
16985 mips32_op
= OPC_ADDIU
;
16987 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16990 /* Logical operations */
16992 mips32_op
= OPC_ORI
;
16995 mips32_op
= OPC_XORI
;
16998 mips32_op
= OPC_ANDI
;
17000 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17003 /* Set less than immediate */
17005 mips32_op
= OPC_SLTI
;
17008 mips32_op
= OPC_SLTIU
;
17010 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17013 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17014 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
17015 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
17016 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17018 case JALS32
: /* BOVC, BEQC, BEQZALC */
17019 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17022 mips32_op
= OPC_BOVC
;
17023 } else if (rs
< rt
&& rs
== 0) {
17025 mips32_op
= OPC_BEQZALC
;
17028 mips32_op
= OPC_BEQC
;
17030 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17033 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
17034 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
17035 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17038 case BEQ32
: /* BC */
17039 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17041 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
17042 sextract32(ctx
->opcode
<< 1, 0, 27));
17045 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
17048 case BNE32
: /* BALC */
17049 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17051 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
17052 sextract32(ctx
->opcode
<< 1, 0, 27));
17055 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
17058 case J32
: /* BGTZC, BLTZC, BLTC */
17059 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17060 if (rs
== 0 && rt
!= 0) {
17062 mips32_op
= OPC_BGTZC
;
17063 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17065 mips32_op
= OPC_BLTZC
;
17068 mips32_op
= OPC_BLTC
;
17070 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17073 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
17074 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17077 case JAL32
: /* BLEZC, BGEZC, BGEC */
17078 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17079 if (rs
== 0 && rt
!= 0) {
17081 mips32_op
= OPC_BLEZC
;
17082 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17084 mips32_op
= OPC_BGEZC
;
17087 mips32_op
= OPC_BGEC
;
17089 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17092 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
17093 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17094 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17097 /* Floating point (COP1) */
17099 mips32_op
= OPC_LWC1
;
17102 mips32_op
= OPC_LDC1
;
17105 mips32_op
= OPC_SWC1
;
17108 mips32_op
= OPC_SDC1
;
17110 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
17112 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17113 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17114 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17115 switch ((ctx
->opcode
>> 16) & 0x1f) {
17124 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17127 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
17130 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
17140 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17143 generate_exception(ctx
, EXCP_RI
);
17148 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
17149 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
17151 gen_addiupc(ctx
, reg
, offset
, 0, 0);
17154 case BNVC
: /* BNEC, BNEZALC */
17155 check_insn(ctx
, ISA_MIPS32R6
);
17158 mips32_op
= OPC_BNVC
;
17159 } else if (rs
< rt
&& rs
== 0) {
17161 mips32_op
= OPC_BNEZALC
;
17164 mips32_op
= OPC_BNEC
;
17166 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17168 case R6_BNEZC
: /* JIALC */
17169 check_insn(ctx
, ISA_MIPS32R6
);
17172 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
17173 sextract32(ctx
->opcode
<< 1, 0, 22));
17176 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
17179 case R6_BEQZC
: /* JIC */
17180 check_insn(ctx
, ISA_MIPS32R6
);
17183 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
17184 sextract32(ctx
->opcode
<< 1, 0, 22));
17187 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
17190 case BLEZALC
: /* BGEZALC, BGEUC */
17191 check_insn(ctx
, ISA_MIPS32R6
);
17192 if (rs
== 0 && rt
!= 0) {
17194 mips32_op
= OPC_BLEZALC
;
17195 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17197 mips32_op
= OPC_BGEZALC
;
17200 mips32_op
= OPC_BGEUC
;
17202 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17204 case BGTZALC
: /* BLTZALC, BLTUC */
17205 check_insn(ctx
, ISA_MIPS32R6
);
17206 if (rs
== 0 && rt
!= 0) {
17208 mips32_op
= OPC_BGTZALC
;
17209 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17211 mips32_op
= OPC_BLTZALC
;
17214 mips32_op
= OPC_BLTUC
;
17216 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17218 /* Loads and stores */
17220 mips32_op
= OPC_LB
;
17223 mips32_op
= OPC_LBU
;
17226 mips32_op
= OPC_LH
;
17229 mips32_op
= OPC_LHU
;
17232 mips32_op
= OPC_LW
;
17234 #ifdef TARGET_MIPS64
17236 check_insn(ctx
, ISA_MIPS3
);
17237 check_mips_64(ctx
);
17238 mips32_op
= OPC_LD
;
17241 check_insn(ctx
, ISA_MIPS3
);
17242 check_mips_64(ctx
);
17243 mips32_op
= OPC_SD
;
17247 mips32_op
= OPC_SB
;
17250 mips32_op
= OPC_SH
;
17253 mips32_op
= OPC_SW
;
17256 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
17259 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
17262 generate_exception_end(ctx
, EXCP_RI
);
17267 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
17271 /* make sure instructions are on a halfword boundary */
17272 if (ctx
->base
.pc_next
& 0x1) {
17273 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
17274 generate_exception_end(ctx
, EXCP_AdEL
);
17278 op
= (ctx
->opcode
>> 10) & 0x3f;
17279 /* Enforce properly-sized instructions in a delay slot */
17280 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
17281 switch (op
& 0x7) { /* MSB-3..MSB-5 */
17283 /* POOL32A, POOL32B, POOL32I, POOL32C */
17285 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
17287 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
17289 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
17291 /* LB32, LH32, LWC132, LDC132, LW32 */
17292 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
17293 generate_exception_end(ctx
, EXCP_RI
);
17298 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
17300 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
17302 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
17303 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
17304 generate_exception_end(ctx
, EXCP_RI
);
17314 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17315 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
17316 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
17319 switch (ctx
->opcode
& 0x1) {
17327 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17328 /* In the Release 6 the register number location in
17329 * the instruction encoding has changed.
17331 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
17333 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
17339 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17340 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
17341 int amount
= (ctx
->opcode
>> 1) & 0x7;
17343 amount
= amount
== 0 ? 8 : amount
;
17345 switch (ctx
->opcode
& 0x1) {
17354 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17358 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17359 gen_pool16c_r6_insn(ctx
);
17361 gen_pool16c_insn(ctx
);
17366 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17367 int rb
= 28; /* GP */
17368 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17370 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17374 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17375 if (ctx
->opcode
& 1) {
17376 generate_exception_end(ctx
, EXCP_RI
);
17379 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17380 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17381 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17382 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17387 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17388 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17389 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17390 offset
= (offset
== 0xf ? -1 : offset
);
17392 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17397 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17398 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17399 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17401 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17406 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17407 int rb
= 29; /* SP */
17408 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17410 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17415 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17416 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17417 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17419 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17424 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17425 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17426 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17428 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17433 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17434 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17435 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17437 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17442 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17443 int rb
= 29; /* SP */
17444 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17446 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17451 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17452 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17453 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17455 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17460 int rd
= uMIPS_RD5(ctx
->opcode
);
17461 int rs
= uMIPS_RS5(ctx
->opcode
);
17463 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17470 switch (ctx
->opcode
& 0x1) {
17480 switch (ctx
->opcode
& 0x1) {
17485 gen_addiur1sp(ctx
);
17489 case B16
: /* BC16 */
17490 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17491 sextract32(ctx
->opcode
, 0, 10) << 1,
17492 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17494 case BNEZ16
: /* BNEZC16 */
17495 case BEQZ16
: /* BEQZC16 */
17496 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17497 mmreg(uMIPS_RD(ctx
->opcode
)),
17498 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17499 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17504 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17505 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17507 imm
= (imm
== 0x7f ? -1 : imm
);
17508 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17514 generate_exception_end(ctx
, EXCP_RI
);
17517 decode_micromips32_opc(env
, ctx
);
17530 /* MAJOR, P16, and P32 pools opcodes */
17534 NM_MOVE_BALC
= 0x02,
17542 NM_P16_SHIFT
= 0x0c,
17560 NM_P_LS_U12
= 0x21,
17570 NM_P16_ADDU
= 0x2c,
17584 NM_MOVEPREV
= 0x3f,
17587 /* POOL32A instruction pool */
17589 NM_POOL32A0
= 0x00,
17590 NM_SPECIAL2
= 0x01,
17593 NM_POOL32A5
= 0x05,
17594 NM_POOL32A7
= 0x07,
17597 /* P.GP.W instruction pool */
17599 NM_ADDIUGP_W
= 0x00,
17604 /* P48I instruction pool */
17608 NM_ADDIUGP48
= 0x02,
17609 NM_ADDIUPC48
= 0x03,
17614 /* P.U12 instruction pool */
17623 NM_ADDIUNEG
= 0x08,
17630 /* POOL32F instruction pool */
17632 NM_POOL32F_0
= 0x00,
17633 NM_POOL32F_3
= 0x03,
17634 NM_POOL32F_5
= 0x05,
17637 /* POOL32S instruction pool */
17639 NM_POOL32S_0
= 0x00,
17640 NM_POOL32S_4
= 0x04,
17643 /* P.LUI instruction pool */
17649 /* P.GP.BH instruction pool */
17654 NM_ADDIUGP_B
= 0x03,
17657 NM_P_GP_CP1
= 0x06,
17660 /* P.LS.U12 instruction pool */
17665 NM_P_PREFU12
= 0x03,
17678 /* P.LS.S9 instruction pool */
17684 NM_P_LS_UAWM
= 0x05,
17687 /* P.BAL instruction pool */
17693 /* P.J instruction pool */
17696 NM_JALRC_HB
= 0x01,
17697 NM_P_BALRSC
= 0x08,
17700 /* P.BR1 instruction pool */
17708 /* P.BR2 instruction pool */
17715 /* P.BRI instruction pool */
17727 /* P16.SHIFT instruction pool */
17733 /* POOL16C instruction pool */
17735 NM_POOL16C_0
= 0x00,
17739 /* P16.A1 instruction pool */
17741 NM_ADDIUR1SP
= 0x01,
17744 /* P16.A2 instruction pool */
17747 NM_P_ADDIURS5
= 0x01,
17750 /* P16.ADDU instruction pool */
17756 /* P16.SR instruction pool */
17759 NM_RESTORE_JRC16
= 0x01,
17762 /* P16.4X4 instruction pool */
17768 /* P16.LB instruction pool */
17775 /* P16.LH instruction pool */
17782 /* P.RI instruction pool */
17785 NM_P_SYSCALL
= 0x01,
17790 /* POOL32A0 instruction pool */
17825 NM_D_E_MT_VPE
= 0x56,
17833 /* CRC32 instruction pool */
17843 /* POOL32A5 instruction pool */
17845 NM_CMP_EQ_PH
= 0x00,
17846 NM_CMP_LT_PH
= 0x08,
17847 NM_CMP_LE_PH
= 0x10,
17848 NM_CMPGU_EQ_QB
= 0x18,
17849 NM_CMPGU_LT_QB
= 0x20,
17850 NM_CMPGU_LE_QB
= 0x28,
17851 NM_CMPGDU_EQ_QB
= 0x30,
17852 NM_CMPGDU_LT_QB
= 0x38,
17853 NM_CMPGDU_LE_QB
= 0x40,
17854 NM_CMPU_EQ_QB
= 0x48,
17855 NM_CMPU_LT_QB
= 0x50,
17856 NM_CMPU_LE_QB
= 0x58,
17857 NM_ADDQ_S_W
= 0x60,
17858 NM_SUBQ_S_W
= 0x68,
17862 NM_ADDQ_S_PH
= 0x01,
17863 NM_ADDQH_R_PH
= 0x09,
17864 NM_ADDQH_R_W
= 0x11,
17865 NM_ADDU_S_QB
= 0x19,
17866 NM_ADDU_S_PH
= 0x21,
17867 NM_ADDUH_R_QB
= 0x29,
17868 NM_SHRAV_R_PH
= 0x31,
17869 NM_SHRAV_R_QB
= 0x39,
17870 NM_SUBQ_S_PH
= 0x41,
17871 NM_SUBQH_R_PH
= 0x49,
17872 NM_SUBQH_R_W
= 0x51,
17873 NM_SUBU_S_QB
= 0x59,
17874 NM_SUBU_S_PH
= 0x61,
17875 NM_SUBUH_R_QB
= 0x69,
17876 NM_SHLLV_S_PH
= 0x71,
17877 NM_PRECR_SRA_R_PH_W
= 0x79,
17879 NM_MULEU_S_PH_QBL
= 0x12,
17880 NM_MULEU_S_PH_QBR
= 0x1a,
17881 NM_MULQ_RS_PH
= 0x22,
17882 NM_MULQ_S_PH
= 0x2a,
17883 NM_MULQ_RS_W
= 0x32,
17884 NM_MULQ_S_W
= 0x3a,
17887 NM_SHRAV_R_W
= 0x5a,
17888 NM_SHRLV_PH
= 0x62,
17889 NM_SHRLV_QB
= 0x6a,
17890 NM_SHLLV_QB
= 0x72,
17891 NM_SHLLV_S_W
= 0x7a,
17895 NM_MULEQ_S_W_PHL
= 0x04,
17896 NM_MULEQ_S_W_PHR
= 0x0c,
17898 NM_MUL_S_PH
= 0x05,
17899 NM_PRECR_QB_PH
= 0x0d,
17900 NM_PRECRQ_QB_PH
= 0x15,
17901 NM_PRECRQ_PH_W
= 0x1d,
17902 NM_PRECRQ_RS_PH_W
= 0x25,
17903 NM_PRECRQU_S_QB_PH
= 0x2d,
17904 NM_PACKRL_PH
= 0x35,
17908 NM_SHRA_R_W
= 0x5e,
17909 NM_SHRA_R_PH
= 0x66,
17910 NM_SHLL_S_PH
= 0x76,
17911 NM_SHLL_S_W
= 0x7e,
17916 /* POOL32A7 instruction pool */
17921 NM_POOL32AXF
= 0x07,
17924 /* P.SR instruction pool */
17930 /* P.SHIFT instruction pool */
17938 /* P.ROTX instruction pool */
17943 /* P.INS instruction pool */
17948 /* P.EXT instruction pool */
17953 /* POOL32F_0 (fmt) instruction pool */
17958 NM_SELEQZ_S
= 0x07,
17959 NM_SELEQZ_D
= 0x47,
17963 NM_SELNEZ_S
= 0x0f,
17964 NM_SELNEZ_D
= 0x4f,
17979 /* POOL32F_3 instruction pool */
17983 NM_MINA_FMT
= 0x04,
17984 NM_MAXA_FMT
= 0x05,
17985 NM_POOL32FXF
= 0x07,
17988 /* POOL32F_5 instruction pool */
17990 NM_CMP_CONDN_S
= 0x00,
17991 NM_CMP_CONDN_D
= 0x02,
17994 /* P.GP.LH instruction pool */
18000 /* P.GP.SH instruction pool */
18005 /* P.GP.CP1 instruction pool */
18013 /* P.LS.S0 instruction pool */
18030 NM_P_PREFS9
= 0x03,
18036 /* P.LS.S1 instruction pool */
18038 NM_ASET_ACLR
= 0x02,
18046 /* P.LS.E0 instruction pool */
18062 /* P.PREFE instruction pool */
18068 /* P.LLE instruction pool */
18074 /* P.SCE instruction pool */
18080 /* P.LS.WM instruction pool */
18086 /* P.LS.UAWM instruction pool */
18092 /* P.BR3A instruction pool */
18098 NM_BPOSGE32C
= 0x04,
18101 /* P16.RI instruction pool */
18103 NM_P16_SYSCALL
= 0x01,
18108 /* POOL16C_0 instruction pool */
18110 NM_POOL16C_00
= 0x00,
18113 /* P16.JRC instruction pool */
18119 /* P.SYSCALL instruction pool */
18125 /* P.TRAP instruction pool */
18131 /* P.CMOVE instruction pool */
18137 /* POOL32Axf instruction pool */
18139 NM_POOL32AXF_1
= 0x01,
18140 NM_POOL32AXF_2
= 0x02,
18141 NM_POOL32AXF_4
= 0x04,
18142 NM_POOL32AXF_5
= 0x05,
18143 NM_POOL32AXF_7
= 0x07,
18146 /* POOL32Axf_1 instruction pool */
18148 NM_POOL32AXF_1_0
= 0x00,
18149 NM_POOL32AXF_1_1
= 0x01,
18150 NM_POOL32AXF_1_3
= 0x03,
18151 NM_POOL32AXF_1_4
= 0x04,
18152 NM_POOL32AXF_1_5
= 0x05,
18153 NM_POOL32AXF_1_7
= 0x07,
18156 /* POOL32Axf_2 instruction pool */
18158 NM_POOL32AXF_2_0_7
= 0x00,
18159 NM_POOL32AXF_2_8_15
= 0x01,
18160 NM_POOL32AXF_2_16_23
= 0x02,
18161 NM_POOL32AXF_2_24_31
= 0x03,
18164 /* POOL32Axf_7 instruction pool */
18166 NM_SHRA_R_QB
= 0x0,
18171 /* POOL32Axf_1_0 instruction pool */
18179 /* POOL32Axf_1_1 instruction pool */
18185 /* POOL32Axf_1_3 instruction pool */
18193 /* POOL32Axf_1_4 instruction pool */
18199 /* POOL32Axf_1_5 instruction pool */
18201 NM_MAQ_S_W_PHR
= 0x0,
18202 NM_MAQ_S_W_PHL
= 0x1,
18203 NM_MAQ_SA_W_PHR
= 0x2,
18204 NM_MAQ_SA_W_PHL
= 0x3,
18207 /* POOL32Axf_1_7 instruction pool */
18211 NM_EXTR_RS_W
= 0x2,
18215 /* POOL32Axf_2_0_7 instruction pool */
18218 NM_DPAQ_S_W_PH
= 0x1,
18220 NM_DPSQ_S_W_PH
= 0x3,
18227 /* POOL32Axf_2_8_15 instruction pool */
18229 NM_DPAX_W_PH
= 0x0,
18230 NM_DPAQ_SA_L_W
= 0x1,
18231 NM_DPSX_W_PH
= 0x2,
18232 NM_DPSQ_SA_L_W
= 0x3,
18235 NM_EXTRV_R_W
= 0x7,
18238 /* POOL32Axf_2_16_23 instruction pool */
18240 NM_DPAU_H_QBL
= 0x0,
18241 NM_DPAQX_S_W_PH
= 0x1,
18242 NM_DPSU_H_QBL
= 0x2,
18243 NM_DPSQX_S_W_PH
= 0x3,
18246 NM_MULSA_W_PH
= 0x6,
18247 NM_EXTRV_RS_W
= 0x7,
18250 /* POOL32Axf_2_24_31 instruction pool */
18252 NM_DPAU_H_QBR
= 0x0,
18253 NM_DPAQX_SA_W_PH
= 0x1,
18254 NM_DPSU_H_QBR
= 0x2,
18255 NM_DPSQX_SA_W_PH
= 0x3,
18258 NM_MULSAQ_S_W_PH
= 0x6,
18259 NM_EXTRV_S_H
= 0x7,
18262 /* POOL32Axf_{4, 5} instruction pool */
18281 /* nanoMIPS DSP instructions */
18282 NM_ABSQ_S_QB
= 0x00,
18283 NM_ABSQ_S_PH
= 0x08,
18284 NM_ABSQ_S_W
= 0x10,
18285 NM_PRECEQ_W_PHL
= 0x28,
18286 NM_PRECEQ_W_PHR
= 0x30,
18287 NM_PRECEQU_PH_QBL
= 0x38,
18288 NM_PRECEQU_PH_QBR
= 0x48,
18289 NM_PRECEU_PH_QBL
= 0x58,
18290 NM_PRECEU_PH_QBR
= 0x68,
18291 NM_PRECEQU_PH_QBLA
= 0x39,
18292 NM_PRECEQU_PH_QBRA
= 0x49,
18293 NM_PRECEU_PH_QBLA
= 0x59,
18294 NM_PRECEU_PH_QBRA
= 0x69,
18295 NM_REPLV_PH
= 0x01,
18296 NM_REPLV_QB
= 0x09,
18299 NM_RADDU_W_QB
= 0x78,
18305 /* PP.SR instruction pool */
18309 NM_RESTORE_JRC
= 0x03,
18312 /* P.SR.F instruction pool */
18315 NM_RESTOREF
= 0x01,
18318 /* P16.SYSCALL instruction pool */
18320 NM_SYSCALL16
= 0x00,
18321 NM_HYPCALL16
= 0x01,
18324 /* POOL16C_00 instruction pool */
18332 /* PP.LSX and PP.LSXS instruction pool */
18370 /* ERETx instruction pool */
18376 /* POOL32FxF_{0, 1} insturction pool */
18385 NM_CVT_S_PL
= 0x84,
18386 NM_CVT_S_PU
= 0xa4,
18388 NM_CVT_L_S
= 0x004,
18389 NM_CVT_L_D
= 0x104,
18390 NM_CVT_W_S
= 0x024,
18391 NM_CVT_W_D
= 0x124,
18393 NM_RSQRT_S
= 0x008,
18394 NM_RSQRT_D
= 0x108,
18399 NM_RECIP_S
= 0x048,
18400 NM_RECIP_D
= 0x148,
18402 NM_FLOOR_L_S
= 0x00c,
18403 NM_FLOOR_L_D
= 0x10c,
18405 NM_FLOOR_W_S
= 0x02c,
18406 NM_FLOOR_W_D
= 0x12c,
18408 NM_CEIL_L_S
= 0x04c,
18409 NM_CEIL_L_D
= 0x14c,
18410 NM_CEIL_W_S
= 0x06c,
18411 NM_CEIL_W_D
= 0x16c,
18412 NM_TRUNC_L_S
= 0x08c,
18413 NM_TRUNC_L_D
= 0x18c,
18414 NM_TRUNC_W_S
= 0x0ac,
18415 NM_TRUNC_W_D
= 0x1ac,
18416 NM_ROUND_L_S
= 0x0cc,
18417 NM_ROUND_L_D
= 0x1cc,
18418 NM_ROUND_W_S
= 0x0ec,
18419 NM_ROUND_W_D
= 0x1ec,
18427 NM_CVT_D_S
= 0x04d,
18428 NM_CVT_D_W
= 0x0cd,
18429 NM_CVT_D_L
= 0x14d,
18430 NM_CVT_S_D
= 0x06d,
18431 NM_CVT_S_W
= 0x0ed,
18432 NM_CVT_S_L
= 0x16d,
18435 /* P.LL instruction pool */
18441 /* P.SC instruction pool */
18447 /* P.DVP instruction pool */
18456 * nanoMIPS decoding engine
18461 /* extraction utilities */
18463 #define NANOMIPS_EXTRACT_RT3(op) ((op >> 7) & 0x7)
18464 #define NANOMIPS_EXTRACT_RS3(op) ((op >> 4) & 0x7)
18465 #define NANOMIPS_EXTRACT_RD3(op) ((op >> 1) & 0x7)
18466 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18467 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18469 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18470 static inline int decode_gpr_gpr3(int r
)
18472 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18474 return map
[r
& 0x7];
18477 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18478 static inline int decode_gpr_gpr3_src_store(int r
)
18480 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18482 return map
[r
& 0x7];
18485 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18486 static inline int decode_gpr_gpr4(int r
)
18488 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18489 16, 17, 18, 19, 20, 21, 22, 23 };
18491 return map
[r
& 0xf];
18494 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18495 static inline int decode_gpr_gpr4_zero(int r
)
18497 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18498 16, 17, 18, 19, 20, 21, 22, 23 };
18500 return map
[r
& 0xf];
18504 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18506 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18509 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18510 uint8_t gp
, uint16_t u
)
18513 TCGv va
= tcg_temp_new();
18514 TCGv t0
= tcg_temp_new();
18516 while (counter
!= count
) {
18517 bool use_gp
= gp
&& (counter
== count
- 1);
18518 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18519 int this_offset
= -((counter
+ 1) << 2);
18520 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18521 gen_load_gpr(t0
, this_rt
);
18522 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18523 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18527 /* adjust stack pointer */
18528 gen_adjust_sp(ctx
, -u
);
18534 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18535 uint8_t gp
, uint16_t u
)
18538 TCGv va
= tcg_temp_new();
18539 TCGv t0
= tcg_temp_new();
18541 while (counter
!= count
) {
18542 bool use_gp
= gp
&& (counter
== count
- 1);
18543 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18544 int this_offset
= u
- ((counter
+ 1) << 2);
18545 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18546 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18547 ctx
->default_tcg_memop_mask
);
18548 tcg_gen_ext32s_tl(t0
, t0
);
18549 gen_store_gpr(t0
, this_rt
);
18553 /* adjust stack pointer */
18554 gen_adjust_sp(ctx
, u
);
18560 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18562 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
18563 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
18565 switch (extract32(ctx
->opcode
, 2, 2)) {
18567 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18570 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18573 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18576 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18581 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18583 int rt
= extract32(ctx
->opcode
, 21, 5);
18584 int rs
= extract32(ctx
->opcode
, 16, 5);
18585 int rd
= extract32(ctx
->opcode
, 11, 5);
18587 switch (extract32(ctx
->opcode
, 3, 7)) {
18589 switch (extract32(ctx
->opcode
, 10, 1)) {
18592 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18596 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18602 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18606 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18609 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18612 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18615 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18618 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18621 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18624 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18627 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18631 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18634 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18637 switch (extract32(ctx
->opcode
, 10, 1)) {
18639 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18642 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18647 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18650 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18653 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18656 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18659 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18664 #ifndef CONFIG_USER_ONLY
18665 TCGv t0
= tcg_temp_new();
18666 switch (extract32(ctx
->opcode
, 10, 1)) {
18669 check_cp0_enabled(ctx
);
18670 gen_helper_dvp(t0
, cpu_env
);
18671 gen_store_gpr(t0
, rt
);
18676 check_cp0_enabled(ctx
);
18677 gen_helper_evp(t0
, cpu_env
);
18678 gen_store_gpr(t0
, rt
);
18685 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18690 TCGv t0
= tcg_temp_new();
18691 TCGv t1
= tcg_temp_new();
18692 TCGv t2
= tcg_temp_new();
18694 gen_load_gpr(t1
, rs
);
18695 gen_load_gpr(t2
, rt
);
18696 tcg_gen_add_tl(t0
, t1
, t2
);
18697 tcg_gen_ext32s_tl(t0
, t0
);
18698 tcg_gen_xor_tl(t1
, t1
, t2
);
18699 tcg_gen_xor_tl(t2
, t0
, t2
);
18700 tcg_gen_andc_tl(t1
, t2
, t1
);
18702 /* operands of same sign, result different sign */
18703 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18704 gen_store_gpr(t0
, rd
);
18712 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18715 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18718 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18721 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18724 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18727 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18730 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18733 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18735 #ifndef CONFIG_USER_ONLY
18737 check_cp0_enabled(ctx
);
18739 /* Treat as NOP. */
18742 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18745 check_cp0_enabled(ctx
);
18747 TCGv t0
= tcg_temp_new();
18749 gen_load_gpr(t0
, rt
);
18750 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18754 case NM_D_E_MT_VPE
:
18756 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18757 TCGv t0
= tcg_temp_new();
18764 gen_helper_dmt(t0
);
18765 gen_store_gpr(t0
, rt
);
18766 } else if (rs
== 0) {
18769 gen_helper_dvpe(t0
, cpu_env
);
18770 gen_store_gpr(t0
, rt
);
18772 generate_exception_end(ctx
, EXCP_RI
);
18779 gen_helper_emt(t0
);
18780 gen_store_gpr(t0
, rt
);
18781 } else if (rs
== 0) {
18784 gen_helper_evpe(t0
, cpu_env
);
18785 gen_store_gpr(t0
, rt
);
18787 generate_exception_end(ctx
, EXCP_RI
);
18798 TCGv t0
= tcg_temp_new();
18799 TCGv t1
= tcg_temp_new();
18801 gen_load_gpr(t0
, rt
);
18802 gen_load_gpr(t1
, rs
);
18803 gen_helper_fork(t0
, t1
);
18810 check_cp0_enabled(ctx
);
18812 /* Treat as NOP. */
18815 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18816 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18820 check_cp0_enabled(ctx
);
18821 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18822 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18827 TCGv t0
= tcg_temp_new();
18829 gen_load_gpr(t0
, rs
);
18830 gen_helper_yield(t0
, cpu_env
, t0
);
18831 gen_store_gpr(t0
, rt
);
18837 generate_exception_end(ctx
, EXCP_RI
);
18843 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18844 int ret
, int v1
, int v2
)
18850 t0
= tcg_temp_new_i32();
18852 v0_t
= tcg_temp_new();
18853 v1_t
= tcg_temp_new();
18855 tcg_gen_movi_i32(t0
, v2
>> 3);
18857 gen_load_gpr(v0_t
, ret
);
18858 gen_load_gpr(v1_t
, v1
);
18861 case NM_MAQ_S_W_PHR
:
18863 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18865 case NM_MAQ_S_W_PHL
:
18867 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18869 case NM_MAQ_SA_W_PHR
:
18871 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18873 case NM_MAQ_SA_W_PHL
:
18875 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18878 generate_exception_end(ctx
, EXCP_RI
);
18882 tcg_temp_free_i32(t0
);
18884 tcg_temp_free(v0_t
);
18885 tcg_temp_free(v1_t
);
18889 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18890 int ret
, int v1
, int v2
)
18893 TCGv t0
= tcg_temp_new();
18894 TCGv t1
= tcg_temp_new();
18895 TCGv v0_t
= tcg_temp_new();
18897 gen_load_gpr(v0_t
, v1
);
18900 case NM_POOL32AXF_1_0
:
18902 switch (extract32(ctx
->opcode
, 12, 2)) {
18904 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
18907 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
18910 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
18913 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
18917 case NM_POOL32AXF_1_1
:
18919 switch (extract32(ctx
->opcode
, 12, 2)) {
18921 tcg_gen_movi_tl(t0
, v2
);
18922 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
18925 tcg_gen_movi_tl(t0
, v2
>> 3);
18926 gen_helper_shilo(t0
, v0_t
, cpu_env
);
18929 generate_exception_end(ctx
, EXCP_RI
);
18933 case NM_POOL32AXF_1_3
:
18935 imm
= extract32(ctx
->opcode
, 14, 7);
18936 switch (extract32(ctx
->opcode
, 12, 2)) {
18938 tcg_gen_movi_tl(t0
, imm
);
18939 gen_helper_rddsp(t0
, t0
, cpu_env
);
18940 gen_store_gpr(t0
, ret
);
18943 gen_load_gpr(t0
, ret
);
18944 tcg_gen_movi_tl(t1
, imm
);
18945 gen_helper_wrdsp(t0
, t1
, cpu_env
);
18948 tcg_gen_movi_tl(t0
, v2
>> 3);
18949 tcg_gen_movi_tl(t1
, v1
);
18950 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
18951 gen_store_gpr(t0
, ret
);
18954 tcg_gen_movi_tl(t0
, v2
>> 3);
18955 tcg_gen_movi_tl(t1
, v1
);
18956 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
18957 gen_store_gpr(t0
, ret
);
18961 case NM_POOL32AXF_1_4
:
18963 tcg_gen_movi_tl(t0
, v2
>> 2);
18964 switch (extract32(ctx
->opcode
, 12, 1)) {
18966 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
18967 gen_store_gpr(t0
, ret
);
18970 gen_helper_shrl_qb(t0
, t0
, v0_t
);
18971 gen_store_gpr(t0
, ret
);
18975 case NM_POOL32AXF_1_5
:
18976 opc
= extract32(ctx
->opcode
, 12, 2);
18977 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
18979 case NM_POOL32AXF_1_7
:
18981 tcg_gen_movi_tl(t0
, v2
>> 3);
18982 tcg_gen_movi_tl(t1
, v1
);
18983 switch (extract32(ctx
->opcode
, 12, 2)) {
18985 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
18986 gen_store_gpr(t0
, ret
);
18989 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
18990 gen_store_gpr(t0
, ret
);
18993 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
18994 gen_store_gpr(t0
, ret
);
18997 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
18998 gen_store_gpr(t0
, ret
);
19003 generate_exception_end(ctx
, EXCP_RI
);
19009 tcg_temp_free(v0_t
);
19012 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
19013 TCGv v0
, TCGv v1
, int rd
)
19017 t0
= tcg_temp_new_i32();
19019 tcg_gen_movi_i32(t0
, rd
>> 3);
19022 case NM_POOL32AXF_2_0_7
:
19023 switch (extract32(ctx
->opcode
, 9, 3)) {
19026 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
19028 case NM_DPAQ_S_W_PH
:
19030 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19034 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
19036 case NM_DPSQ_S_W_PH
:
19038 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19041 generate_exception_end(ctx
, EXCP_RI
);
19045 case NM_POOL32AXF_2_8_15
:
19046 switch (extract32(ctx
->opcode
, 9, 3)) {
19049 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
19051 case NM_DPAQ_SA_L_W
:
19053 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19057 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
19059 case NM_DPSQ_SA_L_W
:
19061 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19064 generate_exception_end(ctx
, EXCP_RI
);
19068 case NM_POOL32AXF_2_16_23
:
19069 switch (extract32(ctx
->opcode
, 9, 3)) {
19070 case NM_DPAU_H_QBL
:
19072 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
19074 case NM_DPAQX_S_W_PH
:
19076 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19078 case NM_DPSU_H_QBL
:
19080 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
19082 case NM_DPSQX_S_W_PH
:
19084 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19086 case NM_MULSA_W_PH
:
19088 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
19091 generate_exception_end(ctx
, EXCP_RI
);
19095 case NM_POOL32AXF_2_24_31
:
19096 switch (extract32(ctx
->opcode
, 9, 3)) {
19097 case NM_DPAU_H_QBR
:
19099 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
19101 case NM_DPAQX_SA_W_PH
:
19103 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19105 case NM_DPSU_H_QBR
:
19107 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
19109 case NM_DPSQX_SA_W_PH
:
19111 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19113 case NM_MULSAQ_S_W_PH
:
19115 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19118 generate_exception_end(ctx
, EXCP_RI
);
19123 generate_exception_end(ctx
, EXCP_RI
);
19127 tcg_temp_free_i32(t0
);
19130 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19131 int rt
, int rs
, int rd
)
19134 TCGv t0
= tcg_temp_new();
19135 TCGv t1
= tcg_temp_new();
19136 TCGv v0_t
= tcg_temp_new();
19137 TCGv v1_t
= tcg_temp_new();
19139 gen_load_gpr(v0_t
, rt
);
19140 gen_load_gpr(v1_t
, rs
);
19143 case NM_POOL32AXF_2_0_7
:
19144 switch (extract32(ctx
->opcode
, 9, 3)) {
19146 case NM_DPAQ_S_W_PH
:
19148 case NM_DPSQ_S_W_PH
:
19149 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19154 gen_load_gpr(t0
, rs
);
19156 if (rd
!= 0 && rd
!= 2) {
19157 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
19158 tcg_gen_ext32u_tl(t0
, t0
);
19159 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
19160 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
19162 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
19168 int acc
= extract32(ctx
->opcode
, 14, 2);
19169 TCGv_i64 t2
= tcg_temp_new_i64();
19170 TCGv_i64 t3
= tcg_temp_new_i64();
19172 gen_load_gpr(t0
, rt
);
19173 gen_load_gpr(t1
, rs
);
19174 tcg_gen_ext_tl_i64(t2
, t0
);
19175 tcg_gen_ext_tl_i64(t3
, t1
);
19176 tcg_gen_mul_i64(t2
, t2
, t3
);
19177 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19178 tcg_gen_add_i64(t2
, t2
, t3
);
19179 tcg_temp_free_i64(t3
);
19180 gen_move_low32(cpu_LO
[acc
], t2
);
19181 gen_move_high32(cpu_HI
[acc
], t2
);
19182 tcg_temp_free_i64(t2
);
19188 int acc
= extract32(ctx
->opcode
, 14, 2);
19189 TCGv_i32 t2
= tcg_temp_new_i32();
19190 TCGv_i32 t3
= tcg_temp_new_i32();
19192 gen_load_gpr(t0
, rs
);
19193 gen_load_gpr(t1
, rt
);
19194 tcg_gen_trunc_tl_i32(t2
, t0
);
19195 tcg_gen_trunc_tl_i32(t3
, t1
);
19196 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
19197 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19198 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19199 tcg_temp_free_i32(t2
);
19200 tcg_temp_free_i32(t3
);
19205 gen_load_gpr(v1_t
, rs
);
19206 tcg_gen_movi_tl(t0
, rd
>> 3);
19207 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
19208 gen_store_gpr(t0
, ret
);
19212 case NM_POOL32AXF_2_8_15
:
19213 switch (extract32(ctx
->opcode
, 9, 3)) {
19215 case NM_DPAQ_SA_L_W
:
19217 case NM_DPSQ_SA_L_W
:
19218 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19223 int acc
= extract32(ctx
->opcode
, 14, 2);
19224 TCGv_i64 t2
= tcg_temp_new_i64();
19225 TCGv_i64 t3
= tcg_temp_new_i64();
19227 gen_load_gpr(t0
, rs
);
19228 gen_load_gpr(t1
, rt
);
19229 tcg_gen_ext32u_tl(t0
, t0
);
19230 tcg_gen_ext32u_tl(t1
, t1
);
19231 tcg_gen_extu_tl_i64(t2
, t0
);
19232 tcg_gen_extu_tl_i64(t3
, t1
);
19233 tcg_gen_mul_i64(t2
, t2
, t3
);
19234 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19235 tcg_gen_add_i64(t2
, t2
, t3
);
19236 tcg_temp_free_i64(t3
);
19237 gen_move_low32(cpu_LO
[acc
], t2
);
19238 gen_move_high32(cpu_HI
[acc
], t2
);
19239 tcg_temp_free_i64(t2
);
19245 int acc
= extract32(ctx
->opcode
, 14, 2);
19246 TCGv_i32 t2
= tcg_temp_new_i32();
19247 TCGv_i32 t3
= tcg_temp_new_i32();
19249 gen_load_gpr(t0
, rs
);
19250 gen_load_gpr(t1
, rt
);
19251 tcg_gen_trunc_tl_i32(t2
, t0
);
19252 tcg_gen_trunc_tl_i32(t3
, t1
);
19253 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
19254 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19255 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19256 tcg_temp_free_i32(t2
);
19257 tcg_temp_free_i32(t3
);
19262 tcg_gen_movi_tl(t0
, rd
>> 3);
19263 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
19264 gen_store_gpr(t0
, ret
);
19267 generate_exception_end(ctx
, EXCP_RI
);
19271 case NM_POOL32AXF_2_16_23
:
19272 switch (extract32(ctx
->opcode
, 9, 3)) {
19273 case NM_DPAU_H_QBL
:
19274 case NM_DPAQX_S_W_PH
:
19275 case NM_DPSU_H_QBL
:
19276 case NM_DPSQX_S_W_PH
:
19277 case NM_MULSA_W_PH
:
19278 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19282 tcg_gen_movi_tl(t0
, rd
>> 3);
19283 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
19284 gen_store_gpr(t0
, ret
);
19289 int acc
= extract32(ctx
->opcode
, 14, 2);
19290 TCGv_i64 t2
= tcg_temp_new_i64();
19291 TCGv_i64 t3
= tcg_temp_new_i64();
19293 gen_load_gpr(t0
, rs
);
19294 gen_load_gpr(t1
, rt
);
19295 tcg_gen_ext_tl_i64(t2
, t0
);
19296 tcg_gen_ext_tl_i64(t3
, t1
);
19297 tcg_gen_mul_i64(t2
, t2
, t3
);
19298 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19299 tcg_gen_sub_i64(t2
, t3
, t2
);
19300 tcg_temp_free_i64(t3
);
19301 gen_move_low32(cpu_LO
[acc
], t2
);
19302 gen_move_high32(cpu_HI
[acc
], t2
);
19303 tcg_temp_free_i64(t2
);
19306 case NM_EXTRV_RS_W
:
19308 tcg_gen_movi_tl(t0
, rd
>> 3);
19309 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
19310 gen_store_gpr(t0
, ret
);
19314 case NM_POOL32AXF_2_24_31
:
19315 switch (extract32(ctx
->opcode
, 9, 3)) {
19316 case NM_DPAU_H_QBR
:
19317 case NM_DPAQX_SA_W_PH
:
19318 case NM_DPSU_H_QBR
:
19319 case NM_DPSQX_SA_W_PH
:
19320 case NM_MULSAQ_S_W_PH
:
19321 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19325 tcg_gen_movi_tl(t0
, rd
>> 3);
19326 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
19327 gen_store_gpr(t0
, ret
);
19332 int acc
= extract32(ctx
->opcode
, 14, 2);
19333 TCGv_i64 t2
= tcg_temp_new_i64();
19334 TCGv_i64 t3
= tcg_temp_new_i64();
19336 gen_load_gpr(t0
, rs
);
19337 gen_load_gpr(t1
, rt
);
19338 tcg_gen_ext32u_tl(t0
, t0
);
19339 tcg_gen_ext32u_tl(t1
, t1
);
19340 tcg_gen_extu_tl_i64(t2
, t0
);
19341 tcg_gen_extu_tl_i64(t3
, t1
);
19342 tcg_gen_mul_i64(t2
, t2
, t3
);
19343 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19344 tcg_gen_sub_i64(t2
, t3
, t2
);
19345 tcg_temp_free_i64(t3
);
19346 gen_move_low32(cpu_LO
[acc
], t2
);
19347 gen_move_high32(cpu_HI
[acc
], t2
);
19348 tcg_temp_free_i64(t2
);
19353 tcg_gen_movi_tl(t0
, rd
>> 3);
19354 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19355 gen_store_gpr(t0
, ret
);
19360 generate_exception_end(ctx
, EXCP_RI
);
19367 tcg_temp_free(v0_t
);
19368 tcg_temp_free(v1_t
);
19371 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19375 TCGv t0
= tcg_temp_new();
19376 TCGv v0_t
= tcg_temp_new();
19378 gen_load_gpr(v0_t
, rs
);
19383 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19384 gen_store_gpr(v0_t
, ret
);
19388 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19389 gen_store_gpr(v0_t
, ret
);
19393 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19394 gen_store_gpr(v0_t
, ret
);
19396 case NM_PRECEQ_W_PHL
:
19398 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19399 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19400 gen_store_gpr(v0_t
, ret
);
19402 case NM_PRECEQ_W_PHR
:
19404 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19405 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19406 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19407 gen_store_gpr(v0_t
, ret
);
19409 case NM_PRECEQU_PH_QBL
:
19411 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19412 gen_store_gpr(v0_t
, ret
);
19414 case NM_PRECEQU_PH_QBR
:
19416 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19417 gen_store_gpr(v0_t
, ret
);
19419 case NM_PRECEQU_PH_QBLA
:
19421 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19422 gen_store_gpr(v0_t
, ret
);
19424 case NM_PRECEQU_PH_QBRA
:
19426 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19427 gen_store_gpr(v0_t
, ret
);
19429 case NM_PRECEU_PH_QBL
:
19431 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19432 gen_store_gpr(v0_t
, ret
);
19434 case NM_PRECEU_PH_QBR
:
19436 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19437 gen_store_gpr(v0_t
, ret
);
19439 case NM_PRECEU_PH_QBLA
:
19441 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19442 gen_store_gpr(v0_t
, ret
);
19444 case NM_PRECEU_PH_QBRA
:
19446 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19447 gen_store_gpr(v0_t
, ret
);
19451 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19452 tcg_gen_shli_tl(t0
, v0_t
, 16);
19453 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19454 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19455 gen_store_gpr(v0_t
, ret
);
19459 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19460 tcg_gen_shli_tl(t0
, v0_t
, 8);
19461 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19462 tcg_gen_shli_tl(t0
, v0_t
, 16);
19463 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19464 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19465 gen_store_gpr(v0_t
, ret
);
19469 gen_helper_bitrev(v0_t
, v0_t
);
19470 gen_store_gpr(v0_t
, ret
);
19475 TCGv tv0
= tcg_temp_new();
19477 gen_load_gpr(tv0
, rt
);
19478 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19479 gen_store_gpr(v0_t
, ret
);
19480 tcg_temp_free(tv0
);
19483 case NM_RADDU_W_QB
:
19485 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19486 gen_store_gpr(v0_t
, ret
);
19489 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19493 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19497 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19500 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19503 generate_exception_end(ctx
, EXCP_RI
);
19507 tcg_temp_free(v0_t
);
19511 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19512 int rt
, int rs
, int rd
)
19514 TCGv t0
= tcg_temp_new();
19515 TCGv rs_t
= tcg_temp_new();
19517 gen_load_gpr(rs_t
, rs
);
19522 tcg_gen_movi_tl(t0
, rd
>> 2);
19523 switch (extract32(ctx
->opcode
, 12, 1)) {
19526 gen_helper_shra_qb(t0
, t0
, rs_t
);
19527 gen_store_gpr(t0
, rt
);
19531 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19532 gen_store_gpr(t0
, rt
);
19538 tcg_gen_movi_tl(t0
, rd
>> 1);
19539 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19540 gen_store_gpr(t0
, rt
);
19546 target_long result
;
19547 imm
= extract32(ctx
->opcode
, 13, 8);
19548 result
= (uint32_t)imm
<< 24 |
19549 (uint32_t)imm
<< 16 |
19550 (uint32_t)imm
<< 8 |
19552 result
= (int32_t)result
;
19553 tcg_gen_movi_tl(t0
, result
);
19554 gen_store_gpr(t0
, rt
);
19558 generate_exception_end(ctx
, EXCP_RI
);
19562 tcg_temp_free(rs_t
);
19566 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19568 int rt
= extract32(ctx
->opcode
, 21, 5);
19569 int rs
= extract32(ctx
->opcode
, 16, 5);
19570 int rd
= extract32(ctx
->opcode
, 11, 5);
19572 switch (extract32(ctx
->opcode
, 6, 3)) {
19573 case NM_POOL32AXF_1
:
19575 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19576 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19579 case NM_POOL32AXF_2
:
19581 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19582 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19585 case NM_POOL32AXF_4
:
19587 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19588 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19591 case NM_POOL32AXF_5
:
19592 switch (extract32(ctx
->opcode
, 9, 7)) {
19593 #ifndef CONFIG_USER_ONLY
19595 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19598 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19601 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19604 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19607 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19610 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19613 check_cp0_enabled(ctx
);
19615 TCGv t0
= tcg_temp_new();
19617 save_cpu_state(ctx
, 1);
19618 gen_helper_di(t0
, cpu_env
);
19619 gen_store_gpr(t0
, rt
);
19620 /* Stop translation as we may have switched the execution mode */
19621 ctx
->base
.is_jmp
= DISAS_STOP
;
19626 check_cp0_enabled(ctx
);
19628 TCGv t0
= tcg_temp_new();
19630 save_cpu_state(ctx
, 1);
19631 gen_helper_ei(t0
, cpu_env
);
19632 gen_store_gpr(t0
, rt
);
19633 /* Stop translation as we may have switched the execution mode */
19634 ctx
->base
.is_jmp
= DISAS_STOP
;
19639 gen_load_srsgpr(rs
, rt
);
19642 gen_store_srsgpr(rs
, rt
);
19645 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19648 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19651 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19655 generate_exception_end(ctx
, EXCP_RI
);
19659 case NM_POOL32AXF_7
:
19661 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19662 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19666 generate_exception_end(ctx
, EXCP_RI
);
19671 /* Immediate Value Compact Branches */
19672 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19673 int rt
, int32_t imm
, int32_t offset
)
19676 int bcond_compute
= 0;
19677 TCGv t0
= tcg_temp_new();
19678 TCGv t1
= tcg_temp_new();
19680 gen_load_gpr(t0
, rt
);
19681 tcg_gen_movi_tl(t1
, imm
);
19682 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19684 /* Load needed operands and calculate btarget */
19687 if (rt
== 0 && imm
== 0) {
19688 /* Unconditional branch */
19689 } else if (rt
== 0 && imm
!= 0) {
19694 cond
= TCG_COND_EQ
;
19700 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19701 generate_exception_end(ctx
, EXCP_RI
);
19703 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19704 /* Unconditional branch */
19705 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19709 tcg_gen_shri_tl(t0
, t0
, imm
);
19710 tcg_gen_andi_tl(t0
, t0
, 1);
19711 tcg_gen_movi_tl(t1
, 0);
19713 if (opc
== NM_BBEQZC
) {
19714 cond
= TCG_COND_EQ
;
19716 cond
= TCG_COND_NE
;
19721 if (rt
== 0 && imm
== 0) {
19724 } else if (rt
== 0 && imm
!= 0) {
19725 /* Unconditional branch */
19728 cond
= TCG_COND_NE
;
19732 if (rt
== 0 && imm
== 0) {
19733 /* Unconditional branch */
19736 cond
= TCG_COND_GE
;
19741 cond
= TCG_COND_LT
;
19744 if (rt
== 0 && imm
== 0) {
19745 /* Unconditional branch */
19748 cond
= TCG_COND_GEU
;
19753 cond
= TCG_COND_LTU
;
19756 MIPS_INVAL("Immediate Value Compact branch");
19757 generate_exception_end(ctx
, EXCP_RI
);
19761 /* branch completion */
19762 clear_branch_hflags(ctx
);
19763 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19765 if (bcond_compute
== 0) {
19766 /* Uncoditional compact branch */
19767 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19769 /* Conditional compact branch */
19770 TCGLabel
*fs
= gen_new_label();
19772 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19774 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19777 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19785 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19786 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19789 TCGv t0
= tcg_temp_new();
19790 TCGv t1
= tcg_temp_new();
19793 gen_load_gpr(t0
, rs
);
19797 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19800 /* calculate btarget */
19801 tcg_gen_shli_tl(t0
, t0
, 1);
19802 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19803 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19805 /* branch completion */
19806 clear_branch_hflags(ctx
);
19807 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19809 /* unconditional branch to register */
19810 tcg_gen_mov_tl(cpu_PC
, btarget
);
19811 tcg_gen_lookup_and_goto_ptr();
19817 /* nanoMIPS Branches */
19818 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19819 int rs
, int rt
, int32_t offset
)
19821 int bcond_compute
= 0;
19822 TCGv t0
= tcg_temp_new();
19823 TCGv t1
= tcg_temp_new();
19825 /* Load needed operands and calculate btarget */
19827 /* compact branch */
19830 gen_load_gpr(t0
, rs
);
19831 gen_load_gpr(t1
, rt
);
19833 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19837 if (rs
== 0 || rs
== rt
) {
19838 /* OPC_BLEZALC, OPC_BGEZALC */
19839 /* OPC_BGTZALC, OPC_BLTZALC */
19840 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19842 gen_load_gpr(t0
, rs
);
19843 gen_load_gpr(t1
, rt
);
19845 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19848 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19852 /* OPC_BEQZC, OPC_BNEZC */
19853 gen_load_gpr(t0
, rs
);
19855 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19857 /* OPC_JIC, OPC_JIALC */
19858 TCGv tbase
= tcg_temp_new();
19859 TCGv toffset
= tcg_temp_new();
19861 gen_load_gpr(tbase
, rt
);
19862 tcg_gen_movi_tl(toffset
, offset
);
19863 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19864 tcg_temp_free(tbase
);
19865 tcg_temp_free(toffset
);
19869 MIPS_INVAL("Compact branch/jump");
19870 generate_exception_end(ctx
, EXCP_RI
);
19874 if (bcond_compute
== 0) {
19875 /* Uncoditional compact branch */
19878 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19881 MIPS_INVAL("Compact branch/jump");
19882 generate_exception_end(ctx
, EXCP_RI
);
19886 /* Conditional compact branch */
19887 TCGLabel
*fs
= gen_new_label();
19891 if (rs
== 0 && rt
!= 0) {
19893 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19894 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19896 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19899 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
19903 if (rs
== 0 && rt
!= 0) {
19905 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19906 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19908 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19911 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
19915 if (rs
== 0 && rt
!= 0) {
19917 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19918 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19920 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19923 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
19927 if (rs
== 0 && rt
!= 0) {
19929 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19930 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19932 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19935 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
19939 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
19942 MIPS_INVAL("Compact conditional branch/jump");
19943 generate_exception_end(ctx
, EXCP_RI
);
19947 /* branch completion */
19948 clear_branch_hflags(ctx
);
19949 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19951 /* Generating branch here as compact branches don't have delay slot */
19952 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19955 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19964 /* nanoMIPS CP1 Branches */
19965 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
19966 int32_t ft
, int32_t offset
)
19968 target_ulong btarget
;
19969 TCGv_i64 t0
= tcg_temp_new_i64();
19971 gen_load_fpr64(ctx
, t0
, ft
);
19972 tcg_gen_andi_i64(t0
, t0
, 1);
19974 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19978 tcg_gen_xori_i64(t0
, t0
, 1);
19979 ctx
->hflags
|= MIPS_HFLAG_BC
;
19982 /* t0 already set */
19983 ctx
->hflags
|= MIPS_HFLAG_BC
;
19986 MIPS_INVAL("cp1 cond branch");
19987 generate_exception_end(ctx
, EXCP_RI
);
19991 tcg_gen_trunc_i64_tl(bcond
, t0
);
19993 ctx
->btarget
= btarget
;
19996 tcg_temp_free_i64(t0
);
20000 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
20003 t0
= tcg_temp_new();
20004 t1
= tcg_temp_new();
20006 gen_load_gpr(t0
, rs
);
20007 gen_load_gpr(t1
, rt
);
20009 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
20010 /* PP.LSXS instructions require shifting */
20011 switch (extract32(ctx
->opcode
, 7, 4)) {
20016 tcg_gen_shli_tl(t0
, t0
, 1);
20023 tcg_gen_shli_tl(t0
, t0
, 2);
20027 tcg_gen_shli_tl(t0
, t0
, 3);
20031 gen_op_addr_add(ctx
, t0
, t0
, t1
);
20033 switch (extract32(ctx
->opcode
, 7, 4)) {
20035 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20037 gen_store_gpr(t0
, rd
);
20041 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20043 gen_store_gpr(t0
, rd
);
20047 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20049 gen_store_gpr(t0
, rd
);
20052 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20054 gen_store_gpr(t0
, rd
);
20058 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20060 gen_store_gpr(t0
, rd
);
20064 gen_load_gpr(t1
, rd
);
20065 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20071 gen_load_gpr(t1
, rd
);
20072 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20078 gen_load_gpr(t1
, rd
);
20079 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20083 /*case NM_LWC1XS:*/
20085 /*case NM_LDC1XS:*/
20087 /*case NM_SWC1XS:*/
20089 /*case NM_SDC1XS:*/
20090 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20091 check_cp1_enabled(ctx
);
20092 switch (extract32(ctx
->opcode
, 7, 4)) {
20094 /*case NM_LWC1XS:*/
20095 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
20098 /*case NM_LDC1XS:*/
20099 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
20102 /*case NM_SWC1XS:*/
20103 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
20106 /*case NM_SDC1XS:*/
20107 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
20111 generate_exception_err(ctx
, EXCP_CpU
, 1);
20115 generate_exception_end(ctx
, EXCP_RI
);
20123 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
20127 rt
= extract32(ctx
->opcode
, 21, 5);
20128 rs
= extract32(ctx
->opcode
, 16, 5);
20129 rd
= extract32(ctx
->opcode
, 11, 5);
20131 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
20132 generate_exception_end(ctx
, EXCP_RI
);
20135 check_cp1_enabled(ctx
);
20136 switch (extract32(ctx
->opcode
, 0, 3)) {
20138 switch (extract32(ctx
->opcode
, 3, 7)) {
20140 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
20143 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
20146 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
20149 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
20152 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
20155 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
20158 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
20161 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
20164 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
20167 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
20170 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
20173 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
20176 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
20179 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
20182 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
20185 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
20188 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
20191 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
20194 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
20197 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
20200 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
20203 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
20206 generate_exception_end(ctx
, EXCP_RI
);
20211 switch (extract32(ctx
->opcode
, 3, 3)) {
20213 switch (extract32(ctx
->opcode
, 9, 1)) {
20215 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
20218 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
20223 switch (extract32(ctx
->opcode
, 9, 1)) {
20225 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
20228 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
20233 switch (extract32(ctx
->opcode
, 9, 1)) {
20235 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
20238 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
20243 switch (extract32(ctx
->opcode
, 9, 1)) {
20245 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
20248 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
20253 switch (extract32(ctx
->opcode
, 6, 8)) {
20255 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
20258 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
20261 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
20264 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
20267 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
20270 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
20273 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
20276 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
20279 switch (extract32(ctx
->opcode
, 6, 9)) {
20281 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
20284 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
20287 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
20290 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
20293 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
20296 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
20299 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
20302 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
20305 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
20308 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
20311 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
20314 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
20317 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
20320 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
20323 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
20326 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
20329 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
20332 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
20335 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
20338 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
20341 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
20344 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
20347 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
20350 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
20353 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
20356 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
20359 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
20362 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
20365 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20368 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20371 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20374 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20377 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20380 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20383 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20386 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20389 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20392 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20395 generate_exception_end(ctx
, EXCP_RI
);
20404 switch (extract32(ctx
->opcode
, 3, 3)) {
20405 case NM_CMP_CONDN_S
:
20406 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20408 case NM_CMP_CONDN_D
:
20409 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20412 generate_exception_end(ctx
, EXCP_RI
);
20417 generate_exception_end(ctx
, EXCP_RI
);
20422 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20423 int rd
, int rs
, int rt
)
20426 TCGv t0
= tcg_temp_new();
20427 TCGv v1_t
= tcg_temp_new();
20428 TCGv v2_t
= tcg_temp_new();
20430 gen_load_gpr(v1_t
, rs
);
20431 gen_load_gpr(v2_t
, rt
);
20436 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20440 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20444 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20446 case NM_CMPU_EQ_QB
:
20448 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20450 case NM_CMPU_LT_QB
:
20452 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20454 case NM_CMPU_LE_QB
:
20456 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20458 case NM_CMPGU_EQ_QB
:
20460 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20461 gen_store_gpr(v1_t
, ret
);
20463 case NM_CMPGU_LT_QB
:
20465 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20466 gen_store_gpr(v1_t
, ret
);
20468 case NM_CMPGU_LE_QB
:
20470 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20471 gen_store_gpr(v1_t
, ret
);
20473 case NM_CMPGDU_EQ_QB
:
20475 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20476 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20477 gen_store_gpr(v1_t
, ret
);
20479 case NM_CMPGDU_LT_QB
:
20481 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20482 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20483 gen_store_gpr(v1_t
, ret
);
20485 case NM_CMPGDU_LE_QB
:
20487 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20488 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20489 gen_store_gpr(v1_t
, ret
);
20493 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20494 gen_store_gpr(v1_t
, ret
);
20498 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20499 gen_store_gpr(v1_t
, ret
);
20503 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20504 gen_store_gpr(v1_t
, ret
);
20508 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20509 gen_store_gpr(v1_t
, ret
);
20513 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20514 gen_store_gpr(v1_t
, ret
);
20518 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20519 gen_store_gpr(v1_t
, ret
);
20523 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20524 gen_store_gpr(v1_t
, ret
);
20528 switch (extract32(ctx
->opcode
, 10, 1)) {
20531 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20532 gen_store_gpr(v1_t
, ret
);
20536 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20537 gen_store_gpr(v1_t
, ret
);
20541 case NM_ADDQH_R_PH
:
20543 switch (extract32(ctx
->opcode
, 10, 1)) {
20546 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20547 gen_store_gpr(v1_t
, ret
);
20551 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20552 gen_store_gpr(v1_t
, ret
);
20558 switch (extract32(ctx
->opcode
, 10, 1)) {
20561 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20562 gen_store_gpr(v1_t
, ret
);
20566 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20567 gen_store_gpr(v1_t
, ret
);
20573 switch (extract32(ctx
->opcode
, 10, 1)) {
20576 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20577 gen_store_gpr(v1_t
, ret
);
20581 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20582 gen_store_gpr(v1_t
, ret
);
20588 switch (extract32(ctx
->opcode
, 10, 1)) {
20591 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20592 gen_store_gpr(v1_t
, ret
);
20596 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20597 gen_store_gpr(v1_t
, ret
);
20601 case NM_ADDUH_R_QB
:
20603 switch (extract32(ctx
->opcode
, 10, 1)) {
20606 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20607 gen_store_gpr(v1_t
, ret
);
20611 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20612 gen_store_gpr(v1_t
, ret
);
20616 case NM_SHRAV_R_PH
:
20618 switch (extract32(ctx
->opcode
, 10, 1)) {
20621 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20622 gen_store_gpr(v1_t
, ret
);
20626 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20627 gen_store_gpr(v1_t
, ret
);
20631 case NM_SHRAV_R_QB
:
20633 switch (extract32(ctx
->opcode
, 10, 1)) {
20636 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20637 gen_store_gpr(v1_t
, ret
);
20641 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20642 gen_store_gpr(v1_t
, ret
);
20648 switch (extract32(ctx
->opcode
, 10, 1)) {
20651 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20652 gen_store_gpr(v1_t
, ret
);
20656 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20657 gen_store_gpr(v1_t
, ret
);
20661 case NM_SUBQH_R_PH
:
20663 switch (extract32(ctx
->opcode
, 10, 1)) {
20666 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20667 gen_store_gpr(v1_t
, ret
);
20671 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20672 gen_store_gpr(v1_t
, ret
);
20678 switch (extract32(ctx
->opcode
, 10, 1)) {
20681 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20682 gen_store_gpr(v1_t
, ret
);
20686 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20687 gen_store_gpr(v1_t
, ret
);
20693 switch (extract32(ctx
->opcode
, 10, 1)) {
20696 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20697 gen_store_gpr(v1_t
, ret
);
20701 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20702 gen_store_gpr(v1_t
, ret
);
20708 switch (extract32(ctx
->opcode
, 10, 1)) {
20711 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20712 gen_store_gpr(v1_t
, ret
);
20716 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20717 gen_store_gpr(v1_t
, ret
);
20721 case NM_SUBUH_R_QB
:
20723 switch (extract32(ctx
->opcode
, 10, 1)) {
20726 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20727 gen_store_gpr(v1_t
, ret
);
20731 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20732 gen_store_gpr(v1_t
, ret
);
20736 case NM_SHLLV_S_PH
:
20738 switch (extract32(ctx
->opcode
, 10, 1)) {
20741 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20742 gen_store_gpr(v1_t
, ret
);
20746 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20747 gen_store_gpr(v1_t
, ret
);
20751 case NM_PRECR_SRA_R_PH_W
:
20753 switch (extract32(ctx
->opcode
, 10, 1)) {
20755 /* PRECR_SRA_PH_W */
20757 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20758 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20760 gen_store_gpr(v1_t
, rt
);
20761 tcg_temp_free_i32(sa_t
);
20765 /* PRECR_SRA_R_PH_W */
20767 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20768 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20770 gen_store_gpr(v1_t
, rt
);
20771 tcg_temp_free_i32(sa_t
);
20776 case NM_MULEU_S_PH_QBL
:
20778 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20779 gen_store_gpr(v1_t
, ret
);
20781 case NM_MULEU_S_PH_QBR
:
20783 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20784 gen_store_gpr(v1_t
, ret
);
20786 case NM_MULQ_RS_PH
:
20788 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20789 gen_store_gpr(v1_t
, ret
);
20793 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20794 gen_store_gpr(v1_t
, ret
);
20798 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20799 gen_store_gpr(v1_t
, ret
);
20803 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20804 gen_store_gpr(v1_t
, ret
);
20808 gen_load_gpr(t0
, rs
);
20810 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20812 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20816 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20817 gen_store_gpr(v1_t
, ret
);
20821 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20822 gen_store_gpr(v1_t
, ret
);
20826 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20827 gen_store_gpr(v1_t
, ret
);
20831 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20832 gen_store_gpr(v1_t
, ret
);
20836 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20837 gen_store_gpr(v1_t
, ret
);
20841 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20842 gen_store_gpr(v1_t
, ret
);
20847 TCGv tv0
= tcg_temp_new();
20848 TCGv tv1
= tcg_temp_new();
20849 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20851 tcg_gen_movi_tl(tv0
, rd
>> 3);
20852 tcg_gen_movi_tl(tv1
, imm
);
20853 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20856 case NM_MULEQ_S_W_PHL
:
20858 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20859 gen_store_gpr(v1_t
, ret
);
20861 case NM_MULEQ_S_W_PHR
:
20863 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20864 gen_store_gpr(v1_t
, ret
);
20868 switch (extract32(ctx
->opcode
, 10, 1)) {
20871 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20872 gen_store_gpr(v1_t
, ret
);
20876 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20877 gen_store_gpr(v1_t
, ret
);
20881 case NM_PRECR_QB_PH
:
20883 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
20884 gen_store_gpr(v1_t
, ret
);
20886 case NM_PRECRQ_QB_PH
:
20888 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
20889 gen_store_gpr(v1_t
, ret
);
20891 case NM_PRECRQ_PH_W
:
20893 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
20894 gen_store_gpr(v1_t
, ret
);
20896 case NM_PRECRQ_RS_PH_W
:
20898 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20899 gen_store_gpr(v1_t
, ret
);
20901 case NM_PRECRQU_S_QB_PH
:
20903 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20904 gen_store_gpr(v1_t
, ret
);
20908 tcg_gen_movi_tl(t0
, rd
);
20909 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
20910 gen_store_gpr(v1_t
, rt
);
20914 tcg_gen_movi_tl(t0
, rd
>> 1);
20915 switch (extract32(ctx
->opcode
, 10, 1)) {
20918 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
20919 gen_store_gpr(v1_t
, rt
);
20923 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
20924 gen_store_gpr(v1_t
, rt
);
20930 tcg_gen_movi_tl(t0
, rd
>> 1);
20931 switch (extract32(ctx
->opcode
, 10, 2)) {
20934 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
20935 gen_store_gpr(v1_t
, rt
);
20939 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
20940 gen_store_gpr(v1_t
, rt
);
20943 generate_exception_end(ctx
, EXCP_RI
);
20949 tcg_gen_movi_tl(t0
, rd
);
20950 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
20951 gen_store_gpr(v1_t
, rt
);
20957 imm
= sextract32(ctx
->opcode
, 11, 11);
20958 imm
= (int16_t)(imm
<< 6) >> 6;
20960 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
20965 generate_exception_end(ctx
, EXCP_RI
);
20970 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
20978 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
20979 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
20981 rt
= extract32(ctx
->opcode
, 21, 5);
20982 rs
= extract32(ctx
->opcode
, 16, 5);
20983 rd
= extract32(ctx
->opcode
, 11, 5);
20985 op
= extract32(ctx
->opcode
, 26, 6);
20990 switch (extract32(ctx
->opcode
, 19, 2)) {
20993 generate_exception_end(ctx
, EXCP_RI
);
20996 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
20997 generate_exception_end(ctx
, EXCP_SYSCALL
);
20999 generate_exception_end(ctx
, EXCP_RI
);
21003 generate_exception_end(ctx
, EXCP_BREAK
);
21006 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
21007 gen_helper_do_semihosting(cpu_env
);
21009 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21010 generate_exception_end(ctx
, EXCP_RI
);
21012 generate_exception_end(ctx
, EXCP_DBp
);
21019 imm
= extract32(ctx
->opcode
, 0, 16);
21021 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
21023 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21025 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21030 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21031 extract32(ctx
->opcode
, 1, 20) << 1;
21032 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21033 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21037 switch (ctx
->opcode
& 0x07) {
21039 gen_pool32a0_nanomips_insn(env
, ctx
);
21043 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
21044 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
21048 switch (extract32(ctx
->opcode
, 3, 3)) {
21050 gen_p_lsx(ctx
, rd
, rs
, rt
);
21053 /* In nanoMIPS, the shift field directly encodes the shift
21054 * amount, meaning that the supported shift values are in
21055 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
21056 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
21057 extract32(ctx
->opcode
, 9, 2) - 1);
21060 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
21063 gen_pool32axf_nanomips_insn(env
, ctx
);
21066 generate_exception_end(ctx
, EXCP_RI
);
21071 generate_exception_end(ctx
, EXCP_RI
);
21076 switch (ctx
->opcode
& 0x03) {
21079 offset
= extract32(ctx
->opcode
, 0, 21);
21080 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
21084 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21087 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21090 generate_exception_end(ctx
, EXCP_RI
);
21096 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
21097 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
21098 switch (extract32(ctx
->opcode
, 16, 5)) {
21102 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
21108 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
21109 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21115 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
21121 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21124 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21131 t0
= tcg_temp_new();
21133 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21136 tcg_gen_movi_tl(t0
, addr
);
21137 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
21145 t0
= tcg_temp_new();
21146 t1
= tcg_temp_new();
21148 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21151 tcg_gen_movi_tl(t0
, addr
);
21152 gen_load_gpr(t1
, rt
);
21154 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
21161 generate_exception_end(ctx
, EXCP_RI
);
21167 switch (extract32(ctx
->opcode
, 12, 4)) {
21169 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21172 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21175 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21178 switch (extract32(ctx
->opcode
, 20, 1)) {
21180 switch (ctx
->opcode
& 3) {
21182 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21183 extract32(ctx
->opcode
, 2, 1),
21184 extract32(ctx
->opcode
, 3, 9) << 3);
21187 case NM_RESTORE_JRC
:
21188 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21189 extract32(ctx
->opcode
, 2, 1),
21190 extract32(ctx
->opcode
, 3, 9) << 3);
21191 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
21192 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21196 generate_exception_end(ctx
, EXCP_RI
);
21201 generate_exception_end(ctx
, EXCP_RI
);
21206 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21209 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21213 TCGv t0
= tcg_temp_new();
21215 imm
= extract32(ctx
->opcode
, 0, 12);
21216 gen_load_gpr(t0
, rs
);
21217 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
21218 gen_store_gpr(t0
, rt
);
21224 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
21225 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
21229 int shift
= extract32(ctx
->opcode
, 0, 5);
21230 switch (extract32(ctx
->opcode
, 5, 4)) {
21232 if (rt
== 0 && shift
== 0) {
21234 } else if (rt
== 0 && shift
== 3) {
21235 /* EHB - treat as NOP */
21236 } else if (rt
== 0 && shift
== 5) {
21237 /* PAUSE - treat as NOP */
21238 } else if (rt
== 0 && shift
== 6) {
21240 gen_sync(extract32(ctx
->opcode
, 16, 5));
21243 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
21244 extract32(ctx
->opcode
, 0, 5));
21248 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
21249 extract32(ctx
->opcode
, 0, 5));
21252 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
21253 extract32(ctx
->opcode
, 0, 5));
21256 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
21257 extract32(ctx
->opcode
, 0, 5));
21265 TCGv t0
= tcg_temp_new();
21266 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
21267 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
21269 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
21271 gen_load_gpr(t0
, rs
);
21272 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
21275 tcg_temp_free_i32(shift
);
21276 tcg_temp_free_i32(shiftx
);
21277 tcg_temp_free_i32(stripe
);
21281 switch (((ctx
->opcode
>> 10) & 2) |
21282 (extract32(ctx
->opcode
, 5, 1))) {
21285 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21286 extract32(ctx
->opcode
, 6, 5));
21289 generate_exception_end(ctx
, EXCP_RI
);
21294 switch (((ctx
->opcode
>> 10) & 2) |
21295 (extract32(ctx
->opcode
, 5, 1))) {
21298 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21299 extract32(ctx
->opcode
, 6, 5));
21302 generate_exception_end(ctx
, EXCP_RI
);
21307 generate_exception_end(ctx
, EXCP_RI
);
21312 gen_pool32f_nanomips_insn(ctx
);
21317 switch (extract32(ctx
->opcode
, 1, 1)) {
21320 tcg_gen_movi_tl(cpu_gpr
[rt
],
21321 sextract32(ctx
->opcode
, 0, 1) << 31 |
21322 extract32(ctx
->opcode
, 2, 10) << 21 |
21323 extract32(ctx
->opcode
, 12, 9) << 12);
21328 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
21329 extract32(ctx
->opcode
, 2, 10) << 21 |
21330 extract32(ctx
->opcode
, 12, 9) << 12;
21332 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21333 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21340 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
21342 switch (extract32(ctx
->opcode
, 18, 3)) {
21344 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
21347 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
21350 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
21354 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
21359 switch (ctx
->opcode
& 1) {
21361 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
21364 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21370 switch (ctx
->opcode
& 1) {
21372 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21375 generate_exception_end(ctx
, EXCP_RI
);
21381 switch (ctx
->opcode
& 0x3) {
21383 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21386 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21389 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21392 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21397 generate_exception_end(ctx
, EXCP_RI
);
21404 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21406 switch (extract32(ctx
->opcode
, 12, 4)) {
21410 /* Break the TB to be able to sync copied instructions
21412 ctx
->base
.is_jmp
= DISAS_STOP
;
21415 /* Treat as NOP. */
21419 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21422 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21425 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21428 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21431 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21434 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21437 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21440 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21443 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21446 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21449 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21452 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21455 generate_exception_end(ctx
, EXCP_RI
);
21462 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21463 extract32(ctx
->opcode
, 0, 8);
21465 switch (extract32(ctx
->opcode
, 8, 3)) {
21467 switch (extract32(ctx
->opcode
, 11, 4)) {
21469 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21472 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21475 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21478 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21481 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21484 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21487 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21490 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21493 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21496 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21499 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21502 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21507 /* Break the TB to be able to sync copied instructions
21509 ctx
->base
.is_jmp
= DISAS_STOP
;
21512 /* Treat as NOP. */
21516 generate_exception_end(ctx
, EXCP_RI
);
21521 switch (extract32(ctx
->opcode
, 11, 4)) {
21526 TCGv t0
= tcg_temp_new();
21527 TCGv t1
= tcg_temp_new();
21529 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21531 switch (extract32(ctx
->opcode
, 11, 4)) {
21533 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21535 gen_store_gpr(t0
, rt
);
21538 gen_load_gpr(t1
, rt
);
21539 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21548 switch (ctx
->opcode
& 0x03) {
21550 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21554 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21559 switch (ctx
->opcode
& 0x03) {
21561 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, s
);
21565 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21571 check_cp0_enabled(ctx
);
21572 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21573 gen_cache_operation(ctx
, rt
, rs
, s
);
21579 switch (extract32(ctx
->opcode
, 11, 4)) {
21582 check_cp0_enabled(ctx
);
21583 gen_ld(ctx
, OPC_LBE
, rt
, rs
, s
);
21587 check_cp0_enabled(ctx
);
21588 gen_st(ctx
, OPC_SBE
, rt
, rs
, s
);
21592 check_cp0_enabled(ctx
);
21593 gen_ld(ctx
, OPC_LBUE
, rt
, rs
, s
);
21597 /* case NM_SYNCIE */
21599 check_cp0_enabled(ctx
);
21600 /* Break the TB to be able to sync copied instructions
21602 ctx
->base
.is_jmp
= DISAS_STOP
;
21604 /* case NM_PREFE */
21606 check_cp0_enabled(ctx
);
21607 /* Treat as NOP. */
21612 check_cp0_enabled(ctx
);
21613 gen_ld(ctx
, OPC_LHE
, rt
, rs
, s
);
21617 check_cp0_enabled(ctx
);
21618 gen_st(ctx
, OPC_SHE
, rt
, rs
, s
);
21622 check_cp0_enabled(ctx
);
21623 gen_ld(ctx
, OPC_LHUE
, rt
, rs
, s
);
21626 check_nms_dl_il_sl_tl_l2c(ctx
);
21627 gen_cache_operation(ctx
, rt
, rs
, s
);
21631 check_cp0_enabled(ctx
);
21632 gen_ld(ctx
, OPC_LWE
, rt
, rs
, s
);
21636 check_cp0_enabled(ctx
);
21637 gen_st(ctx
, OPC_SWE
, rt
, rs
, s
);
21640 switch (extract32(ctx
->opcode
, 2, 2)) {
21644 check_cp0_enabled(ctx
);
21645 gen_ld(ctx
, OPC_LLE
, rt
, rs
, s
);
21650 check_cp0_enabled(ctx
);
21651 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21654 generate_exception_end(ctx
, EXCP_RI
);
21659 switch (extract32(ctx
->opcode
, 2, 2)) {
21663 check_cp0_enabled(ctx
);
21664 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, s
);
21669 check_cp0_enabled(ctx
);
21670 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21674 generate_exception_end(ctx
, EXCP_RI
);
21684 int count
= extract32(ctx
->opcode
, 12, 3);
21687 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21688 extract32(ctx
->opcode
, 0, 8);
21689 TCGv va
= tcg_temp_new();
21690 TCGv t1
= tcg_temp_new();
21691 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21692 NM_P_LS_UAWM
? MO_UNALN
: 0;
21694 count
= (count
== 0) ? 8 : count
;
21695 while (counter
!= count
) {
21696 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21697 int this_offset
= offset
+ (counter
<< 2);
21699 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21701 switch (extract32(ctx
->opcode
, 11, 1)) {
21703 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21705 gen_store_gpr(t1
, this_rt
);
21706 if ((this_rt
== rs
) &&
21707 (counter
!= (count
- 1))) {
21708 /* UNPREDICTABLE */
21712 this_rt
= (rt
== 0) ? 0 : this_rt
;
21713 gen_load_gpr(t1
, this_rt
);
21714 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21725 generate_exception_end(ctx
, EXCP_RI
);
21733 TCGv t0
= tcg_temp_new();
21734 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21735 extract32(ctx
->opcode
, 1, 20) << 1;
21736 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21737 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21738 extract32(ctx
->opcode
, 21, 3));
21739 gen_load_gpr(t0
, rt
);
21740 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21741 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21747 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21748 extract32(ctx
->opcode
, 1, 24) << 1;
21750 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21752 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21755 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21760 switch (extract32(ctx
->opcode
, 12, 4)) {
21763 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21766 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21769 generate_exception_end(ctx
, EXCP_RI
);
21775 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21776 extract32(ctx
->opcode
, 1, 13) << 1;
21777 switch (extract32(ctx
->opcode
, 14, 2)) {
21780 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21783 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21784 extract32(ctx
->opcode
, 1, 13) << 1;
21785 check_cp1_enabled(ctx
);
21786 switch (extract32(ctx
->opcode
, 16, 5)) {
21788 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21791 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21796 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21797 extract32(ctx
->opcode
, 0, 1) << 13;
21799 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21804 generate_exception_end(ctx
, EXCP_RI
);
21810 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21812 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21816 if (rs
== rt
|| rt
== 0) {
21817 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21818 } else if (rs
== 0) {
21819 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21821 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21829 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21830 extract32(ctx
->opcode
, 1, 13) << 1;
21831 switch (extract32(ctx
->opcode
, 14, 2)) {
21834 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21837 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21839 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21841 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21845 if (rs
== 0 || rs
== rt
) {
21847 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21849 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21853 generate_exception_end(ctx
, EXCP_RI
);
21860 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21861 extract32(ctx
->opcode
, 1, 10) << 1;
21862 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21864 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
21869 generate_exception_end(ctx
, EXCP_RI
);
21875 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21878 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
21879 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
21880 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD3(ctx
->opcode
));
21884 /* make sure instructions are on a halfword boundary */
21885 if (ctx
->base
.pc_next
& 0x1) {
21886 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
21887 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
21888 tcg_temp_free(tmp
);
21889 generate_exception_end(ctx
, EXCP_AdEL
);
21893 op
= extract32(ctx
->opcode
, 10, 6);
21896 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21899 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
21900 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
21903 switch (extract32(ctx
->opcode
, 3, 2)) {
21904 case NM_P16_SYSCALL
:
21905 if (extract32(ctx
->opcode
, 2, 1) == 0) {
21906 generate_exception_end(ctx
, EXCP_SYSCALL
);
21908 generate_exception_end(ctx
, EXCP_RI
);
21912 generate_exception_end(ctx
, EXCP_BREAK
);
21915 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
21916 gen_helper_do_semihosting(cpu_env
);
21918 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21919 generate_exception_end(ctx
, EXCP_RI
);
21921 generate_exception_end(ctx
, EXCP_DBp
);
21926 generate_exception_end(ctx
, EXCP_RI
);
21933 int shift
= extract32(ctx
->opcode
, 0, 3);
21935 shift
= (shift
== 0) ? 8 : shift
;
21937 switch (extract32(ctx
->opcode
, 3, 1)) {
21945 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
21949 switch (ctx
->opcode
& 1) {
21951 gen_pool16c_nanomips_insn(ctx
);
21954 gen_ldxs(ctx
, rt
, rs
, rd
);
21959 switch (extract32(ctx
->opcode
, 6, 1)) {
21961 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
21962 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
21965 generate_exception_end(ctx
, EXCP_RI
);
21970 switch (extract32(ctx
->opcode
, 3, 1)) {
21972 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
21973 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
21975 case NM_P_ADDIURS5
:
21976 rt
= extract32(ctx
->opcode
, 5, 5);
21978 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
21979 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
21980 (extract32(ctx
->opcode
, 0, 3));
21981 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
21987 switch (ctx
->opcode
& 0x1) {
21989 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
21992 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
21997 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21998 extract32(ctx
->opcode
, 5, 3);
21999 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22000 extract32(ctx
->opcode
, 0, 3);
22001 rt
= decode_gpr_gpr4(rt
);
22002 rs
= decode_gpr_gpr4(rs
);
22003 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
22004 (extract32(ctx
->opcode
, 3, 1))) {
22007 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
22011 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
22014 generate_exception_end(ctx
, EXCP_RI
);
22020 int imm
= extract32(ctx
->opcode
, 0, 7);
22021 imm
= (imm
== 0x7f ? -1 : imm
);
22023 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
22029 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
22030 u
= (u
== 12) ? 0xff :
22031 (u
== 13) ? 0xffff : u
;
22032 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
22036 offset
= extract32(ctx
->opcode
, 0, 2);
22037 switch (extract32(ctx
->opcode
, 2, 2)) {
22039 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
22042 rt
= decode_gpr_gpr3_src_store(
22043 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22044 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
22047 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
22050 generate_exception_end(ctx
, EXCP_RI
);
22055 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
22056 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
22058 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
22061 rt
= decode_gpr_gpr3_src_store(
22062 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22063 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
22066 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
22069 generate_exception_end(ctx
, EXCP_RI
);
22074 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22075 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22078 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22079 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22080 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
22084 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22085 extract32(ctx
->opcode
, 5, 3);
22086 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22087 extract32(ctx
->opcode
, 0, 3);
22088 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22089 (extract32(ctx
->opcode
, 8, 1) << 2);
22090 rt
= decode_gpr_gpr4(rt
);
22091 rs
= decode_gpr_gpr4(rs
);
22092 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22096 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22097 extract32(ctx
->opcode
, 5, 3);
22098 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22099 extract32(ctx
->opcode
, 0, 3);
22100 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22101 (extract32(ctx
->opcode
, 8, 1) << 2);
22102 rt
= decode_gpr_gpr4_zero(rt
);
22103 rs
= decode_gpr_gpr4(rs
);
22104 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22107 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22108 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
22111 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22112 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22113 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
22116 rt
= decode_gpr_gpr3_src_store(
22117 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22118 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
22119 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22120 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22123 rt
= decode_gpr_gpr3_src_store(
22124 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22125 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22126 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
22129 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
22130 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22131 (extract32(ctx
->opcode
, 1, 9) << 1));
22134 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
22135 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22136 (extract32(ctx
->opcode
, 1, 9) << 1));
22139 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
22140 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22141 (extract32(ctx
->opcode
, 1, 6) << 1));
22144 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
22145 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22146 (extract32(ctx
->opcode
, 1, 6) << 1));
22149 switch (ctx
->opcode
& 0xf) {
22152 switch (extract32(ctx
->opcode
, 4, 1)) {
22154 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
22155 extract32(ctx
->opcode
, 5, 5), 0, 0);
22158 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
22159 extract32(ctx
->opcode
, 5, 5), 31, 0);
22166 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
22167 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
22168 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
22169 extract32(ctx
->opcode
, 0, 4) << 1);
22176 int count
= extract32(ctx
->opcode
, 0, 4);
22177 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
22179 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
22180 switch (extract32(ctx
->opcode
, 8, 1)) {
22182 gen_save(ctx
, rt
, count
, 0, u
);
22184 case NM_RESTORE_JRC16
:
22185 gen_restore(ctx
, rt
, count
, 0, u
);
22186 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
22195 static const int gpr2reg1
[] = {4, 5, 6, 7};
22196 static const int gpr2reg2
[] = {5, 6, 7, 8};
22198 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
22199 extract32(ctx
->opcode
, 8, 1);
22200 int r1
= gpr2reg1
[rd2
];
22201 int r2
= gpr2reg2
[rd2
];
22202 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
22203 extract32(ctx
->opcode
, 0, 3);
22204 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
22205 extract32(ctx
->opcode
, 5, 3);
22206 TCGv t0
= tcg_temp_new();
22207 TCGv t1
= tcg_temp_new();
22208 if (op
== NM_MOVEP
) {
22211 rs
= decode_gpr_gpr4_zero(r3
);
22212 rt
= decode_gpr_gpr4_zero(r4
);
22214 rd
= decode_gpr_gpr4(r3
);
22215 re
= decode_gpr_gpr4(r4
);
22219 gen_load_gpr(t0
, rs
);
22220 gen_load_gpr(t1
, rt
);
22221 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
22222 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
22228 return decode_nanomips_32_48_opc(env
, ctx
);
22235 /* SmartMIPS extension to MIPS32 */
22237 #if defined(TARGET_MIPS64)
22239 /* MDMX extension to MIPS64 */
22243 /* MIPSDSP functions. */
22244 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
22245 int rd
, int base
, int offset
)
22250 t0
= tcg_temp_new();
22253 gen_load_gpr(t0
, offset
);
22254 } else if (offset
== 0) {
22255 gen_load_gpr(t0
, base
);
22257 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
22262 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
22263 gen_store_gpr(t0
, rd
);
22266 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
22267 gen_store_gpr(t0
, rd
);
22270 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
22271 gen_store_gpr(t0
, rd
);
22273 #if defined(TARGET_MIPS64)
22275 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
22276 gen_store_gpr(t0
, rd
);
22283 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22284 int ret
, int v1
, int v2
)
22290 /* Treat as NOP. */
22294 v1_t
= tcg_temp_new();
22295 v2_t
= tcg_temp_new();
22297 gen_load_gpr(v1_t
, v1
);
22298 gen_load_gpr(v2_t
, v2
);
22301 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
22302 case OPC_MULT_G_2E
:
22306 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22308 case OPC_ADDUH_R_QB
:
22309 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22312 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22314 case OPC_ADDQH_R_PH
:
22315 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22318 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22320 case OPC_ADDQH_R_W
:
22321 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22324 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22326 case OPC_SUBUH_R_QB
:
22327 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22330 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22332 case OPC_SUBQH_R_PH
:
22333 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22336 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22338 case OPC_SUBQH_R_W
:
22339 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22343 case OPC_ABSQ_S_PH_DSP
:
22345 case OPC_ABSQ_S_QB
:
22347 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
22349 case OPC_ABSQ_S_PH
:
22351 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
22355 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
22357 case OPC_PRECEQ_W_PHL
:
22359 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
22360 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22362 case OPC_PRECEQ_W_PHR
:
22364 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
22365 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
22366 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22368 case OPC_PRECEQU_PH_QBL
:
22370 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
22372 case OPC_PRECEQU_PH_QBR
:
22374 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
22376 case OPC_PRECEQU_PH_QBLA
:
22378 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
22380 case OPC_PRECEQU_PH_QBRA
:
22382 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
22384 case OPC_PRECEU_PH_QBL
:
22386 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
22388 case OPC_PRECEU_PH_QBR
:
22390 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
22392 case OPC_PRECEU_PH_QBLA
:
22394 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
22396 case OPC_PRECEU_PH_QBRA
:
22398 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
22402 case OPC_ADDU_QB_DSP
:
22406 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22408 case OPC_ADDQ_S_PH
:
22410 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22414 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22418 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22420 case OPC_ADDU_S_QB
:
22422 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22426 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22428 case OPC_ADDU_S_PH
:
22430 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22434 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22436 case OPC_SUBQ_S_PH
:
22438 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22442 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22446 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22448 case OPC_SUBU_S_QB
:
22450 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22454 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22456 case OPC_SUBU_S_PH
:
22458 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22462 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22466 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22470 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22472 case OPC_RADDU_W_QB
:
22474 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22478 case OPC_CMPU_EQ_QB_DSP
:
22480 case OPC_PRECR_QB_PH
:
22482 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22484 case OPC_PRECRQ_QB_PH
:
22486 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22488 case OPC_PRECR_SRA_PH_W
:
22491 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22492 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22494 tcg_temp_free_i32(sa_t
);
22497 case OPC_PRECR_SRA_R_PH_W
:
22500 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22501 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22503 tcg_temp_free_i32(sa_t
);
22506 case OPC_PRECRQ_PH_W
:
22508 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22510 case OPC_PRECRQ_RS_PH_W
:
22512 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22514 case OPC_PRECRQU_S_QB_PH
:
22516 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22520 #ifdef TARGET_MIPS64
22521 case OPC_ABSQ_S_QH_DSP
:
22523 case OPC_PRECEQ_L_PWL
:
22525 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22527 case OPC_PRECEQ_L_PWR
:
22529 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22531 case OPC_PRECEQ_PW_QHL
:
22533 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22535 case OPC_PRECEQ_PW_QHR
:
22537 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22539 case OPC_PRECEQ_PW_QHLA
:
22541 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22543 case OPC_PRECEQ_PW_QHRA
:
22545 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22547 case OPC_PRECEQU_QH_OBL
:
22549 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22551 case OPC_PRECEQU_QH_OBR
:
22553 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22555 case OPC_PRECEQU_QH_OBLA
:
22557 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22559 case OPC_PRECEQU_QH_OBRA
:
22561 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22563 case OPC_PRECEU_QH_OBL
:
22565 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22567 case OPC_PRECEU_QH_OBR
:
22569 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22571 case OPC_PRECEU_QH_OBLA
:
22573 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22575 case OPC_PRECEU_QH_OBRA
:
22577 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22579 case OPC_ABSQ_S_OB
:
22581 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22583 case OPC_ABSQ_S_PW
:
22585 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22587 case OPC_ABSQ_S_QH
:
22589 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22593 case OPC_ADDU_OB_DSP
:
22595 case OPC_RADDU_L_OB
:
22597 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22601 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22603 case OPC_SUBQ_S_PW
:
22605 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22609 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22611 case OPC_SUBQ_S_QH
:
22613 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22617 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22619 case OPC_SUBU_S_OB
:
22621 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22625 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22627 case OPC_SUBU_S_QH
:
22629 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22633 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22635 case OPC_SUBUH_R_OB
:
22637 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22641 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22643 case OPC_ADDQ_S_PW
:
22645 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22649 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22651 case OPC_ADDQ_S_QH
:
22653 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22657 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22659 case OPC_ADDU_S_OB
:
22661 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22665 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22667 case OPC_ADDU_S_QH
:
22669 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22673 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22675 case OPC_ADDUH_R_OB
:
22677 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22681 case OPC_CMPU_EQ_OB_DSP
:
22683 case OPC_PRECR_OB_QH
:
22685 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22687 case OPC_PRECR_SRA_QH_PW
:
22690 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22691 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22692 tcg_temp_free_i32(ret_t
);
22695 case OPC_PRECR_SRA_R_QH_PW
:
22698 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22699 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22700 tcg_temp_free_i32(sa_v
);
22703 case OPC_PRECRQ_OB_QH
:
22705 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22707 case OPC_PRECRQ_PW_L
:
22709 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22711 case OPC_PRECRQ_QH_PW
:
22713 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22715 case OPC_PRECRQ_RS_QH_PW
:
22717 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22719 case OPC_PRECRQU_S_OB_QH
:
22721 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22728 tcg_temp_free(v1_t
);
22729 tcg_temp_free(v2_t
);
22732 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22733 int ret
, int v1
, int v2
)
22741 /* Treat as NOP. */
22745 t0
= tcg_temp_new();
22746 v1_t
= tcg_temp_new();
22747 v2_t
= tcg_temp_new();
22749 tcg_gen_movi_tl(t0
, v1
);
22750 gen_load_gpr(v1_t
, v1
);
22751 gen_load_gpr(v2_t
, v2
);
22754 case OPC_SHLL_QB_DSP
:
22756 op2
= MASK_SHLL_QB(ctx
->opcode
);
22760 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22764 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22768 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22772 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22774 case OPC_SHLL_S_PH
:
22776 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22778 case OPC_SHLLV_S_PH
:
22780 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22784 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22786 case OPC_SHLLV_S_W
:
22788 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22792 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22796 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22800 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22804 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22808 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22810 case OPC_SHRA_R_QB
:
22812 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22816 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22818 case OPC_SHRAV_R_QB
:
22820 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22824 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22826 case OPC_SHRA_R_PH
:
22828 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22832 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22834 case OPC_SHRAV_R_PH
:
22836 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22840 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22842 case OPC_SHRAV_R_W
:
22844 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22846 default: /* Invalid */
22847 MIPS_INVAL("MASK SHLL.QB");
22848 generate_exception_end(ctx
, EXCP_RI
);
22853 #ifdef TARGET_MIPS64
22854 case OPC_SHLL_OB_DSP
:
22855 op2
= MASK_SHLL_OB(ctx
->opcode
);
22859 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22863 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22865 case OPC_SHLL_S_PW
:
22867 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22869 case OPC_SHLLV_S_PW
:
22871 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22875 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22879 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22883 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22887 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22889 case OPC_SHLL_S_QH
:
22891 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22893 case OPC_SHLLV_S_QH
:
22895 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22899 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
22903 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22905 case OPC_SHRA_R_OB
:
22907 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
22909 case OPC_SHRAV_R_OB
:
22911 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22915 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
22919 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22921 case OPC_SHRA_R_PW
:
22923 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
22925 case OPC_SHRAV_R_PW
:
22927 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22931 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
22935 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22937 case OPC_SHRA_R_QH
:
22939 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
22941 case OPC_SHRAV_R_QH
:
22943 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22947 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
22951 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22955 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
22959 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22961 default: /* Invalid */
22962 MIPS_INVAL("MASK SHLL.OB");
22963 generate_exception_end(ctx
, EXCP_RI
);
22971 tcg_temp_free(v1_t
);
22972 tcg_temp_free(v2_t
);
22975 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22976 int ret
, int v1
, int v2
, int check_ret
)
22982 if ((ret
== 0) && (check_ret
== 1)) {
22983 /* Treat as NOP. */
22987 t0
= tcg_temp_new_i32();
22988 v1_t
= tcg_temp_new();
22989 v2_t
= tcg_temp_new();
22991 tcg_gen_movi_i32(t0
, ret
);
22992 gen_load_gpr(v1_t
, v1
);
22993 gen_load_gpr(v2_t
, v2
);
22996 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22997 * the same mask and op1. */
22998 case OPC_MULT_G_2E
:
23002 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23005 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23008 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23010 case OPC_MULQ_RS_W
:
23011 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23015 case OPC_DPA_W_PH_DSP
:
23017 case OPC_DPAU_H_QBL
:
23019 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
23021 case OPC_DPAU_H_QBR
:
23023 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
23025 case OPC_DPSU_H_QBL
:
23027 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
23029 case OPC_DPSU_H_QBR
:
23031 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
23035 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23037 case OPC_DPAX_W_PH
:
23039 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23041 case OPC_DPAQ_S_W_PH
:
23043 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23045 case OPC_DPAQX_S_W_PH
:
23047 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23049 case OPC_DPAQX_SA_W_PH
:
23051 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23055 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23057 case OPC_DPSX_W_PH
:
23059 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23061 case OPC_DPSQ_S_W_PH
:
23063 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23065 case OPC_DPSQX_S_W_PH
:
23067 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23069 case OPC_DPSQX_SA_W_PH
:
23071 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23073 case OPC_MULSAQ_S_W_PH
:
23075 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23077 case OPC_DPAQ_SA_L_W
:
23079 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23081 case OPC_DPSQ_SA_L_W
:
23083 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23085 case OPC_MAQ_S_W_PHL
:
23087 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23089 case OPC_MAQ_S_W_PHR
:
23091 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23093 case OPC_MAQ_SA_W_PHL
:
23095 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23097 case OPC_MAQ_SA_W_PHR
:
23099 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23101 case OPC_MULSA_W_PH
:
23103 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23107 #ifdef TARGET_MIPS64
23108 case OPC_DPAQ_W_QH_DSP
:
23110 int ac
= ret
& 0x03;
23111 tcg_gen_movi_i32(t0
, ac
);
23116 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
23120 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
23124 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
23128 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
23132 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23134 case OPC_DPAQ_S_W_QH
:
23136 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23138 case OPC_DPAQ_SA_L_PW
:
23140 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23142 case OPC_DPAU_H_OBL
:
23144 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23146 case OPC_DPAU_H_OBR
:
23148 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23152 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23154 case OPC_DPSQ_S_W_QH
:
23156 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23158 case OPC_DPSQ_SA_L_PW
:
23160 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23162 case OPC_DPSU_H_OBL
:
23164 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23166 case OPC_DPSU_H_OBR
:
23168 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23170 case OPC_MAQ_S_L_PWL
:
23172 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
23174 case OPC_MAQ_S_L_PWR
:
23176 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
23178 case OPC_MAQ_S_W_QHLL
:
23180 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23182 case OPC_MAQ_SA_W_QHLL
:
23184 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23186 case OPC_MAQ_S_W_QHLR
:
23188 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23190 case OPC_MAQ_SA_W_QHLR
:
23192 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23194 case OPC_MAQ_S_W_QHRL
:
23196 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23198 case OPC_MAQ_SA_W_QHRL
:
23200 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23202 case OPC_MAQ_S_W_QHRR
:
23204 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23206 case OPC_MAQ_SA_W_QHRR
:
23208 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23210 case OPC_MULSAQ_S_L_PW
:
23212 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23214 case OPC_MULSAQ_S_W_QH
:
23216 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23222 case OPC_ADDU_QB_DSP
:
23224 case OPC_MULEU_S_PH_QBL
:
23226 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23228 case OPC_MULEU_S_PH_QBR
:
23230 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23232 case OPC_MULQ_RS_PH
:
23234 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23236 case OPC_MULEQ_S_W_PHL
:
23238 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23240 case OPC_MULEQ_S_W_PHR
:
23242 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23244 case OPC_MULQ_S_PH
:
23246 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23250 #ifdef TARGET_MIPS64
23251 case OPC_ADDU_OB_DSP
:
23253 case OPC_MULEQ_S_PW_QHL
:
23255 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23257 case OPC_MULEQ_S_PW_QHR
:
23259 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23261 case OPC_MULEU_S_QH_OBL
:
23263 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23265 case OPC_MULEU_S_QH_OBR
:
23267 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23269 case OPC_MULQ_RS_QH
:
23271 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23278 tcg_temp_free_i32(t0
);
23279 tcg_temp_free(v1_t
);
23280 tcg_temp_free(v2_t
);
23283 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23291 /* Treat as NOP. */
23295 t0
= tcg_temp_new();
23296 val_t
= tcg_temp_new();
23297 gen_load_gpr(val_t
, val
);
23300 case OPC_ABSQ_S_PH_DSP
:
23304 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
23309 target_long result
;
23310 imm
= (ctx
->opcode
>> 16) & 0xFF;
23311 result
= (uint32_t)imm
<< 24 |
23312 (uint32_t)imm
<< 16 |
23313 (uint32_t)imm
<< 8 |
23315 result
= (int32_t)result
;
23316 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
23321 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23322 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23323 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23324 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23325 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23326 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23331 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23332 imm
= (int16_t)(imm
<< 6) >> 6;
23333 tcg_gen_movi_tl(cpu_gpr
[ret
], \
23334 (target_long
)((int32_t)imm
<< 16 | \
23340 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23341 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23342 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23343 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23347 #ifdef TARGET_MIPS64
23348 case OPC_ABSQ_S_QH_DSP
:
23355 imm
= (ctx
->opcode
>> 16) & 0xFF;
23356 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
23357 temp
= (temp
<< 16) | temp
;
23358 temp
= (temp
<< 32) | temp
;
23359 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23367 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23368 imm
= (int16_t)(imm
<< 6) >> 6;
23369 temp
= ((target_long
)imm
<< 32) \
23370 | ((target_long
)imm
& 0xFFFFFFFF);
23371 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23379 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23380 imm
= (int16_t)(imm
<< 6) >> 6;
23382 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
23383 ((uint64_t)(uint16_t)imm
<< 32) |
23384 ((uint64_t)(uint16_t)imm
<< 16) |
23385 (uint64_t)(uint16_t)imm
;
23386 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23391 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23392 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23393 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23394 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23395 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23396 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23397 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23401 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
23402 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23403 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23407 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23408 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23409 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23410 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23411 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23418 tcg_temp_free(val_t
);
23421 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
23422 uint32_t op1
, uint32_t op2
,
23423 int ret
, int v1
, int v2
, int check_ret
)
23429 if ((ret
== 0) && (check_ret
== 1)) {
23430 /* Treat as NOP. */
23434 t1
= tcg_temp_new();
23435 v1_t
= tcg_temp_new();
23436 v2_t
= tcg_temp_new();
23438 gen_load_gpr(v1_t
, v1
);
23439 gen_load_gpr(v2_t
, v2
);
23442 case OPC_CMPU_EQ_QB_DSP
:
23444 case OPC_CMPU_EQ_QB
:
23446 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
23448 case OPC_CMPU_LT_QB
:
23450 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
23452 case OPC_CMPU_LE_QB
:
23454 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
23456 case OPC_CMPGU_EQ_QB
:
23458 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23460 case OPC_CMPGU_LT_QB
:
23462 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23464 case OPC_CMPGU_LE_QB
:
23466 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23468 case OPC_CMPGDU_EQ_QB
:
23470 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23471 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23472 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23473 tcg_gen_shli_tl(t1
, t1
, 24);
23474 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23476 case OPC_CMPGDU_LT_QB
:
23478 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23479 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23480 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23481 tcg_gen_shli_tl(t1
, t1
, 24);
23482 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23484 case OPC_CMPGDU_LE_QB
:
23486 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23487 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23488 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23489 tcg_gen_shli_tl(t1
, t1
, 24);
23490 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23492 case OPC_CMP_EQ_PH
:
23494 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23496 case OPC_CMP_LT_PH
:
23498 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23500 case OPC_CMP_LE_PH
:
23502 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23506 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23510 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23512 case OPC_PACKRL_PH
:
23514 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23518 #ifdef TARGET_MIPS64
23519 case OPC_CMPU_EQ_OB_DSP
:
23521 case OPC_CMP_EQ_PW
:
23523 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23525 case OPC_CMP_LT_PW
:
23527 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23529 case OPC_CMP_LE_PW
:
23531 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23533 case OPC_CMP_EQ_QH
:
23535 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23537 case OPC_CMP_LT_QH
:
23539 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23541 case OPC_CMP_LE_QH
:
23543 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23545 case OPC_CMPGDU_EQ_OB
:
23547 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23549 case OPC_CMPGDU_LT_OB
:
23551 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23553 case OPC_CMPGDU_LE_OB
:
23555 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23557 case OPC_CMPGU_EQ_OB
:
23559 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23561 case OPC_CMPGU_LT_OB
:
23563 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23565 case OPC_CMPGU_LE_OB
:
23567 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23569 case OPC_CMPU_EQ_OB
:
23571 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23573 case OPC_CMPU_LT_OB
:
23575 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23577 case OPC_CMPU_LE_OB
:
23579 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23581 case OPC_PACKRL_PW
:
23583 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23587 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23591 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23595 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23603 tcg_temp_free(v1_t
);
23604 tcg_temp_free(v2_t
);
23607 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23608 uint32_t op1
, int rt
, int rs
, int sa
)
23615 /* Treat as NOP. */
23619 t0
= tcg_temp_new();
23620 gen_load_gpr(t0
, rs
);
23623 case OPC_APPEND_DSP
:
23624 switch (MASK_APPEND(ctx
->opcode
)) {
23627 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23629 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23633 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23634 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23635 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23636 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23638 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23642 if (sa
!= 0 && sa
!= 2) {
23643 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23644 tcg_gen_ext32u_tl(t0
, t0
);
23645 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23646 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23648 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23650 default: /* Invalid */
23651 MIPS_INVAL("MASK APPEND");
23652 generate_exception_end(ctx
, EXCP_RI
);
23656 #ifdef TARGET_MIPS64
23657 case OPC_DAPPEND_DSP
:
23658 switch (MASK_DAPPEND(ctx
->opcode
)) {
23661 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23665 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23666 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23667 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23671 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23672 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23673 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23678 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23679 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23680 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23681 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23684 default: /* Invalid */
23685 MIPS_INVAL("MASK DAPPEND");
23686 generate_exception_end(ctx
, EXCP_RI
);
23695 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23696 int ret
, int v1
, int v2
, int check_ret
)
23705 if ((ret
== 0) && (check_ret
== 1)) {
23706 /* Treat as NOP. */
23710 t0
= tcg_temp_new();
23711 t1
= tcg_temp_new();
23712 v1_t
= tcg_temp_new();
23713 v2_t
= tcg_temp_new();
23715 gen_load_gpr(v1_t
, v1
);
23716 gen_load_gpr(v2_t
, v2
);
23719 case OPC_EXTR_W_DSP
:
23723 tcg_gen_movi_tl(t0
, v2
);
23724 tcg_gen_movi_tl(t1
, v1
);
23725 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23728 tcg_gen_movi_tl(t0
, v2
);
23729 tcg_gen_movi_tl(t1
, v1
);
23730 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23732 case OPC_EXTR_RS_W
:
23733 tcg_gen_movi_tl(t0
, v2
);
23734 tcg_gen_movi_tl(t1
, v1
);
23735 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23738 tcg_gen_movi_tl(t0
, v2
);
23739 tcg_gen_movi_tl(t1
, v1
);
23740 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23742 case OPC_EXTRV_S_H
:
23743 tcg_gen_movi_tl(t0
, v2
);
23744 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23747 tcg_gen_movi_tl(t0
, v2
);
23748 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23750 case OPC_EXTRV_R_W
:
23751 tcg_gen_movi_tl(t0
, v2
);
23752 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23754 case OPC_EXTRV_RS_W
:
23755 tcg_gen_movi_tl(t0
, v2
);
23756 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23759 tcg_gen_movi_tl(t0
, v2
);
23760 tcg_gen_movi_tl(t1
, v1
);
23761 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23764 tcg_gen_movi_tl(t0
, v2
);
23765 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23768 tcg_gen_movi_tl(t0
, v2
);
23769 tcg_gen_movi_tl(t1
, v1
);
23770 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23773 tcg_gen_movi_tl(t0
, v2
);
23774 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23777 imm
= (ctx
->opcode
>> 20) & 0x3F;
23778 tcg_gen_movi_tl(t0
, ret
);
23779 tcg_gen_movi_tl(t1
, imm
);
23780 gen_helper_shilo(t0
, t1
, cpu_env
);
23783 tcg_gen_movi_tl(t0
, ret
);
23784 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23787 tcg_gen_movi_tl(t0
, ret
);
23788 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23791 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23792 tcg_gen_movi_tl(t0
, imm
);
23793 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23796 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23797 tcg_gen_movi_tl(t0
, imm
);
23798 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23802 #ifdef TARGET_MIPS64
23803 case OPC_DEXTR_W_DSP
:
23807 tcg_gen_movi_tl(t0
, ret
);
23808 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23812 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23813 int ac
= (ctx
->opcode
>> 11) & 0x03;
23814 tcg_gen_movi_tl(t0
, shift
);
23815 tcg_gen_movi_tl(t1
, ac
);
23816 gen_helper_dshilo(t0
, t1
, cpu_env
);
23821 int ac
= (ctx
->opcode
>> 11) & 0x03;
23822 tcg_gen_movi_tl(t0
, ac
);
23823 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23827 tcg_gen_movi_tl(t0
, v2
);
23828 tcg_gen_movi_tl(t1
, v1
);
23830 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23833 tcg_gen_movi_tl(t0
, v2
);
23834 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23837 tcg_gen_movi_tl(t0
, v2
);
23838 tcg_gen_movi_tl(t1
, v1
);
23839 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23842 tcg_gen_movi_tl(t0
, v2
);
23843 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23846 tcg_gen_movi_tl(t0
, v2
);
23847 tcg_gen_movi_tl(t1
, v1
);
23848 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23850 case OPC_DEXTR_R_L
:
23851 tcg_gen_movi_tl(t0
, v2
);
23852 tcg_gen_movi_tl(t1
, v1
);
23853 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23855 case OPC_DEXTR_RS_L
:
23856 tcg_gen_movi_tl(t0
, v2
);
23857 tcg_gen_movi_tl(t1
, v1
);
23858 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23861 tcg_gen_movi_tl(t0
, v2
);
23862 tcg_gen_movi_tl(t1
, v1
);
23863 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23865 case OPC_DEXTR_R_W
:
23866 tcg_gen_movi_tl(t0
, v2
);
23867 tcg_gen_movi_tl(t1
, v1
);
23868 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23870 case OPC_DEXTR_RS_W
:
23871 tcg_gen_movi_tl(t0
, v2
);
23872 tcg_gen_movi_tl(t1
, v1
);
23873 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23875 case OPC_DEXTR_S_H
:
23876 tcg_gen_movi_tl(t0
, v2
);
23877 tcg_gen_movi_tl(t1
, v1
);
23878 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23880 case OPC_DEXTRV_S_H
:
23881 tcg_gen_movi_tl(t0
, v2
);
23882 tcg_gen_movi_tl(t1
, v1
);
23883 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23886 tcg_gen_movi_tl(t0
, v2
);
23887 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23889 case OPC_DEXTRV_R_L
:
23890 tcg_gen_movi_tl(t0
, v2
);
23891 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23893 case OPC_DEXTRV_RS_L
:
23894 tcg_gen_movi_tl(t0
, v2
);
23895 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23898 tcg_gen_movi_tl(t0
, v2
);
23899 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23901 case OPC_DEXTRV_R_W
:
23902 tcg_gen_movi_tl(t0
, v2
);
23903 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23905 case OPC_DEXTRV_RS_W
:
23906 tcg_gen_movi_tl(t0
, v2
);
23907 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23916 tcg_temp_free(v1_t
);
23917 tcg_temp_free(v2_t
);
23920 /* End MIPSDSP functions. */
23922 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23924 int rs
, rt
, rd
, sa
;
23927 rs
= (ctx
->opcode
>> 21) & 0x1f;
23928 rt
= (ctx
->opcode
>> 16) & 0x1f;
23929 rd
= (ctx
->opcode
>> 11) & 0x1f;
23930 sa
= (ctx
->opcode
>> 6) & 0x1f;
23932 op1
= MASK_SPECIAL(ctx
->opcode
);
23935 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23941 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23951 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23954 MIPS_INVAL("special_r6 muldiv");
23955 generate_exception_end(ctx
, EXCP_RI
);
23961 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23965 if (rt
== 0 && sa
== 1) {
23966 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23967 We need additionally to check other fields */
23968 gen_cl(ctx
, op1
, rd
, rs
);
23970 generate_exception_end(ctx
, EXCP_RI
);
23974 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23975 gen_helper_do_semihosting(cpu_env
);
23977 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
23978 generate_exception_end(ctx
, EXCP_RI
);
23980 generate_exception_end(ctx
, EXCP_DBp
);
23984 #if defined(TARGET_MIPS64)
23986 check_mips_64(ctx
);
23987 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23991 if (rt
== 0 && sa
== 1) {
23992 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23993 We need additionally to check other fields */
23994 check_mips_64(ctx
);
23995 gen_cl(ctx
, op1
, rd
, rs
);
23997 generate_exception_end(ctx
, EXCP_RI
);
24005 op2
= MASK_R6_MULDIV(ctx
->opcode
);
24015 check_mips_64(ctx
);
24016 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
24019 MIPS_INVAL("special_r6 muldiv");
24020 generate_exception_end(ctx
, EXCP_RI
);
24025 default: /* Invalid */
24026 MIPS_INVAL("special_r6");
24027 generate_exception_end(ctx
, EXCP_RI
);
24032 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
24034 int rs
= extract32(ctx
->opcode
, 21, 5);
24035 int rt
= extract32(ctx
->opcode
, 16, 5);
24036 int rd
= extract32(ctx
->opcode
, 11, 5);
24037 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
24040 case OPC_MOVN
: /* Conditional move */
24042 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24044 case OPC_MFHI
: /* Move from HI/LO */
24046 gen_HILO(ctx
, op1
, 0, rd
);
24049 case OPC_MTLO
: /* Move to HI/LO */
24050 gen_HILO(ctx
, op1
, 0, rs
);
24054 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
24058 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24060 #if defined(TARGET_MIPS64)
24065 check_insn_opc_user_only(ctx
, INSN_R5900
);
24066 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24070 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
24072 default: /* Invalid */
24073 MIPS_INVAL("special_tx79");
24074 generate_exception_end(ctx
, EXCP_RI
);
24079 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
24081 int rs
, rt
, rd
, sa
;
24084 rs
= (ctx
->opcode
>> 21) & 0x1f;
24085 rt
= (ctx
->opcode
>> 16) & 0x1f;
24086 rd
= (ctx
->opcode
>> 11) & 0x1f;
24087 sa
= (ctx
->opcode
>> 6) & 0x1f;
24089 op1
= MASK_SPECIAL(ctx
->opcode
);
24091 case OPC_MOVN
: /* Conditional move */
24093 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
24094 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
24095 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24097 case OPC_MFHI
: /* Move from HI/LO */
24099 gen_HILO(ctx
, op1
, rs
& 3, rd
);
24102 case OPC_MTLO
: /* Move to HI/LO */
24103 gen_HILO(ctx
, op1
, rd
& 3, rs
);
24106 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
24107 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
24108 check_cp1_enabled(ctx
);
24109 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
24110 (ctx
->opcode
>> 16) & 1);
24112 generate_exception_err(ctx
, EXCP_CpU
, 1);
24118 check_insn(ctx
, INSN_VR54XX
);
24119 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
24120 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
24122 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
24127 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24129 #if defined(TARGET_MIPS64)
24134 check_insn(ctx
, ISA_MIPS3
);
24135 check_mips_64(ctx
);
24136 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24140 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24143 #ifdef MIPS_STRICT_STANDARD
24144 MIPS_INVAL("SPIM");
24145 generate_exception_end(ctx
, EXCP_RI
);
24147 /* Implemented as RI exception for now. */
24148 MIPS_INVAL("spim (unofficial)");
24149 generate_exception_end(ctx
, EXCP_RI
);
24152 default: /* Invalid */
24153 MIPS_INVAL("special_legacy");
24154 generate_exception_end(ctx
, EXCP_RI
);
24159 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
24161 int rs
, rt
, rd
, sa
;
24164 rs
= (ctx
->opcode
>> 21) & 0x1f;
24165 rt
= (ctx
->opcode
>> 16) & 0x1f;
24166 rd
= (ctx
->opcode
>> 11) & 0x1f;
24167 sa
= (ctx
->opcode
>> 6) & 0x1f;
24169 op1
= MASK_SPECIAL(ctx
->opcode
);
24171 case OPC_SLL
: /* Shift with immediate */
24172 if (sa
== 5 && rd
== 0 &&
24173 rs
== 0 && rt
== 0) { /* PAUSE */
24174 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
24175 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
24176 generate_exception_end(ctx
, EXCP_RI
);
24182 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24185 switch ((ctx
->opcode
>> 21) & 0x1f) {
24187 /* rotr is decoded as srl on non-R2 CPUs */
24188 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24193 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24196 generate_exception_end(ctx
, EXCP_RI
);
24204 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24206 case OPC_SLLV
: /* Shifts */
24208 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24211 switch ((ctx
->opcode
>> 6) & 0x1f) {
24213 /* rotrv is decoded as srlv on non-R2 CPUs */
24214 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24219 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24222 generate_exception_end(ctx
, EXCP_RI
);
24226 case OPC_SLT
: /* Set on less than */
24228 gen_slt(ctx
, op1
, rd
, rs
, rt
);
24230 case OPC_AND
: /* Logic*/
24234 gen_logic(ctx
, op1
, rd
, rs
, rt
);
24237 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24239 case OPC_TGE
: /* Traps */
24245 check_insn(ctx
, ISA_MIPS2
);
24246 gen_trap(ctx
, op1
, rs
, rt
, -1);
24248 case OPC_LSA
: /* OPC_PMON */
24249 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24250 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24251 decode_opc_special_r6(env
, ctx
);
24253 /* Pmon entry point, also R4010 selsl */
24254 #ifdef MIPS_STRICT_STANDARD
24255 MIPS_INVAL("PMON / selsl");
24256 generate_exception_end(ctx
, EXCP_RI
);
24258 gen_helper_0e0i(pmon
, sa
);
24263 generate_exception_end(ctx
, EXCP_SYSCALL
);
24266 generate_exception_end(ctx
, EXCP_BREAK
);
24269 check_insn(ctx
, ISA_MIPS2
);
24270 gen_sync(extract32(ctx
->opcode
, 6, 5));
24273 #if defined(TARGET_MIPS64)
24274 /* MIPS64 specific opcodes */
24279 check_insn(ctx
, ISA_MIPS3
);
24280 check_mips_64(ctx
);
24281 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24284 switch ((ctx
->opcode
>> 21) & 0x1f) {
24286 /* drotr is decoded as dsrl on non-R2 CPUs */
24287 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24292 check_insn(ctx
, ISA_MIPS3
);
24293 check_mips_64(ctx
);
24294 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24297 generate_exception_end(ctx
, EXCP_RI
);
24302 switch ((ctx
->opcode
>> 21) & 0x1f) {
24304 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
24305 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24310 check_insn(ctx
, ISA_MIPS3
);
24311 check_mips_64(ctx
);
24312 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24315 generate_exception_end(ctx
, EXCP_RI
);
24323 check_insn(ctx
, ISA_MIPS3
);
24324 check_mips_64(ctx
);
24325 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24329 check_insn(ctx
, ISA_MIPS3
);
24330 check_mips_64(ctx
);
24331 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24334 switch ((ctx
->opcode
>> 6) & 0x1f) {
24336 /* drotrv is decoded as dsrlv on non-R2 CPUs */
24337 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24342 check_insn(ctx
, ISA_MIPS3
);
24343 check_mips_64(ctx
);
24344 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24347 generate_exception_end(ctx
, EXCP_RI
);
24352 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24353 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24354 decode_opc_special_r6(env
, ctx
);
24359 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24360 decode_opc_special_r6(env
, ctx
);
24361 } else if (ctx
->insn_flags
& INSN_R5900
) {
24362 decode_opc_special_tx79(env
, ctx
);
24364 decode_opc_special_legacy(env
, ctx
);
24370 #if !defined(TARGET_MIPS64)
24372 /* MXU accumulate add/subtract 1-bit pattern 'aptn1' */
24373 #define MXU_APTN1_A 0
24374 #define MXU_APTN1_S 1
24376 /* MXU accumulate add/subtract 2-bit pattern 'aptn2' */
24377 #define MXU_APTN2_AA 0
24378 #define MXU_APTN2_AS 1
24379 #define MXU_APTN2_SA 2
24380 #define MXU_APTN2_SS 3
24382 /* MXU execute add/subtract 2-bit pattern 'eptn2' */
24383 #define MXU_EPTN2_AA 0
24384 #define MXU_EPTN2_AS 1
24385 #define MXU_EPTN2_SA 2
24386 #define MXU_EPTN2_SS 3
24388 /* MXU operand getting pattern 'optn2' */
24389 #define MXU_OPTN2_PTN0 0
24390 #define MXU_OPTN2_PTN1 1
24391 #define MXU_OPTN2_PTN2 2
24392 #define MXU_OPTN2_PTN3 3
24393 /* alternative naming scheme for 'optn2' */
24394 #define MXU_OPTN2_WW 0
24395 #define MXU_OPTN2_LW 1
24396 #define MXU_OPTN2_HW 2
24397 #define MXU_OPTN2_XW 3
24399 /* MXU operand getting pattern 'optn3' */
24400 #define MXU_OPTN3_PTN0 0
24401 #define MXU_OPTN3_PTN1 1
24402 #define MXU_OPTN3_PTN2 2
24403 #define MXU_OPTN3_PTN3 3
24404 #define MXU_OPTN3_PTN4 4
24405 #define MXU_OPTN3_PTN5 5
24406 #define MXU_OPTN3_PTN6 6
24407 #define MXU_OPTN3_PTN7 7
24411 * S32I2M XRa, rb - Register move from GRF to XRF
24413 static void gen_mxu_s32i2m(DisasContext
*ctx
)
24418 t0
= tcg_temp_new();
24420 XRa
= extract32(ctx
->opcode
, 6, 5);
24421 Rb
= extract32(ctx
->opcode
, 16, 5);
24423 gen_load_gpr(t0
, Rb
);
24425 gen_store_mxu_gpr(t0
, XRa
);
24426 } else if (XRa
== 16) {
24427 gen_store_mxu_cr(t0
);
24434 * S32M2I XRa, rb - Register move from XRF to GRF
24436 static void gen_mxu_s32m2i(DisasContext
*ctx
)
24441 t0
= tcg_temp_new();
24443 XRa
= extract32(ctx
->opcode
, 6, 5);
24444 Rb
= extract32(ctx
->opcode
, 16, 5);
24447 gen_load_mxu_gpr(t0
, XRa
);
24448 } else if (XRa
== 16) {
24449 gen_load_mxu_cr(t0
);
24452 gen_store_gpr(t0
, Rb
);
24458 * S8LDD XRa, Rb, s8, optn3 - Load a byte from memory to XRF
24460 static void gen_mxu_s8ldd(DisasContext
*ctx
)
24463 uint32_t XRa
, Rb
, s8
, optn3
;
24465 t0
= tcg_temp_new();
24466 t1
= tcg_temp_new();
24468 XRa
= extract32(ctx
->opcode
, 6, 4);
24469 s8
= extract32(ctx
->opcode
, 10, 8);
24470 optn3
= extract32(ctx
->opcode
, 18, 3);
24471 Rb
= extract32(ctx
->opcode
, 21, 5);
24473 gen_load_gpr(t0
, Rb
);
24474 tcg_gen_addi_tl(t0
, t0
, (int8_t)s8
);
24477 /* XRa[7:0] = tmp8 */
24478 case MXU_OPTN3_PTN0
:
24479 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24480 gen_load_mxu_gpr(t0
, XRa
);
24481 tcg_gen_deposit_tl(t0
, t0
, t1
, 0, 8);
24483 /* XRa[15:8] = tmp8 */
24484 case MXU_OPTN3_PTN1
:
24485 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24486 gen_load_mxu_gpr(t0
, XRa
);
24487 tcg_gen_deposit_tl(t0
, t0
, t1
, 8, 8);
24489 /* XRa[23:16] = tmp8 */
24490 case MXU_OPTN3_PTN2
:
24491 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24492 gen_load_mxu_gpr(t0
, XRa
);
24493 tcg_gen_deposit_tl(t0
, t0
, t1
, 16, 8);
24495 /* XRa[31:24] = tmp8 */
24496 case MXU_OPTN3_PTN3
:
24497 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24498 gen_load_mxu_gpr(t0
, XRa
);
24499 tcg_gen_deposit_tl(t0
, t0
, t1
, 24, 8);
24501 /* XRa = {8'b0, tmp8, 8'b0, tmp8} */
24502 case MXU_OPTN3_PTN4
:
24503 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24504 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24506 /* XRa = {tmp8, 8'b0, tmp8, 8'b0} */
24507 case MXU_OPTN3_PTN5
:
24508 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24509 tcg_gen_shli_tl(t1
, t1
, 8);
24510 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24512 /* XRa = {{8{sign of tmp8}}, tmp8, {8{sign of tmp8}}, tmp8} */
24513 case MXU_OPTN3_PTN6
:
24514 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
24515 tcg_gen_mov_tl(t0
, t1
);
24516 tcg_gen_andi_tl(t0
, t0
, 0xFF00FFFF);
24517 tcg_gen_shli_tl(t1
, t1
, 16);
24518 tcg_gen_or_tl(t0
, t0
, t1
);
24520 /* XRa = {tmp8, tmp8, tmp8, tmp8} */
24521 case MXU_OPTN3_PTN7
:
24522 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24523 tcg_gen_deposit_tl(t1
, t1
, t1
, 8, 8);
24524 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24528 gen_store_mxu_gpr(t0
, XRa
);
24535 * D16MUL XRa, XRb, XRc, XRd, optn2 - Signed 16 bit pattern multiplication
24537 static void gen_mxu_d16mul(DisasContext
*ctx
)
24539 TCGv t0
, t1
, t2
, t3
;
24540 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
;
24542 t0
= tcg_temp_new();
24543 t1
= tcg_temp_new();
24544 t2
= tcg_temp_new();
24545 t3
= tcg_temp_new();
24547 XRa
= extract32(ctx
->opcode
, 6, 4);
24548 XRb
= extract32(ctx
->opcode
, 10, 4);
24549 XRc
= extract32(ctx
->opcode
, 14, 4);
24550 XRd
= extract32(ctx
->opcode
, 18, 4);
24551 optn2
= extract32(ctx
->opcode
, 22, 2);
24553 gen_load_mxu_gpr(t1
, XRb
);
24554 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24555 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24556 gen_load_mxu_gpr(t3
, XRc
);
24557 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24558 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24561 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24562 tcg_gen_mul_tl(t3
, t1
, t3
);
24563 tcg_gen_mul_tl(t2
, t0
, t2
);
24565 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24566 tcg_gen_mul_tl(t3
, t0
, t3
);
24567 tcg_gen_mul_tl(t2
, t0
, t2
);
24569 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24570 tcg_gen_mul_tl(t3
, t1
, t3
);
24571 tcg_gen_mul_tl(t2
, t1
, t2
);
24573 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24574 tcg_gen_mul_tl(t3
, t0
, t3
);
24575 tcg_gen_mul_tl(t2
, t1
, t2
);
24578 gen_store_mxu_gpr(t3
, XRa
);
24579 gen_store_mxu_gpr(t2
, XRd
);
24588 * D16MAC XRa, XRb, XRc, XRd, aptn2, optn2 - Signed 16 bit pattern multiply
24591 static void gen_mxu_d16mac(DisasContext
*ctx
)
24593 TCGv t0
, t1
, t2
, t3
;
24594 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
, aptn2
;
24596 t0
= tcg_temp_new();
24597 t1
= tcg_temp_new();
24598 t2
= tcg_temp_new();
24599 t3
= tcg_temp_new();
24601 XRa
= extract32(ctx
->opcode
, 6, 4);
24602 XRb
= extract32(ctx
->opcode
, 10, 4);
24603 XRc
= extract32(ctx
->opcode
, 14, 4);
24604 XRd
= extract32(ctx
->opcode
, 18, 4);
24605 optn2
= extract32(ctx
->opcode
, 22, 2);
24606 aptn2
= extract32(ctx
->opcode
, 24, 2);
24608 gen_load_mxu_gpr(t1
, XRb
);
24609 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24610 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24612 gen_load_mxu_gpr(t3
, XRc
);
24613 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24614 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24617 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24618 tcg_gen_mul_tl(t3
, t1
, t3
);
24619 tcg_gen_mul_tl(t2
, t0
, t2
);
24621 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24622 tcg_gen_mul_tl(t3
, t0
, t3
);
24623 tcg_gen_mul_tl(t2
, t0
, t2
);
24625 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24626 tcg_gen_mul_tl(t3
, t1
, t3
);
24627 tcg_gen_mul_tl(t2
, t1
, t2
);
24629 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24630 tcg_gen_mul_tl(t3
, t0
, t3
);
24631 tcg_gen_mul_tl(t2
, t1
, t2
);
24634 gen_load_mxu_gpr(t0
, XRa
);
24635 gen_load_mxu_gpr(t1
, XRd
);
24639 tcg_gen_add_tl(t3
, t0
, t3
);
24640 tcg_gen_add_tl(t2
, t1
, t2
);
24643 tcg_gen_add_tl(t3
, t0
, t3
);
24644 tcg_gen_sub_tl(t2
, t1
, t2
);
24647 tcg_gen_sub_tl(t3
, t0
, t3
);
24648 tcg_gen_add_tl(t2
, t1
, t2
);
24651 tcg_gen_sub_tl(t3
, t0
, t3
);
24652 tcg_gen_sub_tl(t2
, t1
, t2
);
24655 gen_store_mxu_gpr(t3
, XRa
);
24656 gen_store_mxu_gpr(t2
, XRd
);
24665 * Q8MUL XRa, XRb, XRc, XRd - Parallel unsigned 8 bit pattern multiply
24666 * Q8MULSU XRa, XRb, XRc, XRd - Parallel signed 8 bit pattern multiply
24668 static void gen_mxu_q8mul_q8mulsu(DisasContext
*ctx
)
24670 TCGv t0
, t1
, t2
, t3
, t4
, t5
, t6
, t7
;
24671 uint32_t XRa
, XRb
, XRc
, XRd
, sel
;
24673 t0
= tcg_temp_new();
24674 t1
= tcg_temp_new();
24675 t2
= tcg_temp_new();
24676 t3
= tcg_temp_new();
24677 t4
= tcg_temp_new();
24678 t5
= tcg_temp_new();
24679 t6
= tcg_temp_new();
24680 t7
= tcg_temp_new();
24682 XRa
= extract32(ctx
->opcode
, 6, 4);
24683 XRb
= extract32(ctx
->opcode
, 10, 4);
24684 XRc
= extract32(ctx
->opcode
, 14, 4);
24685 XRd
= extract32(ctx
->opcode
, 18, 4);
24686 sel
= extract32(ctx
->opcode
, 22, 2);
24688 gen_load_mxu_gpr(t3
, XRb
);
24689 gen_load_mxu_gpr(t7
, XRc
);
24693 tcg_gen_ext8s_tl(t0
, t3
);
24694 tcg_gen_shri_tl(t3
, t3
, 8);
24695 tcg_gen_ext8s_tl(t1
, t3
);
24696 tcg_gen_shri_tl(t3
, t3
, 8);
24697 tcg_gen_ext8s_tl(t2
, t3
);
24698 tcg_gen_shri_tl(t3
, t3
, 8);
24699 tcg_gen_ext8s_tl(t3
, t3
);
24702 tcg_gen_ext8u_tl(t0
, t3
);
24703 tcg_gen_shri_tl(t3
, t3
, 8);
24704 tcg_gen_ext8u_tl(t1
, t3
);
24705 tcg_gen_shri_tl(t3
, t3
, 8);
24706 tcg_gen_ext8u_tl(t2
, t3
);
24707 tcg_gen_shri_tl(t3
, t3
, 8);
24708 tcg_gen_ext8u_tl(t3
, t3
);
24711 tcg_gen_ext8u_tl(t4
, t7
);
24712 tcg_gen_shri_tl(t7
, t7
, 8);
24713 tcg_gen_ext8u_tl(t5
, t7
);
24714 tcg_gen_shri_tl(t7
, t7
, 8);
24715 tcg_gen_ext8u_tl(t6
, t7
);
24716 tcg_gen_shri_tl(t7
, t7
, 8);
24717 tcg_gen_ext8u_tl(t7
, t7
);
24719 tcg_gen_mul_tl(t0
, t0
, t4
);
24720 tcg_gen_mul_tl(t1
, t1
, t5
);
24721 tcg_gen_mul_tl(t2
, t2
, t6
);
24722 tcg_gen_mul_tl(t3
, t3
, t7
);
24724 tcg_gen_andi_tl(t0
, t0
, 0xFFFF);
24725 tcg_gen_andi_tl(t1
, t1
, 0xFFFF);
24726 tcg_gen_andi_tl(t2
, t2
, 0xFFFF);
24727 tcg_gen_andi_tl(t3
, t3
, 0xFFFF);
24729 tcg_gen_shli_tl(t1
, t1
, 16);
24730 tcg_gen_shli_tl(t3
, t3
, 16);
24732 tcg_gen_or_tl(t0
, t0
, t1
);
24733 tcg_gen_or_tl(t1
, t2
, t3
);
24735 gen_store_mxu_gpr(t0
, XRd
);
24736 gen_store_mxu_gpr(t1
, XRa
);
24749 * S32LDD XRa, Rb, S12 - Load a word from memory to XRF
24750 * S32LDDR XRa, Rb, S12 - Load a word from memory to XRF, reversed byte seq.
24752 static void gen_mxu_s32ldd_s32lddr(DisasContext
*ctx
)
24755 uint32_t XRa
, Rb
, s12
, sel
;
24757 t0
= tcg_temp_new();
24758 t1
= tcg_temp_new();
24760 XRa
= extract32(ctx
->opcode
, 6, 4);
24761 s12
= extract32(ctx
->opcode
, 10, 10);
24762 sel
= extract32(ctx
->opcode
, 20, 1);
24763 Rb
= extract32(ctx
->opcode
, 21, 5);
24765 gen_load_gpr(t0
, Rb
);
24767 tcg_gen_movi_tl(t1
, s12
);
24768 tcg_gen_shli_tl(t1
, t1
, 2);
24770 tcg_gen_ori_tl(t1
, t1
, 0xFFFFF000);
24772 tcg_gen_add_tl(t1
, t0
, t1
);
24773 tcg_gen_qemu_ld_tl(t1
, t1
, ctx
->mem_idx
, MO_SL
);
24777 tcg_gen_bswap32_tl(t1
, t1
);
24779 gen_store_mxu_gpr(t1
, XRa
);
24787 * MXU instruction category: logic
24788 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24790 * S32NOR S32AND S32OR S32XOR
24794 * S32NOR XRa, XRb, XRc
24795 * Update XRa with the result of logical bitwise 'nor' operation
24796 * applied to the content of XRb and XRc.
24798 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24799 * +-----------+---------+-----+-------+-------+-------+-----------+
24800 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24801 * +-----------+---------+-----+-------+-------+-------+-----------+
24803 static void gen_mxu_S32NOR(DisasContext
*ctx
)
24805 uint32_t pad
, XRc
, XRb
, XRa
;
24807 pad
= extract32(ctx
->opcode
, 21, 5);
24808 XRc
= extract32(ctx
->opcode
, 14, 4);
24809 XRb
= extract32(ctx
->opcode
, 10, 4);
24810 XRa
= extract32(ctx
->opcode
, 6, 4);
24812 if (unlikely(pad
!= 0)) {
24813 /* opcode padding incorrect -> do nothing */
24814 } else if (unlikely(XRa
== 0)) {
24815 /* destination is zero register -> do nothing */
24816 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24817 /* both operands zero registers -> just set destination to all 1s */
24818 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0xFFFFFFFF);
24819 } else if (unlikely(XRb
== 0)) {
24820 /* XRb zero register -> just set destination to the negation of XRc */
24821 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
24822 } else if (unlikely(XRc
== 0)) {
24823 /* XRa zero register -> just set destination to the negation of XRb */
24824 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24825 } else if (unlikely(XRb
== XRc
)) {
24826 /* both operands same -> just set destination to the negation of XRb */
24827 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24829 /* the most general case */
24830 tcg_gen_nor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24835 * S32AND XRa, XRb, XRc
24836 * Update XRa with the result of logical bitwise 'and' operation
24837 * applied to the content of XRb and XRc.
24839 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24840 * +-----------+---------+-----+-------+-------+-------+-----------+
24841 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24842 * +-----------+---------+-----+-------+-------+-------+-----------+
24844 static void gen_mxu_S32AND(DisasContext
*ctx
)
24846 uint32_t pad
, XRc
, XRb
, XRa
;
24848 pad
= extract32(ctx
->opcode
, 21, 5);
24849 XRc
= extract32(ctx
->opcode
, 14, 4);
24850 XRb
= extract32(ctx
->opcode
, 10, 4);
24851 XRa
= extract32(ctx
->opcode
, 6, 4);
24853 if (unlikely(pad
!= 0)) {
24854 /* opcode padding incorrect -> do nothing */
24855 } else if (unlikely(XRa
== 0)) {
24856 /* destination is zero register -> do nothing */
24857 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
24858 /* one of operands zero register -> just set destination to all 0s */
24859 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24860 } else if (unlikely(XRb
== XRc
)) {
24861 /* both operands same -> just set destination to one of them */
24862 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24864 /* the most general case */
24865 tcg_gen_and_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24870 * S32OR XRa, XRb, XRc
24871 * Update XRa with the result of logical bitwise 'or' operation
24872 * applied to the content of XRb and XRc.
24874 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24875 * +-----------+---------+-----+-------+-------+-------+-----------+
24876 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24877 * +-----------+---------+-----+-------+-------+-------+-----------+
24879 static void gen_mxu_S32OR(DisasContext
*ctx
)
24881 uint32_t pad
, XRc
, XRb
, XRa
;
24883 pad
= extract32(ctx
->opcode
, 21, 5);
24884 XRc
= extract32(ctx
->opcode
, 14, 4);
24885 XRb
= extract32(ctx
->opcode
, 10, 4);
24886 XRa
= extract32(ctx
->opcode
, 6, 4);
24888 if (unlikely(pad
!= 0)) {
24889 /* opcode padding incorrect -> do nothing */
24890 } else if (unlikely(XRa
== 0)) {
24891 /* destination is zero register -> do nothing */
24892 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24893 /* both operands zero registers -> just set destination to all 0s */
24894 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24895 } else if (unlikely(XRb
== 0)) {
24896 /* XRb zero register -> just set destination to the content of XRc */
24897 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
24898 } else if (unlikely(XRc
== 0)) {
24899 /* XRc zero register -> just set destination to the content of XRb */
24900 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24901 } else if (unlikely(XRb
== XRc
)) {
24902 /* both operands same -> just set destination to one of them */
24903 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24905 /* the most general case */
24906 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24911 * S32XOR XRa, XRb, XRc
24912 * Update XRa with the result of logical bitwise 'xor' operation
24913 * applied to the content of XRb and XRc.
24915 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24916 * +-----------+---------+-----+-------+-------+-------+-----------+
24917 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24918 * +-----------+---------+-----+-------+-------+-------+-----------+
24920 static void gen_mxu_S32XOR(DisasContext
*ctx
)
24922 uint32_t pad
, XRc
, XRb
, XRa
;
24924 pad
= extract32(ctx
->opcode
, 21, 5);
24925 XRc
= extract32(ctx
->opcode
, 14, 4);
24926 XRb
= extract32(ctx
->opcode
, 10, 4);
24927 XRa
= extract32(ctx
->opcode
, 6, 4);
24929 if (unlikely(pad
!= 0)) {
24930 /* opcode padding incorrect -> do nothing */
24931 } else if (unlikely(XRa
== 0)) {
24932 /* destination is zero register -> do nothing */
24933 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24934 /* both operands zero registers -> just set destination to all 0s */
24935 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24936 } else if (unlikely(XRb
== 0)) {
24937 /* XRb zero register -> just set destination to the content of XRc */
24938 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
24939 } else if (unlikely(XRc
== 0)) {
24940 /* XRc zero register -> just set destination to the content of XRb */
24941 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24942 } else if (unlikely(XRb
== XRc
)) {
24943 /* both operands same -> just set destination to all 0s */
24944 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24946 /* the most general case */
24947 tcg_gen_xor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24953 * MXU instruction category max/min
24954 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24956 * S32MAX D16MAX Q8MAX
24957 * S32MIN D16MIN Q8MIN
24961 * S32MAX XRa, XRb, XRc
24962 * Update XRa with the maximum of signed 32-bit integers contained
24965 * S32MIN XRa, XRb, XRc
24966 * Update XRa with the minimum of signed 32-bit integers contained
24969 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24970 * +-----------+---------+-----+-------+-------+-------+-----------+
24971 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
24972 * +-----------+---------+-----+-------+-------+-------+-----------+
24974 static void gen_mxu_S32MAX_S32MIN(DisasContext
*ctx
)
24976 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
24978 pad
= extract32(ctx
->opcode
, 21, 5);
24979 opc
= extract32(ctx
->opcode
, 18, 3);
24980 XRc
= extract32(ctx
->opcode
, 14, 4);
24981 XRb
= extract32(ctx
->opcode
, 10, 4);
24982 XRa
= extract32(ctx
->opcode
, 6, 4);
24984 if (unlikely(pad
!= 0)) {
24985 /* opcode padding incorrect -> do nothing */
24986 } else if (unlikely(XRa
== 0)) {
24987 /* destination is zero register -> do nothing */
24988 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24989 /* both operands zero registers -> just set destination to zero */
24990 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24991 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
24992 /* exactly one operand is zero register - find which one is not...*/
24993 uint32_t XRx
= XRb
? XRb
: XRc
;
24994 /* ...and do max/min operation with one operand 0 */
24995 if (opc
== OPC_MXU_S32MAX
) {
24996 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
24998 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
25000 } else if (unlikely(XRb
== XRc
)) {
25001 /* both operands same -> just set destination to one of them */
25002 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25004 /* the most general case */
25005 if (opc
== OPC_MXU_S32MAX
) {
25006 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25009 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25017 * Update XRa with the 16-bit-wise maximums of signed integers
25018 * contained in XRb and XRc.
25021 * Update XRa with the 16-bit-wise minimums of signed integers
25022 * contained in XRb and XRc.
25024 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25025 * +-----------+---------+-----+-------+-------+-------+-----------+
25026 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25027 * +-----------+---------+-----+-------+-------+-------+-----------+
25029 static void gen_mxu_D16MAX_D16MIN(DisasContext
*ctx
)
25031 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25033 pad
= extract32(ctx
->opcode
, 21, 5);
25034 opc
= extract32(ctx
->opcode
, 18, 3);
25035 XRc
= extract32(ctx
->opcode
, 14, 4);
25036 XRb
= extract32(ctx
->opcode
, 10, 4);
25037 XRa
= extract32(ctx
->opcode
, 6, 4);
25039 if (unlikely(pad
!= 0)) {
25040 /* opcode padding incorrect -> do nothing */
25041 } else if (unlikely(XRc
== 0)) {
25042 /* destination is zero register -> do nothing */
25043 } else if (unlikely((XRb
== 0) && (XRa
== 0))) {
25044 /* both operands zero registers -> just set destination to zero */
25045 tcg_gen_movi_i32(mxu_gpr
[XRc
- 1], 0);
25046 } else if (unlikely((XRb
== 0) || (XRa
== 0))) {
25047 /* exactly one operand is zero register - find which one is not...*/
25048 uint32_t XRx
= XRb
? XRb
: XRc
;
25049 /* ...and do half-word-wise max/min with one operand 0 */
25050 TCGv_i32 t0
= tcg_temp_new();
25051 TCGv_i32 t1
= tcg_const_i32(0);
25053 /* the left half-word first */
25054 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFFFF0000);
25055 if (opc
== OPC_MXU_D16MAX
) {
25056 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25058 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25061 /* the right half-word */
25062 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0x0000FFFF);
25063 /* move half-words to the leftmost position */
25064 tcg_gen_shli_i32(t0
, t0
, 16);
25065 /* t0 will be max/min of t0 and t1 */
25066 if (opc
== OPC_MXU_D16MAX
) {
25067 tcg_gen_smax_i32(t0
, t0
, t1
);
25069 tcg_gen_smin_i32(t0
, t0
, t1
);
25071 /* return resulting half-words to its original position */
25072 tcg_gen_shri_i32(t0
, t0
, 16);
25073 /* finaly update the destination */
25074 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25078 } else if (unlikely(XRb
== XRc
)) {
25079 /* both operands same -> just set destination to one of them */
25080 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25082 /* the most general case */
25083 TCGv_i32 t0
= tcg_temp_new();
25084 TCGv_i32 t1
= tcg_temp_new();
25086 /* the left half-word first */
25087 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFFFF0000);
25088 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25089 if (opc
== OPC_MXU_D16MAX
) {
25090 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25092 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25095 /* the right half-word */
25096 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25097 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0x0000FFFF);
25098 /* move half-words to the leftmost position */
25099 tcg_gen_shli_i32(t0
, t0
, 16);
25100 tcg_gen_shli_i32(t1
, t1
, 16);
25101 /* t0 will be max/min of t0 and t1 */
25102 if (opc
== OPC_MXU_D16MAX
) {
25103 tcg_gen_smax_i32(t0
, t0
, t1
);
25105 tcg_gen_smin_i32(t0
, t0
, t1
);
25107 /* return resulting half-words to its original position */
25108 tcg_gen_shri_i32(t0
, t0
, 16);
25109 /* finaly update the destination */
25110 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25119 * Update XRa with the 8-bit-wise maximums of signed integers
25120 * contained in XRb and XRc.
25123 * Update XRa with the 8-bit-wise minimums of signed integers
25124 * contained in XRb and XRc.
25126 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25127 * +-----------+---------+-----+-------+-------+-------+-----------+
25128 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25129 * +-----------+---------+-----+-------+-------+-------+-----------+
25131 static void gen_mxu_Q8MAX_Q8MIN(DisasContext
*ctx
)
25133 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25135 pad
= extract32(ctx
->opcode
, 21, 5);
25136 opc
= extract32(ctx
->opcode
, 18, 3);
25137 XRc
= extract32(ctx
->opcode
, 14, 4);
25138 XRb
= extract32(ctx
->opcode
, 10, 4);
25139 XRa
= extract32(ctx
->opcode
, 6, 4);
25141 if (unlikely(pad
!= 0)) {
25142 /* opcode padding incorrect -> do nothing */
25143 } else if (unlikely(XRa
== 0)) {
25144 /* destination is zero register -> do nothing */
25145 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25146 /* both operands zero registers -> just set destination to zero */
25147 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25148 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25149 /* exactly one operand is zero register - make it be the first...*/
25150 uint32_t XRx
= XRb
? XRb
: XRc
;
25151 /* ...and do byte-wise max/min with one operand 0 */
25152 TCGv_i32 t0
= tcg_temp_new();
25153 TCGv_i32 t1
= tcg_const_i32(0);
25156 /* the leftmost byte (byte 3) first */
25157 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF000000);
25158 if (opc
== OPC_MXU_Q8MAX
) {
25159 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25161 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25164 /* bytes 2, 1, 0 */
25165 for (i
= 2; i
>= 0; i
--) {
25166 /* extract the byte */
25167 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF << (8 * i
));
25168 /* move the byte to the leftmost position */
25169 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25170 /* t0 will be max/min of t0 and t1 */
25171 if (opc
== OPC_MXU_Q8MAX
) {
25172 tcg_gen_smax_i32(t0
, t0
, t1
);
25174 tcg_gen_smin_i32(t0
, t0
, t1
);
25176 /* return resulting byte to its original position */
25177 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25178 /* finaly update the destination */
25179 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25184 } else if (unlikely(XRb
== XRc
)) {
25185 /* both operands same -> just set destination to one of them */
25186 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25188 /* the most general case */
25189 TCGv_i32 t0
= tcg_temp_new();
25190 TCGv_i32 t1
= tcg_temp_new();
25193 /* the leftmost bytes (bytes 3) first */
25194 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF000000);
25195 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25196 if (opc
== OPC_MXU_Q8MAX
) {
25197 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25199 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25202 /* bytes 2, 1, 0 */
25203 for (i
= 2; i
>= 0; i
--) {
25204 /* extract corresponding bytes */
25205 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF << (8 * i
));
25206 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF << (8 * i
));
25207 /* move the bytes to the leftmost position */
25208 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25209 tcg_gen_shli_i32(t1
, t1
, 8 * (3 - i
));
25210 /* t0 will be max/min of t0 and t1 */
25211 if (opc
== OPC_MXU_Q8MAX
) {
25212 tcg_gen_smax_i32(t0
, t0
, t1
);
25214 tcg_gen_smin_i32(t0
, t0
, t1
);
25216 /* return resulting byte to its original position */
25217 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25218 /* finaly update the destination */
25219 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25229 * MXU instruction category: align
25230 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25236 * S32ALNI XRc, XRb, XRa, optn3
25237 * Arrange bytes from XRb and XRc according to one of five sets of
25238 * rules determined by optn3, and place the result in XRa.
25240 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25241 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25242 * | SPECIAL2 |optn3|0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25243 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25246 static void gen_mxu_S32ALNI(DisasContext
*ctx
)
25248 uint32_t optn3
, pad
, XRc
, XRb
, XRa
;
25250 optn3
= extract32(ctx
->opcode
, 23, 3);
25251 pad
= extract32(ctx
->opcode
, 21, 2);
25252 XRc
= extract32(ctx
->opcode
, 14, 4);
25253 XRb
= extract32(ctx
->opcode
, 10, 4);
25254 XRa
= extract32(ctx
->opcode
, 6, 4);
25256 if (unlikely(pad
!= 0)) {
25257 /* opcode padding incorrect -> do nothing */
25258 } else if (unlikely(XRa
== 0)) {
25259 /* destination is zero register -> do nothing */
25260 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25261 /* both operands zero registers -> just set destination to all 0s */
25262 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25263 } else if (unlikely(XRb
== 0)) {
25264 /* XRb zero register -> just appropriatelly shift XRc into XRa */
25266 case MXU_OPTN3_PTN0
:
25267 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25269 case MXU_OPTN3_PTN1
:
25270 case MXU_OPTN3_PTN2
:
25271 case MXU_OPTN3_PTN3
:
25272 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1],
25275 case MXU_OPTN3_PTN4
:
25276 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25279 } else if (unlikely(XRc
== 0)) {
25280 /* XRc zero register -> just appropriatelly shift XRb into XRa */
25282 case MXU_OPTN3_PTN0
:
25283 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25285 case MXU_OPTN3_PTN1
:
25286 case MXU_OPTN3_PTN2
:
25287 case MXU_OPTN3_PTN3
:
25288 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25290 case MXU_OPTN3_PTN4
:
25291 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25294 } else if (unlikely(XRb
== XRc
)) {
25295 /* both operands same -> just rotation or moving from any of them */
25297 case MXU_OPTN3_PTN0
:
25298 case MXU_OPTN3_PTN4
:
25299 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25301 case MXU_OPTN3_PTN1
:
25302 case MXU_OPTN3_PTN2
:
25303 case MXU_OPTN3_PTN3
:
25304 tcg_gen_rotli_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25308 /* the most general case */
25310 case MXU_OPTN3_PTN0
:
25314 /* +---------------+ */
25315 /* | A B C D | E F G H */
25316 /* +-------+-------+ */
25321 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25324 case MXU_OPTN3_PTN1
:
25328 /* +-------------------+ */
25329 /* A | B C D E | F G H */
25330 /* +---------+---------+ */
25335 TCGv_i32 t0
= tcg_temp_new();
25336 TCGv_i32 t1
= tcg_temp_new();
25338 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x00FFFFFF);
25339 tcg_gen_shli_i32(t0
, t0
, 8);
25341 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25342 tcg_gen_shri_i32(t1
, t1
, 24);
25344 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25350 case MXU_OPTN3_PTN2
:
25354 /* +-------------------+ */
25355 /* A B | C D E F | G H */
25356 /* +---------+---------+ */
25361 TCGv_i32 t0
= tcg_temp_new();
25362 TCGv_i32 t1
= tcg_temp_new();
25364 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25365 tcg_gen_shli_i32(t0
, t0
, 16);
25367 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25368 tcg_gen_shri_i32(t1
, t1
, 16);
25370 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25376 case MXU_OPTN3_PTN3
:
25380 /* +-------------------+ */
25381 /* A B C | D E F G | H */
25382 /* +---------+---------+ */
25387 TCGv_i32 t0
= tcg_temp_new();
25388 TCGv_i32 t1
= tcg_temp_new();
25390 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x000000FF);
25391 tcg_gen_shli_i32(t0
, t0
, 24);
25393 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFFFF00);
25394 tcg_gen_shri_i32(t1
, t1
, 8);
25396 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25402 case MXU_OPTN3_PTN4
:
25406 /* +---------------+ */
25407 /* A B C D | E F G H | */
25408 /* +-------+-------+ */
25413 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25422 * Decoding engine for MXU
25423 * =======================
25428 * Decode MXU pool00
25430 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25431 * +-----------+---------+-----+-------+-------+-------+-----------+
25432 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL00|
25433 * +-----------+---------+-----+-------+-------+-------+-----------+
25436 static void decode_opc_mxu__pool00(CPUMIPSState
*env
, DisasContext
*ctx
)
25438 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25441 case OPC_MXU_S32MAX
:
25442 case OPC_MXU_S32MIN
:
25443 gen_mxu_S32MAX_S32MIN(ctx
);
25445 case OPC_MXU_D16MAX
:
25446 case OPC_MXU_D16MIN
:
25447 gen_mxu_D16MAX_D16MIN(ctx
);
25449 case OPC_MXU_Q8MAX
:
25450 case OPC_MXU_Q8MIN
:
25451 gen_mxu_Q8MAX_Q8MIN(ctx
);
25453 case OPC_MXU_Q8SLT
:
25454 /* TODO: Implement emulation of Q8SLT instruction. */
25455 MIPS_INVAL("OPC_MXU_Q8SLT");
25456 generate_exception_end(ctx
, EXCP_RI
);
25458 case OPC_MXU_Q8SLTU
:
25459 /* TODO: Implement emulation of Q8SLTU instruction. */
25460 MIPS_INVAL("OPC_MXU_Q8SLTU");
25461 generate_exception_end(ctx
, EXCP_RI
);
25464 MIPS_INVAL("decode_opc_mxu");
25465 generate_exception_end(ctx
, EXCP_RI
);
25472 * Decode MXU pool01
25474 * S32SLT, D16SLT, D16AVG, D16AVGR, Q8AVG, Q8AVGR:
25475 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25476 * +-----------+---------+-----+-------+-------+-------+-----------+
25477 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25478 * +-----------+---------+-----+-------+-------+-------+-----------+
25481 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25482 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25483 * | SPECIAL2 |en2|0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25484 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25487 static void decode_opc_mxu__pool01(CPUMIPSState
*env
, DisasContext
*ctx
)
25489 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25492 case OPC_MXU_S32SLT
:
25493 /* TODO: Implement emulation of S32SLT instruction. */
25494 MIPS_INVAL("OPC_MXU_S32SLT");
25495 generate_exception_end(ctx
, EXCP_RI
);
25497 case OPC_MXU_D16SLT
:
25498 /* TODO: Implement emulation of D16SLT instruction. */
25499 MIPS_INVAL("OPC_MXU_D16SLT");
25500 generate_exception_end(ctx
, EXCP_RI
);
25502 case OPC_MXU_D16AVG
:
25503 /* TODO: Implement emulation of D16AVG instruction. */
25504 MIPS_INVAL("OPC_MXU_D16AVG");
25505 generate_exception_end(ctx
, EXCP_RI
);
25507 case OPC_MXU_D16AVGR
:
25508 /* TODO: Implement emulation of D16AVGR instruction. */
25509 MIPS_INVAL("OPC_MXU_D16AVGR");
25510 generate_exception_end(ctx
, EXCP_RI
);
25512 case OPC_MXU_Q8AVG
:
25513 /* TODO: Implement emulation of Q8AVG instruction. */
25514 MIPS_INVAL("OPC_MXU_Q8AVG");
25515 generate_exception_end(ctx
, EXCP_RI
);
25517 case OPC_MXU_Q8AVGR
:
25518 /* TODO: Implement emulation of Q8AVGR instruction. */
25519 MIPS_INVAL("OPC_MXU_Q8AVGR");
25520 generate_exception_end(ctx
, EXCP_RI
);
25522 case OPC_MXU_Q8ADD
:
25523 /* TODO: Implement emulation of Q8ADD instruction. */
25524 MIPS_INVAL("OPC_MXU_Q8ADD");
25525 generate_exception_end(ctx
, EXCP_RI
);
25528 MIPS_INVAL("decode_opc_mxu");
25529 generate_exception_end(ctx
, EXCP_RI
);
25536 * Decode MXU pool02
25538 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25539 * +-----------+---------+-----+-------+-------+-------+-----------+
25540 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL02|
25541 * +-----------+---------+-----+-------+-------+-------+-----------+
25544 static void decode_opc_mxu__pool02(CPUMIPSState
*env
, DisasContext
*ctx
)
25546 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25549 case OPC_MXU_S32CPS
:
25550 /* TODO: Implement emulation of S32CPS instruction. */
25551 MIPS_INVAL("OPC_MXU_S32CPS");
25552 generate_exception_end(ctx
, EXCP_RI
);
25554 case OPC_MXU_D16CPS
:
25555 /* TODO: Implement emulation of D16CPS instruction. */
25556 MIPS_INVAL("OPC_MXU_D16CPS");
25557 generate_exception_end(ctx
, EXCP_RI
);
25559 case OPC_MXU_Q8ABD
:
25560 /* TODO: Implement emulation of Q8ABD instruction. */
25561 MIPS_INVAL("OPC_MXU_Q8ABD");
25562 generate_exception_end(ctx
, EXCP_RI
);
25564 case OPC_MXU_Q16SAT
:
25565 /* TODO: Implement emulation of Q16SAT instruction. */
25566 MIPS_INVAL("OPC_MXU_Q16SAT");
25567 generate_exception_end(ctx
, EXCP_RI
);
25570 MIPS_INVAL("decode_opc_mxu");
25571 generate_exception_end(ctx
, EXCP_RI
);
25578 * Decode MXU pool03
25581 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25582 * +-----------+---+---+-------+-------+-------+-------+-----------+
25583 * | SPECIAL2 |x x|on2|0 0 0 0| XRc | XRb | XRa |MXU__POOL03|
25584 * +-----------+---+---+-------+-------+-------+-------+-----------+
25587 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25588 * +-----------+---+---+-------+-------+-------+-------+-----------+
25589 * | SPECIAL2 |x x|on2| Xd | XRc | XRb | XRa |MXU__POOL03|
25590 * +-----------+---+---+-------+-------+-------+-------+-----------+
25593 static void decode_opc_mxu__pool03(CPUMIPSState
*env
, DisasContext
*ctx
)
25595 uint32_t opcode
= extract32(ctx
->opcode
, 24, 2);
25598 case OPC_MXU_D16MULF
:
25599 /* TODO: Implement emulation of D16MULF instruction. */
25600 MIPS_INVAL("OPC_MXU_D16MULF");
25601 generate_exception_end(ctx
, EXCP_RI
);
25603 case OPC_MXU_D16MULE
:
25604 /* TODO: Implement emulation of D16MULE instruction. */
25605 MIPS_INVAL("OPC_MXU_D16MULE");
25606 generate_exception_end(ctx
, EXCP_RI
);
25609 MIPS_INVAL("decode_opc_mxu");
25610 generate_exception_end(ctx
, EXCP_RI
);
25617 * Decode MXU pool04
25619 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25620 * +-----------+---------+-+-------------------+-------+-----------+
25621 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL04|
25622 * +-----------+---------+-+-------------------+-------+-----------+
25625 static void decode_opc_mxu__pool04(CPUMIPSState
*env
, DisasContext
*ctx
)
25627 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25630 case OPC_MXU_S32LDD
:
25631 case OPC_MXU_S32LDDR
:
25632 gen_mxu_s32ldd_s32lddr(ctx
);
25635 MIPS_INVAL("decode_opc_mxu");
25636 generate_exception_end(ctx
, EXCP_RI
);
25643 * Decode MXU pool05
25645 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25646 * +-----------+---------+-+-------------------+-------+-----------+
25647 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL05|
25648 * +-----------+---------+-+-------------------+-------+-----------+
25651 static void decode_opc_mxu__pool05(CPUMIPSState
*env
, DisasContext
*ctx
)
25653 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25656 case OPC_MXU_S32STD
:
25657 /* TODO: Implement emulation of S32STD instruction. */
25658 MIPS_INVAL("OPC_MXU_S32STD");
25659 generate_exception_end(ctx
, EXCP_RI
);
25661 case OPC_MXU_S32STDR
:
25662 /* TODO: Implement emulation of S32STDR instruction. */
25663 MIPS_INVAL("OPC_MXU_S32STDR");
25664 generate_exception_end(ctx
, EXCP_RI
);
25667 MIPS_INVAL("decode_opc_mxu");
25668 generate_exception_end(ctx
, EXCP_RI
);
25675 * Decode MXU pool06
25677 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25678 * +-----------+---------+---------+---+-------+-------+-----------+
25679 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL06|
25680 * +-----------+---------+---------+---+-------+-------+-----------+
25683 static void decode_opc_mxu__pool06(CPUMIPSState
*env
, DisasContext
*ctx
)
25685 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25688 case OPC_MXU_S32LDDV
:
25689 /* TODO: Implement emulation of S32LDDV instruction. */
25690 MIPS_INVAL("OPC_MXU_S32LDDV");
25691 generate_exception_end(ctx
, EXCP_RI
);
25693 case OPC_MXU_S32LDDVR
:
25694 /* TODO: Implement emulation of S32LDDVR instruction. */
25695 MIPS_INVAL("OPC_MXU_S32LDDVR");
25696 generate_exception_end(ctx
, EXCP_RI
);
25699 MIPS_INVAL("decode_opc_mxu");
25700 generate_exception_end(ctx
, EXCP_RI
);
25707 * Decode MXU pool07
25709 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25710 * +-----------+---------+---------+---+-------+-------+-----------+
25711 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL07|
25712 * +-----------+---------+---------+---+-------+-------+-----------+
25715 static void decode_opc_mxu__pool07(CPUMIPSState
*env
, DisasContext
*ctx
)
25717 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25720 case OPC_MXU_S32STDV
:
25721 /* TODO: Implement emulation of S32TDV instruction. */
25722 MIPS_INVAL("OPC_MXU_S32TDV");
25723 generate_exception_end(ctx
, EXCP_RI
);
25725 case OPC_MXU_S32STDVR
:
25726 /* TODO: Implement emulation of S32TDVR instruction. */
25727 MIPS_INVAL("OPC_MXU_S32TDVR");
25728 generate_exception_end(ctx
, EXCP_RI
);
25731 MIPS_INVAL("decode_opc_mxu");
25732 generate_exception_end(ctx
, EXCP_RI
);
25739 * Decode MXU pool08
25741 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25742 * +-----------+---------+-+-------------------+-------+-----------+
25743 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL08|
25744 * +-----------+---------+-+-------------------+-------+-----------+
25747 static void decode_opc_mxu__pool08(CPUMIPSState
*env
, DisasContext
*ctx
)
25749 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25752 case OPC_MXU_S32LDI
:
25753 /* TODO: Implement emulation of S32LDI instruction. */
25754 MIPS_INVAL("OPC_MXU_S32LDI");
25755 generate_exception_end(ctx
, EXCP_RI
);
25757 case OPC_MXU_S32LDIR
:
25758 /* TODO: Implement emulation of S32LDIR instruction. */
25759 MIPS_INVAL("OPC_MXU_S32LDIR");
25760 generate_exception_end(ctx
, EXCP_RI
);
25763 MIPS_INVAL("decode_opc_mxu");
25764 generate_exception_end(ctx
, EXCP_RI
);
25771 * Decode MXU pool09
25773 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25774 * +-----------+---------+-+-------------------+-------+-----------+
25775 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL09|
25776 * +-----------+---------+-+-------------------+-------+-----------+
25779 static void decode_opc_mxu__pool09(CPUMIPSState
*env
, DisasContext
*ctx
)
25781 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
25784 case OPC_MXU_S32SDI
:
25785 /* TODO: Implement emulation of S32SDI instruction. */
25786 MIPS_INVAL("OPC_MXU_S32SDI");
25787 generate_exception_end(ctx
, EXCP_RI
);
25789 case OPC_MXU_S32SDIR
:
25790 /* TODO: Implement emulation of S32SDIR instruction. */
25791 MIPS_INVAL("OPC_MXU_S32SDIR");
25792 generate_exception_end(ctx
, EXCP_RI
);
25795 MIPS_INVAL("decode_opc_mxu");
25796 generate_exception_end(ctx
, EXCP_RI
);
25803 * Decode MXU pool10
25805 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25806 * +-----------+---------+---------+---+-------+-------+-----------+
25807 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL10|
25808 * +-----------+---------+---------+---+-------+-------+-----------+
25811 static void decode_opc_mxu__pool10(CPUMIPSState
*env
, DisasContext
*ctx
)
25813 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
25816 case OPC_MXU_S32LDIV
:
25817 /* TODO: Implement emulation of S32LDIV instruction. */
25818 MIPS_INVAL("OPC_MXU_S32LDIV");
25819 generate_exception_end(ctx
, EXCP_RI
);
25821 case OPC_MXU_S32LDIVR
:
25822 /* TODO: Implement emulation of S32LDIVR instruction. */
25823 MIPS_INVAL("OPC_MXU_S32LDIVR");
25824 generate_exception_end(ctx
, EXCP_RI
);
25827 MIPS_INVAL("decode_opc_mxu");
25828 generate_exception_end(ctx
, EXCP_RI
);
25835 * Decode MXU pool11
25837 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25838 * +-----------+---------+---------+---+-------+-------+-----------+
25839 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL11|
25840 * +-----------+---------+---------+---+-------+-------+-----------+
25843 static void decode_opc_mxu__pool11(CPUMIPSState
*env
, DisasContext
*ctx
)
25845 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25848 case OPC_MXU_S32SDIV
:
25849 /* TODO: Implement emulation of S32SDIV instruction. */
25850 MIPS_INVAL("OPC_MXU_S32SDIV");
25851 generate_exception_end(ctx
, EXCP_RI
);
25853 case OPC_MXU_S32SDIVR
:
25854 /* TODO: Implement emulation of S32SDIVR instruction. */
25855 MIPS_INVAL("OPC_MXU_S32SDIVR");
25856 generate_exception_end(ctx
, EXCP_RI
);
25859 MIPS_INVAL("decode_opc_mxu");
25860 generate_exception_end(ctx
, EXCP_RI
);
25867 * Decode MXU pool12
25869 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25870 * +-----------+---+---+-------+-------+-------+-------+-----------+
25871 * | SPECIAL2 |an2|x x| Xd | XRc | XRb | XRa |MXU__POOL12|
25872 * +-----------+---+---+-------+-------+-------+-------+-----------+
25875 static void decode_opc_mxu__pool12(CPUMIPSState
*env
, DisasContext
*ctx
)
25877 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25880 case OPC_MXU_D32ACC
:
25881 /* TODO: Implement emulation of D32ACC instruction. */
25882 MIPS_INVAL("OPC_MXU_D32ACC");
25883 generate_exception_end(ctx
, EXCP_RI
);
25885 case OPC_MXU_D32ACCM
:
25886 /* TODO: Implement emulation of D32ACCM instruction. */
25887 MIPS_INVAL("OPC_MXU_D32ACCM");
25888 generate_exception_end(ctx
, EXCP_RI
);
25890 case OPC_MXU_D32ASUM
:
25891 /* TODO: Implement emulation of D32ASUM instruction. */
25892 MIPS_INVAL("OPC_MXU_D32ASUM");
25893 generate_exception_end(ctx
, EXCP_RI
);
25896 MIPS_INVAL("decode_opc_mxu");
25897 generate_exception_end(ctx
, EXCP_RI
);
25904 * Decode MXU pool13
25906 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25907 * +-----------+---+---+-------+-------+-------+-------+-----------+
25908 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL13|
25909 * +-----------+---+---+-------+-------+-------+-------+-----------+
25912 static void decode_opc_mxu__pool13(CPUMIPSState
*env
, DisasContext
*ctx
)
25914 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25917 case OPC_MXU_Q16ACC
:
25918 /* TODO: Implement emulation of Q16ACC instruction. */
25919 MIPS_INVAL("OPC_MXU_Q16ACC");
25920 generate_exception_end(ctx
, EXCP_RI
);
25922 case OPC_MXU_Q16ACCM
:
25923 /* TODO: Implement emulation of Q16ACCM instruction. */
25924 MIPS_INVAL("OPC_MXU_Q16ACCM");
25925 generate_exception_end(ctx
, EXCP_RI
);
25927 case OPC_MXU_Q16ASUM
:
25928 /* TODO: Implement emulation of Q16ASUM instruction. */
25929 MIPS_INVAL("OPC_MXU_Q16ASUM");
25930 generate_exception_end(ctx
, EXCP_RI
);
25933 MIPS_INVAL("decode_opc_mxu");
25934 generate_exception_end(ctx
, EXCP_RI
);
25941 * Decode MXU pool14
25944 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25945 * +-----------+---+---+-------+-------+-------+-------+-----------+
25946 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL14|
25947 * +-----------+---+---+-------+-------+-------+-------+-----------+
25950 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25951 * +-----------+---+---+-------+-------+-------+-------+-----------+
25952 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL14|
25953 * +-----------+---+---+-------+-------+-------+-------+-----------+
25956 static void decode_opc_mxu__pool14(CPUMIPSState
*env
, DisasContext
*ctx
)
25958 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25961 case OPC_MXU_Q8ADDE
:
25962 /* TODO: Implement emulation of Q8ADDE instruction. */
25963 MIPS_INVAL("OPC_MXU_Q8ADDE");
25964 generate_exception_end(ctx
, EXCP_RI
);
25966 case OPC_MXU_D8SUM
:
25967 /* TODO: Implement emulation of D8SUM instruction. */
25968 MIPS_INVAL("OPC_MXU_D8SUM");
25969 generate_exception_end(ctx
, EXCP_RI
);
25971 case OPC_MXU_D8SUMC
:
25972 /* TODO: Implement emulation of D8SUMC instruction. */
25973 MIPS_INVAL("OPC_MXU_D8SUMC");
25974 generate_exception_end(ctx
, EXCP_RI
);
25977 MIPS_INVAL("decode_opc_mxu");
25978 generate_exception_end(ctx
, EXCP_RI
);
25985 * Decode MXU pool15
25987 * S32MUL, S32MULU, S32EXTRV:
25988 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25989 * +-----------+---------+---------+---+-------+-------+-----------+
25990 * | SPECIAL2 | rs | rt |x x| XRd | XRa |MXU__POOL15|
25991 * +-----------+---------+---------+---+-------+-------+-----------+
25994 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25995 * +-----------+---------+---------+---+-------+-------+-----------+
25996 * | SPECIAL2 | rb | sft5 |x x| XRd | XRa |MXU__POOL15|
25997 * +-----------+---------+---------+---+-------+-------+-----------+
26000 static void decode_opc_mxu__pool15(CPUMIPSState
*env
, DisasContext
*ctx
)
26002 uint32_t opcode
= extract32(ctx
->opcode
, 14, 2);
26005 case OPC_MXU_S32MUL
:
26006 /* TODO: Implement emulation of S32MUL instruction. */
26007 MIPS_INVAL("OPC_MXU_S32MUL");
26008 generate_exception_end(ctx
, EXCP_RI
);
26010 case OPC_MXU_S32MULU
:
26011 /* TODO: Implement emulation of S32MULU instruction. */
26012 MIPS_INVAL("OPC_MXU_S32MULU");
26013 generate_exception_end(ctx
, EXCP_RI
);
26015 case OPC_MXU_S32EXTR
:
26016 /* TODO: Implement emulation of S32EXTR instruction. */
26017 MIPS_INVAL("OPC_MXU_S32EXTR");
26018 generate_exception_end(ctx
, EXCP_RI
);
26020 case OPC_MXU_S32EXTRV
:
26021 /* TODO: Implement emulation of S32EXTRV instruction. */
26022 MIPS_INVAL("OPC_MXU_S32EXTRV");
26023 generate_exception_end(ctx
, EXCP_RI
);
26026 MIPS_INVAL("decode_opc_mxu");
26027 generate_exception_end(ctx
, EXCP_RI
);
26034 * Decode MXU pool16
26037 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26038 * +-----------+---------+-----+-------+-------+-------+-----------+
26039 * | SPECIAL2 | rb |x x x| XRc | XRb | XRa |MXU__POOL16|
26040 * +-----------+---------+-----+-------+-------+-------+-----------+
26043 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26044 * +-----------+---------+-----+-------+-------+-------+-----------+
26045 * | SPECIAL2 | rs |x x x| XRc | XRb | XRa |MXU__POOL16|
26046 * +-----------+---------+-----+-------+-------+-------+-----------+
26049 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26050 * +-----------+-----+---+-----+-------+-------+-------+-----------+
26051 * | SPECIAL2 | s3 |0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
26052 * +-----------+-----+---+-----+-------+-------+-------+-----------+
26055 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26056 * +-----------+-----+---+-----+-------+---------------+-----------+
26057 * | SPECIAL2 |optn3|0 0|x x x| XRc | s8 |MXU__POOL16|
26058 * +-----------+-----+---+-----+-------+---------------+-----------+
26060 * S32NOR, S32AND, S32OR, S32XOR:
26061 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26062 * +-----------+---------+-----+-------+-------+-------+-----------+
26063 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
26064 * +-----------+---------+-----+-------+-------+-------+-----------+
26067 static void decode_opc_mxu__pool16(CPUMIPSState
*env
, DisasContext
*ctx
)
26069 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26072 case OPC_MXU_D32SARW
:
26073 /* TODO: Implement emulation of D32SARW instruction. */
26074 MIPS_INVAL("OPC_MXU_D32SARW");
26075 generate_exception_end(ctx
, EXCP_RI
);
26077 case OPC_MXU_S32ALN
:
26078 /* TODO: Implement emulation of S32ALN instruction. */
26079 MIPS_INVAL("OPC_MXU_S32ALN");
26080 generate_exception_end(ctx
, EXCP_RI
);
26082 case OPC_MXU_S32ALNI
:
26083 gen_mxu_S32ALNI(ctx
);
26085 case OPC_MXU_S32LUI
:
26086 /* TODO: Implement emulation of S32LUI instruction. */
26087 MIPS_INVAL("OPC_MXU_S32LUI");
26088 generate_exception_end(ctx
, EXCP_RI
);
26090 case OPC_MXU_S32NOR
:
26091 gen_mxu_S32NOR(ctx
);
26093 case OPC_MXU_S32AND
:
26094 gen_mxu_S32AND(ctx
);
26096 case OPC_MXU_S32OR
:
26097 gen_mxu_S32OR(ctx
);
26099 case OPC_MXU_S32XOR
:
26100 gen_mxu_S32XOR(ctx
);
26103 MIPS_INVAL("decode_opc_mxu");
26104 generate_exception_end(ctx
, EXCP_RI
);
26111 * Decode MXU pool17
26113 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26114 * +-----------+---------+---------+---+---------+-----+-----------+
26115 * | SPECIAL2 | rs | rt |0 0| rd |x x x|MXU__POOL15|
26116 * +-----------+---------+---------+---+---------+-----+-----------+
26119 static void decode_opc_mxu__pool17(CPUMIPSState
*env
, DisasContext
*ctx
)
26121 uint32_t opcode
= extract32(ctx
->opcode
, 6, 2);
26125 /* TODO: Implement emulation of LXW instruction. */
26126 MIPS_INVAL("OPC_MXU_LXW");
26127 generate_exception_end(ctx
, EXCP_RI
);
26130 /* TODO: Implement emulation of LXH instruction. */
26131 MIPS_INVAL("OPC_MXU_LXH");
26132 generate_exception_end(ctx
, EXCP_RI
);
26135 /* TODO: Implement emulation of LXHU instruction. */
26136 MIPS_INVAL("OPC_MXU_LXHU");
26137 generate_exception_end(ctx
, EXCP_RI
);
26140 /* TODO: Implement emulation of LXB instruction. */
26141 MIPS_INVAL("OPC_MXU_LXB");
26142 generate_exception_end(ctx
, EXCP_RI
);
26145 /* TODO: Implement emulation of LXBU instruction. */
26146 MIPS_INVAL("OPC_MXU_LXBU");
26147 generate_exception_end(ctx
, EXCP_RI
);
26150 MIPS_INVAL("decode_opc_mxu");
26151 generate_exception_end(ctx
, EXCP_RI
);
26157 * Decode MXU pool18
26159 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26160 * +-----------+---------+-----+-------+-------+-------+-----------+
26161 * | SPECIAL2 | rb |x x x| XRd | XRa |0 0 0 0|MXU__POOL18|
26162 * +-----------+---------+-----+-------+-------+-------+-----------+
26165 static void decode_opc_mxu__pool18(CPUMIPSState
*env
, DisasContext
*ctx
)
26167 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26170 case OPC_MXU_D32SLLV
:
26171 /* TODO: Implement emulation of D32SLLV instruction. */
26172 MIPS_INVAL("OPC_MXU_D32SLLV");
26173 generate_exception_end(ctx
, EXCP_RI
);
26175 case OPC_MXU_D32SLRV
:
26176 /* TODO: Implement emulation of D32SLRV instruction. */
26177 MIPS_INVAL("OPC_MXU_D32SLRV");
26178 generate_exception_end(ctx
, EXCP_RI
);
26180 case OPC_MXU_D32SARV
:
26181 /* TODO: Implement emulation of D32SARV instruction. */
26182 MIPS_INVAL("OPC_MXU_D32SARV");
26183 generate_exception_end(ctx
, EXCP_RI
);
26185 case OPC_MXU_Q16SLLV
:
26186 /* TODO: Implement emulation of Q16SLLV instruction. */
26187 MIPS_INVAL("OPC_MXU_Q16SLLV");
26188 generate_exception_end(ctx
, EXCP_RI
);
26190 case OPC_MXU_Q16SLRV
:
26191 /* TODO: Implement emulation of Q16SLRV instruction. */
26192 MIPS_INVAL("OPC_MXU_Q16SLRV");
26193 generate_exception_end(ctx
, EXCP_RI
);
26195 case OPC_MXU_Q16SARV
:
26196 /* TODO: Implement emulation of Q16SARV instruction. */
26197 MIPS_INVAL("OPC_MXU_Q16SARV");
26198 generate_exception_end(ctx
, EXCP_RI
);
26201 MIPS_INVAL("decode_opc_mxu");
26202 generate_exception_end(ctx
, EXCP_RI
);
26209 * Decode MXU pool19
26211 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26212 * +-----------+---+---+-------+-------+-------+-------+-----------+
26213 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL19|
26214 * +-----------+---+---+-------+-------+-------+-------+-----------+
26217 static void decode_opc_mxu__pool19(CPUMIPSState
*env
, DisasContext
*ctx
)
26219 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26222 case OPC_MXU_Q8MUL
:
26223 case OPC_MXU_Q8MULSU
:
26224 gen_mxu_q8mul_q8mulsu(ctx
);
26227 MIPS_INVAL("decode_opc_mxu");
26228 generate_exception_end(ctx
, EXCP_RI
);
26235 * Decode MXU pool20
26237 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26238 * +-----------+---------+-----+-------+-------+-------+-----------+
26239 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL20|
26240 * +-----------+---------+-----+-------+-------+-------+-----------+
26243 static void decode_opc_mxu__pool20(CPUMIPSState
*env
, DisasContext
*ctx
)
26245 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26248 case OPC_MXU_Q8MOVZ
:
26249 /* TODO: Implement emulation of Q8MOVZ instruction. */
26250 MIPS_INVAL("OPC_MXU_Q8MOVZ");
26251 generate_exception_end(ctx
, EXCP_RI
);
26253 case OPC_MXU_Q8MOVN
:
26254 /* TODO: Implement emulation of Q8MOVN instruction. */
26255 MIPS_INVAL("OPC_MXU_Q8MOVN");
26256 generate_exception_end(ctx
, EXCP_RI
);
26258 case OPC_MXU_D16MOVZ
:
26259 /* TODO: Implement emulation of D16MOVZ instruction. */
26260 MIPS_INVAL("OPC_MXU_D16MOVZ");
26261 generate_exception_end(ctx
, EXCP_RI
);
26263 case OPC_MXU_D16MOVN
:
26264 /* TODO: Implement emulation of D16MOVN instruction. */
26265 MIPS_INVAL("OPC_MXU_D16MOVN");
26266 generate_exception_end(ctx
, EXCP_RI
);
26268 case OPC_MXU_S32MOVZ
:
26269 /* TODO: Implement emulation of S32MOVZ instruction. */
26270 MIPS_INVAL("OPC_MXU_S32MOVZ");
26271 generate_exception_end(ctx
, EXCP_RI
);
26273 case OPC_MXU_S32MOVN
:
26274 /* TODO: Implement emulation of S32MOVN instruction. */
26275 MIPS_INVAL("OPC_MXU_S32MOVN");
26276 generate_exception_end(ctx
, EXCP_RI
);
26279 MIPS_INVAL("decode_opc_mxu");
26280 generate_exception_end(ctx
, EXCP_RI
);
26287 * Decode MXU pool21
26289 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26290 * +-----------+---+---+-------+-------+-------+-------+-----------+
26291 * | SPECIAL2 |an2|x x| XRd | XRc | XRb | XRa |MXU__POOL21|
26292 * +-----------+---+---+-------+-------+-------+-------+-----------+
26295 static void decode_opc_mxu__pool21(CPUMIPSState
*env
, DisasContext
*ctx
)
26297 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26300 case OPC_MXU_Q8MAC
:
26301 /* TODO: Implement emulation of Q8MAC instruction. */
26302 MIPS_INVAL("OPC_MXU_Q8MAC");
26303 generate_exception_end(ctx
, EXCP_RI
);
26305 case OPC_MXU_Q8MACSU
:
26306 /* TODO: Implement emulation of Q8MACSU instruction. */
26307 MIPS_INVAL("OPC_MXU_Q8MACSU");
26308 generate_exception_end(ctx
, EXCP_RI
);
26311 MIPS_INVAL("decode_opc_mxu");
26312 generate_exception_end(ctx
, EXCP_RI
);
26319 * Main MXU decoding function
26321 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26322 * +-----------+---------------------------------------+-----------+
26323 * | SPECIAL2 | |x x x x x x|
26324 * +-----------+---------------------------------------+-----------+
26327 static void decode_opc_mxu(CPUMIPSState
*env
, DisasContext
*ctx
)
26330 * TODO: Investigate necessity of including handling of
26331 * CLZ, CLO, SDBB in this function, as they belong to
26332 * SPECIAL2 opcode space for regular pre-R6 MIPS ISAs.
26334 uint32_t opcode
= extract32(ctx
->opcode
, 0, 6);
26336 if (opcode
== OPC__MXU_MUL
) {
26337 uint32_t rs
, rt
, rd
, op1
;
26339 rs
= extract32(ctx
->opcode
, 21, 5);
26340 rt
= extract32(ctx
->opcode
, 16, 5);
26341 rd
= extract32(ctx
->opcode
, 11, 5);
26342 op1
= MASK_SPECIAL2(ctx
->opcode
);
26344 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26349 if (opcode
== OPC_MXU_S32M2I
) {
26350 gen_mxu_s32m2i(ctx
);
26354 if (opcode
== OPC_MXU_S32I2M
) {
26355 gen_mxu_s32i2m(ctx
);
26360 TCGv t_mxu_cr
= tcg_temp_new();
26361 TCGLabel
*l_exit
= gen_new_label();
26363 gen_load_mxu_cr(t_mxu_cr
);
26364 tcg_gen_andi_tl(t_mxu_cr
, t_mxu_cr
, MXU_CR_MXU_EN
);
26365 tcg_gen_brcondi_tl(TCG_COND_NE
, t_mxu_cr
, MXU_CR_MXU_EN
, l_exit
);
26368 case OPC_MXU_S32MADD
:
26369 /* TODO: Implement emulation of S32MADD instruction. */
26370 MIPS_INVAL("OPC_MXU_S32MADD");
26371 generate_exception_end(ctx
, EXCP_RI
);
26373 case OPC_MXU_S32MADDU
:
26374 /* TODO: Implement emulation of S32MADDU instruction. */
26375 MIPS_INVAL("OPC_MXU_S32MADDU");
26376 generate_exception_end(ctx
, EXCP_RI
);
26378 case OPC_MXU__POOL00
:
26379 decode_opc_mxu__pool00(env
, ctx
);
26381 case OPC_MXU_S32MSUB
:
26382 /* TODO: Implement emulation of S32MSUB instruction. */
26383 MIPS_INVAL("OPC_MXU_S32MSUB");
26384 generate_exception_end(ctx
, EXCP_RI
);
26386 case OPC_MXU_S32MSUBU
:
26387 /* TODO: Implement emulation of S32MSUBU instruction. */
26388 MIPS_INVAL("OPC_MXU_S32MSUBU");
26389 generate_exception_end(ctx
, EXCP_RI
);
26391 case OPC_MXU__POOL01
:
26392 decode_opc_mxu__pool01(env
, ctx
);
26394 case OPC_MXU__POOL02
:
26395 decode_opc_mxu__pool02(env
, ctx
);
26397 case OPC_MXU_D16MUL
:
26398 gen_mxu_d16mul(ctx
);
26400 case OPC_MXU__POOL03
:
26401 decode_opc_mxu__pool03(env
, ctx
);
26403 case OPC_MXU_D16MAC
:
26404 gen_mxu_d16mac(ctx
);
26406 case OPC_MXU_D16MACF
:
26407 /* TODO: Implement emulation of D16MACF instruction. */
26408 MIPS_INVAL("OPC_MXU_D16MACF");
26409 generate_exception_end(ctx
, EXCP_RI
);
26411 case OPC_MXU_D16MADL
:
26412 /* TODO: Implement emulation of D16MADL instruction. */
26413 MIPS_INVAL("OPC_MXU_D16MADL");
26414 generate_exception_end(ctx
, EXCP_RI
);
26416 case OPC_MXU_S16MAD
:
26417 /* TODO: Implement emulation of S16MAD instruction. */
26418 MIPS_INVAL("OPC_MXU_S16MAD");
26419 generate_exception_end(ctx
, EXCP_RI
);
26421 case OPC_MXU_Q16ADD
:
26422 /* TODO: Implement emulation of Q16ADD instruction. */
26423 MIPS_INVAL("OPC_MXU_Q16ADD");
26424 generate_exception_end(ctx
, EXCP_RI
);
26426 case OPC_MXU_D16MACE
:
26427 /* TODO: Implement emulation of D16MACE instruction. */
26428 MIPS_INVAL("OPC_MXU_D16MACE");
26429 generate_exception_end(ctx
, EXCP_RI
);
26431 case OPC_MXU__POOL04
:
26432 decode_opc_mxu__pool04(env
, ctx
);
26434 case OPC_MXU__POOL05
:
26435 decode_opc_mxu__pool05(env
, ctx
);
26437 case OPC_MXU__POOL06
:
26438 decode_opc_mxu__pool06(env
, ctx
);
26440 case OPC_MXU__POOL07
:
26441 decode_opc_mxu__pool07(env
, ctx
);
26443 case OPC_MXU__POOL08
:
26444 decode_opc_mxu__pool08(env
, ctx
);
26446 case OPC_MXU__POOL09
:
26447 decode_opc_mxu__pool09(env
, ctx
);
26449 case OPC_MXU__POOL10
:
26450 decode_opc_mxu__pool10(env
, ctx
);
26452 case OPC_MXU__POOL11
:
26453 decode_opc_mxu__pool11(env
, ctx
);
26455 case OPC_MXU_D32ADD
:
26456 /* TODO: Implement emulation of D32ADD instruction. */
26457 MIPS_INVAL("OPC_MXU_D32ADD");
26458 generate_exception_end(ctx
, EXCP_RI
);
26460 case OPC_MXU__POOL12
:
26461 decode_opc_mxu__pool12(env
, ctx
);
26463 case OPC_MXU__POOL13
:
26464 decode_opc_mxu__pool13(env
, ctx
);
26466 case OPC_MXU__POOL14
:
26467 decode_opc_mxu__pool14(env
, ctx
);
26469 case OPC_MXU_Q8ACCE
:
26470 /* TODO: Implement emulation of Q8ACCE instruction. */
26471 MIPS_INVAL("OPC_MXU_Q8ACCE");
26472 generate_exception_end(ctx
, EXCP_RI
);
26474 case OPC_MXU_S8LDD
:
26475 gen_mxu_s8ldd(ctx
);
26477 case OPC_MXU_S8STD
:
26478 /* TODO: Implement emulation of S8STD instruction. */
26479 MIPS_INVAL("OPC_MXU_S8STD");
26480 generate_exception_end(ctx
, EXCP_RI
);
26482 case OPC_MXU_S8LDI
:
26483 /* TODO: Implement emulation of S8LDI instruction. */
26484 MIPS_INVAL("OPC_MXU_S8LDI");
26485 generate_exception_end(ctx
, EXCP_RI
);
26487 case OPC_MXU_S8SDI
:
26488 /* TODO: Implement emulation of S8SDI instruction. */
26489 MIPS_INVAL("OPC_MXU_S8SDI");
26490 generate_exception_end(ctx
, EXCP_RI
);
26492 case OPC_MXU__POOL15
:
26493 decode_opc_mxu__pool15(env
, ctx
);
26495 case OPC_MXU__POOL16
:
26496 decode_opc_mxu__pool16(env
, ctx
);
26498 case OPC_MXU__POOL17
:
26499 decode_opc_mxu__pool17(env
, ctx
);
26501 case OPC_MXU_S16LDD
:
26502 /* TODO: Implement emulation of S16LDD instruction. */
26503 MIPS_INVAL("OPC_MXU_S16LDD");
26504 generate_exception_end(ctx
, EXCP_RI
);
26506 case OPC_MXU_S16STD
:
26507 /* TODO: Implement emulation of S16STD instruction. */
26508 MIPS_INVAL("OPC_MXU_S16STD");
26509 generate_exception_end(ctx
, EXCP_RI
);
26511 case OPC_MXU_S16LDI
:
26512 /* TODO: Implement emulation of S16LDI instruction. */
26513 MIPS_INVAL("OPC_MXU_S16LDI");
26514 generate_exception_end(ctx
, EXCP_RI
);
26516 case OPC_MXU_S16SDI
:
26517 /* TODO: Implement emulation of S16SDI instruction. */
26518 MIPS_INVAL("OPC_MXU_S16SDI");
26519 generate_exception_end(ctx
, EXCP_RI
);
26521 case OPC_MXU_D32SLL
:
26522 /* TODO: Implement emulation of D32SLL instruction. */
26523 MIPS_INVAL("OPC_MXU_D32SLL");
26524 generate_exception_end(ctx
, EXCP_RI
);
26526 case OPC_MXU_D32SLR
:
26527 /* TODO: Implement emulation of D32SLR instruction. */
26528 MIPS_INVAL("OPC_MXU_D32SLR");
26529 generate_exception_end(ctx
, EXCP_RI
);
26531 case OPC_MXU_D32SARL
:
26532 /* TODO: Implement emulation of D32SARL instruction. */
26533 MIPS_INVAL("OPC_MXU_D32SARL");
26534 generate_exception_end(ctx
, EXCP_RI
);
26536 case OPC_MXU_D32SAR
:
26537 /* TODO: Implement emulation of D32SAR instruction. */
26538 MIPS_INVAL("OPC_MXU_D32SAR");
26539 generate_exception_end(ctx
, EXCP_RI
);
26541 case OPC_MXU_Q16SLL
:
26542 /* TODO: Implement emulation of Q16SLL instruction. */
26543 MIPS_INVAL("OPC_MXU_Q16SLL");
26544 generate_exception_end(ctx
, EXCP_RI
);
26546 case OPC_MXU_Q16SLR
:
26547 /* TODO: Implement emulation of Q16SLR instruction. */
26548 MIPS_INVAL("OPC_MXU_Q16SLR");
26549 generate_exception_end(ctx
, EXCP_RI
);
26551 case OPC_MXU__POOL18
:
26552 decode_opc_mxu__pool18(env
, ctx
);
26554 case OPC_MXU_Q16SAR
:
26555 /* TODO: Implement emulation of Q16SAR instruction. */
26556 MIPS_INVAL("OPC_MXU_Q16SAR");
26557 generate_exception_end(ctx
, EXCP_RI
);
26559 case OPC_MXU__POOL19
:
26560 decode_opc_mxu__pool19(env
, ctx
);
26562 case OPC_MXU__POOL20
:
26563 decode_opc_mxu__pool20(env
, ctx
);
26565 case OPC_MXU__POOL21
:
26566 decode_opc_mxu__pool21(env
, ctx
);
26568 case OPC_MXU_Q16SCOP
:
26569 /* TODO: Implement emulation of Q16SCOP instruction. */
26570 MIPS_INVAL("OPC_MXU_Q16SCOP");
26571 generate_exception_end(ctx
, EXCP_RI
);
26573 case OPC_MXU_Q8MADL
:
26574 /* TODO: Implement emulation of Q8MADL instruction. */
26575 MIPS_INVAL("OPC_MXU_Q8MADL");
26576 generate_exception_end(ctx
, EXCP_RI
);
26578 case OPC_MXU_S32SFL
:
26579 /* TODO: Implement emulation of S32SFL instruction. */
26580 MIPS_INVAL("OPC_MXU_S32SFL");
26581 generate_exception_end(ctx
, EXCP_RI
);
26583 case OPC_MXU_Q8SAD
:
26584 /* TODO: Implement emulation of Q8SAD instruction. */
26585 MIPS_INVAL("OPC_MXU_Q8SAD");
26586 generate_exception_end(ctx
, EXCP_RI
);
26589 MIPS_INVAL("decode_opc_mxu");
26590 generate_exception_end(ctx
, EXCP_RI
);
26593 gen_set_label(l_exit
);
26594 tcg_temp_free(t_mxu_cr
);
26598 #endif /* !defined(TARGET_MIPS64) */
26601 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26606 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26608 rs
= (ctx
->opcode
>> 21) & 0x1f;
26609 rt
= (ctx
->opcode
>> 16) & 0x1f;
26610 rd
= (ctx
->opcode
>> 11) & 0x1f;
26612 op1
= MASK_SPECIAL2(ctx
->opcode
);
26614 case OPC_MADD
: /* Multiply and add/sub */
26618 check_insn(ctx
, ISA_MIPS32
);
26619 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
26622 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26625 case OPC_DIVU_G_2F
:
26626 case OPC_MULT_G_2F
:
26627 case OPC_MULTU_G_2F
:
26629 case OPC_MODU_G_2F
:
26630 check_insn(ctx
, INSN_LOONGSON2F
);
26631 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26635 check_insn(ctx
, ISA_MIPS32
);
26636 gen_cl(ctx
, op1
, rd
, rs
);
26639 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
26640 gen_helper_do_semihosting(cpu_env
);
26642 /* XXX: not clear which exception should be raised
26643 * when in debug mode...
26645 check_insn(ctx
, ISA_MIPS32
);
26646 generate_exception_end(ctx
, EXCP_DBp
);
26649 #if defined(TARGET_MIPS64)
26652 check_insn(ctx
, ISA_MIPS64
);
26653 check_mips_64(ctx
);
26654 gen_cl(ctx
, op1
, rd
, rs
);
26656 case OPC_DMULT_G_2F
:
26657 case OPC_DMULTU_G_2F
:
26658 case OPC_DDIV_G_2F
:
26659 case OPC_DDIVU_G_2F
:
26660 case OPC_DMOD_G_2F
:
26661 case OPC_DMODU_G_2F
:
26662 check_insn(ctx
, INSN_LOONGSON2F
);
26663 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26666 default: /* Invalid */
26667 MIPS_INVAL("special2_legacy");
26668 generate_exception_end(ctx
, EXCP_RI
);
26673 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
26675 int rs
, rt
, rd
, sa
;
26679 rs
= (ctx
->opcode
>> 21) & 0x1f;
26680 rt
= (ctx
->opcode
>> 16) & 0x1f;
26681 rd
= (ctx
->opcode
>> 11) & 0x1f;
26682 sa
= (ctx
->opcode
>> 6) & 0x1f;
26683 imm
= (int16_t)ctx
->opcode
>> 7;
26685 op1
= MASK_SPECIAL3(ctx
->opcode
);
26689 /* hint codes 24-31 are reserved and signal RI */
26690 generate_exception_end(ctx
, EXCP_RI
);
26692 /* Treat as NOP. */
26695 check_cp0_enabled(ctx
);
26696 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
26697 gen_cache_operation(ctx
, rt
, rs
, imm
);
26701 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
26704 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26709 /* Treat as NOP. */
26712 op2
= MASK_BSHFL(ctx
->opcode
);
26718 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
26721 gen_bitswap(ctx
, op2
, rd
, rt
);
26726 #if defined(TARGET_MIPS64)
26728 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
26731 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26734 check_mips_64(ctx
);
26737 /* Treat as NOP. */
26740 op2
= MASK_DBSHFL(ctx
->opcode
);
26750 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
26753 gen_bitswap(ctx
, op2
, rd
, rt
);
26760 default: /* Invalid */
26761 MIPS_INVAL("special3_r6");
26762 generate_exception_end(ctx
, EXCP_RI
);
26767 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26772 rs
= (ctx
->opcode
>> 21) & 0x1f;
26773 rt
= (ctx
->opcode
>> 16) & 0x1f;
26774 rd
= (ctx
->opcode
>> 11) & 0x1f;
26776 op1
= MASK_SPECIAL3(ctx
->opcode
);
26779 case OPC_DIVU_G_2E
:
26781 case OPC_MODU_G_2E
:
26782 case OPC_MULT_G_2E
:
26783 case OPC_MULTU_G_2E
:
26784 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
26785 * the same mask and op1. */
26786 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
26787 op2
= MASK_ADDUH_QB(ctx
->opcode
);
26790 case OPC_ADDUH_R_QB
:
26792 case OPC_ADDQH_R_PH
:
26794 case OPC_ADDQH_R_W
:
26796 case OPC_SUBUH_R_QB
:
26798 case OPC_SUBQH_R_PH
:
26800 case OPC_SUBQH_R_W
:
26801 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26806 case OPC_MULQ_RS_W
:
26807 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26810 MIPS_INVAL("MASK ADDUH.QB");
26811 generate_exception_end(ctx
, EXCP_RI
);
26814 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
26815 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26817 generate_exception_end(ctx
, EXCP_RI
);
26821 op2
= MASK_LX(ctx
->opcode
);
26823 #if defined(TARGET_MIPS64)
26829 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
26831 default: /* Invalid */
26832 MIPS_INVAL("MASK LX");
26833 generate_exception_end(ctx
, EXCP_RI
);
26837 case OPC_ABSQ_S_PH_DSP
:
26838 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
26840 case OPC_ABSQ_S_QB
:
26841 case OPC_ABSQ_S_PH
:
26843 case OPC_PRECEQ_W_PHL
:
26844 case OPC_PRECEQ_W_PHR
:
26845 case OPC_PRECEQU_PH_QBL
:
26846 case OPC_PRECEQU_PH_QBR
:
26847 case OPC_PRECEQU_PH_QBLA
:
26848 case OPC_PRECEQU_PH_QBRA
:
26849 case OPC_PRECEU_PH_QBL
:
26850 case OPC_PRECEU_PH_QBR
:
26851 case OPC_PRECEU_PH_QBLA
:
26852 case OPC_PRECEU_PH_QBRA
:
26853 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26860 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
26863 MIPS_INVAL("MASK ABSQ_S.PH");
26864 generate_exception_end(ctx
, EXCP_RI
);
26868 case OPC_ADDU_QB_DSP
:
26869 op2
= MASK_ADDU_QB(ctx
->opcode
);
26872 case OPC_ADDQ_S_PH
:
26875 case OPC_ADDU_S_QB
:
26877 case OPC_ADDU_S_PH
:
26879 case OPC_SUBQ_S_PH
:
26882 case OPC_SUBU_S_QB
:
26884 case OPC_SUBU_S_PH
:
26888 case OPC_RADDU_W_QB
:
26889 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26891 case OPC_MULEU_S_PH_QBL
:
26892 case OPC_MULEU_S_PH_QBR
:
26893 case OPC_MULQ_RS_PH
:
26894 case OPC_MULEQ_S_W_PHL
:
26895 case OPC_MULEQ_S_W_PHR
:
26896 case OPC_MULQ_S_PH
:
26897 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26899 default: /* Invalid */
26900 MIPS_INVAL("MASK ADDU.QB");
26901 generate_exception_end(ctx
, EXCP_RI
);
26906 case OPC_CMPU_EQ_QB_DSP
:
26907 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
26909 case OPC_PRECR_SRA_PH_W
:
26910 case OPC_PRECR_SRA_R_PH_W
:
26911 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
26913 case OPC_PRECR_QB_PH
:
26914 case OPC_PRECRQ_QB_PH
:
26915 case OPC_PRECRQ_PH_W
:
26916 case OPC_PRECRQ_RS_PH_W
:
26917 case OPC_PRECRQU_S_QB_PH
:
26918 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26920 case OPC_CMPU_EQ_QB
:
26921 case OPC_CMPU_LT_QB
:
26922 case OPC_CMPU_LE_QB
:
26923 case OPC_CMP_EQ_PH
:
26924 case OPC_CMP_LT_PH
:
26925 case OPC_CMP_LE_PH
:
26926 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26928 case OPC_CMPGU_EQ_QB
:
26929 case OPC_CMPGU_LT_QB
:
26930 case OPC_CMPGU_LE_QB
:
26931 case OPC_CMPGDU_EQ_QB
:
26932 case OPC_CMPGDU_LT_QB
:
26933 case OPC_CMPGDU_LE_QB
:
26936 case OPC_PACKRL_PH
:
26937 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26939 default: /* Invalid */
26940 MIPS_INVAL("MASK CMPU.EQ.QB");
26941 generate_exception_end(ctx
, EXCP_RI
);
26945 case OPC_SHLL_QB_DSP
:
26946 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
26948 case OPC_DPA_W_PH_DSP
:
26949 op2
= MASK_DPA_W_PH(ctx
->opcode
);
26951 case OPC_DPAU_H_QBL
:
26952 case OPC_DPAU_H_QBR
:
26953 case OPC_DPSU_H_QBL
:
26954 case OPC_DPSU_H_QBR
:
26956 case OPC_DPAX_W_PH
:
26957 case OPC_DPAQ_S_W_PH
:
26958 case OPC_DPAQX_S_W_PH
:
26959 case OPC_DPAQX_SA_W_PH
:
26961 case OPC_DPSX_W_PH
:
26962 case OPC_DPSQ_S_W_PH
:
26963 case OPC_DPSQX_S_W_PH
:
26964 case OPC_DPSQX_SA_W_PH
:
26965 case OPC_MULSAQ_S_W_PH
:
26966 case OPC_DPAQ_SA_L_W
:
26967 case OPC_DPSQ_SA_L_W
:
26968 case OPC_MAQ_S_W_PHL
:
26969 case OPC_MAQ_S_W_PHR
:
26970 case OPC_MAQ_SA_W_PHL
:
26971 case OPC_MAQ_SA_W_PHR
:
26972 case OPC_MULSA_W_PH
:
26973 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26975 default: /* Invalid */
26976 MIPS_INVAL("MASK DPAW.PH");
26977 generate_exception_end(ctx
, EXCP_RI
);
26982 op2
= MASK_INSV(ctx
->opcode
);
26993 t0
= tcg_temp_new();
26994 t1
= tcg_temp_new();
26996 gen_load_gpr(t0
, rt
);
26997 gen_load_gpr(t1
, rs
);
26999 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
27005 default: /* Invalid */
27006 MIPS_INVAL("MASK INSV");
27007 generate_exception_end(ctx
, EXCP_RI
);
27011 case OPC_APPEND_DSP
:
27012 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
27014 case OPC_EXTR_W_DSP
:
27015 op2
= MASK_EXTR_W(ctx
->opcode
);
27019 case OPC_EXTR_RS_W
:
27021 case OPC_EXTRV_S_H
:
27023 case OPC_EXTRV_R_W
:
27024 case OPC_EXTRV_RS_W
:
27029 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27032 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27038 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27040 default: /* Invalid */
27041 MIPS_INVAL("MASK EXTR.W");
27042 generate_exception_end(ctx
, EXCP_RI
);
27046 #if defined(TARGET_MIPS64)
27047 case OPC_DDIV_G_2E
:
27048 case OPC_DDIVU_G_2E
:
27049 case OPC_DMULT_G_2E
:
27050 case OPC_DMULTU_G_2E
:
27051 case OPC_DMOD_G_2E
:
27052 case OPC_DMODU_G_2E
:
27053 check_insn(ctx
, INSN_LOONGSON2E
);
27054 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
27056 case OPC_ABSQ_S_QH_DSP
:
27057 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
27059 case OPC_PRECEQ_L_PWL
:
27060 case OPC_PRECEQ_L_PWR
:
27061 case OPC_PRECEQ_PW_QHL
:
27062 case OPC_PRECEQ_PW_QHR
:
27063 case OPC_PRECEQ_PW_QHLA
:
27064 case OPC_PRECEQ_PW_QHRA
:
27065 case OPC_PRECEQU_QH_OBL
:
27066 case OPC_PRECEQU_QH_OBR
:
27067 case OPC_PRECEQU_QH_OBLA
:
27068 case OPC_PRECEQU_QH_OBRA
:
27069 case OPC_PRECEU_QH_OBL
:
27070 case OPC_PRECEU_QH_OBR
:
27071 case OPC_PRECEU_QH_OBLA
:
27072 case OPC_PRECEU_QH_OBRA
:
27073 case OPC_ABSQ_S_OB
:
27074 case OPC_ABSQ_S_PW
:
27075 case OPC_ABSQ_S_QH
:
27076 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27084 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
27086 default: /* Invalid */
27087 MIPS_INVAL("MASK ABSQ_S.QH");
27088 generate_exception_end(ctx
, EXCP_RI
);
27092 case OPC_ADDU_OB_DSP
:
27093 op2
= MASK_ADDU_OB(ctx
->opcode
);
27095 case OPC_RADDU_L_OB
:
27097 case OPC_SUBQ_S_PW
:
27099 case OPC_SUBQ_S_QH
:
27101 case OPC_SUBU_S_OB
:
27103 case OPC_SUBU_S_QH
:
27105 case OPC_SUBUH_R_OB
:
27107 case OPC_ADDQ_S_PW
:
27109 case OPC_ADDQ_S_QH
:
27111 case OPC_ADDU_S_OB
:
27113 case OPC_ADDU_S_QH
:
27115 case OPC_ADDUH_R_OB
:
27116 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27118 case OPC_MULEQ_S_PW_QHL
:
27119 case OPC_MULEQ_S_PW_QHR
:
27120 case OPC_MULEU_S_QH_OBL
:
27121 case OPC_MULEU_S_QH_OBR
:
27122 case OPC_MULQ_RS_QH
:
27123 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27125 default: /* Invalid */
27126 MIPS_INVAL("MASK ADDU.OB");
27127 generate_exception_end(ctx
, EXCP_RI
);
27131 case OPC_CMPU_EQ_OB_DSP
:
27132 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
27134 case OPC_PRECR_SRA_QH_PW
:
27135 case OPC_PRECR_SRA_R_QH_PW
:
27136 /* Return value is rt. */
27137 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
27139 case OPC_PRECR_OB_QH
:
27140 case OPC_PRECRQ_OB_QH
:
27141 case OPC_PRECRQ_PW_L
:
27142 case OPC_PRECRQ_QH_PW
:
27143 case OPC_PRECRQ_RS_QH_PW
:
27144 case OPC_PRECRQU_S_OB_QH
:
27145 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27147 case OPC_CMPU_EQ_OB
:
27148 case OPC_CMPU_LT_OB
:
27149 case OPC_CMPU_LE_OB
:
27150 case OPC_CMP_EQ_QH
:
27151 case OPC_CMP_LT_QH
:
27152 case OPC_CMP_LE_QH
:
27153 case OPC_CMP_EQ_PW
:
27154 case OPC_CMP_LT_PW
:
27155 case OPC_CMP_LE_PW
:
27156 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27158 case OPC_CMPGDU_EQ_OB
:
27159 case OPC_CMPGDU_LT_OB
:
27160 case OPC_CMPGDU_LE_OB
:
27161 case OPC_CMPGU_EQ_OB
:
27162 case OPC_CMPGU_LT_OB
:
27163 case OPC_CMPGU_LE_OB
:
27164 case OPC_PACKRL_PW
:
27168 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27170 default: /* Invalid */
27171 MIPS_INVAL("MASK CMPU_EQ.OB");
27172 generate_exception_end(ctx
, EXCP_RI
);
27176 case OPC_DAPPEND_DSP
:
27177 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
27179 case OPC_DEXTR_W_DSP
:
27180 op2
= MASK_DEXTR_W(ctx
->opcode
);
27187 case OPC_DEXTR_R_L
:
27188 case OPC_DEXTR_RS_L
:
27190 case OPC_DEXTR_R_W
:
27191 case OPC_DEXTR_RS_W
:
27192 case OPC_DEXTR_S_H
:
27194 case OPC_DEXTRV_R_L
:
27195 case OPC_DEXTRV_RS_L
:
27196 case OPC_DEXTRV_S_H
:
27198 case OPC_DEXTRV_R_W
:
27199 case OPC_DEXTRV_RS_W
:
27200 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27205 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27207 default: /* Invalid */
27208 MIPS_INVAL("MASK EXTR.W");
27209 generate_exception_end(ctx
, EXCP_RI
);
27213 case OPC_DPAQ_W_QH_DSP
:
27214 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
27216 case OPC_DPAU_H_OBL
:
27217 case OPC_DPAU_H_OBR
:
27218 case OPC_DPSU_H_OBL
:
27219 case OPC_DPSU_H_OBR
:
27221 case OPC_DPAQ_S_W_QH
:
27223 case OPC_DPSQ_S_W_QH
:
27224 case OPC_MULSAQ_S_W_QH
:
27225 case OPC_DPAQ_SA_L_PW
:
27226 case OPC_DPSQ_SA_L_PW
:
27227 case OPC_MULSAQ_S_L_PW
:
27228 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27230 case OPC_MAQ_S_W_QHLL
:
27231 case OPC_MAQ_S_W_QHLR
:
27232 case OPC_MAQ_S_W_QHRL
:
27233 case OPC_MAQ_S_W_QHRR
:
27234 case OPC_MAQ_SA_W_QHLL
:
27235 case OPC_MAQ_SA_W_QHLR
:
27236 case OPC_MAQ_SA_W_QHRL
:
27237 case OPC_MAQ_SA_W_QHRR
:
27238 case OPC_MAQ_S_L_PWL
:
27239 case OPC_MAQ_S_L_PWR
:
27244 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27246 default: /* Invalid */
27247 MIPS_INVAL("MASK DPAQ.W.QH");
27248 generate_exception_end(ctx
, EXCP_RI
);
27252 case OPC_DINSV_DSP
:
27253 op2
= MASK_INSV(ctx
->opcode
);
27264 t0
= tcg_temp_new();
27265 t1
= tcg_temp_new();
27267 gen_load_gpr(t0
, rt
);
27268 gen_load_gpr(t1
, rs
);
27270 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
27276 default: /* Invalid */
27277 MIPS_INVAL("MASK DINSV");
27278 generate_exception_end(ctx
, EXCP_RI
);
27282 case OPC_SHLL_OB_DSP
:
27283 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
27286 default: /* Invalid */
27287 MIPS_INVAL("special3_legacy");
27288 generate_exception_end(ctx
, EXCP_RI
);
27293 static void decode_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
27295 uint32_t opc
= MASK_MMI0(ctx
->opcode
);
27298 case MMI_OPC_0_PADDW
: /* TODO: MMI_OPC_0_PADDW */
27299 case MMI_OPC_0_PSUBW
: /* TODO: MMI_OPC_0_PSUBW */
27300 case MMI_OPC_0_PCGTW
: /* TODO: MMI_OPC_0_PCGTW */
27301 case MMI_OPC_0_PMAXW
: /* TODO: MMI_OPC_0_PMAXW */
27302 case MMI_OPC_0_PADDH
: /* TODO: MMI_OPC_0_PADDH */
27303 case MMI_OPC_0_PSUBH
: /* TODO: MMI_OPC_0_PSUBH */
27304 case MMI_OPC_0_PCGTH
: /* TODO: MMI_OPC_0_PCGTH */
27305 case MMI_OPC_0_PMAXH
: /* TODO: MMI_OPC_0_PMAXH */
27306 case MMI_OPC_0_PADDB
: /* TODO: MMI_OPC_0_PADDB */
27307 case MMI_OPC_0_PSUBB
: /* TODO: MMI_OPC_0_PSUBB */
27308 case MMI_OPC_0_PCGTB
: /* TODO: MMI_OPC_0_PCGTB */
27309 case MMI_OPC_0_PADDSW
: /* TODO: MMI_OPC_0_PADDSW */
27310 case MMI_OPC_0_PSUBSW
: /* TODO: MMI_OPC_0_PSUBSW */
27311 case MMI_OPC_0_PEXTLW
: /* TODO: MMI_OPC_0_PEXTLW */
27312 case MMI_OPC_0_PPACW
: /* TODO: MMI_OPC_0_PPACW */
27313 case MMI_OPC_0_PADDSH
: /* TODO: MMI_OPC_0_PADDSH */
27314 case MMI_OPC_0_PSUBSH
: /* TODO: MMI_OPC_0_PSUBSH */
27315 case MMI_OPC_0_PEXTLH
: /* TODO: MMI_OPC_0_PEXTLH */
27316 case MMI_OPC_0_PPACH
: /* TODO: MMI_OPC_0_PPACH */
27317 case MMI_OPC_0_PADDSB
: /* TODO: MMI_OPC_0_PADDSB */
27318 case MMI_OPC_0_PSUBSB
: /* TODO: MMI_OPC_0_PSUBSB */
27319 case MMI_OPC_0_PEXTLB
: /* TODO: MMI_OPC_0_PEXTLB */
27320 case MMI_OPC_0_PPACB
: /* TODO: MMI_OPC_0_PPACB */
27321 case MMI_OPC_0_PEXT5
: /* TODO: MMI_OPC_0_PEXT5 */
27322 case MMI_OPC_0_PPAC5
: /* TODO: MMI_OPC_0_PPAC5 */
27323 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI0 */
27326 MIPS_INVAL("TX79 MMI class MMI0");
27327 generate_exception_end(ctx
, EXCP_RI
);
27332 static void decode_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
27334 uint32_t opc
= MASK_MMI1(ctx
->opcode
);
27337 case MMI_OPC_1_PABSW
: /* TODO: MMI_OPC_1_PABSW */
27338 case MMI_OPC_1_PCEQW
: /* TODO: MMI_OPC_1_PCEQW */
27339 case MMI_OPC_1_PMINW
: /* TODO: MMI_OPC_1_PMINW */
27340 case MMI_OPC_1_PADSBH
: /* TODO: MMI_OPC_1_PADSBH */
27341 case MMI_OPC_1_PABSH
: /* TODO: MMI_OPC_1_PABSH */
27342 case MMI_OPC_1_PCEQH
: /* TODO: MMI_OPC_1_PCEQH */
27343 case MMI_OPC_1_PMINH
: /* TODO: MMI_OPC_1_PMINH */
27344 case MMI_OPC_1_PCEQB
: /* TODO: MMI_OPC_1_PCEQB */
27345 case MMI_OPC_1_PADDUW
: /* TODO: MMI_OPC_1_PADDUW */
27346 case MMI_OPC_1_PSUBUW
: /* TODO: MMI_OPC_1_PSUBUW */
27347 case MMI_OPC_1_PEXTUW
: /* TODO: MMI_OPC_1_PEXTUW */
27348 case MMI_OPC_1_PADDUH
: /* TODO: MMI_OPC_1_PADDUH */
27349 case MMI_OPC_1_PSUBUH
: /* TODO: MMI_OPC_1_PSUBUH */
27350 case MMI_OPC_1_PEXTUH
: /* TODO: MMI_OPC_1_PEXTUH */
27351 case MMI_OPC_1_PADDUB
: /* TODO: MMI_OPC_1_PADDUB */
27352 case MMI_OPC_1_PSUBUB
: /* TODO: MMI_OPC_1_PSUBUB */
27353 case MMI_OPC_1_PEXTUB
: /* TODO: MMI_OPC_1_PEXTUB */
27354 case MMI_OPC_1_QFSRV
: /* TODO: MMI_OPC_1_QFSRV */
27355 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI1 */
27358 MIPS_INVAL("TX79 MMI class MMI1");
27359 generate_exception_end(ctx
, EXCP_RI
);
27364 static void decode_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
27366 uint32_t opc
= MASK_MMI2(ctx
->opcode
);
27369 case MMI_OPC_2_PMADDW
: /* TODO: MMI_OPC_2_PMADDW */
27370 case MMI_OPC_2_PSLLVW
: /* TODO: MMI_OPC_2_PSLLVW */
27371 case MMI_OPC_2_PSRLVW
: /* TODO: MMI_OPC_2_PSRLVW */
27372 case MMI_OPC_2_PMSUBW
: /* TODO: MMI_OPC_2_PMSUBW */
27373 case MMI_OPC_2_PMFHI
: /* TODO: MMI_OPC_2_PMFHI */
27374 case MMI_OPC_2_PMFLO
: /* TODO: MMI_OPC_2_PMFLO */
27375 case MMI_OPC_2_PINTH
: /* TODO: MMI_OPC_2_PINTH */
27376 case MMI_OPC_2_PMULTW
: /* TODO: MMI_OPC_2_PMULTW */
27377 case MMI_OPC_2_PDIVW
: /* TODO: MMI_OPC_2_PDIVW */
27378 case MMI_OPC_2_PCPYLD
: /* TODO: MMI_OPC_2_PCPYLD */
27379 case MMI_OPC_2_PMADDH
: /* TODO: MMI_OPC_2_PMADDH */
27380 case MMI_OPC_2_PHMADH
: /* TODO: MMI_OPC_2_PHMADH */
27381 case MMI_OPC_2_PAND
: /* TODO: MMI_OPC_2_PAND */
27382 case MMI_OPC_2_PXOR
: /* TODO: MMI_OPC_2_PXOR */
27383 case MMI_OPC_2_PMSUBH
: /* TODO: MMI_OPC_2_PMSUBH */
27384 case MMI_OPC_2_PHMSBH
: /* TODO: MMI_OPC_2_PHMSBH */
27385 case MMI_OPC_2_PEXEH
: /* TODO: MMI_OPC_2_PEXEH */
27386 case MMI_OPC_2_PREVH
: /* TODO: MMI_OPC_2_PREVH */
27387 case MMI_OPC_2_PMULTH
: /* TODO: MMI_OPC_2_PMULTH */
27388 case MMI_OPC_2_PDIVBW
: /* TODO: MMI_OPC_2_PDIVBW */
27389 case MMI_OPC_2_PEXEW
: /* TODO: MMI_OPC_2_PEXEW */
27390 case MMI_OPC_2_PROT3W
: /* TODO: MMI_OPC_2_PROT3W */
27391 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI2 */
27394 MIPS_INVAL("TX79 MMI class MMI2");
27395 generate_exception_end(ctx
, EXCP_RI
);
27400 static void decode_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
27402 uint32_t opc
= MASK_MMI3(ctx
->opcode
);
27405 case MMI_OPC_3_PMADDUW
: /* TODO: MMI_OPC_3_PMADDUW */
27406 case MMI_OPC_3_PSRAVW
: /* TODO: MMI_OPC_3_PSRAVW */
27407 case MMI_OPC_3_PMTHI
: /* TODO: MMI_OPC_3_PMTHI */
27408 case MMI_OPC_3_PMTLO
: /* TODO: MMI_OPC_3_PMTLO */
27409 case MMI_OPC_3_PINTEH
: /* TODO: MMI_OPC_3_PINTEH */
27410 case MMI_OPC_3_PMULTUW
: /* TODO: MMI_OPC_3_PMULTUW */
27411 case MMI_OPC_3_PDIVUW
: /* TODO: MMI_OPC_3_PDIVUW */
27412 case MMI_OPC_3_PCPYUD
: /* TODO: MMI_OPC_3_PCPYUD */
27413 case MMI_OPC_3_POR
: /* TODO: MMI_OPC_3_POR */
27414 case MMI_OPC_3_PNOR
: /* TODO: MMI_OPC_3_PNOR */
27415 case MMI_OPC_3_PEXCH
: /* TODO: MMI_OPC_3_PEXCH */
27416 case MMI_OPC_3_PCPYH
: /* TODO: MMI_OPC_3_PCPYH */
27417 case MMI_OPC_3_PEXCW
: /* TODO: MMI_OPC_3_PEXCW */
27418 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI3 */
27421 MIPS_INVAL("TX79 MMI class MMI3");
27422 generate_exception_end(ctx
, EXCP_RI
);
27427 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
27429 uint32_t opc
= MASK_MMI(ctx
->opcode
);
27430 int rs
= extract32(ctx
->opcode
, 21, 5);
27431 int rt
= extract32(ctx
->opcode
, 16, 5);
27432 int rd
= extract32(ctx
->opcode
, 11, 5);
27435 case MMI_OPC_CLASS_MMI0
:
27436 decode_mmi0(env
, ctx
);
27438 case MMI_OPC_CLASS_MMI1
:
27439 decode_mmi1(env
, ctx
);
27441 case MMI_OPC_CLASS_MMI2
:
27442 decode_mmi2(env
, ctx
);
27444 case MMI_OPC_CLASS_MMI3
:
27445 decode_mmi3(env
, ctx
);
27447 case MMI_OPC_MULT1
:
27448 case MMI_OPC_MULTU1
:
27450 case MMI_OPC_MADDU
:
27451 case MMI_OPC_MADD1
:
27452 case MMI_OPC_MADDU1
:
27453 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
27456 case MMI_OPC_DIVU1
:
27457 gen_div1_tx79(ctx
, opc
, rs
, rt
);
27459 case MMI_OPC_MTLO1
:
27460 case MMI_OPC_MTHI1
:
27461 gen_HILO1_tx79(ctx
, opc
, rs
);
27463 case MMI_OPC_MFLO1
:
27464 case MMI_OPC_MFHI1
:
27465 gen_HILO1_tx79(ctx
, opc
, rd
);
27467 case MMI_OPC_PLZCW
: /* TODO: MMI_OPC_PLZCW */
27468 case MMI_OPC_PMFHL
: /* TODO: MMI_OPC_PMFHL */
27469 case MMI_OPC_PMTHL
: /* TODO: MMI_OPC_PMTHL */
27470 case MMI_OPC_PSLLH
: /* TODO: MMI_OPC_PSLLH */
27471 case MMI_OPC_PSRLH
: /* TODO: MMI_OPC_PSRLH */
27472 case MMI_OPC_PSRAH
: /* TODO: MMI_OPC_PSRAH */
27473 case MMI_OPC_PSLLW
: /* TODO: MMI_OPC_PSLLW */
27474 case MMI_OPC_PSRLW
: /* TODO: MMI_OPC_PSRLW */
27475 case MMI_OPC_PSRAW
: /* TODO: MMI_OPC_PSRAW */
27476 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI */
27479 MIPS_INVAL("TX79 MMI class");
27480 generate_exception_end(ctx
, EXCP_RI
);
27485 static void gen_mmi_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
27487 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_LQ */
27490 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
27492 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_SQ */
27496 * The TX79-specific instruction Store Quadword
27498 * +--------+-------+-------+------------------------+
27499 * | 011111 | base | rt | offset | SQ
27500 * +--------+-------+-------+------------------------+
27503 * has the same opcode as the Read Hardware Register instruction
27505 * +--------+-------+-------+-------+-------+--------+
27506 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
27507 * +--------+-------+-------+-------+-------+--------+
27510 * that is required, trapped and emulated by the Linux kernel. However, all
27511 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
27512 * offset is odd. Therefore all valid SQ instructions can execute normally.
27513 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
27514 * between SQ and RDHWR, as the Linux kernel does.
27516 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
27518 int base
= extract32(ctx
->opcode
, 21, 5);
27519 int rt
= extract32(ctx
->opcode
, 16, 5);
27520 int offset
= extract32(ctx
->opcode
, 0, 16);
27522 #ifdef CONFIG_USER_ONLY
27523 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
27524 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
27526 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
27527 int rd
= extract32(ctx
->opcode
, 11, 5);
27529 gen_rdhwr(ctx
, rt
, rd
, 0);
27534 gen_mmi_sq(ctx
, base
, rt
, offset
);
27537 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
27539 int rs
, rt
, rd
, sa
;
27543 rs
= (ctx
->opcode
>> 21) & 0x1f;
27544 rt
= (ctx
->opcode
>> 16) & 0x1f;
27545 rd
= (ctx
->opcode
>> 11) & 0x1f;
27546 sa
= (ctx
->opcode
>> 6) & 0x1f;
27547 imm
= sextract32(ctx
->opcode
, 7, 9);
27549 op1
= MASK_SPECIAL3(ctx
->opcode
);
27552 * EVA loads and stores overlap Loongson 2E instructions decoded by
27553 * decode_opc_special3_legacy(), so be careful to allow their decoding when
27560 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27568 check_cp0_enabled(ctx
);
27569 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27573 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27578 check_cp0_enabled(ctx
);
27579 gen_st(ctx
, op1
, rt
, rs
, imm
);
27582 check_cp0_enabled(ctx
);
27583 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
27586 check_cp0_enabled(ctx
);
27587 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27588 gen_cache_operation(ctx
, rt
, rs
, imm
);
27590 /* Treat as NOP. */
27593 check_cp0_enabled(ctx
);
27594 /* Treat as NOP. */
27602 check_insn(ctx
, ISA_MIPS32R2
);
27603 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27606 op2
= MASK_BSHFL(ctx
->opcode
);
27613 check_insn(ctx
, ISA_MIPS32R6
);
27614 decode_opc_special3_r6(env
, ctx
);
27617 check_insn(ctx
, ISA_MIPS32R2
);
27618 gen_bshfl(ctx
, op2
, rt
, rd
);
27622 #if defined(TARGET_MIPS64)
27629 check_insn(ctx
, ISA_MIPS64R2
);
27630 check_mips_64(ctx
);
27631 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27634 op2
= MASK_DBSHFL(ctx
->opcode
);
27645 check_insn(ctx
, ISA_MIPS32R6
);
27646 decode_opc_special3_r6(env
, ctx
);
27649 check_insn(ctx
, ISA_MIPS64R2
);
27650 check_mips_64(ctx
);
27651 op2
= MASK_DBSHFL(ctx
->opcode
);
27652 gen_bshfl(ctx
, op2
, rt
, rd
);
27658 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
27663 TCGv t0
= tcg_temp_new();
27664 TCGv t1
= tcg_temp_new();
27666 gen_load_gpr(t0
, rt
);
27667 gen_load_gpr(t1
, rs
);
27668 gen_helper_fork(t0
, t1
);
27676 TCGv t0
= tcg_temp_new();
27678 gen_load_gpr(t0
, rs
);
27679 gen_helper_yield(t0
, cpu_env
, t0
);
27680 gen_store_gpr(t0
, rd
);
27685 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27686 decode_opc_special3_r6(env
, ctx
);
27688 decode_opc_special3_legacy(env
, ctx
);
27693 /* MIPS SIMD Architecture (MSA) */
27694 static inline int check_msa_access(DisasContext
*ctx
)
27696 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
27697 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
27698 generate_exception_end(ctx
, EXCP_RI
);
27702 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
27703 if (ctx
->insn_flags
& ASE_MSA
) {
27704 generate_exception_end(ctx
, EXCP_MSADIS
);
27707 generate_exception_end(ctx
, EXCP_RI
);
27714 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
27716 /* generates tcg ops to check if any element is 0 */
27717 /* Note this function only works with MSA_WRLEN = 128 */
27718 uint64_t eval_zero_or_big
= 0;
27719 uint64_t eval_big
= 0;
27720 TCGv_i64 t0
= tcg_temp_new_i64();
27721 TCGv_i64 t1
= tcg_temp_new_i64();
27724 eval_zero_or_big
= 0x0101010101010101ULL
;
27725 eval_big
= 0x8080808080808080ULL
;
27728 eval_zero_or_big
= 0x0001000100010001ULL
;
27729 eval_big
= 0x8000800080008000ULL
;
27732 eval_zero_or_big
= 0x0000000100000001ULL
;
27733 eval_big
= 0x8000000080000000ULL
;
27736 eval_zero_or_big
= 0x0000000000000001ULL
;
27737 eval_big
= 0x8000000000000000ULL
;
27740 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
27741 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
27742 tcg_gen_andi_i64(t0
, t0
, eval_big
);
27743 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
27744 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
27745 tcg_gen_andi_i64(t1
, t1
, eval_big
);
27746 tcg_gen_or_i64(t0
, t0
, t1
);
27747 /* if all bits are zero then all elements are not zero */
27748 /* if some bit is non-zero then some element is zero */
27749 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
27750 tcg_gen_trunc_i64_tl(tresult
, t0
);
27751 tcg_temp_free_i64(t0
);
27752 tcg_temp_free_i64(t1
);
27755 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
27757 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27758 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27759 int64_t s16
= (int16_t)ctx
->opcode
;
27761 check_msa_access(ctx
);
27763 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
27764 generate_exception_end(ctx
, EXCP_RI
);
27771 TCGv_i64 t0
= tcg_temp_new_i64();
27772 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
27773 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
27774 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
27775 tcg_gen_trunc_i64_tl(bcond
, t0
);
27776 tcg_temp_free_i64(t0
);
27783 gen_check_zero_element(bcond
, df
, wt
);
27789 gen_check_zero_element(bcond
, df
, wt
);
27790 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
27794 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
27796 ctx
->hflags
|= MIPS_HFLAG_BC
;
27797 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
27800 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
27802 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
27803 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
27804 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27805 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27807 TCGv_i32 twd
= tcg_const_i32(wd
);
27808 TCGv_i32 tws
= tcg_const_i32(ws
);
27809 TCGv_i32 ti8
= tcg_const_i32(i8
);
27811 switch (MASK_MSA_I8(ctx
->opcode
)) {
27813 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
27816 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
27819 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
27822 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
27825 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
27828 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
27831 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
27837 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
27838 if (df
== DF_DOUBLE
) {
27839 generate_exception_end(ctx
, EXCP_RI
);
27841 TCGv_i32 tdf
= tcg_const_i32(df
);
27842 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
27843 tcg_temp_free_i32(tdf
);
27848 MIPS_INVAL("MSA instruction");
27849 generate_exception_end(ctx
, EXCP_RI
);
27853 tcg_temp_free_i32(twd
);
27854 tcg_temp_free_i32(tws
);
27855 tcg_temp_free_i32(ti8
);
27858 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
27860 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27861 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27862 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
27863 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
27864 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27865 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27867 TCGv_i32 tdf
= tcg_const_i32(df
);
27868 TCGv_i32 twd
= tcg_const_i32(wd
);
27869 TCGv_i32 tws
= tcg_const_i32(ws
);
27870 TCGv_i32 timm
= tcg_temp_new_i32();
27871 tcg_gen_movi_i32(timm
, u5
);
27873 switch (MASK_MSA_I5(ctx
->opcode
)) {
27875 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27878 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27880 case OPC_MAXI_S_df
:
27881 tcg_gen_movi_i32(timm
, s5
);
27882 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27884 case OPC_MAXI_U_df
:
27885 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27887 case OPC_MINI_S_df
:
27888 tcg_gen_movi_i32(timm
, s5
);
27889 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27891 case OPC_MINI_U_df
:
27892 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27895 tcg_gen_movi_i32(timm
, s5
);
27896 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27898 case OPC_CLTI_S_df
:
27899 tcg_gen_movi_i32(timm
, s5
);
27900 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27902 case OPC_CLTI_U_df
:
27903 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27905 case OPC_CLEI_S_df
:
27906 tcg_gen_movi_i32(timm
, s5
);
27907 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27909 case OPC_CLEI_U_df
:
27910 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27914 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
27915 tcg_gen_movi_i32(timm
, s10
);
27916 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
27920 MIPS_INVAL("MSA instruction");
27921 generate_exception_end(ctx
, EXCP_RI
);
27925 tcg_temp_free_i32(tdf
);
27926 tcg_temp_free_i32(twd
);
27927 tcg_temp_free_i32(tws
);
27928 tcg_temp_free_i32(timm
);
27931 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
27933 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27934 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
27935 uint32_t df
= 0, m
= 0;
27936 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27937 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27944 if ((dfm
& 0x40) == 0x00) {
27947 } else if ((dfm
& 0x60) == 0x40) {
27950 } else if ((dfm
& 0x70) == 0x60) {
27953 } else if ((dfm
& 0x78) == 0x70) {
27957 generate_exception_end(ctx
, EXCP_RI
);
27961 tdf
= tcg_const_i32(df
);
27962 tm
= tcg_const_i32(m
);
27963 twd
= tcg_const_i32(wd
);
27964 tws
= tcg_const_i32(ws
);
27966 switch (MASK_MSA_BIT(ctx
->opcode
)) {
27968 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27971 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
27974 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27977 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27980 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
27983 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
27985 case OPC_BINSLI_df
:
27986 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27988 case OPC_BINSRI_df
:
27989 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27992 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
27995 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
27998 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
28001 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28004 MIPS_INVAL("MSA instruction");
28005 generate_exception_end(ctx
, EXCP_RI
);
28009 tcg_temp_free_i32(tdf
);
28010 tcg_temp_free_i32(tm
);
28011 tcg_temp_free_i32(twd
);
28012 tcg_temp_free_i32(tws
);
28015 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
28017 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28018 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28019 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28020 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28021 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28023 TCGv_i32 tdf
= tcg_const_i32(df
);
28024 TCGv_i32 twd
= tcg_const_i32(wd
);
28025 TCGv_i32 tws
= tcg_const_i32(ws
);
28026 TCGv_i32 twt
= tcg_const_i32(wt
);
28028 switch (MASK_MSA_3R(ctx
->opcode
)) {
28030 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
28033 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28036 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28039 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28041 case OPC_SUBS_S_df
:
28042 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28045 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28048 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
28051 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28054 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
28057 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28059 case OPC_ADDS_A_df
:
28060 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28062 case OPC_SUBS_U_df
:
28063 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28066 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28069 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
28072 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
28075 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28078 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28081 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28083 case OPC_ADDS_S_df
:
28084 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28086 case OPC_SUBSUS_U_df
:
28087 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28090 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28093 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28096 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28099 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28102 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28105 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28107 case OPC_ADDS_U_df
:
28108 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28110 case OPC_SUBSUU_S_df
:
28111 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28114 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28117 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
28120 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28123 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28126 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28128 case OPC_ASUB_S_df
:
28129 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28132 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28135 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28138 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
28141 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28144 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28147 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28149 case OPC_ASUB_U_df
:
28150 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28153 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28156 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28159 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28162 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28164 case OPC_AVER_S_df
:
28165 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28168 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28171 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28174 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28177 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28179 case OPC_AVER_U_df
:
28180 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28183 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28186 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28189 case OPC_DOTP_S_df
:
28190 case OPC_DOTP_U_df
:
28191 case OPC_DPADD_S_df
:
28192 case OPC_DPADD_U_df
:
28193 case OPC_DPSUB_S_df
:
28194 case OPC_HADD_S_df
:
28195 case OPC_DPSUB_U_df
:
28196 case OPC_HADD_U_df
:
28197 case OPC_HSUB_S_df
:
28198 case OPC_HSUB_U_df
:
28199 if (df
== DF_BYTE
) {
28200 generate_exception_end(ctx
, EXCP_RI
);
28203 switch (MASK_MSA_3R(ctx
->opcode
)) {
28204 case OPC_DOTP_S_df
:
28205 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28207 case OPC_DOTP_U_df
:
28208 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28210 case OPC_DPADD_S_df
:
28211 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28213 case OPC_DPADD_U_df
:
28214 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28216 case OPC_DPSUB_S_df
:
28217 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28219 case OPC_HADD_S_df
:
28220 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28222 case OPC_DPSUB_U_df
:
28223 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28225 case OPC_HADD_U_df
:
28226 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28228 case OPC_HSUB_S_df
:
28229 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28231 case OPC_HSUB_U_df
:
28232 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28237 MIPS_INVAL("MSA instruction");
28238 generate_exception_end(ctx
, EXCP_RI
);
28241 tcg_temp_free_i32(twd
);
28242 tcg_temp_free_i32(tws
);
28243 tcg_temp_free_i32(twt
);
28244 tcg_temp_free_i32(tdf
);
28247 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
28249 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
28250 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
28251 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
28252 TCGv telm
= tcg_temp_new();
28253 TCGv_i32 tsr
= tcg_const_i32(source
);
28254 TCGv_i32 tdt
= tcg_const_i32(dest
);
28256 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
28258 gen_load_gpr(telm
, source
);
28259 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
28262 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
28263 gen_store_gpr(telm
, dest
);
28266 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
28269 MIPS_INVAL("MSA instruction");
28270 generate_exception_end(ctx
, EXCP_RI
);
28274 tcg_temp_free(telm
);
28275 tcg_temp_free_i32(tdt
);
28276 tcg_temp_free_i32(tsr
);
28279 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
28282 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28283 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28284 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28286 TCGv_i32 tws
= tcg_const_i32(ws
);
28287 TCGv_i32 twd
= tcg_const_i32(wd
);
28288 TCGv_i32 tn
= tcg_const_i32(n
);
28289 TCGv_i32 tdf
= tcg_const_i32(df
);
28291 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28293 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
28295 case OPC_SPLATI_df
:
28296 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
28299 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
28301 case OPC_COPY_S_df
:
28302 case OPC_COPY_U_df
:
28303 case OPC_INSERT_df
:
28304 #if !defined(TARGET_MIPS64)
28305 /* Double format valid only for MIPS64 */
28306 if (df
== DF_DOUBLE
) {
28307 generate_exception_end(ctx
, EXCP_RI
);
28311 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28312 case OPC_COPY_S_df
:
28313 if (likely(wd
!= 0)) {
28314 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
28317 case OPC_COPY_U_df
:
28318 if (likely(wd
!= 0)) {
28319 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
28322 case OPC_INSERT_df
:
28323 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
28328 MIPS_INVAL("MSA instruction");
28329 generate_exception_end(ctx
, EXCP_RI
);
28331 tcg_temp_free_i32(twd
);
28332 tcg_temp_free_i32(tws
);
28333 tcg_temp_free_i32(tn
);
28334 tcg_temp_free_i32(tdf
);
28337 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
28339 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
28340 uint32_t df
= 0, n
= 0;
28342 if ((dfn
& 0x30) == 0x00) {
28345 } else if ((dfn
& 0x38) == 0x20) {
28348 } else if ((dfn
& 0x3c) == 0x30) {
28351 } else if ((dfn
& 0x3e) == 0x38) {
28354 } else if (dfn
== 0x3E) {
28355 /* CTCMSA, CFCMSA, MOVE.V */
28356 gen_msa_elm_3e(env
, ctx
);
28359 generate_exception_end(ctx
, EXCP_RI
);
28363 gen_msa_elm_df(env
, ctx
, df
, n
);
28366 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28368 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28369 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
28370 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28371 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28372 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28374 TCGv_i32 twd
= tcg_const_i32(wd
);
28375 TCGv_i32 tws
= tcg_const_i32(ws
);
28376 TCGv_i32 twt
= tcg_const_i32(wt
);
28377 TCGv_i32 tdf
= tcg_temp_new_i32();
28379 /* adjust df value for floating-point instruction */
28380 tcg_gen_movi_i32(tdf
, df
+ 2);
28382 switch (MASK_MSA_3RF(ctx
->opcode
)) {
28384 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28387 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28390 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28393 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28396 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28399 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28402 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
28405 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28408 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28411 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28414 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28417 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28420 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28423 tcg_gen_movi_i32(tdf
, df
+ 1);
28424 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28427 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28430 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28432 case OPC_MADD_Q_df
:
28433 tcg_gen_movi_i32(tdf
, df
+ 1);
28434 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28437 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28439 case OPC_MSUB_Q_df
:
28440 tcg_gen_movi_i32(tdf
, df
+ 1);
28441 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28444 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28447 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
28450 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28453 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
28456 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28459 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28462 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28465 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28468 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28471 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28474 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28477 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28480 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
28482 case OPC_MULR_Q_df
:
28483 tcg_gen_movi_i32(tdf
, df
+ 1);
28484 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28487 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28489 case OPC_FMIN_A_df
:
28490 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28492 case OPC_MADDR_Q_df
:
28493 tcg_gen_movi_i32(tdf
, df
+ 1);
28494 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28497 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28500 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
28502 case OPC_MSUBR_Q_df
:
28503 tcg_gen_movi_i32(tdf
, df
+ 1);
28504 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28507 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28509 case OPC_FMAX_A_df
:
28510 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28513 MIPS_INVAL("MSA instruction");
28514 generate_exception_end(ctx
, EXCP_RI
);
28518 tcg_temp_free_i32(twd
);
28519 tcg_temp_free_i32(tws
);
28520 tcg_temp_free_i32(twt
);
28521 tcg_temp_free_i32(tdf
);
28524 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
28526 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28527 (op & (0x7 << 18)))
28528 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28529 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28530 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28531 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
28532 TCGv_i32 twd
= tcg_const_i32(wd
);
28533 TCGv_i32 tws
= tcg_const_i32(ws
);
28534 TCGv_i32 twt
= tcg_const_i32(wt
);
28535 TCGv_i32 tdf
= tcg_const_i32(df
);
28537 switch (MASK_MSA_2R(ctx
->opcode
)) {
28539 #if !defined(TARGET_MIPS64)
28540 /* Double format valid only for MIPS64 */
28541 if (df
== DF_DOUBLE
) {
28542 generate_exception_end(ctx
, EXCP_RI
);
28546 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
28549 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
28552 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
28555 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
28558 MIPS_INVAL("MSA instruction");
28559 generate_exception_end(ctx
, EXCP_RI
);
28563 tcg_temp_free_i32(twd
);
28564 tcg_temp_free_i32(tws
);
28565 tcg_temp_free_i32(twt
);
28566 tcg_temp_free_i32(tdf
);
28569 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28571 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28572 (op & (0xf << 17)))
28573 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28574 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28575 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28576 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
28577 TCGv_i32 twd
= tcg_const_i32(wd
);
28578 TCGv_i32 tws
= tcg_const_i32(ws
);
28579 TCGv_i32 twt
= tcg_const_i32(wt
);
28580 /* adjust df value for floating-point instruction */
28581 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
28583 switch (MASK_MSA_2RF(ctx
->opcode
)) {
28584 case OPC_FCLASS_df
:
28585 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
28587 case OPC_FTRUNC_S_df
:
28588 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
28590 case OPC_FTRUNC_U_df
:
28591 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
28594 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
28596 case OPC_FRSQRT_df
:
28597 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
28600 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
28603 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
28606 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
28608 case OPC_FEXUPL_df
:
28609 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
28611 case OPC_FEXUPR_df
:
28612 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
28615 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
28618 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
28620 case OPC_FTINT_S_df
:
28621 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
28623 case OPC_FTINT_U_df
:
28624 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
28626 case OPC_FFINT_S_df
:
28627 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
28629 case OPC_FFINT_U_df
:
28630 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
28634 tcg_temp_free_i32(twd
);
28635 tcg_temp_free_i32(tws
);
28636 tcg_temp_free_i32(twt
);
28637 tcg_temp_free_i32(tdf
);
28640 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
28642 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
28643 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28644 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28645 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28646 TCGv_i32 twd
= tcg_const_i32(wd
);
28647 TCGv_i32 tws
= tcg_const_i32(ws
);
28648 TCGv_i32 twt
= tcg_const_i32(wt
);
28650 switch (MASK_MSA_VEC(ctx
->opcode
)) {
28652 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
28655 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
28658 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
28661 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
28664 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
28667 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
28670 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
28673 MIPS_INVAL("MSA instruction");
28674 generate_exception_end(ctx
, EXCP_RI
);
28678 tcg_temp_free_i32(twd
);
28679 tcg_temp_free_i32(tws
);
28680 tcg_temp_free_i32(twt
);
28683 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
28685 switch (MASK_MSA_VEC(ctx
->opcode
)) {
28693 gen_msa_vec_v(env
, ctx
);
28696 gen_msa_2r(env
, ctx
);
28699 gen_msa_2rf(env
, ctx
);
28702 MIPS_INVAL("MSA instruction");
28703 generate_exception_end(ctx
, EXCP_RI
);
28708 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
28710 uint32_t opcode
= ctx
->opcode
;
28711 check_insn(ctx
, ASE_MSA
);
28712 check_msa_access(ctx
);
28714 switch (MASK_MSA_MINOR(opcode
)) {
28715 case OPC_MSA_I8_00
:
28716 case OPC_MSA_I8_01
:
28717 case OPC_MSA_I8_02
:
28718 gen_msa_i8(env
, ctx
);
28720 case OPC_MSA_I5_06
:
28721 case OPC_MSA_I5_07
:
28722 gen_msa_i5(env
, ctx
);
28724 case OPC_MSA_BIT_09
:
28725 case OPC_MSA_BIT_0A
:
28726 gen_msa_bit(env
, ctx
);
28728 case OPC_MSA_3R_0D
:
28729 case OPC_MSA_3R_0E
:
28730 case OPC_MSA_3R_0F
:
28731 case OPC_MSA_3R_10
:
28732 case OPC_MSA_3R_11
:
28733 case OPC_MSA_3R_12
:
28734 case OPC_MSA_3R_13
:
28735 case OPC_MSA_3R_14
:
28736 case OPC_MSA_3R_15
:
28737 gen_msa_3r(env
, ctx
);
28740 gen_msa_elm(env
, ctx
);
28742 case OPC_MSA_3RF_1A
:
28743 case OPC_MSA_3RF_1B
:
28744 case OPC_MSA_3RF_1C
:
28745 gen_msa_3rf(env
, ctx
);
28748 gen_msa_vec(env
, ctx
);
28759 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
28760 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
28761 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28762 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
28764 TCGv_i32 twd
= tcg_const_i32(wd
);
28765 TCGv taddr
= tcg_temp_new();
28766 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
28768 switch (MASK_MSA_MINOR(opcode
)) {
28770 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
28773 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
28776 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
28779 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
28782 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
28785 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
28788 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
28791 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
28795 tcg_temp_free_i32(twd
);
28796 tcg_temp_free(taddr
);
28800 MIPS_INVAL("MSA instruction");
28801 generate_exception_end(ctx
, EXCP_RI
);
28807 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
28810 int rs
, rt
, rd
, sa
;
28814 /* make sure instructions are on a word boundary */
28815 if (ctx
->base
.pc_next
& 0x3) {
28816 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
28817 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
28821 /* Handle blikely not taken case */
28822 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
28823 TCGLabel
*l1
= gen_new_label();
28825 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
28826 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
28827 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
28831 op
= MASK_OP_MAJOR(ctx
->opcode
);
28832 rs
= (ctx
->opcode
>> 21) & 0x1f;
28833 rt
= (ctx
->opcode
>> 16) & 0x1f;
28834 rd
= (ctx
->opcode
>> 11) & 0x1f;
28835 sa
= (ctx
->opcode
>> 6) & 0x1f;
28836 imm
= (int16_t)ctx
->opcode
;
28839 decode_opc_special(env
, ctx
);
28842 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
28843 decode_mmi(env
, ctx
);
28844 #if !defined(TARGET_MIPS64)
28845 } else if (ctx
->insn_flags
& ASE_MXU
) {
28846 decode_opc_mxu(env
, ctx
);
28849 decode_opc_special2_legacy(env
, ctx
);
28853 if (ctx
->insn_flags
& INSN_R5900
) {
28854 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
28856 decode_opc_special3(env
, ctx
);
28860 op1
= MASK_REGIMM(ctx
->opcode
);
28862 case OPC_BLTZL
: /* REGIMM branches */
28866 check_insn(ctx
, ISA_MIPS2
);
28867 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28871 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
28875 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28877 /* OPC_NAL, OPC_BAL */
28878 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
28880 generate_exception_end(ctx
, EXCP_RI
);
28883 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
28886 case OPC_TGEI
: /* REGIMM traps */
28893 check_insn(ctx
, ISA_MIPS2
);
28894 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28895 gen_trap(ctx
, op1
, rs
, -1, imm
);
28898 check_insn(ctx
, ISA_MIPS32R6
);
28899 generate_exception_end(ctx
, EXCP_RI
);
28902 check_insn(ctx
, ISA_MIPS32R2
);
28903 /* Break the TB to be able to sync copied instructions
28905 ctx
->base
.is_jmp
= DISAS_STOP
;
28907 case OPC_BPOSGE32
: /* MIPS DSP branch */
28908 #if defined(TARGET_MIPS64)
28912 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
28914 #if defined(TARGET_MIPS64)
28916 check_insn(ctx
, ISA_MIPS32R6
);
28917 check_mips_64(ctx
);
28919 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
28923 check_insn(ctx
, ISA_MIPS32R6
);
28924 check_mips_64(ctx
);
28926 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
28930 default: /* Invalid */
28931 MIPS_INVAL("regimm");
28932 generate_exception_end(ctx
, EXCP_RI
);
28937 check_cp0_enabled(ctx
);
28938 op1
= MASK_CP0(ctx
->opcode
);
28946 #if defined(TARGET_MIPS64)
28950 #ifndef CONFIG_USER_ONLY
28951 gen_cp0(env
, ctx
, op1
, rt
, rd
);
28952 #endif /* !CONFIG_USER_ONLY */
28970 #ifndef CONFIG_USER_ONLY
28971 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
28972 #endif /* !CONFIG_USER_ONLY */
28975 #ifndef CONFIG_USER_ONLY
28978 TCGv t0
= tcg_temp_new();
28980 op2
= MASK_MFMC0(ctx
->opcode
);
28984 gen_helper_dmt(t0
);
28985 gen_store_gpr(t0
, rt
);
28989 gen_helper_emt(t0
);
28990 gen_store_gpr(t0
, rt
);
28994 gen_helper_dvpe(t0
, cpu_env
);
28995 gen_store_gpr(t0
, rt
);
28999 gen_helper_evpe(t0
, cpu_env
);
29000 gen_store_gpr(t0
, rt
);
29003 check_insn(ctx
, ISA_MIPS32R6
);
29005 gen_helper_dvp(t0
, cpu_env
);
29006 gen_store_gpr(t0
, rt
);
29010 check_insn(ctx
, ISA_MIPS32R6
);
29012 gen_helper_evp(t0
, cpu_env
);
29013 gen_store_gpr(t0
, rt
);
29017 check_insn(ctx
, ISA_MIPS32R2
);
29018 save_cpu_state(ctx
, 1);
29019 gen_helper_di(t0
, cpu_env
);
29020 gen_store_gpr(t0
, rt
);
29021 /* Stop translation as we may have switched
29022 the execution mode. */
29023 ctx
->base
.is_jmp
= DISAS_STOP
;
29026 check_insn(ctx
, ISA_MIPS32R2
);
29027 save_cpu_state(ctx
, 1);
29028 gen_helper_ei(t0
, cpu_env
);
29029 gen_store_gpr(t0
, rt
);
29030 /* DISAS_STOP isn't sufficient, we need to ensure we break
29031 out of translated code to check for pending interrupts */
29032 gen_save_pc(ctx
->base
.pc_next
+ 4);
29033 ctx
->base
.is_jmp
= DISAS_EXIT
;
29035 default: /* Invalid */
29036 MIPS_INVAL("mfmc0");
29037 generate_exception_end(ctx
, EXCP_RI
);
29042 #endif /* !CONFIG_USER_ONLY */
29045 check_insn(ctx
, ISA_MIPS32R2
);
29046 gen_load_srsgpr(rt
, rd
);
29049 check_insn(ctx
, ISA_MIPS32R2
);
29050 gen_store_srsgpr(rt
, rd
);
29054 generate_exception_end(ctx
, EXCP_RI
);
29058 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
29059 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29060 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
29061 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29064 /* Arithmetic with immediate opcode */
29065 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29069 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29071 case OPC_SLTI
: /* Set on less than with immediate opcode */
29073 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
29075 case OPC_ANDI
: /* Arithmetic with immediate opcode */
29076 case OPC_LUI
: /* OPC_AUI */
29079 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
29081 case OPC_J
: /* Jump */
29083 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29084 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29087 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
29088 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29090 generate_exception_end(ctx
, EXCP_RI
);
29093 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
29094 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29097 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29100 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
29101 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29103 generate_exception_end(ctx
, EXCP_RI
);
29106 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
29107 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29110 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29113 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
29116 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29118 check_insn(ctx
, ISA_MIPS32R6
);
29119 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
29120 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29123 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
29126 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29128 check_insn(ctx
, ISA_MIPS32R6
);
29129 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
29130 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29135 check_insn(ctx
, ISA_MIPS2
);
29136 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29140 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29142 case OPC_LL
: /* Load and stores */
29143 check_insn(ctx
, ISA_MIPS2
);
29144 if (ctx
->insn_flags
& INSN_R5900
) {
29145 check_insn_opc_user_only(ctx
, INSN_R5900
);
29150 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29158 gen_ld(ctx
, op
, rt
, rs
, imm
);
29162 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29167 gen_st(ctx
, op
, rt
, rs
, imm
);
29170 check_insn(ctx
, ISA_MIPS2
);
29171 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29172 if (ctx
->insn_flags
& INSN_R5900
) {
29173 check_insn_opc_user_only(ctx
, INSN_R5900
);
29175 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
29178 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29179 check_cp0_enabled(ctx
);
29180 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
29181 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
29182 gen_cache_operation(ctx
, rt
, rs
, imm
);
29184 /* Treat as NOP. */
29187 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29188 if (ctx
->insn_flags
& INSN_R5900
) {
29189 /* Treat as NOP. */
29191 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
29192 /* Treat as NOP. */
29196 /* Floating point (COP1). */
29201 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
29205 op1
= MASK_CP1(ctx
->opcode
);
29210 check_cp1_enabled(ctx
);
29211 check_insn(ctx
, ISA_MIPS32R2
);
29217 check_cp1_enabled(ctx
);
29218 gen_cp1(ctx
, op1
, rt
, rd
);
29220 #if defined(TARGET_MIPS64)
29223 check_cp1_enabled(ctx
);
29224 check_insn(ctx
, ISA_MIPS3
);
29225 check_mips_64(ctx
);
29226 gen_cp1(ctx
, op1
, rt
, rd
);
29229 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
29230 check_cp1_enabled(ctx
);
29231 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29233 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29238 check_insn(ctx
, ASE_MIPS3D
);
29239 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29240 (rt
>> 2) & 0x7, imm
<< 2);
29244 check_cp1_enabled(ctx
);
29245 check_insn(ctx
, ISA_MIPS32R6
);
29246 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29250 check_cp1_enabled(ctx
);
29251 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29253 check_insn(ctx
, ASE_MIPS3D
);
29256 check_cp1_enabled(ctx
);
29257 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29258 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29259 (rt
>> 2) & 0x7, imm
<< 2);
29266 check_cp1_enabled(ctx
);
29267 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29273 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
29274 check_cp1_enabled(ctx
);
29275 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29277 case R6_OPC_CMP_AF_S
:
29278 case R6_OPC_CMP_UN_S
:
29279 case R6_OPC_CMP_EQ_S
:
29280 case R6_OPC_CMP_UEQ_S
:
29281 case R6_OPC_CMP_LT_S
:
29282 case R6_OPC_CMP_ULT_S
:
29283 case R6_OPC_CMP_LE_S
:
29284 case R6_OPC_CMP_ULE_S
:
29285 case R6_OPC_CMP_SAF_S
:
29286 case R6_OPC_CMP_SUN_S
:
29287 case R6_OPC_CMP_SEQ_S
:
29288 case R6_OPC_CMP_SEUQ_S
:
29289 case R6_OPC_CMP_SLT_S
:
29290 case R6_OPC_CMP_SULT_S
:
29291 case R6_OPC_CMP_SLE_S
:
29292 case R6_OPC_CMP_SULE_S
:
29293 case R6_OPC_CMP_OR_S
:
29294 case R6_OPC_CMP_UNE_S
:
29295 case R6_OPC_CMP_NE_S
:
29296 case R6_OPC_CMP_SOR_S
:
29297 case R6_OPC_CMP_SUNE_S
:
29298 case R6_OPC_CMP_SNE_S
:
29299 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29301 case R6_OPC_CMP_AF_D
:
29302 case R6_OPC_CMP_UN_D
:
29303 case R6_OPC_CMP_EQ_D
:
29304 case R6_OPC_CMP_UEQ_D
:
29305 case R6_OPC_CMP_LT_D
:
29306 case R6_OPC_CMP_ULT_D
:
29307 case R6_OPC_CMP_LE_D
:
29308 case R6_OPC_CMP_ULE_D
:
29309 case R6_OPC_CMP_SAF_D
:
29310 case R6_OPC_CMP_SUN_D
:
29311 case R6_OPC_CMP_SEQ_D
:
29312 case R6_OPC_CMP_SEUQ_D
:
29313 case R6_OPC_CMP_SLT_D
:
29314 case R6_OPC_CMP_SULT_D
:
29315 case R6_OPC_CMP_SLE_D
:
29316 case R6_OPC_CMP_SULE_D
:
29317 case R6_OPC_CMP_OR_D
:
29318 case R6_OPC_CMP_UNE_D
:
29319 case R6_OPC_CMP_NE_D
:
29320 case R6_OPC_CMP_SOR_D
:
29321 case R6_OPC_CMP_SUNE_D
:
29322 case R6_OPC_CMP_SNE_D
:
29323 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29326 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
29327 rt
, rd
, sa
, (imm
>> 8) & 0x7);
29332 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29347 check_insn(ctx
, ASE_MSA
);
29348 gen_msa_branch(env
, ctx
, op1
);
29352 generate_exception_end(ctx
, EXCP_RI
);
29357 /* Compact branches [R6] and COP2 [non-R6] */
29358 case OPC_BC
: /* OPC_LWC2 */
29359 case OPC_BALC
: /* OPC_SWC2 */
29360 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29361 /* OPC_BC, OPC_BALC */
29362 gen_compute_compact_branch(ctx
, op
, 0, 0,
29363 sextract32(ctx
->opcode
<< 2, 0, 28));
29365 /* OPC_LWC2, OPC_SWC2 */
29366 /* COP2: Not implemented. */
29367 generate_exception_err(ctx
, EXCP_CpU
, 2);
29370 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
29371 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
29372 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29374 /* OPC_BEQZC, OPC_BNEZC */
29375 gen_compute_compact_branch(ctx
, op
, rs
, 0,
29376 sextract32(ctx
->opcode
<< 2, 0, 23));
29378 /* OPC_JIC, OPC_JIALC */
29379 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
29382 /* OPC_LWC2, OPC_SWC2 */
29383 /* COP2: Not implemented. */
29384 generate_exception_err(ctx
, EXCP_CpU
, 2);
29388 check_insn(ctx
, INSN_LOONGSON2F
);
29389 /* Note that these instructions use different fields. */
29390 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
29394 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29395 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
29396 check_cp1_enabled(ctx
);
29397 op1
= MASK_CP3(ctx
->opcode
);
29401 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29407 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29408 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
29411 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29412 /* Treat as NOP. */
29415 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29429 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29430 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
29434 generate_exception_end(ctx
, EXCP_RI
);
29438 generate_exception_err(ctx
, EXCP_CpU
, 1);
29442 #if defined(TARGET_MIPS64)
29443 /* MIPS64 opcodes */
29445 if (ctx
->insn_flags
& INSN_R5900
) {
29446 check_insn_opc_user_only(ctx
, INSN_R5900
);
29451 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29455 check_insn(ctx
, ISA_MIPS3
);
29456 check_mips_64(ctx
);
29457 gen_ld(ctx
, op
, rt
, rs
, imm
);
29461 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29464 check_insn(ctx
, ISA_MIPS3
);
29465 check_mips_64(ctx
);
29466 gen_st(ctx
, op
, rt
, rs
, imm
);
29469 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29470 check_insn(ctx
, ISA_MIPS3
);
29471 if (ctx
->insn_flags
& INSN_R5900
) {
29472 check_insn_opc_user_only(ctx
, INSN_R5900
);
29474 check_mips_64(ctx
);
29475 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
29477 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
29478 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29479 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
29480 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29483 check_insn(ctx
, ISA_MIPS3
);
29484 check_mips_64(ctx
);
29485 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29489 check_insn(ctx
, ISA_MIPS3
);
29490 check_mips_64(ctx
);
29491 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29494 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
29495 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29496 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29498 MIPS_INVAL("major opcode");
29499 generate_exception_end(ctx
, EXCP_RI
);
29503 case OPC_DAUI
: /* OPC_JALX */
29504 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29505 #if defined(TARGET_MIPS64)
29507 check_mips_64(ctx
);
29509 generate_exception(ctx
, EXCP_RI
);
29510 } else if (rt
!= 0) {
29511 TCGv t0
= tcg_temp_new();
29512 gen_load_gpr(t0
, rs
);
29513 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
29517 generate_exception_end(ctx
, EXCP_RI
);
29518 MIPS_INVAL("major opcode");
29522 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
29523 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29524 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29527 case OPC_MSA
: /* OPC_MDMX */
29528 if (ctx
->insn_flags
& INSN_R5900
) {
29529 gen_mmi_lq(env
, ctx
); /* MMI_OPC_LQ */
29531 /* MDMX: Not implemented. */
29536 check_insn(ctx
, ISA_MIPS32R6
);
29537 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
29539 default: /* Invalid */
29540 MIPS_INVAL("major opcode");
29541 generate_exception_end(ctx
, EXCP_RI
);
29546 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
29548 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29549 CPUMIPSState
*env
= cs
->env_ptr
;
29551 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
29552 ctx
->saved_pc
= -1;
29553 ctx
->insn_flags
= env
->insn_flags
;
29554 ctx
->CP0_Config1
= env
->CP0_Config1
;
29555 ctx
->CP0_Config2
= env
->CP0_Config2
;
29556 ctx
->CP0_Config3
= env
->CP0_Config3
;
29557 ctx
->CP0_Config5
= env
->CP0_Config5
;
29559 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
29560 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
29561 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
29562 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
29563 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
29564 ctx
->PAMask
= env
->PAMask
;
29565 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
29566 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
29567 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
29568 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
29569 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
29570 /* Restore delay slot state from the tb context. */
29571 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
29572 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
29573 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
29574 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
29575 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
29576 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
29577 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
29578 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
29579 restore_cpu_state(env
, ctx
);
29580 #ifdef CONFIG_USER_ONLY
29581 ctx
->mem_idx
= MIPS_HFLAG_UM
;
29583 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
29585 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
29586 MO_UNALN
: MO_ALIGN
;
29588 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
29592 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29596 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29598 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29600 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
29604 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
29605 const CPUBreakpoint
*bp
)
29607 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29609 save_cpu_state(ctx
, 1);
29610 ctx
->base
.is_jmp
= DISAS_NORETURN
;
29611 gen_helper_raise_exception_debug(cpu_env
);
29612 /* The address covered by the breakpoint must be included in
29613 [tb->pc, tb->pc + tb->size) in order to for it to be
29614 properly cleared -- thus we increment the PC here so that
29615 the logic setting tb->size below does the right thing. */
29616 ctx
->base
.pc_next
+= 4;
29620 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
29622 CPUMIPSState
*env
= cs
->env_ptr
;
29623 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29627 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
29628 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
29629 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29630 insn_bytes
= decode_nanomips_opc(env
, ctx
);
29631 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
29632 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
29634 decode_opc(env
, ctx
);
29635 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
29636 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29637 insn_bytes
= decode_micromips_opc(env
, ctx
);
29638 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
29639 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29640 insn_bytes
= decode_mips16_opc(env
, ctx
);
29642 generate_exception_end(ctx
, EXCP_RI
);
29643 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
29647 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
29648 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
29649 MIPS_HFLAG_FBNSLOT
))) {
29650 /* force to generate branch as there is neither delay nor
29654 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
29655 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
29656 /* Force to generate branch as microMIPS R6 doesn't restrict
29657 branches in the forbidden slot. */
29662 gen_branch(ctx
, insn_bytes
);
29664 ctx
->base
.pc_next
+= insn_bytes
;
29666 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
29669 /* Execute a branch and its delay slot as a single instruction.
29670 This is what GDB expects and is consistent with what the
29671 hardware does (e.g. if a delay slot instruction faults, the
29672 reported PC is the PC of the branch). */
29673 if (ctx
->base
.singlestep_enabled
&&
29674 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
29675 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
29677 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
29678 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
29682 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
29684 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29686 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
29687 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
29688 gen_helper_raise_exception_debug(cpu_env
);
29690 switch (ctx
->base
.is_jmp
) {
29692 gen_save_pc(ctx
->base
.pc_next
);
29693 tcg_gen_lookup_and_goto_ptr();
29696 case DISAS_TOO_MANY
:
29697 save_cpu_state(ctx
, 0);
29698 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
29701 tcg_gen_exit_tb(NULL
, 0);
29703 case DISAS_NORETURN
:
29706 g_assert_not_reached();
29711 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
29713 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
29714 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
29717 static const TranslatorOps mips_tr_ops
= {
29718 .init_disas_context
= mips_tr_init_disas_context
,
29719 .tb_start
= mips_tr_tb_start
,
29720 .insn_start
= mips_tr_insn_start
,
29721 .breakpoint_check
= mips_tr_breakpoint_check
,
29722 .translate_insn
= mips_tr_translate_insn
,
29723 .tb_stop
= mips_tr_tb_stop
,
29724 .disas_log
= mips_tr_disas_log
,
29727 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
29731 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
29734 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
29738 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
29740 #define printfpr(fp) \
29743 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
29744 " fd:%13g fs:%13g psu: %13g\n", \
29745 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
29746 (double)(fp)->fd, \
29747 (double)(fp)->fs[FP_ENDIAN_IDX], \
29748 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
29751 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
29752 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
29753 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
29754 " fd:%13g fs:%13g psu:%13g\n", \
29755 tmp.w[FP_ENDIAN_IDX], tmp.d, \
29757 (double)tmp.fs[FP_ENDIAN_IDX], \
29758 (double)tmp.fs[!FP_ENDIAN_IDX]); \
29763 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
29764 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
29765 get_float_exception_flags(&env
->active_fpu
.fp_status
));
29766 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
29767 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
29768 printfpr(&env
->active_fpu
.fpr
[i
]);
29774 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
29777 MIPSCPU
*cpu
= MIPS_CPU(cs
);
29778 CPUMIPSState
*env
= &cpu
->env
;
29781 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
29782 " LO=0x" TARGET_FMT_lx
" ds %04x "
29783 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
29784 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
29785 env
->hflags
, env
->btarget
, env
->bcond
);
29786 for (i
= 0; i
< 32; i
++) {
29788 cpu_fprintf(f
, "GPR%02d:", i
);
29789 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
29791 cpu_fprintf(f
, "\n");
29794 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
29795 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
29796 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
29798 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
29799 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
29800 env
->CP0_Config2
, env
->CP0_Config3
);
29801 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
29802 env
->CP0_Config4
, env
->CP0_Config5
);
29803 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
29804 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
29808 void mips_tcg_init(void)
29813 for (i
= 1; i
< 32; i
++)
29814 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
29815 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
29818 for (i
= 0; i
< 32; i
++) {
29819 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
29821 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
29822 /* The scalar floating-point unit (FPU) registers are mapped on
29823 * the MSA vector registers. */
29824 fpu_f64
[i
] = msa_wr_d
[i
* 2];
29825 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
29826 msa_wr_d
[i
* 2 + 1] =
29827 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
29830 cpu_PC
= tcg_global_mem_new(cpu_env
,
29831 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
29832 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
29833 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
29834 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
29836 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
29837 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
29840 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
29841 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
29843 bcond
= tcg_global_mem_new(cpu_env
,
29844 offsetof(CPUMIPSState
, bcond
), "bcond");
29845 btarget
= tcg_global_mem_new(cpu_env
,
29846 offsetof(CPUMIPSState
, btarget
), "btarget");
29847 hflags
= tcg_global_mem_new_i32(cpu_env
,
29848 offsetof(CPUMIPSState
, hflags
), "hflags");
29850 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
29851 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
29853 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
29854 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
29857 #if defined(TARGET_MIPS64)
29859 for (i
= 1; i
< 32; i
++) {
29860 cpu_mmr
[i
] = tcg_global_mem_new_i64(cpu_env
,
29861 offsetof(CPUMIPSState
,
29867 #if !defined(TARGET_MIPS64)
29868 for (i
= 0; i
< NUMBER_OF_MXU_REGISTERS
- 1; i
++) {
29869 mxu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
29870 offsetof(CPUMIPSState
,
29871 active_tc
.mxu_gpr
[i
]),
29875 mxu_CR
= tcg_global_mem_new(cpu_env
,
29876 offsetof(CPUMIPSState
, active_tc
.mxu_cr
),
29877 mxuregnames
[NUMBER_OF_MXU_REGISTERS
- 1]);
29881 #include "translate_init.inc.c"
29883 void cpu_mips_realize_env(CPUMIPSState
*env
)
29885 env
->exception_base
= (int32_t)0xBFC00000;
29887 #ifndef CONFIG_USER_ONLY
29888 mmu_init(env
, env
->cpu_model
);
29890 fpu_init(env
, env
->cpu_model
);
29891 mvp_init(env
, env
->cpu_model
);
29894 bool cpu_supports_cps_smp(const char *cpu_type
)
29896 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
29897 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
29900 bool cpu_supports_isa(const char *cpu_type
, uint64_t isa
)
29902 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
29903 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
29906 void cpu_set_exception_base(int vp_index
, target_ulong address
)
29908 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
29909 vp
->env
.exception_base
= address
;
29912 void cpu_state_reset(CPUMIPSState
*env
)
29914 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
29915 CPUState
*cs
= CPU(cpu
);
29917 /* Reset registers to their default values */
29918 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
29919 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
29920 #ifdef TARGET_WORDS_BIGENDIAN
29921 env
->CP0_Config0
|= (1 << CP0C0_BE
);
29923 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
29924 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
29925 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
29926 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
29927 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
29928 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
29929 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
29930 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
29931 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
29932 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
29933 << env
->cpu_model
->CP0_LLAddr_shift
;
29934 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
29935 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
29936 env
->CCRes
= env
->cpu_model
->CCRes
;
29937 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
29938 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
29939 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
29940 env
->current_tc
= 0;
29941 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
29942 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
29943 #if defined(TARGET_MIPS64)
29944 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
29945 env
->SEGMask
|= 3ULL << 62;
29948 env
->PABITS
= env
->cpu_model
->PABITS
;
29949 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
29950 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
29951 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
29952 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
29953 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
29954 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
29955 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
29956 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
29957 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
29958 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
29959 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
29960 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
29961 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
29962 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
29963 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
29964 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
29965 env
->msair
= env
->cpu_model
->MSAIR
;
29966 env
->insn_flags
= env
->cpu_model
->insn_flags
;
29968 #if defined(CONFIG_USER_ONLY)
29969 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
29970 # ifdef TARGET_MIPS64
29971 /* Enable 64-bit register mode. */
29972 env
->CP0_Status
|= (1 << CP0St_PX
);
29974 # ifdef TARGET_ABI_MIPSN64
29975 /* Enable 64-bit address mode. */
29976 env
->CP0_Status
|= (1 << CP0St_UX
);
29978 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
29979 hardware registers. */
29980 env
->CP0_HWREna
|= 0x0000000F;
29981 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
29982 env
->CP0_Status
|= (1 << CP0St_CU1
);
29984 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
29985 env
->CP0_Status
|= (1 << CP0St_MX
);
29987 # if defined(TARGET_MIPS64)
29988 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
29989 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
29990 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
29991 env
->CP0_Status
|= (1 << CP0St_FR
);
29995 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
29996 /* If the exception was raised from a delay slot,
29997 come back to the jump. */
29998 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
29999 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
30001 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
30003 env
->active_tc
.PC
= env
->exception_base
;
30004 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
30005 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
30006 env
->CP0_Wired
= 0;
30007 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
30008 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
30009 if (mips_um_ksegs_enabled()) {
30010 env
->CP0_EBase
|= 0x40000000;
30012 env
->CP0_EBase
|= (int32_t)0x80000000;
30014 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
30015 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
30017 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
30019 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
30020 /* vectored interrupts not implemented, timer on int 7,
30021 no performance counters. */
30022 env
->CP0_IntCtl
= 0xe0000000;
30026 for (i
= 0; i
< 7; i
++) {
30027 env
->CP0_WatchLo
[i
] = 0;
30028 env
->CP0_WatchHi
[i
] = 0x80000000;
30030 env
->CP0_WatchLo
[7] = 0;
30031 env
->CP0_WatchHi
[7] = 0;
30033 /* Count register increments in debug mode, EJTAG version 1 */
30034 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
30036 cpu_mips_store_count(env
, 1);
30038 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
30041 /* Only TC0 on VPE 0 starts as active. */
30042 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
30043 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
30044 env
->tcs
[i
].CP0_TCHalt
= 1;
30046 env
->active_tc
.CP0_TCHalt
= 1;
30049 if (cs
->cpu_index
== 0) {
30050 /* VPE0 starts up enabled. */
30051 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
30052 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
30054 /* TC0 starts up unhalted. */
30056 env
->active_tc
.CP0_TCHalt
= 0;
30057 env
->tcs
[0].CP0_TCHalt
= 0;
30058 /* With thread 0 active. */
30059 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
30060 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
30065 * Configure default legacy segmentation control. We use this regardless of
30066 * whether segmentation control is presented to the guest.
30068 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
30069 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
30070 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
30071 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
30072 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
30073 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
30075 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
30076 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
30077 (3 << CP0SC_C
)) << 16;
30078 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
30079 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30080 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
30081 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
30082 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30083 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
30084 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
30085 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
30087 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
30088 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
30089 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
30090 env
->CP0_Status
|= (1 << CP0St_FR
);
30093 if (env
->insn_flags
& ISA_MIPS32R6
) {
30095 env
->CP0_PWSize
= 0x40;
30101 env
->CP0_PWField
= 0x0C30C302;
30108 env
->CP0_PWField
= 0x02;
30111 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
30112 /* microMIPS on reset when Config3.ISA is 3 */
30113 env
->hflags
|= MIPS_HFLAG_M16
;
30117 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
30121 compute_hflags(env
);
30122 restore_fp_status(env
);
30123 restore_pamask(env
);
30124 cs
->exception_index
= EXCP_NONE
;
30126 if (semihosting_get_argc()) {
30127 /* UHI interface can be used to obtain argc and argv */
30128 env
->active_tc
.gpr
[4] = -1;
30132 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
30133 target_ulong
*data
)
30135 env
->active_tc
.PC
= data
[0];
30136 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
30137 env
->hflags
|= data
[1];
30138 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
30139 case MIPS_HFLAG_BR
:
30141 case MIPS_HFLAG_BC
:
30142 case MIPS_HFLAG_BL
:
30144 env
->btarget
= data
[2];