2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "hw/semihosting/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
41 #include "qemu/qemu-print.h"
43 #define MIPS_DEBUG_DISAS 0
45 /* MIPS major opcodes */
46 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
49 /* indirect opcode tables */
50 OPC_SPECIAL
= (0x00 << 26),
51 OPC_REGIMM
= (0x01 << 26),
52 OPC_CP0
= (0x10 << 26),
53 OPC_CP1
= (0x11 << 26),
54 OPC_CP2
= (0x12 << 26),
55 OPC_CP3
= (0x13 << 26),
56 OPC_SPECIAL2
= (0x1C << 26),
57 OPC_SPECIAL3
= (0x1F << 26),
58 /* arithmetic with immediate */
59 OPC_ADDI
= (0x08 << 26),
60 OPC_ADDIU
= (0x09 << 26),
61 OPC_SLTI
= (0x0A << 26),
62 OPC_SLTIU
= (0x0B << 26),
63 /* logic with immediate */
64 OPC_ANDI
= (0x0C << 26),
65 OPC_ORI
= (0x0D << 26),
66 OPC_XORI
= (0x0E << 26),
67 OPC_LUI
= (0x0F << 26),
68 /* arithmetic with immediate */
69 OPC_DADDI
= (0x18 << 26),
70 OPC_DADDIU
= (0x19 << 26),
71 /* Jump and branches */
73 OPC_JAL
= (0x03 << 26),
74 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
75 OPC_BEQL
= (0x14 << 26),
76 OPC_BNE
= (0x05 << 26),
77 OPC_BNEL
= (0x15 << 26),
78 OPC_BLEZ
= (0x06 << 26),
79 OPC_BLEZL
= (0x16 << 26),
80 OPC_BGTZ
= (0x07 << 26),
81 OPC_BGTZL
= (0x17 << 26),
82 OPC_JALX
= (0x1D << 26),
83 OPC_DAUI
= (0x1D << 26),
85 OPC_LDL
= (0x1A << 26),
86 OPC_LDR
= (0x1B << 26),
87 OPC_LB
= (0x20 << 26),
88 OPC_LH
= (0x21 << 26),
89 OPC_LWL
= (0x22 << 26),
90 OPC_LW
= (0x23 << 26),
91 OPC_LWPC
= OPC_LW
| 0x5,
92 OPC_LBU
= (0x24 << 26),
93 OPC_LHU
= (0x25 << 26),
94 OPC_LWR
= (0x26 << 26),
95 OPC_LWU
= (0x27 << 26),
96 OPC_SB
= (0x28 << 26),
97 OPC_SH
= (0x29 << 26),
98 OPC_SWL
= (0x2A << 26),
99 OPC_SW
= (0x2B << 26),
100 OPC_SDL
= (0x2C << 26),
101 OPC_SDR
= (0x2D << 26),
102 OPC_SWR
= (0x2E << 26),
103 OPC_LL
= (0x30 << 26),
104 OPC_LLD
= (0x34 << 26),
105 OPC_LD
= (0x37 << 26),
106 OPC_LDPC
= OPC_LD
| 0x5,
107 OPC_SC
= (0x38 << 26),
108 OPC_SCD
= (0x3C << 26),
109 OPC_SD
= (0x3F << 26),
110 /* Floating point load/store */
111 OPC_LWC1
= (0x31 << 26),
112 OPC_LWC2
= (0x32 << 26),
113 OPC_LDC1
= (0x35 << 26),
114 OPC_LDC2
= (0x36 << 26),
115 OPC_SWC1
= (0x39 << 26),
116 OPC_SWC2
= (0x3A << 26),
117 OPC_SDC1
= (0x3D << 26),
118 OPC_SDC2
= (0x3E << 26),
119 /* Compact Branches */
120 OPC_BLEZALC
= (0x06 << 26),
121 OPC_BGEZALC
= (0x06 << 26),
122 OPC_BGEUC
= (0x06 << 26),
123 OPC_BGTZALC
= (0x07 << 26),
124 OPC_BLTZALC
= (0x07 << 26),
125 OPC_BLTUC
= (0x07 << 26),
126 OPC_BOVC
= (0x08 << 26),
127 OPC_BEQZALC
= (0x08 << 26),
128 OPC_BEQC
= (0x08 << 26),
129 OPC_BLEZC
= (0x16 << 26),
130 OPC_BGEZC
= (0x16 << 26),
131 OPC_BGEC
= (0x16 << 26),
132 OPC_BGTZC
= (0x17 << 26),
133 OPC_BLTZC
= (0x17 << 26),
134 OPC_BLTC
= (0x17 << 26),
135 OPC_BNVC
= (0x18 << 26),
136 OPC_BNEZALC
= (0x18 << 26),
137 OPC_BNEC
= (0x18 << 26),
138 OPC_BC
= (0x32 << 26),
139 OPC_BEQZC
= (0x36 << 26),
140 OPC_JIC
= (0x36 << 26),
141 OPC_BALC
= (0x3A << 26),
142 OPC_BNEZC
= (0x3E << 26),
143 OPC_JIALC
= (0x3E << 26),
144 /* MDMX ASE specific */
145 OPC_MDMX
= (0x1E << 26),
146 /* MSA ASE, same as MDMX */
148 /* Cache and prefetch */
149 OPC_CACHE
= (0x2F << 26),
150 OPC_PREF
= (0x33 << 26),
151 /* PC-relative address computation / loads */
152 OPC_PCREL
= (0x3B << 26),
155 /* PC-relative address computation / loads */
156 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
157 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
159 /* Instructions determined by bits 19 and 20 */
160 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
161 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
162 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
164 /* Instructions determined by bits 16 ... 20 */
165 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
166 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
169 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
172 /* MIPS special opcodes */
173 #define MASK_SPECIAL(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
177 OPC_SLL
= 0x00 | OPC_SPECIAL
,
178 /* NOP is SLL r0, r0, 0 */
179 /* SSNOP is SLL r0, r0, 1 */
180 /* EHB is SLL r0, r0, 3 */
181 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
182 OPC_ROTR
= OPC_SRL
| (1 << 21),
183 OPC_SRA
= 0x03 | OPC_SPECIAL
,
184 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
185 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
186 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
187 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
188 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
189 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
190 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
191 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
192 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
193 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
194 OPC_DROTR
= OPC_DSRL
| (1 << 21),
195 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
196 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
197 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
198 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
199 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
200 /* Multiplication / division */
201 OPC_MULT
= 0x18 | OPC_SPECIAL
,
202 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
203 OPC_DIV
= 0x1A | OPC_SPECIAL
,
204 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
205 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
206 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
207 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
208 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
210 /* 2 registers arithmetic / logic */
211 OPC_ADD
= 0x20 | OPC_SPECIAL
,
212 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
213 OPC_SUB
= 0x22 | OPC_SPECIAL
,
214 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
215 OPC_AND
= 0x24 | OPC_SPECIAL
,
216 OPC_OR
= 0x25 | OPC_SPECIAL
,
217 OPC_XOR
= 0x26 | OPC_SPECIAL
,
218 OPC_NOR
= 0x27 | OPC_SPECIAL
,
219 OPC_SLT
= 0x2A | OPC_SPECIAL
,
220 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
221 OPC_DADD
= 0x2C | OPC_SPECIAL
,
222 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
223 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
224 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
226 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
227 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
229 OPC_TGE
= 0x30 | OPC_SPECIAL
,
230 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
231 OPC_TLT
= 0x32 | OPC_SPECIAL
,
232 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
233 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
234 OPC_TNE
= 0x36 | OPC_SPECIAL
,
235 /* HI / LO registers load & stores */
236 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
237 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
238 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
239 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
240 /* Conditional moves */
241 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
242 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
244 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
245 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
247 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
250 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
251 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
252 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
253 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
254 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
256 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
257 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
258 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
259 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
263 * R6 Multiply and Divide instructions have the same opcode
264 * and function field as legacy OPC_MULT[U]/OPC_DIV[U]
266 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
269 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
270 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
271 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
272 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
273 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
274 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
275 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
276 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
278 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
279 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
280 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
281 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
282 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
283 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
284 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
285 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
287 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
288 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
289 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
290 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
291 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
293 OPC_LSA
= 0x05 | OPC_SPECIAL
,
294 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
297 /* Multiplication variants of the vr54xx. */
298 #define MASK_MUL_VR54XX(op) (MASK_SPECIAL(op) | (op & (0x1F << 6)))
301 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
302 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
303 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
304 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
305 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
306 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
307 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
308 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
309 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
310 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
311 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
312 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
313 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
314 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
317 /* REGIMM (rt field) opcodes */
318 #define MASK_REGIMM(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 16)))
321 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
322 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
323 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
324 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
325 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
326 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
327 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
328 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
329 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
330 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
331 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
332 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
333 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
334 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
335 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
336 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
338 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
339 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
342 /* Special2 opcodes */
343 #define MASK_SPECIAL2(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
346 /* Multiply & xxx operations */
347 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
348 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
349 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
350 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
351 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
353 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
354 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
355 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
356 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
357 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
358 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
359 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
360 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
361 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
362 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
363 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
364 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
366 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
367 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
368 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
369 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
371 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
374 /* Special3 opcodes */
375 #define MASK_SPECIAL3(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
378 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
379 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
380 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
381 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
382 OPC_INS
= 0x04 | OPC_SPECIAL3
,
383 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
384 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
385 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
386 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
387 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
388 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
389 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
390 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
393 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
394 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
395 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
396 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
397 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
398 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
399 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
400 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
401 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
402 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
403 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
404 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
407 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
408 /* MIPS DSP Arithmetic */
409 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
410 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
411 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
412 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
413 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
414 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
415 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
416 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
417 /* MIPS DSP GPR-Based Shift Sub-class */
418 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
419 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
420 /* MIPS DSP Multiply Sub-class insns */
421 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
422 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
423 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
424 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
425 /* DSP Bit/Manipulation Sub-class */
426 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
427 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
428 /* MIPS DSP Append Sub-class */
429 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
430 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
431 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
432 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
433 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
436 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
437 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
438 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
439 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
440 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
441 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
442 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
443 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
444 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
445 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
446 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
447 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
448 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
449 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
450 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
451 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
454 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
455 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
456 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
457 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
458 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
459 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
463 #define MASK_BSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
466 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
467 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
468 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
469 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
470 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
471 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
472 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
473 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
477 #define MASK_DBSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
480 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
481 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
482 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
483 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
484 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
485 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
486 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
487 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
488 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
489 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
490 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
493 /* MIPS DSP REGIMM opcodes */
495 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
496 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
499 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
502 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
503 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
504 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
505 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
508 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
510 /* MIPS DSP Arithmetic Sub-class */
511 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
517 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
518 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
519 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
520 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
522 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
523 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
524 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
525 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
526 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
527 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
528 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
529 /* MIPS DSP Multiply Sub-class insns */
530 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
531 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
532 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
533 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
534 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
535 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
538 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
539 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
541 /* MIPS DSP Arithmetic Sub-class */
542 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
548 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
549 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
550 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
551 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
552 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
553 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
554 /* MIPS DSP Multiply Sub-class insns */
555 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
556 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
557 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
558 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
561 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
563 /* MIPS DSP Arithmetic Sub-class */
564 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
572 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
573 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
574 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
575 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
576 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
577 /* DSP Bit/Manipulation Sub-class */
578 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
579 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
580 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
581 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
582 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
585 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
587 /* MIPS DSP Arithmetic Sub-class */
588 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
595 /* DSP Compare-Pick Sub-class */
596 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
600 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
601 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
602 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
603 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
604 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
605 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
606 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
607 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
608 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
609 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
610 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
613 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
615 /* MIPS DSP GPR-Based Shift Sub-class */
616 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
627 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
628 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
629 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
630 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
631 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
632 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
633 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
634 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
635 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
636 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
637 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
640 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
642 /* MIPS DSP Multiply Sub-class insns */
643 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
654 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
655 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
656 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
657 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
658 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
659 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
660 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
661 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
662 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
663 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
664 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
667 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
669 /* DSP Bit/Manipulation Sub-class */
670 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
673 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
675 /* MIPS DSP Append Sub-class */
676 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
677 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
678 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
681 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
683 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
684 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
685 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
686 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
687 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
688 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
689 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
690 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
691 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
692 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
693 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
694 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
695 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
696 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
697 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
698 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
699 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
700 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
703 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
705 /* MIPS DSP Arithmetic Sub-class */
706 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
719 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
720 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
721 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
722 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
723 /* DSP Bit/Manipulation Sub-class */
724 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
725 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
726 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
727 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
728 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
729 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
732 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
734 /* MIPS DSP Multiply Sub-class insns */
735 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
736 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
737 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
738 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
739 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
740 /* MIPS DSP Arithmetic Sub-class */
741 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
743 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
744 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
745 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
746 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
749 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
750 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
751 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
752 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
753 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
754 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
755 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
756 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
757 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
758 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
759 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
760 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
761 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
764 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
766 /* DSP Compare-Pick Sub-class */
767 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
784 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
785 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
786 /* MIPS DSP Arithmetic Sub-class */
787 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
788 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
789 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
792 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
793 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
794 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
797 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
799 /* DSP Append Sub-class */
800 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
801 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
802 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
803 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
806 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
808 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
809 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
819 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
820 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
821 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
822 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
823 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
824 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
825 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
826 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
827 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
828 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
829 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
832 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
834 /* DSP Bit/Manipulation Sub-class */
835 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
838 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
840 /* MIPS DSP Multiply Sub-class insns */
841 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
856 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
857 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
858 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
859 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
860 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
861 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
862 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
863 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
864 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
865 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
866 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
869 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
871 /* MIPS DSP GPR-Based Shift Sub-class */
872 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
887 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
888 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
889 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
890 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
891 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
892 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
893 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
894 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
895 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
896 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
897 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
900 /* Coprocessor 0 (rs field) */
901 #define MASK_CP0(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
904 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
905 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
906 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
907 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
908 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
909 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
910 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
911 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
912 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
913 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
914 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
915 OPC_C0
= (0x10 << 21) | OPC_CP0
,
916 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
917 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
918 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
919 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
920 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
921 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
922 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
923 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
924 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
925 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
926 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
927 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
928 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
929 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
930 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
934 #define MASK_MFMC0(op) (MASK_CP0(op) | (op & 0xFFFF))
937 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
938 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
939 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
940 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
941 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
942 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
943 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
944 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
947 /* Coprocessor 0 (with rs == C0) */
948 #define MASK_C0(op) (MASK_CP0(op) | (op & 0x3F))
951 OPC_TLBR
= 0x01 | OPC_C0
,
952 OPC_TLBWI
= 0x02 | OPC_C0
,
953 OPC_TLBINV
= 0x03 | OPC_C0
,
954 OPC_TLBINVF
= 0x04 | OPC_C0
,
955 OPC_TLBWR
= 0x06 | OPC_C0
,
956 OPC_TLBP
= 0x08 | OPC_C0
,
957 OPC_RFE
= 0x10 | OPC_C0
,
958 OPC_ERET
= 0x18 | OPC_C0
,
959 OPC_DERET
= 0x1F | OPC_C0
,
960 OPC_WAIT
= 0x20 | OPC_C0
,
963 /* Coprocessor 1 (rs field) */
964 #define MASK_CP1(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
966 /* Values for the fmt field in FP instructions */
968 /* 0 - 15 are reserved */
969 FMT_S
= 16, /* single fp */
970 FMT_D
= 17, /* double fp */
971 FMT_E
= 18, /* extended fp */
972 FMT_Q
= 19, /* quad fp */
973 FMT_W
= 20, /* 32-bit fixed */
974 FMT_L
= 21, /* 64-bit fixed */
975 FMT_PS
= 22, /* paired single fp */
976 /* 23 - 31 are reserved */
980 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
981 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
982 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
983 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
984 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
985 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
986 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
987 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
988 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
989 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
990 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
991 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
992 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
993 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
994 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
995 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
996 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
997 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
998 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
999 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
1000 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
1001 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
1002 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
1003 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
1004 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
1005 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
1006 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
1007 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
1008 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
1009 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1012 #define MASK_CP1_FUNC(op) (MASK_CP1(op) | (op & 0x3F))
1013 #define MASK_BC1(op) (MASK_CP1(op) | (op & (0x3 << 16)))
1016 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1017 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1018 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1019 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1023 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1024 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1028 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1029 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1032 #define MASK_CP2(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
1035 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1036 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1037 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1038 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1039 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1040 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1041 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1042 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1043 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1044 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1045 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1048 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1051 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1052 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1053 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1054 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1055 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1056 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1057 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1058 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1060 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1061 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1062 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1063 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1064 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1065 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1066 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1067 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1069 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1070 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1071 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1072 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1073 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1074 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1075 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1076 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1078 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1079 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1080 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1081 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1082 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1083 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1084 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1085 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1087 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1088 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1089 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1090 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1091 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1092 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1094 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1095 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1096 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1097 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1098 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1099 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1101 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1102 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1103 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1104 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1105 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1106 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1108 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1109 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1110 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1111 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1112 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1113 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1115 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1116 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1117 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1118 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1119 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1120 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1122 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1123 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1124 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1125 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1126 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1127 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1129 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1130 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1131 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1132 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1133 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1134 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1136 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1137 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1138 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1139 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1140 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1141 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1145 #define MASK_CP3(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1148 OPC_LWXC1
= 0x00 | OPC_CP3
,
1149 OPC_LDXC1
= 0x01 | OPC_CP3
,
1150 OPC_LUXC1
= 0x05 | OPC_CP3
,
1151 OPC_SWXC1
= 0x08 | OPC_CP3
,
1152 OPC_SDXC1
= 0x09 | OPC_CP3
,
1153 OPC_SUXC1
= 0x0D | OPC_CP3
,
1154 OPC_PREFX
= 0x0F | OPC_CP3
,
1155 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1156 OPC_MADD_S
= 0x20 | OPC_CP3
,
1157 OPC_MADD_D
= 0x21 | OPC_CP3
,
1158 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1159 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1160 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1161 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1162 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1163 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1164 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1165 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1166 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1167 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1171 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1173 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1174 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1175 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1176 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1177 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1178 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1179 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1180 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1181 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1182 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1183 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1184 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1185 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1186 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1187 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1188 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1189 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1190 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1191 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1192 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1193 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1195 /* MI10 instruction */
1196 OPC_LD_B
= (0x20) | OPC_MSA
,
1197 OPC_LD_H
= (0x21) | OPC_MSA
,
1198 OPC_LD_W
= (0x22) | OPC_MSA
,
1199 OPC_LD_D
= (0x23) | OPC_MSA
,
1200 OPC_ST_B
= (0x24) | OPC_MSA
,
1201 OPC_ST_H
= (0x25) | OPC_MSA
,
1202 OPC_ST_W
= (0x26) | OPC_MSA
,
1203 OPC_ST_D
= (0x27) | OPC_MSA
,
1207 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1208 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1209 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1210 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1211 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1212 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1213 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1214 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1215 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1216 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1217 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1218 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1219 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1221 /* I8 instruction */
1222 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1223 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1224 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1225 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1226 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1227 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1228 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1229 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1230 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1231 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1233 /* VEC/2R/2RF instruction */
1234 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1235 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1236 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1237 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1238 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1239 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1240 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1242 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1243 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1245 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1246 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1247 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1248 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1249 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1251 /* 2RF instruction df(bit 16) = _w, _d */
1252 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1253 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1254 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1255 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1256 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1257 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1258 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1259 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1260 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1261 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1262 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1263 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1264 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1265 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1266 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1267 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1269 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1270 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1271 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1272 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1273 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1274 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1275 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1276 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1277 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1278 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1279 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1280 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1281 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1282 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1283 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1284 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1285 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1286 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1287 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1288 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1289 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1290 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1291 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1292 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1293 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1294 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1295 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1296 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1297 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1298 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1299 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1300 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1301 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1302 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1303 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1304 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1305 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1306 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1307 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1308 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1309 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1310 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1311 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1312 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1313 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1314 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1315 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1316 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1317 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1318 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1319 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1320 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1321 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1322 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1323 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1324 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1325 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1326 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1327 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1328 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1329 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1330 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1331 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1332 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1334 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1335 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1336 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1337 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1338 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1339 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1340 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1341 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1342 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1343 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1345 /* 3RF instruction _df(bit 21) = _w, _d */
1346 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1348 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1349 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1350 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1351 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1352 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1353 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1357 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1358 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1359 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1361 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1362 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1363 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1364 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1365 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1366 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1367 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1368 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1369 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1370 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1373 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1374 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1375 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1376 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1377 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1378 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1379 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1380 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1381 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1382 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1383 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1384 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1385 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1386 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1388 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1389 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1390 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1391 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1392 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1393 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1394 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1395 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1396 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1397 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1398 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1399 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1400 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1406 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1407 * ============================================
1410 * MXU (full name: MIPS eXtension/enhanced Unit) is a SIMD extension of MIPS32
1411 * instructions set. It is designed to fit the needs of signal, graphical and
1412 * video processing applications. MXU instruction set is used in Xburst family
1413 * of microprocessors by Ingenic.
1415 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1416 * the control register.
1419 * The notation used in MXU assembler mnemonics
1420 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1422 * Register operands:
1424 * XRa, XRb, XRc, XRd - MXU registers
1425 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1427 * Non-register operands:
1429 * aptn1 - 1-bit accumulate add/subtract pattern
1430 * aptn2 - 2-bit accumulate add/subtract pattern
1431 * eptn2 - 2-bit execute add/subtract pattern
1432 * optn2 - 2-bit operand pattern
1433 * optn3 - 3-bit operand pattern
1434 * sft4 - 4-bit shift amount
1435 * strd2 - 2-bit stride amount
1439 * Level of parallelism: Operand size:
1440 * S - single operation at a time 32 - word
1441 * D - two operations in parallel 16 - half word
1442 * Q - four operations in parallel 8 - byte
1446 * ADD - Add or subtract
1447 * ADDC - Add with carry-in
1449 * ASUM - Sum together then accumulate (add or subtract)
1450 * ASUMC - Sum together then accumulate (add or subtract) with carry-in
1451 * AVG - Average between 2 operands
1452 * ABD - Absolute difference
1454 * AND - Logical bitwise 'and' operation
1456 * EXTR - Extract bits
1457 * I2M - Move from GPR register to MXU register
1458 * LDD - Load data from memory to XRF
1459 * LDI - Load data from memory to XRF (and increase the address base)
1460 * LUI - Load unsigned immediate
1462 * MULU - Unsigned multiply
1463 * MADD - 64-bit operand add 32x32 product
1464 * MSUB - 64-bit operand subtract 32x32 product
1465 * MAC - Multiply and accumulate (add or subtract)
1466 * MAD - Multiply and add or subtract
1467 * MAX - Maximum between 2 operands
1468 * MIN - Minimum between 2 operands
1469 * M2I - Move from MXU register to GPR register
1470 * MOVZ - Move if zero
1471 * MOVN - Move if non-zero
1472 * NOR - Logical bitwise 'nor' operation
1473 * OR - Logical bitwise 'or' operation
1474 * STD - Store data from XRF to memory
1475 * SDI - Store data from XRF to memory (and increase the address base)
1476 * SLT - Set of less than comparison
1477 * SAD - Sum of absolute differences
1478 * SLL - Logical shift left
1479 * SLR - Logical shift right
1480 * SAR - Arithmetic shift right
1483 * SCOP - Calculate x’s scope (-1, means x<0; 0, means x==0; 1, means x>0)
1484 * XOR - Logical bitwise 'exclusive or' operation
1488 * E - Expand results
1489 * F - Fixed point multiplication
1490 * L - Low part result
1491 * R - Doing rounding
1492 * V - Variable instead of immediate
1493 * W - Combine above L and V
1496 * The list of MXU instructions grouped by functionality
1497 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1499 * Load/Store instructions Multiplication instructions
1500 * ----------------------- ---------------------------
1502 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1503 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1504 * S32LDDV XRa, Rb, rc, strd2 S32MSUB XRa, XRd, Rs, Rt
1505 * S32STDV XRa, Rb, rc, strd2 S32MSUBU XRa, XRd, Rs, Rt
1506 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1507 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1508 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1509 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1510 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1511 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1512 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1513 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1514 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1515 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1516 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1517 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1518 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1519 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1520 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1521 * S16SDI XRa, Rb, s10, eptn2
1522 * S8LDD XRa, Rb, s8, eptn3
1523 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1524 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1525 * S8SDI XRa, Rb, s8, eptn3
1526 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1527 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1528 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1529 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1530 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1531 * S32CPS XRa, XRb, XRc
1532 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1533 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1534 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1535 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1536 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1537 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1538 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1539 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1540 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1541 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1542 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1543 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1544 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1545 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1546 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1547 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1548 * Q8SLT XRa, XRb, XRc
1549 * Q8SLTU XRa, XRb, XRc
1550 * Q8MOVZ XRa, XRb, XRc Shift instructions
1551 * Q8MOVN XRa, XRb, XRc ------------------
1553 * D32SLL XRa, XRb, XRc, XRd, sft4
1554 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1555 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1556 * D32SARL XRa, XRb, XRc, sft4
1557 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1558 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1559 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1560 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1561 * Q16SLL XRa, XRb, XRc, XRd, sft4
1562 * Q16SLR XRa, XRb, XRc, XRd, sft4
1563 * Miscellaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1564 * ------------------------- Q16SLLV XRa, XRb, Rb
1565 * Q16SLRV XRa, XRb, Rb
1566 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1567 * S32ALN XRa, XRb, XRc, Rb
1568 * S32ALNI XRa, XRb, XRc, s3
1569 * S32LUI XRa, s8, optn3 Move instructions
1570 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1571 * S32EXTRV XRa, XRb, Rs, Rt
1572 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1573 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1576 * The opcode organization of MXU instructions
1577 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1579 * The bits 31..26 of all MXU instructions are equal to 0x1C (also referred
1580 * as opcode SPECIAL2 in the base MIPS ISA). The organization and meaning of
1581 * other bits up to the instruction level is as follows:
1586 * ┌─ 000000 ─ OPC_MXU_S32MADD
1587 * ├─ 000001 ─ OPC_MXU_S32MADDU
1588 * ├─ 000010 ─ <not assigned> (non-MXU OPC_MUL)
1591 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1592 * │ ├─ 001 ─ OPC_MXU_S32MIN
1593 * │ ├─ 010 ─ OPC_MXU_D16MAX
1594 * │ ├─ 011 ─ OPC_MXU_D16MIN
1595 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1596 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1597 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1598 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1599 * ├─ 000100 ─ OPC_MXU_S32MSUB
1600 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1601 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1602 * │ ├─ 001 ─ OPC_MXU_D16SLT
1603 * │ ├─ 010 ─ OPC_MXU_D16AVG
1604 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1605 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1606 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1607 * │ └─ 111 ─ OPC_MXU_Q8ADD
1610 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1611 * │ ├─ 010 ─ OPC_MXU_D16CPS
1612 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1613 * │ └─ 110 ─ OPC_MXU_Q16SAT
1614 * ├─ 001000 ─ OPC_MXU_D16MUL
1616 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1617 * │ └─ 01 ─ OPC_MXU_D16MULE
1618 * ├─ 001010 ─ OPC_MXU_D16MAC
1619 * ├─ 001011 ─ OPC_MXU_D16MACF
1620 * ├─ 001100 ─ OPC_MXU_D16MADL
1621 * ├─ 001101 ─ OPC_MXU_S16MAD
1622 * ├─ 001110 ─ OPC_MXU_Q16ADD
1623 * ├─ 001111 ─ OPC_MXU_D16MACE 23
1624 * │ ┌─ 0 ─ OPC_MXU_S32LDD
1625 * ├─ 010000 ─ OPC_MXU__POOL04 ─┴─ 1 ─ OPC_MXU_S32LDDR
1628 * ├─ 010001 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32STD
1629 * │ └─ 1 ─ OPC_MXU_S32STDR
1632 * ├─ 010010 ─ OPC_MXU__POOL06 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1633 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1636 * ├─ 010011 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32STDV
1637 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1640 * ├─ 010100 ─ OPC_MXU__POOL08 ─┬─ 0 ─ OPC_MXU_S32LDI
1641 * │ └─ 1 ─ OPC_MXU_S32LDIR
1644 * ├─ 010101 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32SDI
1645 * │ └─ 1 ─ OPC_MXU_S32SDIR
1648 * ├─ 010110 ─ OPC_MXU__POOL10 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1649 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1652 * ├─ 010111 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1653 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1654 * ├─ 011000 ─ OPC_MXU_D32ADD
1656 * MXU ├─ 011001 ─ OPC_MXU__POOL12 ─┬─ 00 ─ OPC_MXU_D32ACC
1657 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1658 * │ └─ 10 ─ OPC_MXU_D32ASUM
1659 * ├─ 011010 ─ <not assigned>
1661 * ├─ 011011 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_Q16ACC
1662 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1663 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1666 * ├─ 011100 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1667 * │ ├─ 01 ─ OPC_MXU_D8SUM
1668 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1669 * ├─ 011110 ─ <not assigned>
1670 * ├─ 011111 ─ <not assigned>
1671 * ├─ 100000 ─ <not assigned> (overlaps with CLZ)
1672 * ├─ 100001 ─ <not assigned> (overlaps with CLO)
1673 * ├─ 100010 ─ OPC_MXU_S8LDD
1674 * ├─ 100011 ─ OPC_MXU_S8STD 15..14
1675 * ├─ 100100 ─ OPC_MXU_S8LDI ┌─ 00 ─ OPC_MXU_S32MUL
1676 * ├─ 100101 ─ OPC_MXU_S8SDI ├─ 00 ─ OPC_MXU_S32MULU
1677 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1678 * ├─ 100110 ─ OPC_MXU__POOL15 ─┴─ 00 ─ OPC_MXU_S32EXTRV
1681 * ├─ 100111 ─ OPC_MXU__POOL16 ─┬─ 000 ─ OPC_MXU_D32SARW
1682 * │ ├─ 001 ─ OPC_MXU_S32ALN
1683 * │ ├─ 010 ─ OPC_MXU_S32ALNI
1684 * │ ├─ 011 ─ OPC_MXU_S32LUI
1685 * │ ├─ 100 ─ OPC_MXU_S32NOR
1686 * │ ├─ 101 ─ OPC_MXU_S32AND
1687 * │ ├─ 110 ─ OPC_MXU_S32OR
1688 * │ └─ 111 ─ OPC_MXU_S32XOR
1691 * ├─ 101000 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_LXB
1692 * │ ├─ 001 ─ OPC_MXU_LXH
1693 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_LXW
1694 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_LXBU
1695 * ├─ 101011 ─ OPC_MXU_S16STD └─ 101 ─ OPC_MXU_LXHU
1696 * ├─ 101100 ─ OPC_MXU_S16LDI
1697 * ├─ 101101 ─ OPC_MXU_S16SDI
1698 * ├─ 101110 ─ OPC_MXU_S32M2I
1699 * ├─ 101111 ─ OPC_MXU_S32I2M
1700 * ├─ 110000 ─ OPC_MXU_D32SLL
1701 * ├─ 110001 ─ OPC_MXU_D32SLR 20..18
1702 * ├─ 110010 ─ OPC_MXU_D32SARL ┌─ 000 ─ OPC_MXU_D32SLLV
1703 * ├─ 110011 ─ OPC_MXU_D32SAR ├─ 001 ─ OPC_MXU_D32SLRV
1704 * ├─ 110100 ─ OPC_MXU_Q16SLL ├─ 010 ─ OPC_MXU_D32SARV
1705 * ├─ 110101 ─ OPC_MXU_Q16SLR ├─ 011 ─ OPC_MXU_Q16SLLV
1706 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1707 * ├─ 110110 ─ OPC_MXU__POOL18 ─┴─ 101 ─ OPC_MXU_Q16SARV
1709 * ├─ 110111 ─ OPC_MXU_Q16SAR
1711 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1712 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1715 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1716 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1717 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1718 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1719 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1720 * │ └─ 101 ─ OPC_MXU_S32MOVN
1723 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1724 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1725 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1726 * ├─ 111100 ─ OPC_MXU_Q8MADL
1727 * ├─ 111101 ─ OPC_MXU_S32SFL
1728 * ├─ 111110 ─ OPC_MXU_Q8SAD
1729 * └─ 111111 ─ <not assigned> (overlaps with SDBBP)
1734 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1735 * Programming Manual", Ingenic Semiconductor Co, Ltd., revision June 2, 2017
1739 OPC_MXU_S32MADD
= 0x00,
1740 OPC_MXU_S32MADDU
= 0x01,
1741 OPC__MXU_MUL
= 0x02,
1742 OPC_MXU__POOL00
= 0x03,
1743 OPC_MXU_S32MSUB
= 0x04,
1744 OPC_MXU_S32MSUBU
= 0x05,
1745 OPC_MXU__POOL01
= 0x06,
1746 OPC_MXU__POOL02
= 0x07,
1747 OPC_MXU_D16MUL
= 0x08,
1748 OPC_MXU__POOL03
= 0x09,
1749 OPC_MXU_D16MAC
= 0x0A,
1750 OPC_MXU_D16MACF
= 0x0B,
1751 OPC_MXU_D16MADL
= 0x0C,
1752 OPC_MXU_S16MAD
= 0x0D,
1753 OPC_MXU_Q16ADD
= 0x0E,
1754 OPC_MXU_D16MACE
= 0x0F,
1755 OPC_MXU__POOL04
= 0x10,
1756 OPC_MXU__POOL05
= 0x11,
1757 OPC_MXU__POOL06
= 0x12,
1758 OPC_MXU__POOL07
= 0x13,
1759 OPC_MXU__POOL08
= 0x14,
1760 OPC_MXU__POOL09
= 0x15,
1761 OPC_MXU__POOL10
= 0x16,
1762 OPC_MXU__POOL11
= 0x17,
1763 OPC_MXU_D32ADD
= 0x18,
1764 OPC_MXU__POOL12
= 0x19,
1765 /* not assigned 0x1A */
1766 OPC_MXU__POOL13
= 0x1B,
1767 OPC_MXU__POOL14
= 0x1C,
1768 OPC_MXU_Q8ACCE
= 0x1D,
1769 /* not assigned 0x1E */
1770 /* not assigned 0x1F */
1771 /* not assigned 0x20 */
1772 /* not assigned 0x21 */
1773 OPC_MXU_S8LDD
= 0x22,
1774 OPC_MXU_S8STD
= 0x23,
1775 OPC_MXU_S8LDI
= 0x24,
1776 OPC_MXU_S8SDI
= 0x25,
1777 OPC_MXU__POOL15
= 0x26,
1778 OPC_MXU__POOL16
= 0x27,
1779 OPC_MXU__POOL17
= 0x28,
1780 /* not assigned 0x29 */
1781 OPC_MXU_S16LDD
= 0x2A,
1782 OPC_MXU_S16STD
= 0x2B,
1783 OPC_MXU_S16LDI
= 0x2C,
1784 OPC_MXU_S16SDI
= 0x2D,
1785 OPC_MXU_S32M2I
= 0x2E,
1786 OPC_MXU_S32I2M
= 0x2F,
1787 OPC_MXU_D32SLL
= 0x30,
1788 OPC_MXU_D32SLR
= 0x31,
1789 OPC_MXU_D32SARL
= 0x32,
1790 OPC_MXU_D32SAR
= 0x33,
1791 OPC_MXU_Q16SLL
= 0x34,
1792 OPC_MXU_Q16SLR
= 0x35,
1793 OPC_MXU__POOL18
= 0x36,
1794 OPC_MXU_Q16SAR
= 0x37,
1795 OPC_MXU__POOL19
= 0x38,
1796 OPC_MXU__POOL20
= 0x39,
1797 OPC_MXU__POOL21
= 0x3A,
1798 OPC_MXU_Q16SCOP
= 0x3B,
1799 OPC_MXU_Q8MADL
= 0x3C,
1800 OPC_MXU_S32SFL
= 0x3D,
1801 OPC_MXU_Q8SAD
= 0x3E,
1802 /* not assigned 0x3F */
1810 OPC_MXU_S32MAX
= 0x00,
1811 OPC_MXU_S32MIN
= 0x01,
1812 OPC_MXU_D16MAX
= 0x02,
1813 OPC_MXU_D16MIN
= 0x03,
1814 OPC_MXU_Q8MAX
= 0x04,
1815 OPC_MXU_Q8MIN
= 0x05,
1816 OPC_MXU_Q8SLT
= 0x06,
1817 OPC_MXU_Q8SLTU
= 0x07,
1824 OPC_MXU_S32SLT
= 0x00,
1825 OPC_MXU_D16SLT
= 0x01,
1826 OPC_MXU_D16AVG
= 0x02,
1827 OPC_MXU_D16AVGR
= 0x03,
1828 OPC_MXU_Q8AVG
= 0x04,
1829 OPC_MXU_Q8AVGR
= 0x05,
1830 OPC_MXU_Q8ADD
= 0x07,
1837 OPC_MXU_S32CPS
= 0x00,
1838 OPC_MXU_D16CPS
= 0x02,
1839 OPC_MXU_Q8ABD
= 0x04,
1840 OPC_MXU_Q16SAT
= 0x06,
1847 OPC_MXU_D16MULF
= 0x00,
1848 OPC_MXU_D16MULE
= 0x01,
1855 OPC_MXU_S32LDD
= 0x00,
1856 OPC_MXU_S32LDDR
= 0x01,
1863 OPC_MXU_S32STD
= 0x00,
1864 OPC_MXU_S32STDR
= 0x01,
1871 OPC_MXU_S32LDDV
= 0x00,
1872 OPC_MXU_S32LDDVR
= 0x01,
1879 OPC_MXU_S32STDV
= 0x00,
1880 OPC_MXU_S32STDVR
= 0x01,
1887 OPC_MXU_S32LDI
= 0x00,
1888 OPC_MXU_S32LDIR
= 0x01,
1895 OPC_MXU_S32SDI
= 0x00,
1896 OPC_MXU_S32SDIR
= 0x01,
1903 OPC_MXU_S32LDIV
= 0x00,
1904 OPC_MXU_S32LDIVR
= 0x01,
1911 OPC_MXU_S32SDIV
= 0x00,
1912 OPC_MXU_S32SDIVR
= 0x01,
1919 OPC_MXU_D32ACC
= 0x00,
1920 OPC_MXU_D32ACCM
= 0x01,
1921 OPC_MXU_D32ASUM
= 0x02,
1928 OPC_MXU_Q16ACC
= 0x00,
1929 OPC_MXU_Q16ACCM
= 0x01,
1930 OPC_MXU_Q16ASUM
= 0x02,
1937 OPC_MXU_Q8ADDE
= 0x00,
1938 OPC_MXU_D8SUM
= 0x01,
1939 OPC_MXU_D8SUMC
= 0x02,
1946 OPC_MXU_S32MUL
= 0x00,
1947 OPC_MXU_S32MULU
= 0x01,
1948 OPC_MXU_S32EXTR
= 0x02,
1949 OPC_MXU_S32EXTRV
= 0x03,
1956 OPC_MXU_D32SARW
= 0x00,
1957 OPC_MXU_S32ALN
= 0x01,
1958 OPC_MXU_S32ALNI
= 0x02,
1959 OPC_MXU_S32LUI
= 0x03,
1960 OPC_MXU_S32NOR
= 0x04,
1961 OPC_MXU_S32AND
= 0x05,
1962 OPC_MXU_S32OR
= 0x06,
1963 OPC_MXU_S32XOR
= 0x07,
1973 OPC_MXU_LXBU
= 0x04,
1974 OPC_MXU_LXHU
= 0x05,
1981 OPC_MXU_D32SLLV
= 0x00,
1982 OPC_MXU_D32SLRV
= 0x01,
1983 OPC_MXU_D32SARV
= 0x03,
1984 OPC_MXU_Q16SLLV
= 0x04,
1985 OPC_MXU_Q16SLRV
= 0x05,
1986 OPC_MXU_Q16SARV
= 0x07,
1993 OPC_MXU_Q8MUL
= 0x00,
1994 OPC_MXU_Q8MULSU
= 0x01,
2001 OPC_MXU_Q8MOVZ
= 0x00,
2002 OPC_MXU_Q8MOVN
= 0x01,
2003 OPC_MXU_D16MOVZ
= 0x02,
2004 OPC_MXU_D16MOVN
= 0x03,
2005 OPC_MXU_S32MOVZ
= 0x04,
2006 OPC_MXU_S32MOVN
= 0x05,
2013 OPC_MXU_Q8MAC
= 0x00,
2014 OPC_MXU_Q8MACSU
= 0x01,
2018 * Overview of the TX79-specific instruction set
2019 * =============================================
2021 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
2022 * are only used by the specific quadword (128-bit) LQ/SQ load/store
2023 * instructions and certain multimedia instructions (MMIs). These MMIs
2024 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
2025 * or sixteen 8-bit paths.
2029 * The Toshiba TX System RISC TX79 Core Architecture manual,
2030 * https://wiki.qemu.org/File:C790.pdf
2032 * Three-Operand Multiply and Multiply-Add (4 instructions)
2033 * --------------------------------------------------------
2034 * MADD [rd,] rs, rt Multiply/Add
2035 * MADDU [rd,] rs, rt Multiply/Add Unsigned
2036 * MULT [rd,] rs, rt Multiply (3-operand)
2037 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
2039 * Multiply Instructions for Pipeline 1 (10 instructions)
2040 * ------------------------------------------------------
2041 * MULT1 [rd,] rs, rt Multiply Pipeline 1
2042 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
2043 * DIV1 rs, rt Divide Pipeline 1
2044 * DIVU1 rs, rt Divide Unsigned Pipeline 1
2045 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
2046 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
2047 * MFHI1 rd Move From HI1 Register
2048 * MFLO1 rd Move From LO1 Register
2049 * MTHI1 rs Move To HI1 Register
2050 * MTLO1 rs Move To LO1 Register
2052 * Arithmetic (19 instructions)
2053 * ----------------------------
2054 * PADDB rd, rs, rt Parallel Add Byte
2055 * PSUBB rd, rs, rt Parallel Subtract Byte
2056 * PADDH rd, rs, rt Parallel Add Halfword
2057 * PSUBH rd, rs, rt Parallel Subtract Halfword
2058 * PADDW rd, rs, rt Parallel Add Word
2059 * PSUBW rd, rs, rt Parallel Subtract Word
2060 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
2061 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
2062 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
2063 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
2064 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
2065 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
2066 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
2067 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
2068 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
2069 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
2070 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
2071 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
2072 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
2074 * Min/Max (4 instructions)
2075 * ------------------------
2076 * PMAXH rd, rs, rt Parallel Maximum Halfword
2077 * PMINH rd, rs, rt Parallel Minimum Halfword
2078 * PMAXW rd, rs, rt Parallel Maximum Word
2079 * PMINW rd, rs, rt Parallel Minimum Word
2081 * Absolute (2 instructions)
2082 * -------------------------
2083 * PABSH rd, rt Parallel Absolute Halfword
2084 * PABSW rd, rt Parallel Absolute Word
2086 * Logical (4 instructions)
2087 * ------------------------
2088 * PAND rd, rs, rt Parallel AND
2089 * POR rd, rs, rt Parallel OR
2090 * PXOR rd, rs, rt Parallel XOR
2091 * PNOR rd, rs, rt Parallel NOR
2093 * Shift (9 instructions)
2094 * ----------------------
2095 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2096 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2097 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2098 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2099 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2100 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2101 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2102 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2103 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2105 * Compare (6 instructions)
2106 * ------------------------
2107 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2108 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2109 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2110 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2111 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2112 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2114 * LZC (1 instruction)
2115 * -------------------
2116 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2118 * Quadword Load and Store (2 instructions)
2119 * ----------------------------------------
2120 * LQ rt, offset(base) Load Quadword
2121 * SQ rt, offset(base) Store Quadword
2123 * Multiply and Divide (19 instructions)
2124 * -------------------------------------
2125 * PMULTW rd, rs, rt Parallel Multiply Word
2126 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2127 * PDIVW rs, rt Parallel Divide Word
2128 * PDIVUW rs, rt Parallel Divide Unsigned Word
2129 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2130 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2131 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2132 * PMULTH rd, rs, rt Parallel Multiply Halfword
2133 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2134 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2135 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2136 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2137 * PDIVBW rs, rt Parallel Divide Broadcast Word
2138 * PMFHI rd Parallel Move From HI Register
2139 * PMFLO rd Parallel Move From LO Register
2140 * PMTHI rs Parallel Move To HI Register
2141 * PMTLO rs Parallel Move To LO Register
2142 * PMFHL rd Parallel Move From HI/LO Register
2143 * PMTHL rs Parallel Move To HI/LO Register
2145 * Pack/Extend (11 instructions)
2146 * -----------------------------
2147 * PPAC5 rd, rt Parallel Pack to 5 bits
2148 * PPACB rd, rs, rt Parallel Pack to Byte
2149 * PPACH rd, rs, rt Parallel Pack to Halfword
2150 * PPACW rd, rs, rt Parallel Pack to Word
2151 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2152 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2153 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2154 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2155 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2156 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2157 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2159 * Others (16 instructions)
2160 * ------------------------
2161 * PCPYH rd, rt Parallel Copy Halfword
2162 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2163 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2164 * PREVH rd, rt Parallel Reverse Halfword
2165 * PINTH rd, rs, rt Parallel Interleave Halfword
2166 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2167 * PEXEH rd, rt Parallel Exchange Even Halfword
2168 * PEXCH rd, rt Parallel Exchange Center Halfword
2169 * PEXEW rd, rt Parallel Exchange Even Word
2170 * PEXCW rd, rt Parallel Exchange Center Word
2171 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2172 * MFSA rd Move from Shift Amount Register
2173 * MTSA rs Move to Shift Amount Register
2174 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2175 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2176 * PROT3W rd, rt Parallel Rotate 3 Words
2178 * MMI (MultiMedia Instruction) encodings
2179 * ======================================
2181 * MMI instructions encoding table keys:
2183 * * This code is reserved for future use. An attempt to execute it
2184 * causes a Reserved Instruction exception.
2185 * % This code indicates an instruction class. The instruction word
2186 * must be further decoded by examining additional tables that show
2187 * the values for other instruction fields.
2188 * # This code is reserved for the unsupported instructions DMULT,
2189 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2190 * to execute it causes a Reserved Instruction exception.
2192 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
2195 * +--------+----------------------------------------+
2197 * +--------+----------------------------------------+
2199 * opcode bits 28..26
2200 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2201 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2202 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2203 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2204 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2205 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2206 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2207 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2208 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2209 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2210 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2214 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2215 MMI_OPC_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2216 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2220 * MMI instructions with opcode field = MMI:
2223 * +--------+-------------------------------+--------+
2224 * | MMI | |function|
2225 * +--------+-------------------------------+--------+
2227 * function bits 2..0
2228 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2229 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2230 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2231 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2232 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2233 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2234 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2235 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2236 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2237 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2238 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2241 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2243 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
2244 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
2245 MMI_OPC_PLZCW
= 0x04 | MMI_OPC_CLASS_MMI
,
2246 MMI_OPC_CLASS_MMI0
= 0x08 | MMI_OPC_CLASS_MMI
,
2247 MMI_OPC_CLASS_MMI2
= 0x09 | MMI_OPC_CLASS_MMI
,
2248 MMI_OPC_MFHI1
= 0x10 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFHI */
2249 MMI_OPC_MTHI1
= 0x11 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTHI */
2250 MMI_OPC_MFLO1
= 0x12 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFLO */
2251 MMI_OPC_MTLO1
= 0x13 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTLO */
2252 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
2253 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
2254 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
2255 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
2256 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
2257 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
2258 MMI_OPC_CLASS_MMI1
= 0x28 | MMI_OPC_CLASS_MMI
,
2259 MMI_OPC_CLASS_MMI3
= 0x29 | MMI_OPC_CLASS_MMI
,
2260 MMI_OPC_PMFHL
= 0x30 | MMI_OPC_CLASS_MMI
,
2261 MMI_OPC_PMTHL
= 0x31 | MMI_OPC_CLASS_MMI
,
2262 MMI_OPC_PSLLH
= 0x34 | MMI_OPC_CLASS_MMI
,
2263 MMI_OPC_PSRLH
= 0x36 | MMI_OPC_CLASS_MMI
,
2264 MMI_OPC_PSRAH
= 0x37 | MMI_OPC_CLASS_MMI
,
2265 MMI_OPC_PSLLW
= 0x3C | MMI_OPC_CLASS_MMI
,
2266 MMI_OPC_PSRLW
= 0x3E | MMI_OPC_CLASS_MMI
,
2267 MMI_OPC_PSRAW
= 0x3F | MMI_OPC_CLASS_MMI
,
2271 * MMI instructions with opcode field = MMI and bits 5..0 = MMI0:
2274 * +--------+----------------------+--------+--------+
2275 * | MMI | |function| MMI0 |
2276 * +--------+----------------------+--------+--------+
2278 * function bits 7..6
2279 * bits | 0 | 1 | 2 | 3
2280 * 10..8 | 00 | 01 | 10 | 11
2281 * -------+-------+-------+-------+-------
2282 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2283 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2284 * 2 010 | PADDB | PSUBB | PCGTB | *
2285 * 3 011 | * | * | * | *
2286 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2287 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2288 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2289 * 7 111 | * | * | PEXT5 | PPAC5
2292 #define MASK_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2294 MMI_OPC_0_PADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI0
,
2295 MMI_OPC_0_PSUBW
= (0x01 << 6) | MMI_OPC_CLASS_MMI0
,
2296 MMI_OPC_0_PCGTW
= (0x02 << 6) | MMI_OPC_CLASS_MMI0
,
2297 MMI_OPC_0_PMAXW
= (0x03 << 6) | MMI_OPC_CLASS_MMI0
,
2298 MMI_OPC_0_PADDH
= (0x04 << 6) | MMI_OPC_CLASS_MMI0
,
2299 MMI_OPC_0_PSUBH
= (0x05 << 6) | MMI_OPC_CLASS_MMI0
,
2300 MMI_OPC_0_PCGTH
= (0x06 << 6) | MMI_OPC_CLASS_MMI0
,
2301 MMI_OPC_0_PMAXH
= (0x07 << 6) | MMI_OPC_CLASS_MMI0
,
2302 MMI_OPC_0_PADDB
= (0x08 << 6) | MMI_OPC_CLASS_MMI0
,
2303 MMI_OPC_0_PSUBB
= (0x09 << 6) | MMI_OPC_CLASS_MMI0
,
2304 MMI_OPC_0_PCGTB
= (0x0A << 6) | MMI_OPC_CLASS_MMI0
,
2305 MMI_OPC_0_PADDSW
= (0x10 << 6) | MMI_OPC_CLASS_MMI0
,
2306 MMI_OPC_0_PSUBSW
= (0x11 << 6) | MMI_OPC_CLASS_MMI0
,
2307 MMI_OPC_0_PEXTLW
= (0x12 << 6) | MMI_OPC_CLASS_MMI0
,
2308 MMI_OPC_0_PPACW
= (0x13 << 6) | MMI_OPC_CLASS_MMI0
,
2309 MMI_OPC_0_PADDSH
= (0x14 << 6) | MMI_OPC_CLASS_MMI0
,
2310 MMI_OPC_0_PSUBSH
= (0x15 << 6) | MMI_OPC_CLASS_MMI0
,
2311 MMI_OPC_0_PEXTLH
= (0x16 << 6) | MMI_OPC_CLASS_MMI0
,
2312 MMI_OPC_0_PPACH
= (0x17 << 6) | MMI_OPC_CLASS_MMI0
,
2313 MMI_OPC_0_PADDSB
= (0x18 << 6) | MMI_OPC_CLASS_MMI0
,
2314 MMI_OPC_0_PSUBSB
= (0x19 << 6) | MMI_OPC_CLASS_MMI0
,
2315 MMI_OPC_0_PEXTLB
= (0x1A << 6) | MMI_OPC_CLASS_MMI0
,
2316 MMI_OPC_0_PPACB
= (0x1B << 6) | MMI_OPC_CLASS_MMI0
,
2317 MMI_OPC_0_PEXT5
= (0x1E << 6) | MMI_OPC_CLASS_MMI0
,
2318 MMI_OPC_0_PPAC5
= (0x1F << 6) | MMI_OPC_CLASS_MMI0
,
2322 * MMI instructions with opcode field = MMI and bits 5..0 = MMI1:
2325 * +--------+----------------------+--------+--------+
2326 * | MMI | |function| MMI1 |
2327 * +--------+----------------------+--------+--------+
2329 * function bits 7..6
2330 * bits | 0 | 1 | 2 | 3
2331 * 10..8 | 00 | 01 | 10 | 11
2332 * -------+-------+-------+-------+-------
2333 * 0 000 | * | PABSW | PCEQW | PMINW
2334 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2335 * 2 010 | * | * | PCEQB | *
2336 * 3 011 | * | * | * | *
2337 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2338 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2339 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2340 * 7 111 | * | * | * | *
2343 #define MASK_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2345 MMI_OPC_1_PABSW
= (0x01 << 6) | MMI_OPC_CLASS_MMI1
,
2346 MMI_OPC_1_PCEQW
= (0x02 << 6) | MMI_OPC_CLASS_MMI1
,
2347 MMI_OPC_1_PMINW
= (0x03 << 6) | MMI_OPC_CLASS_MMI1
,
2348 MMI_OPC_1_PADSBH
= (0x04 << 6) | MMI_OPC_CLASS_MMI1
,
2349 MMI_OPC_1_PABSH
= (0x05 << 6) | MMI_OPC_CLASS_MMI1
,
2350 MMI_OPC_1_PCEQH
= (0x06 << 6) | MMI_OPC_CLASS_MMI1
,
2351 MMI_OPC_1_PMINH
= (0x07 << 6) | MMI_OPC_CLASS_MMI1
,
2352 MMI_OPC_1_PCEQB
= (0x0A << 6) | MMI_OPC_CLASS_MMI1
,
2353 MMI_OPC_1_PADDUW
= (0x10 << 6) | MMI_OPC_CLASS_MMI1
,
2354 MMI_OPC_1_PSUBUW
= (0x11 << 6) | MMI_OPC_CLASS_MMI1
,
2355 MMI_OPC_1_PEXTUW
= (0x12 << 6) | MMI_OPC_CLASS_MMI1
,
2356 MMI_OPC_1_PADDUH
= (0x14 << 6) | MMI_OPC_CLASS_MMI1
,
2357 MMI_OPC_1_PSUBUH
= (0x15 << 6) | MMI_OPC_CLASS_MMI1
,
2358 MMI_OPC_1_PEXTUH
= (0x16 << 6) | MMI_OPC_CLASS_MMI1
,
2359 MMI_OPC_1_PADDUB
= (0x18 << 6) | MMI_OPC_CLASS_MMI1
,
2360 MMI_OPC_1_PSUBUB
= (0x19 << 6) | MMI_OPC_CLASS_MMI1
,
2361 MMI_OPC_1_PEXTUB
= (0x1A << 6) | MMI_OPC_CLASS_MMI1
,
2362 MMI_OPC_1_QFSRV
= (0x1B << 6) | MMI_OPC_CLASS_MMI1
,
2366 * MMI instructions with opcode field = MMI and bits 5..0 = MMI2:
2369 * +--------+----------------------+--------+--------+
2370 * | MMI | |function| MMI2 |
2371 * +--------+----------------------+--------+--------+
2373 * function bits 7..6
2374 * bits | 0 | 1 | 2 | 3
2375 * 10..8 | 00 | 01 | 10 | 11
2376 * -------+-------+-------+-------+-------
2377 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2378 * 1 001 | PMSUBW| * | * | *
2379 * 2 010 | PMFHI | PMFLO | PINTH | *
2380 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2381 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2382 * 5 101 | PMSUBH| PHMSBH| * | *
2383 * 6 110 | * | * | PEXEH | PREVH
2384 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2387 #define MASK_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2389 MMI_OPC_2_PMADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI2
,
2390 MMI_OPC_2_PSLLVW
= (0x02 << 6) | MMI_OPC_CLASS_MMI2
,
2391 MMI_OPC_2_PSRLVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI2
,
2392 MMI_OPC_2_PMSUBW
= (0x04 << 6) | MMI_OPC_CLASS_MMI2
,
2393 MMI_OPC_2_PMFHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI2
,
2394 MMI_OPC_2_PMFLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI2
,
2395 MMI_OPC_2_PINTH
= (0x0A << 6) | MMI_OPC_CLASS_MMI2
,
2396 MMI_OPC_2_PMULTW
= (0x0C << 6) | MMI_OPC_CLASS_MMI2
,
2397 MMI_OPC_2_PDIVW
= (0x0D << 6) | MMI_OPC_CLASS_MMI2
,
2398 MMI_OPC_2_PCPYLD
= (0x0E << 6) | MMI_OPC_CLASS_MMI2
,
2399 MMI_OPC_2_PMADDH
= (0x10 << 6) | MMI_OPC_CLASS_MMI2
,
2400 MMI_OPC_2_PHMADH
= (0x11 << 6) | MMI_OPC_CLASS_MMI2
,
2401 MMI_OPC_2_PAND
= (0x12 << 6) | MMI_OPC_CLASS_MMI2
,
2402 MMI_OPC_2_PXOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI2
,
2403 MMI_OPC_2_PMSUBH
= (0x14 << 6) | MMI_OPC_CLASS_MMI2
,
2404 MMI_OPC_2_PHMSBH
= (0x15 << 6) | MMI_OPC_CLASS_MMI2
,
2405 MMI_OPC_2_PEXEH
= (0x1A << 6) | MMI_OPC_CLASS_MMI2
,
2406 MMI_OPC_2_PREVH
= (0x1B << 6) | MMI_OPC_CLASS_MMI2
,
2407 MMI_OPC_2_PMULTH
= (0x1C << 6) | MMI_OPC_CLASS_MMI2
,
2408 MMI_OPC_2_PDIVBW
= (0x1D << 6) | MMI_OPC_CLASS_MMI2
,
2409 MMI_OPC_2_PEXEW
= (0x1E << 6) | MMI_OPC_CLASS_MMI2
,
2410 MMI_OPC_2_PROT3W
= (0x1F << 6) | MMI_OPC_CLASS_MMI2
,
2414 * MMI instructions with opcode field = MMI and bits 5..0 = MMI3:
2417 * +--------+----------------------+--------+--------+
2418 * | MMI | |function| MMI3 |
2419 * +--------+----------------------+--------+--------+
2421 * function bits 7..6
2422 * bits | 0 | 1 | 2 | 3
2423 * 10..8 | 00 | 01 | 10 | 11
2424 * -------+-------+-------+-------+-------
2425 * 0 000 |PMADDUW| * | * | PSRAVW
2426 * 1 001 | * | * | * | *
2427 * 2 010 | PMTHI | PMTLO | PINTEH| *
2428 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2429 * 4 100 | * | * | POR | PNOR
2430 * 5 101 | * | * | * | *
2431 * 6 110 | * | * | PEXCH | PCPYH
2432 * 7 111 | * | * | PEXCW | *
2435 #define MASK_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2437 MMI_OPC_3_PMADDUW
= (0x00 << 6) | MMI_OPC_CLASS_MMI3
,
2438 MMI_OPC_3_PSRAVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI3
,
2439 MMI_OPC_3_PMTHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI3
,
2440 MMI_OPC_3_PMTLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI3
,
2441 MMI_OPC_3_PINTEH
= (0x0A << 6) | MMI_OPC_CLASS_MMI3
,
2442 MMI_OPC_3_PMULTUW
= (0x0C << 6) | MMI_OPC_CLASS_MMI3
,
2443 MMI_OPC_3_PDIVUW
= (0x0D << 6) | MMI_OPC_CLASS_MMI3
,
2444 MMI_OPC_3_PCPYUD
= (0x0E << 6) | MMI_OPC_CLASS_MMI3
,
2445 MMI_OPC_3_POR
= (0x12 << 6) | MMI_OPC_CLASS_MMI3
,
2446 MMI_OPC_3_PNOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI3
,
2447 MMI_OPC_3_PEXCH
= (0x1A << 6) | MMI_OPC_CLASS_MMI3
,
2448 MMI_OPC_3_PCPYH
= (0x1B << 6) | MMI_OPC_CLASS_MMI3
,
2449 MMI_OPC_3_PEXCW
= (0x1E << 6) | MMI_OPC_CLASS_MMI3
,
2452 /* global register indices */
2453 static TCGv cpu_gpr
[32], cpu_PC
;
2454 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2455 static TCGv cpu_dspctrl
, btarget
, bcond
;
2456 static TCGv cpu_lladdr
, cpu_llval
;
2457 static TCGv_i32 hflags
;
2458 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2459 static TCGv_i64 fpu_f64
[32];
2460 static TCGv_i64 msa_wr_d
[64];
2462 #if defined(TARGET_MIPS64)
2463 /* Upper halves of R5900's 128-bit registers: MMRs (multimedia registers) */
2464 static TCGv_i64 cpu_mmr
[32];
2467 #if !defined(TARGET_MIPS64)
2469 static TCGv mxu_gpr
[NUMBER_OF_MXU_REGISTERS
- 1];
2473 #include "exec/gen-icount.h"
2475 #define gen_helper_0e0i(name, arg) do { \
2476 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2477 gen_helper_##name(cpu_env, helper_tmp); \
2478 tcg_temp_free_i32(helper_tmp); \
2481 #define gen_helper_0e1i(name, arg1, arg2) do { \
2482 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2483 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2484 tcg_temp_free_i32(helper_tmp); \
2487 #define gen_helper_1e0i(name, ret, arg1) do { \
2488 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2489 gen_helper_##name(ret, cpu_env, helper_tmp); \
2490 tcg_temp_free_i32(helper_tmp); \
2493 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2494 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2495 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2496 tcg_temp_free_i32(helper_tmp); \
2499 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2500 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2501 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2502 tcg_temp_free_i32(helper_tmp); \
2505 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2506 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2507 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2508 tcg_temp_free_i32(helper_tmp); \
2511 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2512 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2513 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2514 tcg_temp_free_i32(helper_tmp); \
2517 typedef struct DisasContext
{
2518 DisasContextBase base
;
2519 target_ulong saved_pc
;
2520 target_ulong page_start
;
2522 uint64_t insn_flags
;
2523 int32_t CP0_Config1
;
2524 int32_t CP0_Config2
;
2525 int32_t CP0_Config3
;
2526 int32_t CP0_Config5
;
2527 /* Routine used to access memory */
2529 TCGMemOp default_tcg_memop_mask
;
2530 uint32_t hflags
, saved_hflags
;
2531 target_ulong btarget
;
2542 int CP0_LLAddr_shift
;
2552 #define DISAS_STOP DISAS_TARGET_0
2553 #define DISAS_EXIT DISAS_TARGET_1
2555 static const char * const regnames
[] = {
2556 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2557 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2558 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2559 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2562 static const char * const regnames_HI
[] = {
2563 "HI0", "HI1", "HI2", "HI3",
2566 static const char * const regnames_LO
[] = {
2567 "LO0", "LO1", "LO2", "LO3",
2570 static const char * const fregnames
[] = {
2571 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2572 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2573 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2574 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2577 static const char * const msaregnames
[] = {
2578 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2579 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2580 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2581 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2582 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2583 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2584 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2585 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2586 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2587 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2588 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2589 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2590 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2591 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2592 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2593 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2596 #if !defined(TARGET_MIPS64)
2597 static const char * const mxuregnames
[] = {
2598 "XR1", "XR2", "XR3", "XR4", "XR5", "XR6", "XR7", "XR8",
2599 "XR9", "XR10", "XR11", "XR12", "XR13", "XR14", "XR15", "MXU_CR",
2603 #define LOG_DISAS(...) \
2605 if (MIPS_DEBUG_DISAS) { \
2606 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2610 #define MIPS_INVAL(op) \
2612 if (MIPS_DEBUG_DISAS) { \
2613 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2614 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2615 ctx->base.pc_next, ctx->opcode, op, \
2616 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2617 ((ctx->opcode >> 16) & 0x1F)); \
2621 /* General purpose registers moves. */
2622 static inline void gen_load_gpr(TCGv t
, int reg
)
2625 tcg_gen_movi_tl(t
, 0);
2627 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2631 static inline void gen_store_gpr(TCGv t
, int reg
)
2634 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2638 /* Moves to/from shadow registers. */
2639 static inline void gen_load_srsgpr(int from
, int to
)
2641 TCGv t0
= tcg_temp_new();
2644 tcg_gen_movi_tl(t0
, 0);
2646 TCGv_i32 t2
= tcg_temp_new_i32();
2647 TCGv_ptr addr
= tcg_temp_new_ptr();
2649 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2650 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2651 tcg_gen_andi_i32(t2
, t2
, 0xf);
2652 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2653 tcg_gen_ext_i32_ptr(addr
, t2
);
2654 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2656 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2657 tcg_temp_free_ptr(addr
);
2658 tcg_temp_free_i32(t2
);
2660 gen_store_gpr(t0
, to
);
2664 static inline void gen_store_srsgpr(int from
, int to
)
2667 TCGv t0
= tcg_temp_new();
2668 TCGv_i32 t2
= tcg_temp_new_i32();
2669 TCGv_ptr addr
= tcg_temp_new_ptr();
2671 gen_load_gpr(t0
, from
);
2672 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2673 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2674 tcg_gen_andi_i32(t2
, t2
, 0xf);
2675 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2676 tcg_gen_ext_i32_ptr(addr
, t2
);
2677 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2679 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2680 tcg_temp_free_ptr(addr
);
2681 tcg_temp_free_i32(t2
);
2686 #if !defined(TARGET_MIPS64)
2687 /* MXU General purpose registers moves. */
2688 static inline void gen_load_mxu_gpr(TCGv t
, unsigned int reg
)
2691 tcg_gen_movi_tl(t
, 0);
2692 } else if (reg
<= 15) {
2693 tcg_gen_mov_tl(t
, mxu_gpr
[reg
- 1]);
2697 static inline void gen_store_mxu_gpr(TCGv t
, unsigned int reg
)
2699 if (reg
> 0 && reg
<= 15) {
2700 tcg_gen_mov_tl(mxu_gpr
[reg
- 1], t
);
2704 /* MXU control register moves. */
2705 static inline void gen_load_mxu_cr(TCGv t
)
2707 tcg_gen_mov_tl(t
, mxu_CR
);
2710 static inline void gen_store_mxu_cr(TCGv t
)
2712 /* TODO: Add handling of RW rules for MXU_CR. */
2713 tcg_gen_mov_tl(mxu_CR
, t
);
2719 static inline void gen_save_pc(target_ulong pc
)
2721 tcg_gen_movi_tl(cpu_PC
, pc
);
2724 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2726 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2727 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2728 gen_save_pc(ctx
->base
.pc_next
);
2729 ctx
->saved_pc
= ctx
->base
.pc_next
;
2731 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2732 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2733 ctx
->saved_hflags
= ctx
->hflags
;
2734 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2740 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2746 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2748 ctx
->saved_hflags
= ctx
->hflags
;
2749 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2755 ctx
->btarget
= env
->btarget
;
2760 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2762 TCGv_i32 texcp
= tcg_const_i32(excp
);
2763 TCGv_i32 terr
= tcg_const_i32(err
);
2764 save_cpu_state(ctx
, 1);
2765 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2766 tcg_temp_free_i32(terr
);
2767 tcg_temp_free_i32(texcp
);
2768 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2771 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2773 gen_helper_0e0i(raise_exception
, excp
);
2776 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2778 generate_exception_err(ctx
, excp
, 0);
2781 /* Floating point register moves. */
2782 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2784 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2785 generate_exception(ctx
, EXCP_RI
);
2787 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2790 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2793 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2794 generate_exception(ctx
, EXCP_RI
);
2796 t64
= tcg_temp_new_i64();
2797 tcg_gen_extu_i32_i64(t64
, t
);
2798 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2799 tcg_temp_free_i64(t64
);
2802 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2804 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2805 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2807 gen_load_fpr32(ctx
, t
, reg
| 1);
2811 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2813 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2814 TCGv_i64 t64
= tcg_temp_new_i64();
2815 tcg_gen_extu_i32_i64(t64
, t
);
2816 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2817 tcg_temp_free_i64(t64
);
2819 gen_store_fpr32(ctx
, t
, reg
| 1);
2823 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2825 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2826 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2828 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2832 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2834 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2835 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2838 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2839 t0
= tcg_temp_new_i64();
2840 tcg_gen_shri_i64(t0
, t
, 32);
2841 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2842 tcg_temp_free_i64(t0
);
2846 static inline int get_fp_bit(int cc
)
2855 /* Addresses computation */
2856 static inline void gen_op_addr_add(DisasContext
*ctx
, TCGv ret
, TCGv arg0
,
2859 tcg_gen_add_tl(ret
, arg0
, arg1
);
2861 #if defined(TARGET_MIPS64)
2862 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2863 tcg_gen_ext32s_i64(ret
, ret
);
2868 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2871 tcg_gen_addi_tl(ret
, base
, ofs
);
2873 #if defined(TARGET_MIPS64)
2874 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2875 tcg_gen_ext32s_i64(ret
, ret
);
2880 /* Addresses computation (translation time) */
2881 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2884 target_long sum
= base
+ offset
;
2886 #if defined(TARGET_MIPS64)
2887 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2894 /* Sign-extract the low 32-bits to a target_long. */
2895 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2897 #if defined(TARGET_MIPS64)
2898 tcg_gen_ext32s_i64(ret
, arg
);
2900 tcg_gen_extrl_i64_i32(ret
, arg
);
2904 /* Sign-extract the high 32-bits to a target_long. */
2905 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2907 #if defined(TARGET_MIPS64)
2908 tcg_gen_sari_i64(ret
, arg
, 32);
2910 tcg_gen_extrh_i64_i32(ret
, arg
);
2914 static inline void check_cp0_enabled(DisasContext
*ctx
)
2916 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
2917 generate_exception_err(ctx
, EXCP_CpU
, 0);
2921 static inline void check_cp1_enabled(DisasContext
*ctx
)
2923 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
))) {
2924 generate_exception_err(ctx
, EXCP_CpU
, 1);
2929 * Verify that the processor is running with COP1X instructions enabled.
2930 * This is associated with the nabla symbol in the MIPS32 and MIPS64
2933 static inline void check_cop1x(DisasContext
*ctx
)
2935 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
))) {
2936 generate_exception_end(ctx
, EXCP_RI
);
2941 * Verify that the processor is running with 64-bit floating-point
2942 * operations enabled.
2944 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2946 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
))) {
2947 generate_exception_end(ctx
, EXCP_RI
);
2952 * Verify if floating point register is valid; an operation is not defined
2953 * if bit 0 of any register specification is set and the FR bit in the
2954 * Status register equals zero, since the register numbers specify an
2955 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2956 * in the Status register equals one, both even and odd register numbers
2957 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2959 * Multiple 64 bit wide registers can be checked by calling
2960 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2962 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2964 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1))) {
2965 generate_exception_end(ctx
, EXCP_RI
);
2970 * Verify that the processor is running with DSP instructions enabled.
2971 * This is enabled by CP0 Status register MX(24) bit.
2973 static inline void check_dsp(DisasContext
*ctx
)
2975 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2976 if (ctx
->insn_flags
& ASE_DSP
) {
2977 generate_exception_end(ctx
, EXCP_DSPDIS
);
2979 generate_exception_end(ctx
, EXCP_RI
);
2984 static inline void check_dsp_r2(DisasContext
*ctx
)
2986 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2987 if (ctx
->insn_flags
& ASE_DSP
) {
2988 generate_exception_end(ctx
, EXCP_DSPDIS
);
2990 generate_exception_end(ctx
, EXCP_RI
);
2995 static inline void check_dsp_r3(DisasContext
*ctx
)
2997 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2998 if (ctx
->insn_flags
& ASE_DSP
) {
2999 generate_exception_end(ctx
, EXCP_DSPDIS
);
3001 generate_exception_end(ctx
, EXCP_RI
);
3007 * This code generates a "reserved instruction" exception if the
3008 * CPU does not support the instruction set corresponding to flags.
3010 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
3012 if (unlikely(!(ctx
->insn_flags
& flags
))) {
3013 generate_exception_end(ctx
, EXCP_RI
);
3018 * This code generates a "reserved instruction" exception if the
3019 * CPU has corresponding flag set which indicates that the instruction
3022 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
3024 if (unlikely(ctx
->insn_flags
& flags
)) {
3025 generate_exception_end(ctx
, EXCP_RI
);
3030 * The Linux kernel traps certain reserved instruction exceptions to
3031 * emulate the corresponding instructions. QEMU is the kernel in user
3032 * mode, so those traps are emulated by accepting the instructions.
3034 * A reserved instruction exception is generated for flagged CPUs if
3035 * QEMU runs in system mode.
3037 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
3039 #ifndef CONFIG_USER_ONLY
3040 check_insn_opc_removed(ctx
, flags
);
3045 * This code generates a "reserved instruction" exception if the
3046 * CPU does not support 64-bit paired-single (PS) floating point data type.
3048 static inline void check_ps(DisasContext
*ctx
)
3050 if (unlikely(!ctx
->ps
)) {
3051 generate_exception(ctx
, EXCP_RI
);
3053 check_cp1_64bitmode(ctx
);
3056 #ifdef TARGET_MIPS64
3058 * This code generates a "reserved instruction" exception if 64-bit
3059 * instructions are not enabled.
3061 static inline void check_mips_64(DisasContext
*ctx
)
3063 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
))) {
3064 generate_exception_end(ctx
, EXCP_RI
);
3069 #ifndef CONFIG_USER_ONLY
3070 static inline void check_mvh(DisasContext
*ctx
)
3072 if (unlikely(!ctx
->mvh
)) {
3073 generate_exception(ctx
, EXCP_RI
);
3079 * This code generates a "reserved instruction" exception if the
3080 * Config5 XNP bit is set.
3082 static inline void check_xnp(DisasContext
*ctx
)
3084 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
3085 generate_exception_end(ctx
, EXCP_RI
);
3089 #ifndef CONFIG_USER_ONLY
3091 * This code generates a "reserved instruction" exception if the
3092 * Config3 PW bit is NOT set.
3094 static inline void check_pw(DisasContext
*ctx
)
3096 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
3097 generate_exception_end(ctx
, EXCP_RI
);
3103 * This code generates a "reserved instruction" exception if the
3104 * Config3 MT bit is NOT set.
3106 static inline void check_mt(DisasContext
*ctx
)
3108 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3109 generate_exception_end(ctx
, EXCP_RI
);
3113 #ifndef CONFIG_USER_ONLY
3115 * This code generates a "coprocessor unusable" exception if CP0 is not
3116 * available, and, if that is not the case, generates a "reserved instruction"
3117 * exception if the Config5 MT bit is NOT set. This is needed for availability
3118 * control of some of MT ASE instructions.
3120 static inline void check_cp0_mt(DisasContext
*ctx
)
3122 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
3123 generate_exception_err(ctx
, EXCP_CpU
, 0);
3125 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3126 generate_exception_err(ctx
, EXCP_RI
, 0);
3133 * This code generates a "reserved instruction" exception if the
3134 * Config5 NMS bit is set.
3136 static inline void check_nms(DisasContext
*ctx
)
3138 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
3139 generate_exception_end(ctx
, EXCP_RI
);
3144 * This code generates a "reserved instruction" exception if the
3145 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
3146 * Config2 TL, and Config5 L2C are unset.
3148 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
3150 if (unlikely((ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
3151 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
3152 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
3153 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
3154 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
3155 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))) {
3156 generate_exception_end(ctx
, EXCP_RI
);
3161 * This code generates a "reserved instruction" exception if the
3162 * Config5 EVA bit is NOT set.
3164 static inline void check_eva(DisasContext
*ctx
)
3166 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
3167 generate_exception_end(ctx
, EXCP_RI
);
3173 * Define small wrappers for gen_load_fpr* so that we have a uniform
3174 * calling interface for 32 and 64-bit FPRs. No sense in changing
3175 * all callers for gen_load_fpr32 when we need the CTX parameter for
3178 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
3179 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
3180 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
3181 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
3182 int ft, int fs, int cc) \
3184 TCGv_i##bits fp0 = tcg_temp_new_i##bits(); \
3185 TCGv_i##bits fp1 = tcg_temp_new_i##bits(); \
3194 check_cp1_registers(ctx, fs | ft); \
3202 gen_ldcmp_fpr##bits(ctx, fp0, fs); \
3203 gen_ldcmp_fpr##bits(ctx, fp1, ft); \
3206 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); \
3209 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); \
3212 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); \
3215 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); \
3218 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); \
3221 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); \
3224 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); \
3227 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); \
3230 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); \
3233 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); \
3236 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); \
3239 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); \
3242 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); \
3245 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); \
3248 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); \
3251 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); \
3256 tcg_temp_free_i##bits(fp0); \
3257 tcg_temp_free_i##bits(fp1); \
3260 FOP_CONDS(, 0, d
, FMT_D
, 64)
3261 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3262 FOP_CONDS(, 0, s
, FMT_S
, 32)
3263 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3264 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3265 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3268 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3269 static inline void gen_r6_cmp_ ## fmt(DisasContext *ctx, int n, \
3270 int ft, int fs, int fd) \
3272 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3273 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3274 if (ifmt == FMT_D) { \
3275 check_cp1_registers(ctx, fs | ft | fd); \
3277 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3278 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3281 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3284 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3287 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3290 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3293 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3296 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3299 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3302 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3305 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3308 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3311 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3314 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3317 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3320 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3323 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3326 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3329 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3332 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3335 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3338 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3341 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3344 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3350 tcg_temp_free_i ## bits(fp0); \
3351 tcg_temp_free_i ## bits(fp1); \
3354 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3355 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3357 #undef gen_ldcmp_fpr32
3358 #undef gen_ldcmp_fpr64
3360 /* load/store instructions. */
3361 #ifdef CONFIG_USER_ONLY
3362 #define OP_LD_ATOMIC(insn, fname) \
3363 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3364 DisasContext *ctx) \
3366 TCGv t0 = tcg_temp_new(); \
3367 tcg_gen_mov_tl(t0, arg1); \
3368 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3369 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3370 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3371 tcg_temp_free(t0); \
3374 #define OP_LD_ATOMIC(insn, fname) \
3375 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3376 DisasContext *ctx) \
3378 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3381 OP_LD_ATOMIC(ll
, ld32s
);
3382 #if defined(TARGET_MIPS64)
3383 OP_LD_ATOMIC(lld
, ld64
);
3387 static void gen_base_offset_addr(DisasContext
*ctx
, TCGv addr
,
3388 int base
, int offset
)
3391 tcg_gen_movi_tl(addr
, offset
);
3392 } else if (offset
== 0) {
3393 gen_load_gpr(addr
, base
);
3395 tcg_gen_movi_tl(addr
, offset
);
3396 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3400 static target_ulong
pc_relative_pc(DisasContext
*ctx
)
3402 target_ulong pc
= ctx
->base
.pc_next
;
3404 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3405 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3410 pc
&= ~(target_ulong
)3;
3415 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3416 int rt
, int base
, int offset
)
3419 int mem_idx
= ctx
->mem_idx
;
3421 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3423 * Loongson CPU uses a load to zero register for prefetch.
3424 * We emulate it as a NOP. On other CPU we must perform the
3425 * actual memory access.
3430 t0
= tcg_temp_new();
3431 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3434 #if defined(TARGET_MIPS64)
3436 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3437 ctx
->default_tcg_memop_mask
);
3438 gen_store_gpr(t0
, rt
);
3441 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3442 ctx
->default_tcg_memop_mask
);
3443 gen_store_gpr(t0
, rt
);
3447 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3448 gen_store_gpr(t0
, rt
);
3451 t1
= tcg_temp_new();
3453 * Do a byte access to possibly trigger a page
3454 * fault with the unaligned address.
3456 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3457 tcg_gen_andi_tl(t1
, t0
, 7);
3458 #ifndef TARGET_WORDS_BIGENDIAN
3459 tcg_gen_xori_tl(t1
, t1
, 7);
3461 tcg_gen_shli_tl(t1
, t1
, 3);
3462 tcg_gen_andi_tl(t0
, t0
, ~7);
3463 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3464 tcg_gen_shl_tl(t0
, t0
, t1
);
3465 t2
= tcg_const_tl(-1);
3466 tcg_gen_shl_tl(t2
, t2
, t1
);
3467 gen_load_gpr(t1
, rt
);
3468 tcg_gen_andc_tl(t1
, t1
, t2
);
3470 tcg_gen_or_tl(t0
, t0
, t1
);
3472 gen_store_gpr(t0
, rt
);
3475 t1
= tcg_temp_new();
3477 * Do a byte access to possibly trigger a page
3478 * fault with the unaligned address.
3480 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3481 tcg_gen_andi_tl(t1
, t0
, 7);
3482 #ifdef TARGET_WORDS_BIGENDIAN
3483 tcg_gen_xori_tl(t1
, t1
, 7);
3485 tcg_gen_shli_tl(t1
, t1
, 3);
3486 tcg_gen_andi_tl(t0
, t0
, ~7);
3487 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3488 tcg_gen_shr_tl(t0
, t0
, t1
);
3489 tcg_gen_xori_tl(t1
, t1
, 63);
3490 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3491 tcg_gen_shl_tl(t2
, t2
, t1
);
3492 gen_load_gpr(t1
, rt
);
3493 tcg_gen_and_tl(t1
, t1
, t2
);
3495 tcg_gen_or_tl(t0
, t0
, t1
);
3497 gen_store_gpr(t0
, rt
);
3500 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3501 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3503 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3504 gen_store_gpr(t0
, rt
);
3508 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3509 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3511 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3512 gen_store_gpr(t0
, rt
);
3515 mem_idx
= MIPS_HFLAG_UM
;
3518 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3519 ctx
->default_tcg_memop_mask
);
3520 gen_store_gpr(t0
, rt
);
3523 mem_idx
= MIPS_HFLAG_UM
;
3526 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3527 ctx
->default_tcg_memop_mask
);
3528 gen_store_gpr(t0
, rt
);
3531 mem_idx
= MIPS_HFLAG_UM
;
3534 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3535 ctx
->default_tcg_memop_mask
);
3536 gen_store_gpr(t0
, rt
);
3539 mem_idx
= MIPS_HFLAG_UM
;
3542 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3543 gen_store_gpr(t0
, rt
);
3546 mem_idx
= MIPS_HFLAG_UM
;
3549 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3550 gen_store_gpr(t0
, rt
);
3553 mem_idx
= MIPS_HFLAG_UM
;
3556 t1
= tcg_temp_new();
3558 * Do a byte access to possibly trigger a page
3559 * fault with the unaligned address.
3561 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3562 tcg_gen_andi_tl(t1
, t0
, 3);
3563 #ifndef TARGET_WORDS_BIGENDIAN
3564 tcg_gen_xori_tl(t1
, t1
, 3);
3566 tcg_gen_shli_tl(t1
, t1
, 3);
3567 tcg_gen_andi_tl(t0
, t0
, ~3);
3568 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3569 tcg_gen_shl_tl(t0
, t0
, t1
);
3570 t2
= tcg_const_tl(-1);
3571 tcg_gen_shl_tl(t2
, t2
, t1
);
3572 gen_load_gpr(t1
, rt
);
3573 tcg_gen_andc_tl(t1
, t1
, t2
);
3575 tcg_gen_or_tl(t0
, t0
, t1
);
3577 tcg_gen_ext32s_tl(t0
, t0
);
3578 gen_store_gpr(t0
, rt
);
3581 mem_idx
= MIPS_HFLAG_UM
;
3584 t1
= tcg_temp_new();
3586 * Do a byte access to possibly trigger a page
3587 * fault with the unaligned address.
3589 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3590 tcg_gen_andi_tl(t1
, t0
, 3);
3591 #ifdef TARGET_WORDS_BIGENDIAN
3592 tcg_gen_xori_tl(t1
, t1
, 3);
3594 tcg_gen_shli_tl(t1
, t1
, 3);
3595 tcg_gen_andi_tl(t0
, t0
, ~3);
3596 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3597 tcg_gen_shr_tl(t0
, t0
, t1
);
3598 tcg_gen_xori_tl(t1
, t1
, 31);
3599 t2
= tcg_const_tl(0xfffffffeull
);
3600 tcg_gen_shl_tl(t2
, t2
, t1
);
3601 gen_load_gpr(t1
, rt
);
3602 tcg_gen_and_tl(t1
, t1
, t2
);
3604 tcg_gen_or_tl(t0
, t0
, t1
);
3606 tcg_gen_ext32s_tl(t0
, t0
);
3607 gen_store_gpr(t0
, rt
);
3610 mem_idx
= MIPS_HFLAG_UM
;
3614 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3615 gen_store_gpr(t0
, rt
);
3621 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3622 uint32_t reg1
, uint32_t reg2
)
3624 TCGv taddr
= tcg_temp_new();
3625 TCGv_i64 tval
= tcg_temp_new_i64();
3626 TCGv tmp1
= tcg_temp_new();
3627 TCGv tmp2
= tcg_temp_new();
3629 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3630 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3631 #ifdef TARGET_WORDS_BIGENDIAN
3632 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3634 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3636 gen_store_gpr(tmp1
, reg1
);
3637 tcg_temp_free(tmp1
);
3638 gen_store_gpr(tmp2
, reg2
);
3639 tcg_temp_free(tmp2
);
3640 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3641 tcg_temp_free_i64(tval
);
3642 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3643 tcg_temp_free(taddr
);
3647 static void gen_st(DisasContext
*ctx
, uint32_t opc
, int rt
,
3648 int base
, int offset
)
3650 TCGv t0
= tcg_temp_new();
3651 TCGv t1
= tcg_temp_new();
3652 int mem_idx
= ctx
->mem_idx
;
3654 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3655 gen_load_gpr(t1
, rt
);
3657 #if defined(TARGET_MIPS64)
3659 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3660 ctx
->default_tcg_memop_mask
);
3663 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3666 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3670 mem_idx
= MIPS_HFLAG_UM
;
3673 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3674 ctx
->default_tcg_memop_mask
);
3677 mem_idx
= MIPS_HFLAG_UM
;
3680 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3681 ctx
->default_tcg_memop_mask
);
3684 mem_idx
= MIPS_HFLAG_UM
;
3687 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3690 mem_idx
= MIPS_HFLAG_UM
;
3693 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3696 mem_idx
= MIPS_HFLAG_UM
;
3699 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3707 /* Store conditional */
3708 static void gen_st_cond(DisasContext
*ctx
, int rt
, int base
, int offset
,
3709 TCGMemOp tcg_mo
, bool eva
)
3712 TCGLabel
*l1
= gen_new_label();
3713 TCGLabel
*done
= gen_new_label();
3715 t0
= tcg_temp_new();
3716 addr
= tcg_temp_new();
3717 /* compare the address against that of the preceeding LL */
3718 gen_base_offset_addr(ctx
, addr
, base
, offset
);
3719 tcg_gen_brcond_tl(TCG_COND_EQ
, addr
, cpu_lladdr
, l1
);
3720 tcg_temp_free(addr
);
3721 tcg_gen_movi_tl(t0
, 0);
3722 gen_store_gpr(t0
, rt
);
3726 /* generate cmpxchg */
3727 val
= tcg_temp_new();
3728 gen_load_gpr(val
, rt
);
3729 tcg_gen_atomic_cmpxchg_tl(t0
, cpu_lladdr
, cpu_llval
, val
,
3730 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, tcg_mo
);
3731 tcg_gen_setcond_tl(TCG_COND_EQ
, t0
, t0
, cpu_llval
);
3732 gen_store_gpr(t0
, rt
);
3735 gen_set_label(done
);
3740 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3741 uint32_t reg1
, uint32_t reg2
, bool eva
)
3743 TCGv taddr
= tcg_temp_local_new();
3744 TCGv lladdr
= tcg_temp_local_new();
3745 TCGv_i64 tval
= tcg_temp_new_i64();
3746 TCGv_i64 llval
= tcg_temp_new_i64();
3747 TCGv_i64 val
= tcg_temp_new_i64();
3748 TCGv tmp1
= tcg_temp_new();
3749 TCGv tmp2
= tcg_temp_new();
3750 TCGLabel
*lab_fail
= gen_new_label();
3751 TCGLabel
*lab_done
= gen_new_label();
3753 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3755 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3756 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3758 gen_load_gpr(tmp1
, reg1
);
3759 gen_load_gpr(tmp2
, reg2
);
3761 #ifdef TARGET_WORDS_BIGENDIAN
3762 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3764 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3767 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3768 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3769 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, MO_64
);
3771 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3773 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3775 gen_set_label(lab_fail
);
3778 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3780 gen_set_label(lab_done
);
3781 tcg_gen_movi_tl(lladdr
, -1);
3782 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3785 /* Load and store */
3786 static void gen_flt_ldst(DisasContext
*ctx
, uint32_t opc
, int ft
,
3790 * Don't do NOP if destination is zero: we must perform the actual
3796 TCGv_i32 fp0
= tcg_temp_new_i32();
3797 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3798 ctx
->default_tcg_memop_mask
);
3799 gen_store_fpr32(ctx
, fp0
, ft
);
3800 tcg_temp_free_i32(fp0
);
3805 TCGv_i32 fp0
= tcg_temp_new_i32();
3806 gen_load_fpr32(ctx
, fp0
, ft
);
3807 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3808 ctx
->default_tcg_memop_mask
);
3809 tcg_temp_free_i32(fp0
);
3814 TCGv_i64 fp0
= tcg_temp_new_i64();
3815 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3816 ctx
->default_tcg_memop_mask
);
3817 gen_store_fpr64(ctx
, fp0
, ft
);
3818 tcg_temp_free_i64(fp0
);
3823 TCGv_i64 fp0
= tcg_temp_new_i64();
3824 gen_load_fpr64(ctx
, fp0
, ft
);
3825 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3826 ctx
->default_tcg_memop_mask
);
3827 tcg_temp_free_i64(fp0
);
3831 MIPS_INVAL("flt_ldst");
3832 generate_exception_end(ctx
, EXCP_RI
);
3837 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3838 int rs
, int16_t imm
)
3840 TCGv t0
= tcg_temp_new();
3842 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3843 check_cp1_enabled(ctx
);
3847 check_insn(ctx
, ISA_MIPS2
);
3850 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3851 gen_flt_ldst(ctx
, op
, rt
, t0
);
3854 generate_exception_err(ctx
, EXCP_CpU
, 1);
3859 /* Arithmetic with immediate operand */
3860 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3861 int rt
, int rs
, int imm
)
3863 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3865 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3867 * If no destination, treat it as a NOP.
3868 * For addi, we must generate the overflow exception when needed.
3875 TCGv t0
= tcg_temp_local_new();
3876 TCGv t1
= tcg_temp_new();
3877 TCGv t2
= tcg_temp_new();
3878 TCGLabel
*l1
= gen_new_label();
3880 gen_load_gpr(t1
, rs
);
3881 tcg_gen_addi_tl(t0
, t1
, uimm
);
3882 tcg_gen_ext32s_tl(t0
, t0
);
3884 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3885 tcg_gen_xori_tl(t2
, t0
, uimm
);
3886 tcg_gen_and_tl(t1
, t1
, t2
);
3888 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3890 /* operands of same sign, result different sign */
3891 generate_exception(ctx
, EXCP_OVERFLOW
);
3893 tcg_gen_ext32s_tl(t0
, t0
);
3894 gen_store_gpr(t0
, rt
);
3900 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3901 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3903 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3906 #if defined(TARGET_MIPS64)
3909 TCGv t0
= tcg_temp_local_new();
3910 TCGv t1
= tcg_temp_new();
3911 TCGv t2
= tcg_temp_new();
3912 TCGLabel
*l1
= gen_new_label();
3914 gen_load_gpr(t1
, rs
);
3915 tcg_gen_addi_tl(t0
, t1
, uimm
);
3917 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3918 tcg_gen_xori_tl(t2
, t0
, uimm
);
3919 tcg_gen_and_tl(t1
, t1
, t2
);
3921 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3923 /* operands of same sign, result different sign */
3924 generate_exception(ctx
, EXCP_OVERFLOW
);
3926 gen_store_gpr(t0
, rt
);
3932 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3934 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3941 /* Logic with immediate operand */
3942 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3943 int rt
, int rs
, int16_t imm
)
3948 /* If no destination, treat it as a NOP. */
3951 uimm
= (uint16_t)imm
;
3954 if (likely(rs
!= 0)) {
3955 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3957 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3962 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3964 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3968 if (likely(rs
!= 0)) {
3969 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3971 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3975 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3977 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3978 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3980 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3989 /* Set on less than with immediate operand */
3990 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3991 int rt
, int rs
, int16_t imm
)
3993 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3997 /* If no destination, treat it as a NOP. */
4000 t0
= tcg_temp_new();
4001 gen_load_gpr(t0
, rs
);
4004 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
4007 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
4013 /* Shifts with immediate operand */
4014 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
4015 int rt
, int rs
, int16_t imm
)
4017 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
4021 /* If no destination, treat it as a NOP. */
4025 t0
= tcg_temp_new();
4026 gen_load_gpr(t0
, rs
);
4029 tcg_gen_shli_tl(t0
, t0
, uimm
);
4030 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4033 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4037 tcg_gen_ext32u_tl(t0
, t0
);
4038 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4040 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4045 TCGv_i32 t1
= tcg_temp_new_i32();
4047 tcg_gen_trunc_tl_i32(t1
, t0
);
4048 tcg_gen_rotri_i32(t1
, t1
, uimm
);
4049 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
4050 tcg_temp_free_i32(t1
);
4052 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4055 #if defined(TARGET_MIPS64)
4057 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
4060 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4063 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4067 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
4069 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
4073 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4076 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4079 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4082 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4090 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
4091 int rd
, int rs
, int rt
)
4093 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
4094 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
4096 * If no destination, treat it as a NOP.
4097 * For add & sub, we must generate the overflow exception when needed.
4105 TCGv t0
= tcg_temp_local_new();
4106 TCGv t1
= tcg_temp_new();
4107 TCGv t2
= tcg_temp_new();
4108 TCGLabel
*l1
= gen_new_label();
4110 gen_load_gpr(t1
, rs
);
4111 gen_load_gpr(t2
, rt
);
4112 tcg_gen_add_tl(t0
, t1
, t2
);
4113 tcg_gen_ext32s_tl(t0
, t0
);
4114 tcg_gen_xor_tl(t1
, t1
, t2
);
4115 tcg_gen_xor_tl(t2
, t0
, t2
);
4116 tcg_gen_andc_tl(t1
, t2
, t1
);
4118 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4120 /* operands of same sign, result different sign */
4121 generate_exception(ctx
, EXCP_OVERFLOW
);
4123 gen_store_gpr(t0
, rd
);
4128 if (rs
!= 0 && rt
!= 0) {
4129 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4130 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4131 } else if (rs
== 0 && rt
!= 0) {
4132 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4133 } else if (rs
!= 0 && rt
== 0) {
4134 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4136 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4141 TCGv t0
= tcg_temp_local_new();
4142 TCGv t1
= tcg_temp_new();
4143 TCGv t2
= tcg_temp_new();
4144 TCGLabel
*l1
= gen_new_label();
4146 gen_load_gpr(t1
, rs
);
4147 gen_load_gpr(t2
, rt
);
4148 tcg_gen_sub_tl(t0
, t1
, t2
);
4149 tcg_gen_ext32s_tl(t0
, t0
);
4150 tcg_gen_xor_tl(t2
, t1
, t2
);
4151 tcg_gen_xor_tl(t1
, t0
, t1
);
4152 tcg_gen_and_tl(t1
, t1
, t2
);
4154 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4157 * operands of different sign, first operand and the result
4160 generate_exception(ctx
, EXCP_OVERFLOW
);
4162 gen_store_gpr(t0
, rd
);
4167 if (rs
!= 0 && rt
!= 0) {
4168 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4169 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4170 } else if (rs
== 0 && rt
!= 0) {
4171 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4172 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4173 } else if (rs
!= 0 && rt
== 0) {
4174 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4176 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4179 #if defined(TARGET_MIPS64)
4182 TCGv t0
= tcg_temp_local_new();
4183 TCGv t1
= tcg_temp_new();
4184 TCGv t2
= tcg_temp_new();
4185 TCGLabel
*l1
= gen_new_label();
4187 gen_load_gpr(t1
, rs
);
4188 gen_load_gpr(t2
, rt
);
4189 tcg_gen_add_tl(t0
, t1
, t2
);
4190 tcg_gen_xor_tl(t1
, t1
, t2
);
4191 tcg_gen_xor_tl(t2
, t0
, t2
);
4192 tcg_gen_andc_tl(t1
, t2
, t1
);
4194 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4196 /* operands of same sign, result different sign */
4197 generate_exception(ctx
, EXCP_OVERFLOW
);
4199 gen_store_gpr(t0
, rd
);
4204 if (rs
!= 0 && rt
!= 0) {
4205 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4206 } else if (rs
== 0 && rt
!= 0) {
4207 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4208 } else if (rs
!= 0 && rt
== 0) {
4209 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4211 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4216 TCGv t0
= tcg_temp_local_new();
4217 TCGv t1
= tcg_temp_new();
4218 TCGv t2
= tcg_temp_new();
4219 TCGLabel
*l1
= gen_new_label();
4221 gen_load_gpr(t1
, rs
);
4222 gen_load_gpr(t2
, rt
);
4223 tcg_gen_sub_tl(t0
, t1
, t2
);
4224 tcg_gen_xor_tl(t2
, t1
, t2
);
4225 tcg_gen_xor_tl(t1
, t0
, t1
);
4226 tcg_gen_and_tl(t1
, t1
, t2
);
4228 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4231 * Operands of different sign, first operand and result different
4234 generate_exception(ctx
, EXCP_OVERFLOW
);
4236 gen_store_gpr(t0
, rd
);
4241 if (rs
!= 0 && rt
!= 0) {
4242 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4243 } else if (rs
== 0 && rt
!= 0) {
4244 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4245 } else if (rs
!= 0 && rt
== 0) {
4246 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4248 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4253 if (likely(rs
!= 0 && rt
!= 0)) {
4254 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4255 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4257 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4263 /* Conditional move */
4264 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4265 int rd
, int rs
, int rt
)
4270 /* If no destination, treat it as a NOP. */
4274 t0
= tcg_temp_new();
4275 gen_load_gpr(t0
, rt
);
4276 t1
= tcg_const_tl(0);
4277 t2
= tcg_temp_new();
4278 gen_load_gpr(t2
, rs
);
4281 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4284 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4287 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4290 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4299 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4300 int rd
, int rs
, int rt
)
4303 /* If no destination, treat it as a NOP. */
4309 if (likely(rs
!= 0 && rt
!= 0)) {
4310 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4312 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4316 if (rs
!= 0 && rt
!= 0) {
4317 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4318 } else if (rs
== 0 && rt
!= 0) {
4319 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4320 } else if (rs
!= 0 && rt
== 0) {
4321 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4323 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4327 if (likely(rs
!= 0 && rt
!= 0)) {
4328 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4329 } else if (rs
== 0 && rt
!= 0) {
4330 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4331 } else if (rs
!= 0 && rt
== 0) {
4332 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4334 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4338 if (likely(rs
!= 0 && rt
!= 0)) {
4339 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4340 } else if (rs
== 0 && rt
!= 0) {
4341 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4342 } else if (rs
!= 0 && rt
== 0) {
4343 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4345 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4351 /* Set on lower than */
4352 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4353 int rd
, int rs
, int rt
)
4358 /* If no destination, treat it as a NOP. */
4362 t0
= tcg_temp_new();
4363 t1
= tcg_temp_new();
4364 gen_load_gpr(t0
, rs
);
4365 gen_load_gpr(t1
, rt
);
4368 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4371 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4379 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4380 int rd
, int rs
, int rt
)
4386 * If no destination, treat it as a NOP.
4387 * For add & sub, we must generate the overflow exception when needed.
4392 t0
= tcg_temp_new();
4393 t1
= tcg_temp_new();
4394 gen_load_gpr(t0
, rs
);
4395 gen_load_gpr(t1
, rt
);
4398 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4399 tcg_gen_shl_tl(t0
, t1
, t0
);
4400 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4403 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4404 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4407 tcg_gen_ext32u_tl(t1
, t1
);
4408 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4409 tcg_gen_shr_tl(t0
, t1
, t0
);
4410 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4414 TCGv_i32 t2
= tcg_temp_new_i32();
4415 TCGv_i32 t3
= tcg_temp_new_i32();
4417 tcg_gen_trunc_tl_i32(t2
, t0
);
4418 tcg_gen_trunc_tl_i32(t3
, t1
);
4419 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4420 tcg_gen_rotr_i32(t2
, t3
, t2
);
4421 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4422 tcg_temp_free_i32(t2
);
4423 tcg_temp_free_i32(t3
);
4426 #if defined(TARGET_MIPS64)
4428 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4429 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4432 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4433 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4436 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4437 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4440 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4441 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4449 #if defined(TARGET_MIPS64)
4450 /* Copy GPR to and from TX79 HI1/LO1 register. */
4451 static void gen_HILO1_tx79(DisasContext
*ctx
, uint32_t opc
, int reg
)
4453 if (reg
== 0 && (opc
== MMI_OPC_MFHI1
|| opc
== MMI_OPC_MFLO1
)) {
4460 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[1]);
4463 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[1]);
4467 tcg_gen_mov_tl(cpu_HI
[1], cpu_gpr
[reg
]);
4469 tcg_gen_movi_tl(cpu_HI
[1], 0);
4474 tcg_gen_mov_tl(cpu_LO
[1], cpu_gpr
[reg
]);
4476 tcg_gen_movi_tl(cpu_LO
[1], 0);
4480 MIPS_INVAL("mfthilo1 TX79");
4481 generate_exception_end(ctx
, EXCP_RI
);
4487 /* Arithmetic on HI/LO registers */
4488 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4490 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
4501 #if defined(TARGET_MIPS64)
4503 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4507 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4511 #if defined(TARGET_MIPS64)
4513 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4517 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4522 #if defined(TARGET_MIPS64)
4524 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4528 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4531 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4536 #if defined(TARGET_MIPS64)
4538 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4542 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4545 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4551 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4554 TCGv t0
= tcg_const_tl(addr
);
4555 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4556 gen_store_gpr(t0
, reg
);
4560 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4566 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4569 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4570 addr
= addr_add(ctx
, pc
, offset
);
4571 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4575 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4576 addr
= addr_add(ctx
, pc
, offset
);
4577 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4579 #if defined(TARGET_MIPS64)
4582 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4583 addr
= addr_add(ctx
, pc
, offset
);
4584 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4588 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4591 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4592 addr
= addr_add(ctx
, pc
, offset
);
4593 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4598 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4599 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4600 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4603 #if defined(TARGET_MIPS64)
4604 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4605 case R6_OPC_LDPC
+ (1 << 16):
4606 case R6_OPC_LDPC
+ (2 << 16):
4607 case R6_OPC_LDPC
+ (3 << 16):
4609 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4610 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4611 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4615 MIPS_INVAL("OPC_PCREL");
4616 generate_exception_end(ctx
, EXCP_RI
);
4623 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4632 t0
= tcg_temp_new();
4633 t1
= tcg_temp_new();
4635 gen_load_gpr(t0
, rs
);
4636 gen_load_gpr(t1
, rt
);
4641 TCGv t2
= tcg_temp_new();
4642 TCGv t3
= tcg_temp_new();
4643 tcg_gen_ext32s_tl(t0
, t0
);
4644 tcg_gen_ext32s_tl(t1
, t1
);
4645 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4646 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4647 tcg_gen_and_tl(t2
, t2
, t3
);
4648 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4649 tcg_gen_or_tl(t2
, t2
, t3
);
4650 tcg_gen_movi_tl(t3
, 0);
4651 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4652 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4653 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4660 TCGv t2
= tcg_temp_new();
4661 TCGv t3
= tcg_temp_new();
4662 tcg_gen_ext32s_tl(t0
, t0
);
4663 tcg_gen_ext32s_tl(t1
, t1
);
4664 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4665 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4666 tcg_gen_and_tl(t2
, t2
, t3
);
4667 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4668 tcg_gen_or_tl(t2
, t2
, t3
);
4669 tcg_gen_movi_tl(t3
, 0);
4670 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4671 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4672 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4679 TCGv t2
= tcg_const_tl(0);
4680 TCGv t3
= tcg_const_tl(1);
4681 tcg_gen_ext32u_tl(t0
, t0
);
4682 tcg_gen_ext32u_tl(t1
, t1
);
4683 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4684 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4685 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4692 TCGv t2
= tcg_const_tl(0);
4693 TCGv t3
= tcg_const_tl(1);
4694 tcg_gen_ext32u_tl(t0
, t0
);
4695 tcg_gen_ext32u_tl(t1
, t1
);
4696 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4697 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4698 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4705 TCGv_i32 t2
= tcg_temp_new_i32();
4706 TCGv_i32 t3
= tcg_temp_new_i32();
4707 tcg_gen_trunc_tl_i32(t2
, t0
);
4708 tcg_gen_trunc_tl_i32(t3
, t1
);
4709 tcg_gen_mul_i32(t2
, t2
, t3
);
4710 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4711 tcg_temp_free_i32(t2
);
4712 tcg_temp_free_i32(t3
);
4717 TCGv_i32 t2
= tcg_temp_new_i32();
4718 TCGv_i32 t3
= tcg_temp_new_i32();
4719 tcg_gen_trunc_tl_i32(t2
, t0
);
4720 tcg_gen_trunc_tl_i32(t3
, t1
);
4721 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4722 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4723 tcg_temp_free_i32(t2
);
4724 tcg_temp_free_i32(t3
);
4729 TCGv_i32 t2
= tcg_temp_new_i32();
4730 TCGv_i32 t3
= tcg_temp_new_i32();
4731 tcg_gen_trunc_tl_i32(t2
, t0
);
4732 tcg_gen_trunc_tl_i32(t3
, t1
);
4733 tcg_gen_mul_i32(t2
, t2
, t3
);
4734 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4735 tcg_temp_free_i32(t2
);
4736 tcg_temp_free_i32(t3
);
4741 TCGv_i32 t2
= tcg_temp_new_i32();
4742 TCGv_i32 t3
= tcg_temp_new_i32();
4743 tcg_gen_trunc_tl_i32(t2
, t0
);
4744 tcg_gen_trunc_tl_i32(t3
, t1
);
4745 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4746 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4747 tcg_temp_free_i32(t2
);
4748 tcg_temp_free_i32(t3
);
4751 #if defined(TARGET_MIPS64)
4754 TCGv t2
= tcg_temp_new();
4755 TCGv t3
= tcg_temp_new();
4756 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4757 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4758 tcg_gen_and_tl(t2
, t2
, t3
);
4759 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4760 tcg_gen_or_tl(t2
, t2
, t3
);
4761 tcg_gen_movi_tl(t3
, 0);
4762 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4763 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4770 TCGv t2
= tcg_temp_new();
4771 TCGv t3
= tcg_temp_new();
4772 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4773 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4774 tcg_gen_and_tl(t2
, t2
, t3
);
4775 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4776 tcg_gen_or_tl(t2
, t2
, t3
);
4777 tcg_gen_movi_tl(t3
, 0);
4778 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4779 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4786 TCGv t2
= tcg_const_tl(0);
4787 TCGv t3
= tcg_const_tl(1);
4788 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4789 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4796 TCGv t2
= tcg_const_tl(0);
4797 TCGv t3
= tcg_const_tl(1);
4798 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4799 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4805 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4809 TCGv t2
= tcg_temp_new();
4810 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4815 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4819 TCGv t2
= tcg_temp_new();
4820 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4826 MIPS_INVAL("r6 mul/div");
4827 generate_exception_end(ctx
, EXCP_RI
);
4835 #if defined(TARGET_MIPS64)
4836 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
4840 t0
= tcg_temp_new();
4841 t1
= tcg_temp_new();
4843 gen_load_gpr(t0
, rs
);
4844 gen_load_gpr(t1
, rt
);
4849 TCGv t2
= tcg_temp_new();
4850 TCGv t3
= tcg_temp_new();
4851 tcg_gen_ext32s_tl(t0
, t0
);
4852 tcg_gen_ext32s_tl(t1
, t1
);
4853 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4854 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4855 tcg_gen_and_tl(t2
, t2
, t3
);
4856 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4857 tcg_gen_or_tl(t2
, t2
, t3
);
4858 tcg_gen_movi_tl(t3
, 0);
4859 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4860 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
4861 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
4862 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4863 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4870 TCGv t2
= tcg_const_tl(0);
4871 TCGv t3
= tcg_const_tl(1);
4872 tcg_gen_ext32u_tl(t0
, t0
);
4873 tcg_gen_ext32u_tl(t1
, t1
);
4874 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4875 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
4876 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
4877 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4878 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4884 MIPS_INVAL("div1 TX79");
4885 generate_exception_end(ctx
, EXCP_RI
);
4894 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4895 int acc
, int rs
, int rt
)
4899 t0
= tcg_temp_new();
4900 t1
= tcg_temp_new();
4902 gen_load_gpr(t0
, rs
);
4903 gen_load_gpr(t1
, rt
);
4912 TCGv t2
= tcg_temp_new();
4913 TCGv t3
= tcg_temp_new();
4914 tcg_gen_ext32s_tl(t0
, t0
);
4915 tcg_gen_ext32s_tl(t1
, t1
);
4916 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4917 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4918 tcg_gen_and_tl(t2
, t2
, t3
);
4919 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4920 tcg_gen_or_tl(t2
, t2
, t3
);
4921 tcg_gen_movi_tl(t3
, 0);
4922 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4923 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4924 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4925 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4926 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4933 TCGv t2
= tcg_const_tl(0);
4934 TCGv t3
= tcg_const_tl(1);
4935 tcg_gen_ext32u_tl(t0
, t0
);
4936 tcg_gen_ext32u_tl(t1
, t1
);
4937 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4938 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4939 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4940 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4941 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4948 TCGv_i32 t2
= tcg_temp_new_i32();
4949 TCGv_i32 t3
= tcg_temp_new_i32();
4950 tcg_gen_trunc_tl_i32(t2
, t0
);
4951 tcg_gen_trunc_tl_i32(t3
, t1
);
4952 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4953 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4954 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4955 tcg_temp_free_i32(t2
);
4956 tcg_temp_free_i32(t3
);
4961 TCGv_i32 t2
= tcg_temp_new_i32();
4962 TCGv_i32 t3
= tcg_temp_new_i32();
4963 tcg_gen_trunc_tl_i32(t2
, t0
);
4964 tcg_gen_trunc_tl_i32(t3
, t1
);
4965 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4966 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4967 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4968 tcg_temp_free_i32(t2
);
4969 tcg_temp_free_i32(t3
);
4972 #if defined(TARGET_MIPS64)
4975 TCGv t2
= tcg_temp_new();
4976 TCGv t3
= tcg_temp_new();
4977 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4978 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4979 tcg_gen_and_tl(t2
, t2
, t3
);
4980 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4981 tcg_gen_or_tl(t2
, t2
, t3
);
4982 tcg_gen_movi_tl(t3
, 0);
4983 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4984 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4985 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4992 TCGv t2
= tcg_const_tl(0);
4993 TCGv t3
= tcg_const_tl(1);
4994 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4995 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4996 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
5002 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
5005 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
5010 TCGv_i64 t2
= tcg_temp_new_i64();
5011 TCGv_i64 t3
= tcg_temp_new_i64();
5013 tcg_gen_ext_tl_i64(t2
, t0
);
5014 tcg_gen_ext_tl_i64(t3
, t1
);
5015 tcg_gen_mul_i64(t2
, t2
, t3
);
5016 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5017 tcg_gen_add_i64(t2
, t2
, t3
);
5018 tcg_temp_free_i64(t3
);
5019 gen_move_low32(cpu_LO
[acc
], t2
);
5020 gen_move_high32(cpu_HI
[acc
], t2
);
5021 tcg_temp_free_i64(t2
);
5026 TCGv_i64 t2
= tcg_temp_new_i64();
5027 TCGv_i64 t3
= tcg_temp_new_i64();
5029 tcg_gen_ext32u_tl(t0
, t0
);
5030 tcg_gen_ext32u_tl(t1
, t1
);
5031 tcg_gen_extu_tl_i64(t2
, t0
);
5032 tcg_gen_extu_tl_i64(t3
, t1
);
5033 tcg_gen_mul_i64(t2
, t2
, t3
);
5034 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5035 tcg_gen_add_i64(t2
, t2
, t3
);
5036 tcg_temp_free_i64(t3
);
5037 gen_move_low32(cpu_LO
[acc
], t2
);
5038 gen_move_high32(cpu_HI
[acc
], t2
);
5039 tcg_temp_free_i64(t2
);
5044 TCGv_i64 t2
= tcg_temp_new_i64();
5045 TCGv_i64 t3
= tcg_temp_new_i64();
5047 tcg_gen_ext_tl_i64(t2
, t0
);
5048 tcg_gen_ext_tl_i64(t3
, t1
);
5049 tcg_gen_mul_i64(t2
, t2
, t3
);
5050 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5051 tcg_gen_sub_i64(t2
, t3
, t2
);
5052 tcg_temp_free_i64(t3
);
5053 gen_move_low32(cpu_LO
[acc
], t2
);
5054 gen_move_high32(cpu_HI
[acc
], t2
);
5055 tcg_temp_free_i64(t2
);
5060 TCGv_i64 t2
= tcg_temp_new_i64();
5061 TCGv_i64 t3
= tcg_temp_new_i64();
5063 tcg_gen_ext32u_tl(t0
, t0
);
5064 tcg_gen_ext32u_tl(t1
, t1
);
5065 tcg_gen_extu_tl_i64(t2
, t0
);
5066 tcg_gen_extu_tl_i64(t3
, t1
);
5067 tcg_gen_mul_i64(t2
, t2
, t3
);
5068 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5069 tcg_gen_sub_i64(t2
, t3
, t2
);
5070 tcg_temp_free_i64(t3
);
5071 gen_move_low32(cpu_LO
[acc
], t2
);
5072 gen_move_high32(cpu_HI
[acc
], t2
);
5073 tcg_temp_free_i64(t2
);
5077 MIPS_INVAL("mul/div");
5078 generate_exception_end(ctx
, EXCP_RI
);
5087 * These MULT[U] and MADD[U] instructions implemented in for example
5088 * the Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
5089 * architectures are special three-operand variants with the syntax
5091 * MULT[U][1] rd, rs, rt
5095 * (rd, LO, HI) <- rs * rt
5099 * MADD[U][1] rd, rs, rt
5103 * (rd, LO, HI) <- (LO, HI) + rs * rt
5105 * where the low-order 32-bits of the result is placed into both the
5106 * GPR rd and the special register LO. The high-order 32-bits of the
5107 * result is placed into the special register HI.
5109 * If the GPR rd is omitted in assembly language, it is taken to be 0,
5110 * which is the zero register that always reads as 0.
5112 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
5113 int rd
, int rs
, int rt
)
5115 TCGv t0
= tcg_temp_new();
5116 TCGv t1
= tcg_temp_new();
5119 gen_load_gpr(t0
, rs
);
5120 gen_load_gpr(t1
, rt
);
5128 TCGv_i32 t2
= tcg_temp_new_i32();
5129 TCGv_i32 t3
= tcg_temp_new_i32();
5130 tcg_gen_trunc_tl_i32(t2
, t0
);
5131 tcg_gen_trunc_tl_i32(t3
, t1
);
5132 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
5134 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5136 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5137 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5138 tcg_temp_free_i32(t2
);
5139 tcg_temp_free_i32(t3
);
5142 case MMI_OPC_MULTU1
:
5147 TCGv_i32 t2
= tcg_temp_new_i32();
5148 TCGv_i32 t3
= tcg_temp_new_i32();
5149 tcg_gen_trunc_tl_i32(t2
, t0
);
5150 tcg_gen_trunc_tl_i32(t3
, t1
);
5151 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
5153 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5155 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5156 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5157 tcg_temp_free_i32(t2
);
5158 tcg_temp_free_i32(t3
);
5166 TCGv_i64 t2
= tcg_temp_new_i64();
5167 TCGv_i64 t3
= tcg_temp_new_i64();
5169 tcg_gen_ext_tl_i64(t2
, t0
);
5170 tcg_gen_ext_tl_i64(t3
, t1
);
5171 tcg_gen_mul_i64(t2
, t2
, t3
);
5172 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5173 tcg_gen_add_i64(t2
, t2
, t3
);
5174 tcg_temp_free_i64(t3
);
5175 gen_move_low32(cpu_LO
[acc
], t2
);
5176 gen_move_high32(cpu_HI
[acc
], t2
);
5178 gen_move_low32(cpu_gpr
[rd
], t2
);
5180 tcg_temp_free_i64(t2
);
5183 case MMI_OPC_MADDU1
:
5188 TCGv_i64 t2
= tcg_temp_new_i64();
5189 TCGv_i64 t3
= tcg_temp_new_i64();
5191 tcg_gen_ext32u_tl(t0
, t0
);
5192 tcg_gen_ext32u_tl(t1
, t1
);
5193 tcg_gen_extu_tl_i64(t2
, t0
);
5194 tcg_gen_extu_tl_i64(t3
, t1
);
5195 tcg_gen_mul_i64(t2
, t2
, t3
);
5196 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5197 tcg_gen_add_i64(t2
, t2
, t3
);
5198 tcg_temp_free_i64(t3
);
5199 gen_move_low32(cpu_LO
[acc
], t2
);
5200 gen_move_high32(cpu_HI
[acc
], t2
);
5202 gen_move_low32(cpu_gpr
[rd
], t2
);
5204 tcg_temp_free_i64(t2
);
5208 MIPS_INVAL("mul/madd TXx9");
5209 generate_exception_end(ctx
, EXCP_RI
);
5218 static void gen_mul_vr54xx(DisasContext
*ctx
, uint32_t opc
,
5219 int rd
, int rs
, int rt
)
5221 TCGv t0
= tcg_temp_new();
5222 TCGv t1
= tcg_temp_new();
5224 gen_load_gpr(t0
, rs
);
5225 gen_load_gpr(t1
, rt
);
5228 case OPC_VR54XX_MULS
:
5229 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
5231 case OPC_VR54XX_MULSU
:
5232 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
5234 case OPC_VR54XX_MACC
:
5235 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
5237 case OPC_VR54XX_MACCU
:
5238 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
5240 case OPC_VR54XX_MSAC
:
5241 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
5243 case OPC_VR54XX_MSACU
:
5244 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
5246 case OPC_VR54XX_MULHI
:
5247 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
5249 case OPC_VR54XX_MULHIU
:
5250 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
5252 case OPC_VR54XX_MULSHI
:
5253 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
5255 case OPC_VR54XX_MULSHIU
:
5256 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
5258 case OPC_VR54XX_MACCHI
:
5259 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
5261 case OPC_VR54XX_MACCHIU
:
5262 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
5264 case OPC_VR54XX_MSACHI
:
5265 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
5267 case OPC_VR54XX_MSACHIU
:
5268 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
5271 MIPS_INVAL("mul vr54xx");
5272 generate_exception_end(ctx
, EXCP_RI
);
5275 gen_store_gpr(t0
, rd
);
5282 static void gen_cl(DisasContext
*ctx
, uint32_t opc
,
5292 gen_load_gpr(t0
, rs
);
5297 #if defined(TARGET_MIPS64)
5301 tcg_gen_not_tl(t0
, t0
);
5310 tcg_gen_ext32u_tl(t0
, t0
);
5311 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
5312 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
5314 #if defined(TARGET_MIPS64)
5319 tcg_gen_clzi_i64(t0
, t0
, 64);
5325 /* Godson integer instructions */
5326 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
5327 int rd
, int rs
, int rt
)
5339 case OPC_MULTU_G_2E
:
5340 case OPC_MULTU_G_2F
:
5341 #if defined(TARGET_MIPS64)
5342 case OPC_DMULT_G_2E
:
5343 case OPC_DMULT_G_2F
:
5344 case OPC_DMULTU_G_2E
:
5345 case OPC_DMULTU_G_2F
:
5347 t0
= tcg_temp_new();
5348 t1
= tcg_temp_new();
5351 t0
= tcg_temp_local_new();
5352 t1
= tcg_temp_local_new();
5356 gen_load_gpr(t0
, rs
);
5357 gen_load_gpr(t1
, rt
);
5362 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5363 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5365 case OPC_MULTU_G_2E
:
5366 case OPC_MULTU_G_2F
:
5367 tcg_gen_ext32u_tl(t0
, t0
);
5368 tcg_gen_ext32u_tl(t1
, t1
);
5369 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5370 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5375 TCGLabel
*l1
= gen_new_label();
5376 TCGLabel
*l2
= gen_new_label();
5377 TCGLabel
*l3
= gen_new_label();
5378 tcg_gen_ext32s_tl(t0
, t0
);
5379 tcg_gen_ext32s_tl(t1
, t1
);
5380 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5381 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5384 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5385 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5386 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5389 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5390 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5397 TCGLabel
*l1
= gen_new_label();
5398 TCGLabel
*l2
= gen_new_label();
5399 tcg_gen_ext32u_tl(t0
, t0
);
5400 tcg_gen_ext32u_tl(t1
, t1
);
5401 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5402 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5405 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5406 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5413 TCGLabel
*l1
= gen_new_label();
5414 TCGLabel
*l2
= gen_new_label();
5415 TCGLabel
*l3
= gen_new_label();
5416 tcg_gen_ext32u_tl(t0
, t0
);
5417 tcg_gen_ext32u_tl(t1
, t1
);
5418 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5419 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5420 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5422 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5425 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5426 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5433 TCGLabel
*l1
= gen_new_label();
5434 TCGLabel
*l2
= gen_new_label();
5435 tcg_gen_ext32u_tl(t0
, t0
);
5436 tcg_gen_ext32u_tl(t1
, t1
);
5437 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5438 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5441 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5442 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5446 #if defined(TARGET_MIPS64)
5447 case OPC_DMULT_G_2E
:
5448 case OPC_DMULT_G_2F
:
5449 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5451 case OPC_DMULTU_G_2E
:
5452 case OPC_DMULTU_G_2F
:
5453 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5458 TCGLabel
*l1
= gen_new_label();
5459 TCGLabel
*l2
= gen_new_label();
5460 TCGLabel
*l3
= gen_new_label();
5461 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5462 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5465 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5466 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5467 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5470 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5474 case OPC_DDIVU_G_2E
:
5475 case OPC_DDIVU_G_2F
:
5477 TCGLabel
*l1
= gen_new_label();
5478 TCGLabel
*l2
= gen_new_label();
5479 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5480 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5483 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5490 TCGLabel
*l1
= gen_new_label();
5491 TCGLabel
*l2
= gen_new_label();
5492 TCGLabel
*l3
= gen_new_label();
5493 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5494 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5495 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5497 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5500 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5504 case OPC_DMODU_G_2E
:
5505 case OPC_DMODU_G_2F
:
5507 TCGLabel
*l1
= gen_new_label();
5508 TCGLabel
*l2
= gen_new_label();
5509 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5510 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5513 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5524 /* Loongson multimedia instructions */
5525 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5527 uint32_t opc
, shift_max
;
5530 opc
= MASK_LMI(ctx
->opcode
);
5536 t0
= tcg_temp_local_new_i64();
5537 t1
= tcg_temp_local_new_i64();
5540 t0
= tcg_temp_new_i64();
5541 t1
= tcg_temp_new_i64();
5545 check_cp1_enabled(ctx
);
5546 gen_load_fpr64(ctx
, t0
, rs
);
5547 gen_load_fpr64(ctx
, t1
, rt
);
5549 #define LMI_HELPER(UP, LO) \
5550 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5551 #define LMI_HELPER_1(UP, LO) \
5552 case OPC_##UP: gen_helper_##LO(t0, t0); break
5553 #define LMI_DIRECT(UP, LO, OP) \
5554 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5557 LMI_HELPER(PADDSH
, paddsh
);
5558 LMI_HELPER(PADDUSH
, paddush
);
5559 LMI_HELPER(PADDH
, paddh
);
5560 LMI_HELPER(PADDW
, paddw
);
5561 LMI_HELPER(PADDSB
, paddsb
);
5562 LMI_HELPER(PADDUSB
, paddusb
);
5563 LMI_HELPER(PADDB
, paddb
);
5565 LMI_HELPER(PSUBSH
, psubsh
);
5566 LMI_HELPER(PSUBUSH
, psubush
);
5567 LMI_HELPER(PSUBH
, psubh
);
5568 LMI_HELPER(PSUBW
, psubw
);
5569 LMI_HELPER(PSUBSB
, psubsb
);
5570 LMI_HELPER(PSUBUSB
, psubusb
);
5571 LMI_HELPER(PSUBB
, psubb
);
5573 LMI_HELPER(PSHUFH
, pshufh
);
5574 LMI_HELPER(PACKSSWH
, packsswh
);
5575 LMI_HELPER(PACKSSHB
, packsshb
);
5576 LMI_HELPER(PACKUSHB
, packushb
);
5578 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5579 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5580 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5581 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5582 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5583 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5585 LMI_HELPER(PAVGH
, pavgh
);
5586 LMI_HELPER(PAVGB
, pavgb
);
5587 LMI_HELPER(PMAXSH
, pmaxsh
);
5588 LMI_HELPER(PMINSH
, pminsh
);
5589 LMI_HELPER(PMAXUB
, pmaxub
);
5590 LMI_HELPER(PMINUB
, pminub
);
5592 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5593 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5594 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5595 LMI_HELPER(PCMPGTH
, pcmpgth
);
5596 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5597 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5599 LMI_HELPER(PSLLW
, psllw
);
5600 LMI_HELPER(PSLLH
, psllh
);
5601 LMI_HELPER(PSRLW
, psrlw
);
5602 LMI_HELPER(PSRLH
, psrlh
);
5603 LMI_HELPER(PSRAW
, psraw
);
5604 LMI_HELPER(PSRAH
, psrah
);
5606 LMI_HELPER(PMULLH
, pmullh
);
5607 LMI_HELPER(PMULHH
, pmulhh
);
5608 LMI_HELPER(PMULHUH
, pmulhuh
);
5609 LMI_HELPER(PMADDHW
, pmaddhw
);
5611 LMI_HELPER(PASUBUB
, pasubub
);
5612 LMI_HELPER_1(BIADD
, biadd
);
5613 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5615 LMI_DIRECT(PADDD
, paddd
, add
);
5616 LMI_DIRECT(PSUBD
, psubd
, sub
);
5617 LMI_DIRECT(XOR_CP2
, xor, xor);
5618 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5619 LMI_DIRECT(AND_CP2
, and, and);
5620 LMI_DIRECT(OR_CP2
, or, or);
5623 tcg_gen_andc_i64(t0
, t1
, t0
);
5627 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5630 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5633 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5636 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5640 tcg_gen_andi_i64(t1
, t1
, 3);
5641 tcg_gen_shli_i64(t1
, t1
, 4);
5642 tcg_gen_shr_i64(t0
, t0
, t1
);
5643 tcg_gen_ext16u_i64(t0
, t0
);
5647 tcg_gen_add_i64(t0
, t0
, t1
);
5648 tcg_gen_ext32s_i64(t0
, t0
);
5651 tcg_gen_sub_i64(t0
, t0
, t1
);
5652 tcg_gen_ext32s_i64(t0
, t0
);
5674 /* Make sure shift count isn't TCG undefined behaviour. */
5675 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5680 tcg_gen_shl_i64(t0
, t0
, t1
);
5685 * Since SRA is UndefinedResult without sign-extended inputs,
5686 * we can treat SRA and DSRA the same.
5688 tcg_gen_sar_i64(t0
, t0
, t1
);
5691 /* We want to shift in zeros for SRL; zero-extend first. */
5692 tcg_gen_ext32u_i64(t0
, t0
);
5695 tcg_gen_shr_i64(t0
, t0
, t1
);
5699 if (shift_max
== 32) {
5700 tcg_gen_ext32s_i64(t0
, t0
);
5703 /* Shifts larger than MAX produce zero. */
5704 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5705 tcg_gen_neg_i64(t1
, t1
);
5706 tcg_gen_and_i64(t0
, t0
, t1
);
5712 TCGv_i64 t2
= tcg_temp_new_i64();
5713 TCGLabel
*lab
= gen_new_label();
5715 tcg_gen_mov_i64(t2
, t0
);
5716 tcg_gen_add_i64(t0
, t1
, t2
);
5717 if (opc
== OPC_ADD_CP2
) {
5718 tcg_gen_ext32s_i64(t0
, t0
);
5720 tcg_gen_xor_i64(t1
, t1
, t2
);
5721 tcg_gen_xor_i64(t2
, t2
, t0
);
5722 tcg_gen_andc_i64(t1
, t2
, t1
);
5723 tcg_temp_free_i64(t2
);
5724 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5725 generate_exception(ctx
, EXCP_OVERFLOW
);
5733 TCGv_i64 t2
= tcg_temp_new_i64();
5734 TCGLabel
*lab
= gen_new_label();
5736 tcg_gen_mov_i64(t2
, t0
);
5737 tcg_gen_sub_i64(t0
, t1
, t2
);
5738 if (opc
== OPC_SUB_CP2
) {
5739 tcg_gen_ext32s_i64(t0
, t0
);
5741 tcg_gen_xor_i64(t1
, t1
, t2
);
5742 tcg_gen_xor_i64(t2
, t2
, t0
);
5743 tcg_gen_and_i64(t1
, t1
, t2
);
5744 tcg_temp_free_i64(t2
);
5745 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5746 generate_exception(ctx
, EXCP_OVERFLOW
);
5752 tcg_gen_ext32u_i64(t0
, t0
);
5753 tcg_gen_ext32u_i64(t1
, t1
);
5754 tcg_gen_mul_i64(t0
, t0
, t1
);
5764 * ??? Document is unclear: Set FCC[CC]. Does that mean the
5765 * FD field is the CC field?
5768 MIPS_INVAL("loongson_cp2");
5769 generate_exception_end(ctx
, EXCP_RI
);
5776 gen_store_fpr64(ctx
, t0
, rd
);
5778 tcg_temp_free_i64(t0
);
5779 tcg_temp_free_i64(t1
);
5783 static void gen_trap(DisasContext
*ctx
, uint32_t opc
,
5784 int rs
, int rt
, int16_t imm
)
5787 TCGv t0
= tcg_temp_new();
5788 TCGv t1
= tcg_temp_new();
5791 /* Load needed operands */
5799 /* Compare two registers */
5801 gen_load_gpr(t0
, rs
);
5802 gen_load_gpr(t1
, rt
);
5812 /* Compare register to immediate */
5813 if (rs
!= 0 || imm
!= 0) {
5814 gen_load_gpr(t0
, rs
);
5815 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5822 case OPC_TEQ
: /* rs == rs */
5823 case OPC_TEQI
: /* r0 == 0 */
5824 case OPC_TGE
: /* rs >= rs */
5825 case OPC_TGEI
: /* r0 >= 0 */
5826 case OPC_TGEU
: /* rs >= rs unsigned */
5827 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5829 generate_exception_end(ctx
, EXCP_TRAP
);
5831 case OPC_TLT
: /* rs < rs */
5832 case OPC_TLTI
: /* r0 < 0 */
5833 case OPC_TLTU
: /* rs < rs unsigned */
5834 case OPC_TLTIU
: /* r0 < 0 unsigned */
5835 case OPC_TNE
: /* rs != rs */
5836 case OPC_TNEI
: /* r0 != 0 */
5837 /* Never trap: treat as NOP. */
5841 TCGLabel
*l1
= gen_new_label();
5846 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5850 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5854 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5858 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5862 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5866 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5869 generate_exception(ctx
, EXCP_TRAP
);
5876 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5878 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5882 #ifndef CONFIG_USER_ONLY
5883 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5889 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5891 if (use_goto_tb(ctx
, dest
)) {
5894 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5897 if (ctx
->base
.singlestep_enabled
) {
5898 save_cpu_state(ctx
, 0);
5899 gen_helper_raise_exception_debug(cpu_env
);
5901 tcg_gen_lookup_and_goto_ptr();
5905 /* Branches (before delay slot) */
5906 static void gen_compute_branch(DisasContext
*ctx
, uint32_t opc
,
5908 int rs
, int rt
, int32_t offset
,
5911 target_ulong btgt
= -1;
5913 int bcond_compute
= 0;
5914 TCGv t0
= tcg_temp_new();
5915 TCGv t1
= tcg_temp_new();
5917 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5918 #ifdef MIPS_DEBUG_DISAS
5919 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5920 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5922 generate_exception_end(ctx
, EXCP_RI
);
5926 /* Load needed operands */
5932 /* Compare two registers */
5934 gen_load_gpr(t0
, rs
);
5935 gen_load_gpr(t1
, rt
);
5938 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5952 /* Compare to zero */
5954 gen_load_gpr(t0
, rs
);
5957 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5960 #if defined(TARGET_MIPS64)
5962 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5964 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5967 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5972 /* Jump to immediate */
5973 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5978 /* Jump to register */
5979 if (offset
!= 0 && offset
!= 16) {
5981 * Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5982 * others are reserved.
5984 MIPS_INVAL("jump hint");
5985 generate_exception_end(ctx
, EXCP_RI
);
5988 gen_load_gpr(btarget
, rs
);
5991 MIPS_INVAL("branch/jump");
5992 generate_exception_end(ctx
, EXCP_RI
);
5995 if (bcond_compute
== 0) {
5996 /* No condition to be computed */
5998 case OPC_BEQ
: /* rx == rx */
5999 case OPC_BEQL
: /* rx == rx likely */
6000 case OPC_BGEZ
: /* 0 >= 0 */
6001 case OPC_BGEZL
: /* 0 >= 0 likely */
6002 case OPC_BLEZ
: /* 0 <= 0 */
6003 case OPC_BLEZL
: /* 0 <= 0 likely */
6005 ctx
->hflags
|= MIPS_HFLAG_B
;
6007 case OPC_BGEZAL
: /* 0 >= 0 */
6008 case OPC_BGEZALL
: /* 0 >= 0 likely */
6009 /* Always take and link */
6011 ctx
->hflags
|= MIPS_HFLAG_B
;
6013 case OPC_BNE
: /* rx != rx */
6014 case OPC_BGTZ
: /* 0 > 0 */
6015 case OPC_BLTZ
: /* 0 < 0 */
6018 case OPC_BLTZAL
: /* 0 < 0 */
6020 * Handle as an unconditional branch to get correct delay
6024 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
6025 ctx
->hflags
|= MIPS_HFLAG_B
;
6027 case OPC_BLTZALL
: /* 0 < 0 likely */
6028 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6029 /* Skip the instruction in the delay slot */
6030 ctx
->base
.pc_next
+= 4;
6032 case OPC_BNEL
: /* rx != rx likely */
6033 case OPC_BGTZL
: /* 0 > 0 likely */
6034 case OPC_BLTZL
: /* 0 < 0 likely */
6035 /* Skip the instruction in the delay slot */
6036 ctx
->base
.pc_next
+= 4;
6039 ctx
->hflags
|= MIPS_HFLAG_B
;
6042 ctx
->hflags
|= MIPS_HFLAG_BX
;
6046 ctx
->hflags
|= MIPS_HFLAG_B
;
6049 ctx
->hflags
|= MIPS_HFLAG_BR
;
6053 ctx
->hflags
|= MIPS_HFLAG_BR
;
6056 MIPS_INVAL("branch/jump");
6057 generate_exception_end(ctx
, EXCP_RI
);
6063 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6066 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6069 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6072 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6075 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6078 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6081 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6085 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6089 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6092 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6095 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6098 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6101 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6104 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6107 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6109 #if defined(TARGET_MIPS64)
6111 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
6115 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6118 ctx
->hflags
|= MIPS_HFLAG_BC
;
6121 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6124 ctx
->hflags
|= MIPS_HFLAG_BL
;
6127 MIPS_INVAL("conditional branch/jump");
6128 generate_exception_end(ctx
, EXCP_RI
);
6133 ctx
->btarget
= btgt
;
6135 switch (delayslot_size
) {
6137 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
6140 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
6145 int post_delay
= insn_bytes
+ delayslot_size
;
6146 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
6148 tcg_gen_movi_tl(cpu_gpr
[blink
],
6149 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
6153 if (insn_bytes
== 2) {
6154 ctx
->hflags
|= MIPS_HFLAG_B16
;
6161 /* nanoMIPS Branches */
6162 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
6164 int rs
, int rt
, int32_t offset
)
6166 target_ulong btgt
= -1;
6167 int bcond_compute
= 0;
6168 TCGv t0
= tcg_temp_new();
6169 TCGv t1
= tcg_temp_new();
6171 /* Load needed operands */
6175 /* Compare two registers */
6177 gen_load_gpr(t0
, rs
);
6178 gen_load_gpr(t1
, rt
);
6181 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6184 /* Compare to zero */
6186 gen_load_gpr(t0
, rs
);
6189 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6192 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
6194 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6198 /* Jump to register */
6199 if (offset
!= 0 && offset
!= 16) {
6201 * Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
6202 * others are reserved.
6204 MIPS_INVAL("jump hint");
6205 generate_exception_end(ctx
, EXCP_RI
);
6208 gen_load_gpr(btarget
, rs
);
6211 MIPS_INVAL("branch/jump");
6212 generate_exception_end(ctx
, EXCP_RI
);
6215 if (bcond_compute
== 0) {
6216 /* No condition to be computed */
6218 case OPC_BEQ
: /* rx == rx */
6220 ctx
->hflags
|= MIPS_HFLAG_B
;
6222 case OPC_BGEZAL
: /* 0 >= 0 */
6223 /* Always take and link */
6224 tcg_gen_movi_tl(cpu_gpr
[31],
6225 ctx
->base
.pc_next
+ insn_bytes
);
6226 ctx
->hflags
|= MIPS_HFLAG_B
;
6228 case OPC_BNE
: /* rx != rx */
6229 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6230 /* Skip the instruction in the delay slot */
6231 ctx
->base
.pc_next
+= 4;
6234 ctx
->hflags
|= MIPS_HFLAG_BR
;
6238 tcg_gen_movi_tl(cpu_gpr
[rt
],
6239 ctx
->base
.pc_next
+ insn_bytes
);
6241 ctx
->hflags
|= MIPS_HFLAG_BR
;
6244 MIPS_INVAL("branch/jump");
6245 generate_exception_end(ctx
, EXCP_RI
);
6251 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6254 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6257 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6258 tcg_gen_movi_tl(cpu_gpr
[31],
6259 ctx
->base
.pc_next
+ insn_bytes
);
6262 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6264 ctx
->hflags
|= MIPS_HFLAG_BC
;
6267 MIPS_INVAL("conditional branch/jump");
6268 generate_exception_end(ctx
, EXCP_RI
);
6273 ctx
->btarget
= btgt
;
6276 if (insn_bytes
== 2) {
6277 ctx
->hflags
|= MIPS_HFLAG_B16
;
6284 /* special3 bitfield operations */
6285 static void gen_bitops(DisasContext
*ctx
, uint32_t opc
, int rt
,
6286 int rs
, int lsb
, int msb
)
6288 TCGv t0
= tcg_temp_new();
6289 TCGv t1
= tcg_temp_new();
6291 gen_load_gpr(t1
, rs
);
6294 if (lsb
+ msb
> 31) {
6298 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6301 * The two checks together imply that lsb == 0,
6302 * so this is a simple sign-extension.
6304 tcg_gen_ext32s_tl(t0
, t1
);
6307 #if defined(TARGET_MIPS64)
6316 if (lsb
+ msb
> 63) {
6319 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6326 gen_load_gpr(t0
, rt
);
6327 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6328 tcg_gen_ext32s_tl(t0
, t0
);
6330 #if defined(TARGET_MIPS64)
6341 gen_load_gpr(t0
, rt
);
6342 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6347 MIPS_INVAL("bitops");
6348 generate_exception_end(ctx
, EXCP_RI
);
6353 gen_store_gpr(t0
, rt
);
6358 static void gen_bshfl(DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6363 /* If no destination, treat it as a NOP. */
6367 t0
= tcg_temp_new();
6368 gen_load_gpr(t0
, rt
);
6372 TCGv t1
= tcg_temp_new();
6373 TCGv t2
= tcg_const_tl(0x00FF00FF);
6375 tcg_gen_shri_tl(t1
, t0
, 8);
6376 tcg_gen_and_tl(t1
, t1
, t2
);
6377 tcg_gen_and_tl(t0
, t0
, t2
);
6378 tcg_gen_shli_tl(t0
, t0
, 8);
6379 tcg_gen_or_tl(t0
, t0
, t1
);
6382 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6386 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6389 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6391 #if defined(TARGET_MIPS64)
6394 TCGv t1
= tcg_temp_new();
6395 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6397 tcg_gen_shri_tl(t1
, t0
, 8);
6398 tcg_gen_and_tl(t1
, t1
, t2
);
6399 tcg_gen_and_tl(t0
, t0
, t2
);
6400 tcg_gen_shli_tl(t0
, t0
, 8);
6401 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6408 TCGv t1
= tcg_temp_new();
6409 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6411 tcg_gen_shri_tl(t1
, t0
, 16);
6412 tcg_gen_and_tl(t1
, t1
, t2
);
6413 tcg_gen_and_tl(t0
, t0
, t2
);
6414 tcg_gen_shli_tl(t0
, t0
, 16);
6415 tcg_gen_or_tl(t0
, t0
, t1
);
6416 tcg_gen_shri_tl(t1
, t0
, 32);
6417 tcg_gen_shli_tl(t0
, t0
, 32);
6418 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6425 MIPS_INVAL("bsfhl");
6426 generate_exception_end(ctx
, EXCP_RI
);
6433 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6442 t0
= tcg_temp_new();
6443 t1
= tcg_temp_new();
6444 gen_load_gpr(t0
, rs
);
6445 gen_load_gpr(t1
, rt
);
6446 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6447 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6448 if (opc
== OPC_LSA
) {
6449 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6458 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6466 t0
= tcg_temp_new();
6467 if (bits
== 0 || bits
== wordsz
) {
6469 gen_load_gpr(t0
, rt
);
6471 gen_load_gpr(t0
, rs
);
6475 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6477 #if defined(TARGET_MIPS64)
6479 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6484 TCGv t1
= tcg_temp_new();
6485 gen_load_gpr(t0
, rt
);
6486 gen_load_gpr(t1
, rs
);
6490 TCGv_i64 t2
= tcg_temp_new_i64();
6491 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6492 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6493 gen_move_low32(cpu_gpr
[rd
], t2
);
6494 tcg_temp_free_i64(t2
);
6497 #if defined(TARGET_MIPS64)
6499 tcg_gen_shli_tl(t0
, t0
, bits
);
6500 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6501 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6511 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6514 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6517 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6520 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6523 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6530 t0
= tcg_temp_new();
6531 gen_load_gpr(t0
, rt
);
6534 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6536 #if defined(TARGET_MIPS64)
6538 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6545 #ifndef CONFIG_USER_ONLY
6546 /* CP0 (MMU and control) */
6547 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6549 TCGv_i64 t0
= tcg_temp_new_i64();
6550 TCGv_i64 t1
= tcg_temp_new_i64();
6552 tcg_gen_ext_tl_i64(t0
, arg
);
6553 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6554 #if defined(TARGET_MIPS64)
6555 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6557 tcg_gen_concat32_i64(t1
, t1
, t0
);
6559 tcg_gen_st_i64(t1
, cpu_env
, off
);
6560 tcg_temp_free_i64(t1
);
6561 tcg_temp_free_i64(t0
);
6564 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6566 TCGv_i64 t0
= tcg_temp_new_i64();
6567 TCGv_i64 t1
= tcg_temp_new_i64();
6569 tcg_gen_ext_tl_i64(t0
, arg
);
6570 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6571 tcg_gen_concat32_i64(t1
, t1
, t0
);
6572 tcg_gen_st_i64(t1
, cpu_env
, off
);
6573 tcg_temp_free_i64(t1
);
6574 tcg_temp_free_i64(t0
);
6577 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6579 TCGv_i64 t0
= tcg_temp_new_i64();
6581 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6582 #if defined(TARGET_MIPS64)
6583 tcg_gen_shri_i64(t0
, t0
, 30);
6585 tcg_gen_shri_i64(t0
, t0
, 32);
6587 gen_move_low32(arg
, t0
);
6588 tcg_temp_free_i64(t0
);
6591 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6593 TCGv_i64 t0
= tcg_temp_new_i64();
6595 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6596 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6597 gen_move_low32(arg
, t0
);
6598 tcg_temp_free_i64(t0
);
6601 static inline void gen_mfc0_load32(TCGv arg
, target_ulong off
)
6603 TCGv_i32 t0
= tcg_temp_new_i32();
6605 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6606 tcg_gen_ext_i32_tl(arg
, t0
);
6607 tcg_temp_free_i32(t0
);
6610 static inline void gen_mfc0_load64(TCGv arg
, target_ulong off
)
6612 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6613 tcg_gen_ext32s_tl(arg
, arg
);
6616 static inline void gen_mtc0_store32(TCGv arg
, target_ulong off
)
6618 TCGv_i32 t0
= tcg_temp_new_i32();
6620 tcg_gen_trunc_tl_i32(t0
, arg
);
6621 tcg_gen_st_i32(t0
, cpu_env
, off
);
6622 tcg_temp_free_i32(t0
);
6625 #define CP0_CHECK(c) \
6628 goto cp0_unimplemented; \
6632 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6634 const char *register_name
= "invalid";
6637 case CP0_REGISTER_02
:
6640 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6641 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6642 register_name
= "EntryLo0";
6645 goto cp0_unimplemented
;
6648 case CP0_REGISTER_03
:
6651 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6652 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6653 register_name
= "EntryLo1";
6656 goto cp0_unimplemented
;
6659 case CP0_REGISTER_09
:
6662 CP0_CHECK(ctx
->saar
);
6663 gen_helper_mfhc0_saar(arg
, cpu_env
);
6664 register_name
= "SAAR";
6667 goto cp0_unimplemented
;
6670 case CP0_REGISTER_17
:
6673 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_LLAddr
),
6674 ctx
->CP0_LLAddr_shift
);
6675 register_name
= "LLAddr";
6678 CP0_CHECK(ctx
->mrp
);
6679 gen_helper_mfhc0_maar(arg
, cpu_env
);
6680 register_name
= "MAAR";
6683 goto cp0_unimplemented
;
6686 case CP0_REGISTER_28
:
6692 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6693 register_name
= "TagLo";
6696 goto cp0_unimplemented
;
6700 goto cp0_unimplemented
;
6702 trace_mips_translate_c0("mfhc0", register_name
, reg
, sel
);
6706 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n",
6707 register_name
, reg
, sel
);
6708 tcg_gen_movi_tl(arg
, 0);
6711 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6713 const char *register_name
= "invalid";
6714 uint64_t mask
= ctx
->PAMask
>> 36;
6717 case CP0_REGISTER_02
:
6720 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6721 tcg_gen_andi_tl(arg
, arg
, mask
);
6722 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6723 register_name
= "EntryLo0";
6726 goto cp0_unimplemented
;
6729 case CP0_REGISTER_03
:
6732 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6733 tcg_gen_andi_tl(arg
, arg
, mask
);
6734 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6735 register_name
= "EntryLo1";
6738 goto cp0_unimplemented
;
6741 case CP0_REGISTER_09
:
6744 CP0_CHECK(ctx
->saar
);
6745 gen_helper_mthc0_saar(cpu_env
, arg
);
6746 register_name
= "SAAR";
6749 goto cp0_unimplemented
;
6752 case CP0_REGISTER_17
:
6756 * LLAddr is read-only (the only exception is bit 0 if LLB is
6757 * supported); the CP0_LLAddr_rw_bitmask does not seem to be
6758 * relevant for modern MIPS cores supporting MTHC0, therefore
6759 * treating MTHC0 to LLAddr as NOP.
6761 register_name
= "LLAddr";
6764 CP0_CHECK(ctx
->mrp
);
6765 gen_helper_mthc0_maar(cpu_env
, arg
);
6766 register_name
= "MAAR";
6769 goto cp0_unimplemented
;
6772 case CP0_REGISTER_28
:
6778 tcg_gen_andi_tl(arg
, arg
, mask
);
6779 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6780 register_name
= "TagLo";
6783 goto cp0_unimplemented
;
6787 goto cp0_unimplemented
;
6789 trace_mips_translate_c0("mthc0", register_name
, reg
, sel
);
6792 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n",
6793 register_name
, reg
, sel
);
6796 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6798 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6799 tcg_gen_movi_tl(arg
, 0);
6801 tcg_gen_movi_tl(arg
, ~0);
6805 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6807 const char *register_name
= "invalid";
6810 check_insn(ctx
, ISA_MIPS32
);
6814 case CP0_REGISTER_00
:
6816 case CP0_REG00__INDEX
:
6817 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6818 register_name
= "Index";
6820 case CP0_REG00__MVPCONTROL
:
6821 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6822 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6823 register_name
= "MVPControl";
6825 case CP0_REG00__MVPCONF0
:
6826 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6827 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6828 register_name
= "MVPConf0";
6830 case CP0_REG00__MVPCONF1
:
6831 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6832 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6833 register_name
= "MVPConf1";
6835 case CP0_REG00__VPCONTROL
:
6837 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6838 register_name
= "VPControl";
6841 goto cp0_unimplemented
;
6844 case CP0_REGISTER_01
:
6847 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6848 gen_helper_mfc0_random(arg
, cpu_env
);
6849 register_name
= "Random";
6852 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6853 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6854 register_name
= "VPEControl";
6857 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6858 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6859 register_name
= "VPEConf0";
6862 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6863 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6864 register_name
= "VPEConf1";
6867 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6868 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6869 register_name
= "YQMask";
6872 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6873 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6874 register_name
= "VPESchedule";
6877 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6878 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6879 register_name
= "VPEScheFBack";
6882 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6883 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6884 register_name
= "VPEOpt";
6887 goto cp0_unimplemented
;
6890 case CP0_REGISTER_02
:
6894 TCGv_i64 tmp
= tcg_temp_new_i64();
6895 tcg_gen_ld_i64(tmp
, cpu_env
,
6896 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6897 #if defined(TARGET_MIPS64)
6899 /* Move RI/XI fields to bits 31:30 */
6900 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6901 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6904 gen_move_low32(arg
, tmp
);
6905 tcg_temp_free_i64(tmp
);
6907 register_name
= "EntryLo0";
6910 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6911 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6912 register_name
= "TCStatus";
6915 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6916 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6917 register_name
= "TCBind";
6920 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6921 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6922 register_name
= "TCRestart";
6925 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6926 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6927 register_name
= "TCHalt";
6930 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6931 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6932 register_name
= "TCContext";
6935 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6936 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6937 register_name
= "TCSchedule";
6940 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6941 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6942 register_name
= "TCScheFBack";
6945 goto cp0_unimplemented
;
6948 case CP0_REGISTER_03
:
6952 TCGv_i64 tmp
= tcg_temp_new_i64();
6953 tcg_gen_ld_i64(tmp
, cpu_env
,
6954 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6955 #if defined(TARGET_MIPS64)
6957 /* Move RI/XI fields to bits 31:30 */
6958 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6959 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6962 gen_move_low32(arg
, tmp
);
6963 tcg_temp_free_i64(tmp
);
6965 register_name
= "EntryLo1";
6969 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6970 register_name
= "GlobalNumber";
6973 goto cp0_unimplemented
;
6976 case CP0_REGISTER_04
:
6979 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6980 tcg_gen_ext32s_tl(arg
, arg
);
6981 register_name
= "Context";
6984 /* gen_helper_mfc0_contextconfig(arg); - SmartMIPS ASE */
6985 register_name
= "ContextConfig";
6986 goto cp0_unimplemented
;
6988 CP0_CHECK(ctx
->ulri
);
6989 tcg_gen_ld_tl(arg
, cpu_env
,
6990 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6991 tcg_gen_ext32s_tl(arg
, arg
);
6992 register_name
= "UserLocal";
6995 goto cp0_unimplemented
;
6998 case CP0_REGISTER_05
:
7001 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
7002 register_name
= "PageMask";
7005 check_insn(ctx
, ISA_MIPS32R2
);
7006 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
7007 register_name
= "PageGrain";
7011 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
7012 tcg_gen_ext32s_tl(arg
, arg
);
7013 register_name
= "SegCtl0";
7017 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
7018 tcg_gen_ext32s_tl(arg
, arg
);
7019 register_name
= "SegCtl1";
7023 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
7024 tcg_gen_ext32s_tl(arg
, arg
);
7025 register_name
= "SegCtl2";
7029 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7030 register_name
= "PWBase";
7034 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
7035 register_name
= "PWField";
7039 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
7040 register_name
= "PWSize";
7043 goto cp0_unimplemented
;
7046 case CP0_REGISTER_06
:
7049 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
7050 register_name
= "Wired";
7053 check_insn(ctx
, ISA_MIPS32R2
);
7054 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
7055 register_name
= "SRSConf0";
7058 check_insn(ctx
, ISA_MIPS32R2
);
7059 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
7060 register_name
= "SRSConf1";
7063 check_insn(ctx
, ISA_MIPS32R2
);
7064 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
7065 register_name
= "SRSConf2";
7068 check_insn(ctx
, ISA_MIPS32R2
);
7069 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
7070 register_name
= "SRSConf3";
7073 check_insn(ctx
, ISA_MIPS32R2
);
7074 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
7075 register_name
= "SRSConf4";
7079 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
7080 register_name
= "PWCtl";
7083 goto cp0_unimplemented
;
7086 case CP0_REGISTER_07
:
7089 check_insn(ctx
, ISA_MIPS32R2
);
7090 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
7091 register_name
= "HWREna";
7094 goto cp0_unimplemented
;
7097 case CP0_REGISTER_08
:
7100 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7101 tcg_gen_ext32s_tl(arg
, arg
);
7102 register_name
= "BadVAddr";
7106 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7107 register_name
= "BadInstr";
7111 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7112 register_name
= "BadInstrP";
7116 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7117 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7118 register_name
= "BadInstrX";
7121 goto cp0_unimplemented
;
7124 case CP0_REGISTER_09
:
7127 /* Mark as an IO operation because we read the time. */
7128 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7131 gen_helper_mfc0_count(arg
, cpu_env
);
7133 * Break the TB to be able to take timer interrupts immediately
7134 * after reading count. DISAS_STOP isn't sufficient, we need to
7135 * ensure we break completely out of translated code.
7137 gen_save_pc(ctx
->base
.pc_next
+ 4);
7138 ctx
->base
.is_jmp
= DISAS_EXIT
;
7139 register_name
= "Count";
7142 CP0_CHECK(ctx
->saar
);
7143 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
7144 register_name
= "SAARI";
7147 CP0_CHECK(ctx
->saar
);
7148 gen_helper_mfc0_saar(arg
, cpu_env
);
7149 register_name
= "SAAR";
7152 goto cp0_unimplemented
;
7155 case CP0_REGISTER_10
:
7158 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7159 tcg_gen_ext32s_tl(arg
, arg
);
7160 register_name
= "EntryHi";
7163 goto cp0_unimplemented
;
7166 case CP0_REGISTER_11
:
7169 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7170 register_name
= "Compare";
7172 /* 6,7 are implementation dependent */
7174 goto cp0_unimplemented
;
7177 case CP0_REGISTER_12
:
7180 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7181 register_name
= "Status";
7184 check_insn(ctx
, ISA_MIPS32R2
);
7185 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7186 register_name
= "IntCtl";
7189 check_insn(ctx
, ISA_MIPS32R2
);
7190 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7191 register_name
= "SRSCtl";
7194 check_insn(ctx
, ISA_MIPS32R2
);
7195 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7196 register_name
= "SRSMap";
7199 goto cp0_unimplemented
;
7202 case CP0_REGISTER_13
:
7205 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7206 register_name
= "Cause";
7209 goto cp0_unimplemented
;
7212 case CP0_REGISTER_14
:
7215 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7216 tcg_gen_ext32s_tl(arg
, arg
);
7217 register_name
= "EPC";
7220 goto cp0_unimplemented
;
7223 case CP0_REGISTER_15
:
7226 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7227 register_name
= "PRid";
7230 check_insn(ctx
, ISA_MIPS32R2
);
7231 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7232 tcg_gen_ext32s_tl(arg
, arg
);
7233 register_name
= "EBase";
7236 check_insn(ctx
, ISA_MIPS32R2
);
7237 CP0_CHECK(ctx
->cmgcr
);
7238 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7239 tcg_gen_ext32s_tl(arg
, arg
);
7240 register_name
= "CMGCRBase";
7243 goto cp0_unimplemented
;
7246 case CP0_REGISTER_16
:
7249 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7250 register_name
= "Config";
7253 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7254 register_name
= "Config1";
7257 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7258 register_name
= "Config2";
7261 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7262 register_name
= "Config3";
7265 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7266 register_name
= "Config4";
7269 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7270 register_name
= "Config5";
7272 /* 6,7 are implementation dependent */
7274 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7275 register_name
= "Config6";
7278 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7279 register_name
= "Config7";
7282 goto cp0_unimplemented
;
7285 case CP0_REGISTER_17
:
7288 gen_helper_mfc0_lladdr(arg
, cpu_env
);
7289 register_name
= "LLAddr";
7292 CP0_CHECK(ctx
->mrp
);
7293 gen_helper_mfc0_maar(arg
, cpu_env
);
7294 register_name
= "MAAR";
7297 CP0_CHECK(ctx
->mrp
);
7298 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7299 register_name
= "MAARI";
7302 goto cp0_unimplemented
;
7305 case CP0_REGISTER_18
:
7315 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7316 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
7317 register_name
= "WatchLo";
7320 goto cp0_unimplemented
;
7323 case CP0_REGISTER_19
:
7333 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7334 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
7335 register_name
= "WatchHi";
7338 goto cp0_unimplemented
;
7341 case CP0_REGISTER_20
:
7344 #if defined(TARGET_MIPS64)
7345 check_insn(ctx
, ISA_MIPS3
);
7346 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7347 tcg_gen_ext32s_tl(arg
, arg
);
7348 register_name
= "XContext";
7352 goto cp0_unimplemented
;
7355 case CP0_REGISTER_21
:
7356 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7357 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7360 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7361 register_name
= "Framemask";
7364 goto cp0_unimplemented
;
7367 case CP0_REGISTER_22
:
7368 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7369 register_name
= "'Diagnostic"; /* implementation dependent */
7371 case CP0_REGISTER_23
:
7374 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7375 register_name
= "Debug";
7378 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
7379 register_name
= "TraceControl";
7380 goto cp0_unimplemented
;
7382 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
7383 register_name
= "TraceControl2";
7384 goto cp0_unimplemented
;
7386 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
7387 register_name
= "UserTraceData";
7388 goto cp0_unimplemented
;
7390 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
7391 register_name
= "TraceBPC";
7392 goto cp0_unimplemented
;
7394 goto cp0_unimplemented
;
7397 case CP0_REGISTER_24
:
7401 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7402 tcg_gen_ext32s_tl(arg
, arg
);
7403 register_name
= "DEPC";
7406 goto cp0_unimplemented
;
7409 case CP0_REGISTER_25
:
7412 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7413 register_name
= "Performance0";
7416 /* gen_helper_mfc0_performance1(arg); */
7417 register_name
= "Performance1";
7418 goto cp0_unimplemented
;
7420 /* gen_helper_mfc0_performance2(arg); */
7421 register_name
= "Performance2";
7422 goto cp0_unimplemented
;
7424 /* gen_helper_mfc0_performance3(arg); */
7425 register_name
= "Performance3";
7426 goto cp0_unimplemented
;
7428 /* gen_helper_mfc0_performance4(arg); */
7429 register_name
= "Performance4";
7430 goto cp0_unimplemented
;
7432 /* gen_helper_mfc0_performance5(arg); */
7433 register_name
= "Performance5";
7434 goto cp0_unimplemented
;
7436 /* gen_helper_mfc0_performance6(arg); */
7437 register_name
= "Performance6";
7438 goto cp0_unimplemented
;
7440 /* gen_helper_mfc0_performance7(arg); */
7441 register_name
= "Performance7";
7442 goto cp0_unimplemented
;
7444 goto cp0_unimplemented
;
7447 case CP0_REGISTER_26
:
7450 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7451 register_name
= "ErrCtl";
7454 goto cp0_unimplemented
;
7457 case CP0_REGISTER_27
:
7463 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7464 register_name
= "CacheErr";
7467 goto cp0_unimplemented
;
7470 case CP0_REGISTER_28
:
7477 TCGv_i64 tmp
= tcg_temp_new_i64();
7478 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7479 gen_move_low32(arg
, tmp
);
7480 tcg_temp_free_i64(tmp
);
7482 register_name
= "TagLo";
7488 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7489 register_name
= "DataLo";
7492 goto cp0_unimplemented
;
7495 case CP0_REGISTER_29
:
7501 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7502 register_name
= "TagHi";
7508 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7509 register_name
= "DataHi";
7512 goto cp0_unimplemented
;
7515 case CP0_REGISTER_30
:
7518 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7519 tcg_gen_ext32s_tl(arg
, arg
);
7520 register_name
= "ErrorEPC";
7523 goto cp0_unimplemented
;
7526 case CP0_REGISTER_31
:
7530 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7531 register_name
= "DESAVE";
7539 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7540 tcg_gen_ld_tl(arg
, cpu_env
,
7541 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7542 tcg_gen_ext32s_tl(arg
, arg
);
7543 register_name
= "KScratch";
7546 goto cp0_unimplemented
;
7550 goto cp0_unimplemented
;
7552 trace_mips_translate_c0("mfc0", register_name
, reg
, sel
);
7556 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n",
7557 register_name
, reg
, sel
);
7558 gen_mfc0_unimplemented(ctx
, arg
);
7561 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7563 const char *register_name
= "invalid";
7566 check_insn(ctx
, ISA_MIPS32
);
7569 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7574 case CP0_REGISTER_00
:
7576 case CP0_REG00__INDEX
:
7577 gen_helper_mtc0_index(cpu_env
, arg
);
7578 register_name
= "Index";
7580 case CP0_REG00__MVPCONTROL
:
7581 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7582 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7583 register_name
= "MVPControl";
7585 case CP0_REG00__MVPCONF0
:
7586 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7588 register_name
= "MVPConf0";
7590 case CP0_REG00__MVPCONF1
:
7591 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7593 register_name
= "MVPConf1";
7595 case CP0_REG00__VPCONTROL
:
7598 register_name
= "VPControl";
7601 goto cp0_unimplemented
;
7604 case CP0_REGISTER_01
:
7608 register_name
= "Random";
7611 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7612 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7613 register_name
= "VPEControl";
7616 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7617 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7618 register_name
= "VPEConf0";
7621 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7622 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7623 register_name
= "VPEConf1";
7626 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7627 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7628 register_name
= "YQMask";
7631 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7632 tcg_gen_st_tl(arg
, cpu_env
,
7633 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7634 register_name
= "VPESchedule";
7637 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7638 tcg_gen_st_tl(arg
, cpu_env
,
7639 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7640 register_name
= "VPEScheFBack";
7643 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7644 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7645 register_name
= "VPEOpt";
7648 goto cp0_unimplemented
;
7651 case CP0_REGISTER_02
:
7654 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7655 register_name
= "EntryLo0";
7658 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7659 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7660 register_name
= "TCStatus";
7663 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7664 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7665 register_name
= "TCBind";
7668 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7669 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7670 register_name
= "TCRestart";
7673 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7674 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7675 register_name
= "TCHalt";
7678 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7679 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7680 register_name
= "TCContext";
7683 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7684 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7685 register_name
= "TCSchedule";
7688 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7689 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7690 register_name
= "TCScheFBack";
7693 goto cp0_unimplemented
;
7696 case CP0_REGISTER_03
:
7699 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7700 register_name
= "EntryLo1";
7705 register_name
= "GlobalNumber";
7708 goto cp0_unimplemented
;
7711 case CP0_REGISTER_04
:
7714 gen_helper_mtc0_context(cpu_env
, arg
);
7715 register_name
= "Context";
7718 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7719 register_name
= "ContextConfig";
7720 goto cp0_unimplemented
;
7722 CP0_CHECK(ctx
->ulri
);
7723 tcg_gen_st_tl(arg
, cpu_env
,
7724 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7725 register_name
= "UserLocal";
7728 goto cp0_unimplemented
;
7731 case CP0_REGISTER_05
:
7734 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7735 register_name
= "PageMask";
7738 check_insn(ctx
, ISA_MIPS32R2
);
7739 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7740 register_name
= "PageGrain";
7741 ctx
->base
.is_jmp
= DISAS_STOP
;
7745 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7746 register_name
= "SegCtl0";
7750 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7751 register_name
= "SegCtl1";
7755 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7756 register_name
= "SegCtl2";
7760 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7761 register_name
= "PWBase";
7765 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7766 register_name
= "PWField";
7770 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7771 register_name
= "PWSize";
7774 goto cp0_unimplemented
;
7777 case CP0_REGISTER_06
:
7780 gen_helper_mtc0_wired(cpu_env
, arg
);
7781 register_name
= "Wired";
7784 check_insn(ctx
, ISA_MIPS32R2
);
7785 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7786 register_name
= "SRSConf0";
7789 check_insn(ctx
, ISA_MIPS32R2
);
7790 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7791 register_name
= "SRSConf1";
7794 check_insn(ctx
, ISA_MIPS32R2
);
7795 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7796 register_name
= "SRSConf2";
7799 check_insn(ctx
, ISA_MIPS32R2
);
7800 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7801 register_name
= "SRSConf3";
7804 check_insn(ctx
, ISA_MIPS32R2
);
7805 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7806 register_name
= "SRSConf4";
7810 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7811 register_name
= "PWCtl";
7814 goto cp0_unimplemented
;
7817 case CP0_REGISTER_07
:
7820 check_insn(ctx
, ISA_MIPS32R2
);
7821 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7822 ctx
->base
.is_jmp
= DISAS_STOP
;
7823 register_name
= "HWREna";
7826 goto cp0_unimplemented
;
7829 case CP0_REGISTER_08
:
7833 register_name
= "BadVAddr";
7837 register_name
= "BadInstr";
7841 register_name
= "BadInstrP";
7845 register_name
= "BadInstrX";
7848 goto cp0_unimplemented
;
7851 case CP0_REGISTER_09
:
7854 gen_helper_mtc0_count(cpu_env
, arg
);
7855 register_name
= "Count";
7858 CP0_CHECK(ctx
->saar
);
7859 gen_helper_mtc0_saari(cpu_env
, arg
);
7860 register_name
= "SAARI";
7863 CP0_CHECK(ctx
->saar
);
7864 gen_helper_mtc0_saar(cpu_env
, arg
);
7865 register_name
= "SAAR";
7868 goto cp0_unimplemented
;
7871 case CP0_REGISTER_10
:
7874 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7875 register_name
= "EntryHi";
7878 goto cp0_unimplemented
;
7881 case CP0_REGISTER_11
:
7884 gen_helper_mtc0_compare(cpu_env
, arg
);
7885 register_name
= "Compare";
7887 /* 6,7 are implementation dependent */
7889 goto cp0_unimplemented
;
7892 case CP0_REGISTER_12
:
7895 save_cpu_state(ctx
, 1);
7896 gen_helper_mtc0_status(cpu_env
, arg
);
7897 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7898 gen_save_pc(ctx
->base
.pc_next
+ 4);
7899 ctx
->base
.is_jmp
= DISAS_EXIT
;
7900 register_name
= "Status";
7903 check_insn(ctx
, ISA_MIPS32R2
);
7904 gen_helper_mtc0_intctl(cpu_env
, arg
);
7905 /* Stop translation as we may have switched the execution mode */
7906 ctx
->base
.is_jmp
= DISAS_STOP
;
7907 register_name
= "IntCtl";
7910 check_insn(ctx
, ISA_MIPS32R2
);
7911 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7912 /* Stop translation as we may have switched the execution mode */
7913 ctx
->base
.is_jmp
= DISAS_STOP
;
7914 register_name
= "SRSCtl";
7917 check_insn(ctx
, ISA_MIPS32R2
);
7918 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7919 /* Stop translation as we may have switched the execution mode */
7920 ctx
->base
.is_jmp
= DISAS_STOP
;
7921 register_name
= "SRSMap";
7924 goto cp0_unimplemented
;
7927 case CP0_REGISTER_13
:
7930 save_cpu_state(ctx
, 1);
7931 gen_helper_mtc0_cause(cpu_env
, arg
);
7933 * Stop translation as we may have triggered an interrupt.
7934 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7935 * translated code to check for pending interrupts.
7937 gen_save_pc(ctx
->base
.pc_next
+ 4);
7938 ctx
->base
.is_jmp
= DISAS_EXIT
;
7939 register_name
= "Cause";
7942 goto cp0_unimplemented
;
7945 case CP0_REGISTER_14
:
7948 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7949 register_name
= "EPC";
7952 goto cp0_unimplemented
;
7955 case CP0_REGISTER_15
:
7959 register_name
= "PRid";
7962 check_insn(ctx
, ISA_MIPS32R2
);
7963 gen_helper_mtc0_ebase(cpu_env
, arg
);
7964 register_name
= "EBase";
7967 goto cp0_unimplemented
;
7970 case CP0_REGISTER_16
:
7973 gen_helper_mtc0_config0(cpu_env
, arg
);
7974 register_name
= "Config";
7975 /* Stop translation as we may have switched the execution mode */
7976 ctx
->base
.is_jmp
= DISAS_STOP
;
7979 /* ignored, read only */
7980 register_name
= "Config1";
7983 gen_helper_mtc0_config2(cpu_env
, arg
);
7984 register_name
= "Config2";
7985 /* Stop translation as we may have switched the execution mode */
7986 ctx
->base
.is_jmp
= DISAS_STOP
;
7989 gen_helper_mtc0_config3(cpu_env
, arg
);
7990 register_name
= "Config3";
7991 /* Stop translation as we may have switched the execution mode */
7992 ctx
->base
.is_jmp
= DISAS_STOP
;
7995 gen_helper_mtc0_config4(cpu_env
, arg
);
7996 register_name
= "Config4";
7997 ctx
->base
.is_jmp
= DISAS_STOP
;
8000 gen_helper_mtc0_config5(cpu_env
, arg
);
8001 register_name
= "Config5";
8002 /* Stop translation as we may have switched the execution mode */
8003 ctx
->base
.is_jmp
= DISAS_STOP
;
8005 /* 6,7 are implementation dependent */
8008 register_name
= "Config6";
8012 register_name
= "Config7";
8015 register_name
= "Invalid config selector";
8016 goto cp0_unimplemented
;
8019 case CP0_REGISTER_17
:
8022 gen_helper_mtc0_lladdr(cpu_env
, arg
);
8023 register_name
= "LLAddr";
8026 CP0_CHECK(ctx
->mrp
);
8027 gen_helper_mtc0_maar(cpu_env
, arg
);
8028 register_name
= "MAAR";
8031 CP0_CHECK(ctx
->mrp
);
8032 gen_helper_mtc0_maari(cpu_env
, arg
);
8033 register_name
= "MAARI";
8036 goto cp0_unimplemented
;
8039 case CP0_REGISTER_18
:
8049 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8050 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
8051 register_name
= "WatchLo";
8054 goto cp0_unimplemented
;
8057 case CP0_REGISTER_19
:
8067 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8068 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
8069 register_name
= "WatchHi";
8072 goto cp0_unimplemented
;
8075 case CP0_REGISTER_20
:
8078 #if defined(TARGET_MIPS64)
8079 check_insn(ctx
, ISA_MIPS3
);
8080 gen_helper_mtc0_xcontext(cpu_env
, arg
);
8081 register_name
= "XContext";
8085 goto cp0_unimplemented
;
8088 case CP0_REGISTER_21
:
8089 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8090 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8093 gen_helper_mtc0_framemask(cpu_env
, arg
);
8094 register_name
= "Framemask";
8097 goto cp0_unimplemented
;
8100 case CP0_REGISTER_22
:
8102 register_name
= "Diagnostic"; /* implementation dependent */
8104 case CP0_REGISTER_23
:
8107 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
8108 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8109 gen_save_pc(ctx
->base
.pc_next
+ 4);
8110 ctx
->base
.is_jmp
= DISAS_EXIT
;
8111 register_name
= "Debug";
8114 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
8115 register_name
= "TraceControl";
8116 /* Stop translation as we may have switched the execution mode */
8117 ctx
->base
.is_jmp
= DISAS_STOP
;
8118 goto cp0_unimplemented
;
8120 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
8121 register_name
= "TraceControl2";
8122 /* Stop translation as we may have switched the execution mode */
8123 ctx
->base
.is_jmp
= DISAS_STOP
;
8124 goto cp0_unimplemented
;
8126 /* Stop translation as we may have switched the execution mode */
8127 ctx
->base
.is_jmp
= DISAS_STOP
;
8128 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
8129 register_name
= "UserTraceData";
8130 /* Stop translation as we may have switched the execution mode */
8131 ctx
->base
.is_jmp
= DISAS_STOP
;
8132 goto cp0_unimplemented
;
8134 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
8135 /* Stop translation as we may have switched the execution mode */
8136 ctx
->base
.is_jmp
= DISAS_STOP
;
8137 register_name
= "TraceBPC";
8138 goto cp0_unimplemented
;
8140 goto cp0_unimplemented
;
8143 case CP0_REGISTER_24
:
8147 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8148 register_name
= "DEPC";
8151 goto cp0_unimplemented
;
8154 case CP0_REGISTER_25
:
8157 gen_helper_mtc0_performance0(cpu_env
, arg
);
8158 register_name
= "Performance0";
8161 /* gen_helper_mtc0_performance1(arg); */
8162 register_name
= "Performance1";
8163 goto cp0_unimplemented
;
8165 /* gen_helper_mtc0_performance2(arg); */
8166 register_name
= "Performance2";
8167 goto cp0_unimplemented
;
8169 /* gen_helper_mtc0_performance3(arg); */
8170 register_name
= "Performance3";
8171 goto cp0_unimplemented
;
8173 /* gen_helper_mtc0_performance4(arg); */
8174 register_name
= "Performance4";
8175 goto cp0_unimplemented
;
8177 /* gen_helper_mtc0_performance5(arg); */
8178 register_name
= "Performance5";
8179 goto cp0_unimplemented
;
8181 /* gen_helper_mtc0_performance6(arg); */
8182 register_name
= "Performance6";
8183 goto cp0_unimplemented
;
8185 /* gen_helper_mtc0_performance7(arg); */
8186 register_name
= "Performance7";
8187 goto cp0_unimplemented
;
8189 goto cp0_unimplemented
;
8192 case CP0_REGISTER_26
:
8195 gen_helper_mtc0_errctl(cpu_env
, arg
);
8196 ctx
->base
.is_jmp
= DISAS_STOP
;
8197 register_name
= "ErrCtl";
8200 goto cp0_unimplemented
;
8203 case CP0_REGISTER_27
:
8210 register_name
= "CacheErr";
8213 goto cp0_unimplemented
;
8216 case CP0_REGISTER_28
:
8222 gen_helper_mtc0_taglo(cpu_env
, arg
);
8223 register_name
= "TagLo";
8229 gen_helper_mtc0_datalo(cpu_env
, arg
);
8230 register_name
= "DataLo";
8233 goto cp0_unimplemented
;
8236 case CP0_REGISTER_29
:
8242 gen_helper_mtc0_taghi(cpu_env
, arg
);
8243 register_name
= "TagHi";
8249 gen_helper_mtc0_datahi(cpu_env
, arg
);
8250 register_name
= "DataHi";
8253 register_name
= "invalid sel";
8254 goto cp0_unimplemented
;
8257 case CP0_REGISTER_30
:
8260 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8261 register_name
= "ErrorEPC";
8264 goto cp0_unimplemented
;
8267 case CP0_REGISTER_31
:
8271 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8272 register_name
= "DESAVE";
8280 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8281 tcg_gen_st_tl(arg
, cpu_env
,
8282 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8283 register_name
= "KScratch";
8286 goto cp0_unimplemented
;
8290 goto cp0_unimplemented
;
8292 trace_mips_translate_c0("mtc0", register_name
, reg
, sel
);
8294 /* For simplicity assume that all writes can cause interrupts. */
8295 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8297 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8298 * translated code to check for pending interrupts.
8300 gen_save_pc(ctx
->base
.pc_next
+ 4);
8301 ctx
->base
.is_jmp
= DISAS_EXIT
;
8306 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n",
8307 register_name
, reg
, sel
);
8310 #if defined(TARGET_MIPS64)
8311 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8313 const char *register_name
= "invalid";
8316 check_insn(ctx
, ISA_MIPS64
);
8320 case CP0_REGISTER_00
:
8322 case CP0_REG00__INDEX
:
8323 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
8324 register_name
= "Index";
8326 case CP0_REG00__MVPCONTROL
:
8327 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8328 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
8329 register_name
= "MVPControl";
8331 case CP0_REG00__MVPCONF0
:
8332 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8333 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
8334 register_name
= "MVPConf0";
8336 case CP0_REG00__MVPCONF1
:
8337 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8338 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
8339 register_name
= "MVPConf1";
8341 case CP0_REG00__VPCONTROL
:
8343 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
8344 register_name
= "VPControl";
8347 goto cp0_unimplemented
;
8350 case CP0_REGISTER_01
:
8353 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8354 gen_helper_mfc0_random(arg
, cpu_env
);
8355 register_name
= "Random";
8358 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8359 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
8360 register_name
= "VPEControl";
8363 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8364 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
8365 register_name
= "VPEConf0";
8368 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8369 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
8370 register_name
= "VPEConf1";
8373 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8374 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
8375 register_name
= "YQMask";
8378 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8379 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8380 register_name
= "VPESchedule";
8383 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8384 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8385 register_name
= "VPEScheFBack";
8388 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8389 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
8390 register_name
= "VPEOpt";
8393 goto cp0_unimplemented
;
8396 case CP0_REGISTER_02
:
8399 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
8400 register_name
= "EntryLo0";
8403 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8404 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8405 register_name
= "TCStatus";
8408 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8409 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8410 register_name
= "TCBind";
8413 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8414 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8415 register_name
= "TCRestart";
8418 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8419 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8420 register_name
= "TCHalt";
8423 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8424 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8425 register_name
= "TCContext";
8428 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8429 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8430 register_name
= "TCSchedule";
8433 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8434 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8435 register_name
= "TCScheFBack";
8438 goto cp0_unimplemented
;
8441 case CP0_REGISTER_03
:
8444 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8445 register_name
= "EntryLo1";
8449 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8450 register_name
= "GlobalNumber";
8453 goto cp0_unimplemented
;
8456 case CP0_REGISTER_04
:
8459 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8460 register_name
= "Context";
8463 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8464 register_name
= "ContextConfig";
8465 goto cp0_unimplemented
;
8467 CP0_CHECK(ctx
->ulri
);
8468 tcg_gen_ld_tl(arg
, cpu_env
,
8469 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8470 register_name
= "UserLocal";
8473 goto cp0_unimplemented
;
8476 case CP0_REGISTER_05
:
8479 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8480 register_name
= "PageMask";
8483 check_insn(ctx
, ISA_MIPS32R2
);
8484 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8485 register_name
= "PageGrain";
8489 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8490 register_name
= "SegCtl0";
8494 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8495 register_name
= "SegCtl1";
8499 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8500 register_name
= "SegCtl2";
8504 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8505 register_name
= "PWBase";
8509 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8510 register_name
= "PWField";
8514 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8515 register_name
= "PWSize";
8518 goto cp0_unimplemented
;
8521 case CP0_REGISTER_06
:
8524 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8525 register_name
= "Wired";
8528 check_insn(ctx
, ISA_MIPS32R2
);
8529 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8530 register_name
= "SRSConf0";
8533 check_insn(ctx
, ISA_MIPS32R2
);
8534 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8535 register_name
= "SRSConf1";
8538 check_insn(ctx
, ISA_MIPS32R2
);
8539 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8540 register_name
= "SRSConf2";
8543 check_insn(ctx
, ISA_MIPS32R2
);
8544 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8545 register_name
= "SRSConf3";
8548 check_insn(ctx
, ISA_MIPS32R2
);
8549 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8550 register_name
= "SRSConf4";
8554 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8555 register_name
= "PWCtl";
8558 goto cp0_unimplemented
;
8561 case CP0_REGISTER_07
:
8564 check_insn(ctx
, ISA_MIPS32R2
);
8565 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8566 register_name
= "HWREna";
8569 goto cp0_unimplemented
;
8572 case CP0_REGISTER_08
:
8575 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8576 register_name
= "BadVAddr";
8580 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8581 register_name
= "BadInstr";
8585 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8586 register_name
= "BadInstrP";
8590 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8591 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8592 register_name
= "BadInstrX";
8595 goto cp0_unimplemented
;
8598 case CP0_REGISTER_09
:
8601 /* Mark as an IO operation because we read the time. */
8602 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8605 gen_helper_mfc0_count(arg
, cpu_env
);
8607 * Break the TB to be able to take timer interrupts immediately
8608 * after reading count. DISAS_STOP isn't sufficient, we need to
8609 * ensure we break completely out of translated code.
8611 gen_save_pc(ctx
->base
.pc_next
+ 4);
8612 ctx
->base
.is_jmp
= DISAS_EXIT
;
8613 register_name
= "Count";
8616 CP0_CHECK(ctx
->saar
);
8617 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
8618 register_name
= "SAARI";
8621 CP0_CHECK(ctx
->saar
);
8622 gen_helper_dmfc0_saar(arg
, cpu_env
);
8623 register_name
= "SAAR";
8626 goto cp0_unimplemented
;
8629 case CP0_REGISTER_10
:
8632 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8633 register_name
= "EntryHi";
8636 goto cp0_unimplemented
;
8639 case CP0_REGISTER_11
:
8642 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8643 register_name
= "Compare";
8645 /* 6,7 are implementation dependent */
8647 goto cp0_unimplemented
;
8650 case CP0_REGISTER_12
:
8653 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8654 register_name
= "Status";
8657 check_insn(ctx
, ISA_MIPS32R2
);
8658 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8659 register_name
= "IntCtl";
8662 check_insn(ctx
, ISA_MIPS32R2
);
8663 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8664 register_name
= "SRSCtl";
8667 check_insn(ctx
, ISA_MIPS32R2
);
8668 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8669 register_name
= "SRSMap";
8672 goto cp0_unimplemented
;
8675 case CP0_REGISTER_13
:
8678 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8679 register_name
= "Cause";
8682 goto cp0_unimplemented
;
8685 case CP0_REGISTER_14
:
8688 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8689 register_name
= "EPC";
8692 goto cp0_unimplemented
;
8695 case CP0_REGISTER_15
:
8698 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8699 register_name
= "PRid";
8702 check_insn(ctx
, ISA_MIPS32R2
);
8703 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8704 register_name
= "EBase";
8707 check_insn(ctx
, ISA_MIPS32R2
);
8708 CP0_CHECK(ctx
->cmgcr
);
8709 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8710 register_name
= "CMGCRBase";
8713 goto cp0_unimplemented
;
8716 case CP0_REGISTER_16
:
8719 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8720 register_name
= "Config";
8723 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8724 register_name
= "Config1";
8727 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8728 register_name
= "Config2";
8731 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8732 register_name
= "Config3";
8735 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8736 register_name
= "Config4";
8739 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8740 register_name
= "Config5";
8742 /* 6,7 are implementation dependent */
8744 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8745 register_name
= "Config6";
8748 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8749 register_name
= "Config7";
8752 goto cp0_unimplemented
;
8755 case CP0_REGISTER_17
:
8758 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8759 register_name
= "LLAddr";
8762 CP0_CHECK(ctx
->mrp
);
8763 gen_helper_dmfc0_maar(arg
, cpu_env
);
8764 register_name
= "MAAR";
8767 CP0_CHECK(ctx
->mrp
);
8768 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8769 register_name
= "MAARI";
8772 goto cp0_unimplemented
;
8775 case CP0_REGISTER_18
:
8785 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8786 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8787 register_name
= "WatchLo";
8790 goto cp0_unimplemented
;
8793 case CP0_REGISTER_19
:
8803 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8804 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8805 register_name
= "WatchHi";
8808 goto cp0_unimplemented
;
8811 case CP0_REGISTER_20
:
8814 check_insn(ctx
, ISA_MIPS3
);
8815 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8816 register_name
= "XContext";
8819 goto cp0_unimplemented
;
8822 case CP0_REGISTER_21
:
8823 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8824 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8827 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8828 register_name
= "Framemask";
8831 goto cp0_unimplemented
;
8834 case CP0_REGISTER_22
:
8835 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8836 register_name
= "'Diagnostic"; /* implementation dependent */
8838 case CP0_REGISTER_23
:
8841 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8842 register_name
= "Debug";
8845 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8846 register_name
= "TraceControl";
8847 goto cp0_unimplemented
;
8849 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8850 register_name
= "TraceControl2";
8851 goto cp0_unimplemented
;
8853 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8854 register_name
= "UserTraceData";
8855 goto cp0_unimplemented
;
8857 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8858 register_name
= "TraceBPC";
8859 goto cp0_unimplemented
;
8861 goto cp0_unimplemented
;
8864 case CP0_REGISTER_24
:
8868 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8869 register_name
= "DEPC";
8872 goto cp0_unimplemented
;
8875 case CP0_REGISTER_25
:
8878 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8879 register_name
= "Performance0";
8882 /* gen_helper_dmfc0_performance1(arg); */
8883 register_name
= "Performance1";
8884 goto cp0_unimplemented
;
8886 /* gen_helper_dmfc0_performance2(arg); */
8887 register_name
= "Performance2";
8888 goto cp0_unimplemented
;
8890 /* gen_helper_dmfc0_performance3(arg); */
8891 register_name
= "Performance3";
8892 goto cp0_unimplemented
;
8894 /* gen_helper_dmfc0_performance4(arg); */
8895 register_name
= "Performance4";
8896 goto cp0_unimplemented
;
8898 /* gen_helper_dmfc0_performance5(arg); */
8899 register_name
= "Performance5";
8900 goto cp0_unimplemented
;
8902 /* gen_helper_dmfc0_performance6(arg); */
8903 register_name
= "Performance6";
8904 goto cp0_unimplemented
;
8906 /* gen_helper_dmfc0_performance7(arg); */
8907 register_name
= "Performance7";
8908 goto cp0_unimplemented
;
8910 goto cp0_unimplemented
;
8913 case CP0_REGISTER_26
:
8916 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8917 register_name
= "ErrCtl";
8920 goto cp0_unimplemented
;
8923 case CP0_REGISTER_27
:
8930 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8931 register_name
= "CacheErr";
8934 goto cp0_unimplemented
;
8937 case CP0_REGISTER_28
:
8943 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8944 register_name
= "TagLo";
8950 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8951 register_name
= "DataLo";
8954 goto cp0_unimplemented
;
8957 case CP0_REGISTER_29
:
8963 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8964 register_name
= "TagHi";
8970 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8971 register_name
= "DataHi";
8974 goto cp0_unimplemented
;
8977 case CP0_REGISTER_30
:
8980 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8981 register_name
= "ErrorEPC";
8984 goto cp0_unimplemented
;
8987 case CP0_REGISTER_31
:
8991 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8992 register_name
= "DESAVE";
9000 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9001 tcg_gen_ld_tl(arg
, cpu_env
,
9002 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9003 register_name
= "KScratch";
9006 goto cp0_unimplemented
;
9010 goto cp0_unimplemented
;
9012 trace_mips_translate_c0("dmfc0", register_name
, reg
, sel
);
9016 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n",
9017 register_name
, reg
, sel
);
9018 gen_mfc0_unimplemented(ctx
, arg
);
9021 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
9023 const char *register_name
= "invalid";
9026 check_insn(ctx
, ISA_MIPS64
);
9029 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9034 case CP0_REGISTER_00
:
9036 case CP0_REG00__INDEX
:
9037 gen_helper_mtc0_index(cpu_env
, arg
);
9038 register_name
= "Index";
9040 case CP0_REG00__MVPCONTROL
:
9041 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9042 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
9043 register_name
= "MVPControl";
9045 case CP0_REG00__MVPCONF0
:
9046 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9048 register_name
= "MVPConf0";
9050 case CP0_REG00__MVPCONF1
:
9051 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9053 register_name
= "MVPConf1";
9055 case CP0_REG00__VPCONTROL
:
9058 register_name
= "VPControl";
9061 goto cp0_unimplemented
;
9064 case CP0_REGISTER_01
:
9068 register_name
= "Random";
9071 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9072 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
9073 register_name
= "VPEControl";
9076 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9077 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
9078 register_name
= "VPEConf0";
9081 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9082 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
9083 register_name
= "VPEConf1";
9086 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9087 gen_helper_mtc0_yqmask(cpu_env
, arg
);
9088 register_name
= "YQMask";
9091 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9092 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
9093 register_name
= "VPESchedule";
9096 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9097 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
9098 register_name
= "VPEScheFBack";
9101 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9102 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
9103 register_name
= "VPEOpt";
9106 goto cp0_unimplemented
;
9109 case CP0_REGISTER_02
:
9112 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
9113 register_name
= "EntryLo0";
9116 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9117 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
9118 register_name
= "TCStatus";
9121 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9122 gen_helper_mtc0_tcbind(cpu_env
, arg
);
9123 register_name
= "TCBind";
9126 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9127 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
9128 register_name
= "TCRestart";
9131 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9132 gen_helper_mtc0_tchalt(cpu_env
, arg
);
9133 register_name
= "TCHalt";
9136 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9137 gen_helper_mtc0_tccontext(cpu_env
, arg
);
9138 register_name
= "TCContext";
9141 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9142 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
9143 register_name
= "TCSchedule";
9146 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9147 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
9148 register_name
= "TCScheFBack";
9151 goto cp0_unimplemented
;
9154 case CP0_REGISTER_03
:
9157 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
9158 register_name
= "EntryLo1";
9163 register_name
= "GlobalNumber";
9166 goto cp0_unimplemented
;
9169 case CP0_REGISTER_04
:
9172 gen_helper_mtc0_context(cpu_env
, arg
);
9173 register_name
= "Context";
9176 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
9177 register_name
= "ContextConfig";
9178 goto cp0_unimplemented
;
9180 CP0_CHECK(ctx
->ulri
);
9181 tcg_gen_st_tl(arg
, cpu_env
,
9182 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
9183 register_name
= "UserLocal";
9186 goto cp0_unimplemented
;
9189 case CP0_REGISTER_05
:
9192 gen_helper_mtc0_pagemask(cpu_env
, arg
);
9193 register_name
= "PageMask";
9196 check_insn(ctx
, ISA_MIPS32R2
);
9197 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
9198 register_name
= "PageGrain";
9202 gen_helper_mtc0_segctl0(cpu_env
, arg
);
9203 register_name
= "SegCtl0";
9207 gen_helper_mtc0_segctl1(cpu_env
, arg
);
9208 register_name
= "SegCtl1";
9212 gen_helper_mtc0_segctl2(cpu_env
, arg
);
9213 register_name
= "SegCtl2";
9217 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
9218 register_name
= "PWBase";
9222 gen_helper_mtc0_pwfield(cpu_env
, arg
);
9223 register_name
= "PWField";
9227 gen_helper_mtc0_pwsize(cpu_env
, arg
);
9228 register_name
= "PWSize";
9231 goto cp0_unimplemented
;
9234 case CP0_REGISTER_06
:
9237 gen_helper_mtc0_wired(cpu_env
, arg
);
9238 register_name
= "Wired";
9241 check_insn(ctx
, ISA_MIPS32R2
);
9242 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
9243 register_name
= "SRSConf0";
9246 check_insn(ctx
, ISA_MIPS32R2
);
9247 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
9248 register_name
= "SRSConf1";
9251 check_insn(ctx
, ISA_MIPS32R2
);
9252 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
9253 register_name
= "SRSConf2";
9256 check_insn(ctx
, ISA_MIPS32R2
);
9257 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
9258 register_name
= "SRSConf3";
9261 check_insn(ctx
, ISA_MIPS32R2
);
9262 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
9263 register_name
= "SRSConf4";
9267 gen_helper_mtc0_pwctl(cpu_env
, arg
);
9268 register_name
= "PWCtl";
9271 goto cp0_unimplemented
;
9274 case CP0_REGISTER_07
:
9277 check_insn(ctx
, ISA_MIPS32R2
);
9278 gen_helper_mtc0_hwrena(cpu_env
, arg
);
9279 ctx
->base
.is_jmp
= DISAS_STOP
;
9280 register_name
= "HWREna";
9283 goto cp0_unimplemented
;
9286 case CP0_REGISTER_08
:
9290 register_name
= "BadVAddr";
9294 register_name
= "BadInstr";
9298 register_name
= "BadInstrP";
9302 register_name
= "BadInstrX";
9305 goto cp0_unimplemented
;
9308 case CP0_REGISTER_09
:
9311 gen_helper_mtc0_count(cpu_env
, arg
);
9312 register_name
= "Count";
9315 CP0_CHECK(ctx
->saar
);
9316 gen_helper_mtc0_saari(cpu_env
, arg
);
9317 register_name
= "SAARI";
9320 CP0_CHECK(ctx
->saar
);
9321 gen_helper_mtc0_saar(cpu_env
, arg
);
9322 register_name
= "SAAR";
9325 goto cp0_unimplemented
;
9327 /* Stop translation as we may have switched the execution mode */
9328 ctx
->base
.is_jmp
= DISAS_STOP
;
9330 case CP0_REGISTER_10
:
9333 gen_helper_mtc0_entryhi(cpu_env
, arg
);
9334 register_name
= "EntryHi";
9337 goto cp0_unimplemented
;
9340 case CP0_REGISTER_11
:
9343 gen_helper_mtc0_compare(cpu_env
, arg
);
9344 register_name
= "Compare";
9346 /* 6,7 are implementation dependent */
9348 goto cp0_unimplemented
;
9350 /* Stop translation as we may have switched the execution mode */
9351 ctx
->base
.is_jmp
= DISAS_STOP
;
9353 case CP0_REGISTER_12
:
9356 save_cpu_state(ctx
, 1);
9357 gen_helper_mtc0_status(cpu_env
, arg
);
9358 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9359 gen_save_pc(ctx
->base
.pc_next
+ 4);
9360 ctx
->base
.is_jmp
= DISAS_EXIT
;
9361 register_name
= "Status";
9364 check_insn(ctx
, ISA_MIPS32R2
);
9365 gen_helper_mtc0_intctl(cpu_env
, arg
);
9366 /* Stop translation as we may have switched the execution mode */
9367 ctx
->base
.is_jmp
= DISAS_STOP
;
9368 register_name
= "IntCtl";
9371 check_insn(ctx
, ISA_MIPS32R2
);
9372 gen_helper_mtc0_srsctl(cpu_env
, arg
);
9373 /* Stop translation as we may have switched the execution mode */
9374 ctx
->base
.is_jmp
= DISAS_STOP
;
9375 register_name
= "SRSCtl";
9378 check_insn(ctx
, ISA_MIPS32R2
);
9379 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
9380 /* Stop translation as we may have switched the execution mode */
9381 ctx
->base
.is_jmp
= DISAS_STOP
;
9382 register_name
= "SRSMap";
9385 goto cp0_unimplemented
;
9388 case CP0_REGISTER_13
:
9391 save_cpu_state(ctx
, 1);
9392 gen_helper_mtc0_cause(cpu_env
, arg
);
9394 * Stop translation as we may have triggered an interrupt.
9395 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9396 * translated code to check for pending interrupts.
9398 gen_save_pc(ctx
->base
.pc_next
+ 4);
9399 ctx
->base
.is_jmp
= DISAS_EXIT
;
9400 register_name
= "Cause";
9403 goto cp0_unimplemented
;
9406 case CP0_REGISTER_14
:
9409 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
9410 register_name
= "EPC";
9413 goto cp0_unimplemented
;
9416 case CP0_REGISTER_15
:
9420 register_name
= "PRid";
9423 check_insn(ctx
, ISA_MIPS32R2
);
9424 gen_helper_mtc0_ebase(cpu_env
, arg
);
9425 register_name
= "EBase";
9428 goto cp0_unimplemented
;
9431 case CP0_REGISTER_16
:
9434 gen_helper_mtc0_config0(cpu_env
, arg
);
9435 register_name
= "Config";
9436 /* Stop translation as we may have switched the execution mode */
9437 ctx
->base
.is_jmp
= DISAS_STOP
;
9440 /* ignored, read only */
9441 register_name
= "Config1";
9444 gen_helper_mtc0_config2(cpu_env
, arg
);
9445 register_name
= "Config2";
9446 /* Stop translation as we may have switched the execution mode */
9447 ctx
->base
.is_jmp
= DISAS_STOP
;
9450 gen_helper_mtc0_config3(cpu_env
, arg
);
9451 register_name
= "Config3";
9452 /* Stop translation as we may have switched the execution mode */
9453 ctx
->base
.is_jmp
= DISAS_STOP
;
9456 /* currently ignored */
9457 register_name
= "Config4";
9460 gen_helper_mtc0_config5(cpu_env
, arg
);
9461 register_name
= "Config5";
9462 /* Stop translation as we may have switched the execution mode */
9463 ctx
->base
.is_jmp
= DISAS_STOP
;
9465 /* 6,7 are implementation dependent */
9467 register_name
= "Invalid config selector";
9468 goto cp0_unimplemented
;
9471 case CP0_REGISTER_17
:
9474 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9475 register_name
= "LLAddr";
9478 CP0_CHECK(ctx
->mrp
);
9479 gen_helper_mtc0_maar(cpu_env
, arg
);
9480 register_name
= "MAAR";
9483 CP0_CHECK(ctx
->mrp
);
9484 gen_helper_mtc0_maari(cpu_env
, arg
);
9485 register_name
= "MAARI";
9488 goto cp0_unimplemented
;
9491 case CP0_REGISTER_18
:
9501 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9502 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9503 register_name
= "WatchLo";
9506 goto cp0_unimplemented
;
9509 case CP0_REGISTER_19
:
9519 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9520 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9521 register_name
= "WatchHi";
9524 goto cp0_unimplemented
;
9527 case CP0_REGISTER_20
:
9530 check_insn(ctx
, ISA_MIPS3
);
9531 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9532 register_name
= "XContext";
9535 goto cp0_unimplemented
;
9538 case CP0_REGISTER_21
:
9539 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9540 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9543 gen_helper_mtc0_framemask(cpu_env
, arg
);
9544 register_name
= "Framemask";
9547 goto cp0_unimplemented
;
9550 case CP0_REGISTER_22
:
9552 register_name
= "Diagnostic"; /* implementation dependent */
9554 case CP0_REGISTER_23
:
9557 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9558 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9559 gen_save_pc(ctx
->base
.pc_next
+ 4);
9560 ctx
->base
.is_jmp
= DISAS_EXIT
;
9561 register_name
= "Debug";
9564 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9565 /* Stop translation as we may have switched the execution mode */
9566 ctx
->base
.is_jmp
= DISAS_STOP
;
9567 register_name
= "TraceControl";
9568 goto cp0_unimplemented
;
9570 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9571 /* Stop translation as we may have switched the execution mode */
9572 ctx
->base
.is_jmp
= DISAS_STOP
;
9573 register_name
= "TraceControl2";
9574 goto cp0_unimplemented
;
9576 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9577 /* Stop translation as we may have switched the execution mode */
9578 ctx
->base
.is_jmp
= DISAS_STOP
;
9579 register_name
= "UserTraceData";
9580 goto cp0_unimplemented
;
9582 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9583 /* Stop translation as we may have switched the execution mode */
9584 ctx
->base
.is_jmp
= DISAS_STOP
;
9585 register_name
= "TraceBPC";
9586 goto cp0_unimplemented
;
9588 goto cp0_unimplemented
;
9591 case CP0_REGISTER_24
:
9595 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9596 register_name
= "DEPC";
9599 goto cp0_unimplemented
;
9602 case CP0_REGISTER_25
:
9605 gen_helper_mtc0_performance0(cpu_env
, arg
);
9606 register_name
= "Performance0";
9609 /* gen_helper_mtc0_performance1(cpu_env, arg); */
9610 register_name
= "Performance1";
9611 goto cp0_unimplemented
;
9613 /* gen_helper_mtc0_performance2(cpu_env, arg); */
9614 register_name
= "Performance2";
9615 goto cp0_unimplemented
;
9617 /* gen_helper_mtc0_performance3(cpu_env, arg); */
9618 register_name
= "Performance3";
9619 goto cp0_unimplemented
;
9621 /* gen_helper_mtc0_performance4(cpu_env, arg); */
9622 register_name
= "Performance4";
9623 goto cp0_unimplemented
;
9625 /* gen_helper_mtc0_performance5(cpu_env, arg); */
9626 register_name
= "Performance5";
9627 goto cp0_unimplemented
;
9629 /* gen_helper_mtc0_performance6(cpu_env, arg); */
9630 register_name
= "Performance6";
9631 goto cp0_unimplemented
;
9633 /* gen_helper_mtc0_performance7(cpu_env, arg); */
9634 register_name
= "Performance7";
9635 goto cp0_unimplemented
;
9637 goto cp0_unimplemented
;
9640 case CP0_REGISTER_26
:
9643 gen_helper_mtc0_errctl(cpu_env
, arg
);
9644 ctx
->base
.is_jmp
= DISAS_STOP
;
9645 register_name
= "ErrCtl";
9648 goto cp0_unimplemented
;
9651 case CP0_REGISTER_27
:
9658 register_name
= "CacheErr";
9661 goto cp0_unimplemented
;
9664 case CP0_REGISTER_28
:
9670 gen_helper_mtc0_taglo(cpu_env
, arg
);
9671 register_name
= "TagLo";
9677 gen_helper_mtc0_datalo(cpu_env
, arg
);
9678 register_name
= "DataLo";
9681 goto cp0_unimplemented
;
9684 case CP0_REGISTER_29
:
9690 gen_helper_mtc0_taghi(cpu_env
, arg
);
9691 register_name
= "TagHi";
9697 gen_helper_mtc0_datahi(cpu_env
, arg
);
9698 register_name
= "DataHi";
9701 register_name
= "invalid sel";
9702 goto cp0_unimplemented
;
9705 case CP0_REGISTER_30
:
9708 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9709 register_name
= "ErrorEPC";
9712 goto cp0_unimplemented
;
9715 case CP0_REGISTER_31
:
9719 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9720 register_name
= "DESAVE";
9728 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9729 tcg_gen_st_tl(arg
, cpu_env
,
9730 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
9731 register_name
= "KScratch";
9734 goto cp0_unimplemented
;
9738 goto cp0_unimplemented
;
9740 trace_mips_translate_c0("dmtc0", register_name
, reg
, sel
);
9742 /* For simplicity assume that all writes can cause interrupts. */
9743 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9745 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9746 * translated code to check for pending interrupts.
9748 gen_save_pc(ctx
->base
.pc_next
+ 4);
9749 ctx
->base
.is_jmp
= DISAS_EXIT
;
9754 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n",
9755 register_name
, reg
, sel
);
9757 #endif /* TARGET_MIPS64 */
9759 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9760 int u
, int sel
, int h
)
9762 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9763 TCGv t0
= tcg_temp_local_new();
9765 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9766 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9767 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
9768 tcg_gen_movi_tl(t0
, -1);
9769 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9770 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
9771 tcg_gen_movi_tl(t0
, -1);
9772 } else if (u
== 0) {
9777 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9780 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9790 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9793 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9796 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9799 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9802 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9805 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9808 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9811 gen_mfc0(ctx
, t0
, rt
, sel
);
9818 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9821 gen_mfc0(ctx
, t0
, rt
, sel
);
9828 gen_helper_mftc0_status(t0
, cpu_env
);
9831 gen_mfc0(ctx
, t0
, rt
, sel
);
9838 gen_helper_mftc0_cause(t0
, cpu_env
);
9848 gen_helper_mftc0_epc(t0
, cpu_env
);
9858 gen_helper_mftc0_ebase(t0
, cpu_env
);
9875 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9885 gen_helper_mftc0_debug(t0
, cpu_env
);
9888 gen_mfc0(ctx
, t0
, rt
, sel
);
9893 gen_mfc0(ctx
, t0
, rt
, sel
);
9897 /* GPR registers. */
9899 gen_helper_1e0i(mftgpr
, t0
, rt
);
9901 /* Auxiliary CPU registers */
9905 gen_helper_1e0i(mftlo
, t0
, 0);
9908 gen_helper_1e0i(mfthi
, t0
, 0);
9911 gen_helper_1e0i(mftacx
, t0
, 0);
9914 gen_helper_1e0i(mftlo
, t0
, 1);
9917 gen_helper_1e0i(mfthi
, t0
, 1);
9920 gen_helper_1e0i(mftacx
, t0
, 1);
9923 gen_helper_1e0i(mftlo
, t0
, 2);
9926 gen_helper_1e0i(mfthi
, t0
, 2);
9929 gen_helper_1e0i(mftacx
, t0
, 2);
9932 gen_helper_1e0i(mftlo
, t0
, 3);
9935 gen_helper_1e0i(mfthi
, t0
, 3);
9938 gen_helper_1e0i(mftacx
, t0
, 3);
9941 gen_helper_mftdsp(t0
, cpu_env
);
9947 /* Floating point (COP1). */
9949 /* XXX: For now we support only a single FPU context. */
9951 TCGv_i32 fp0
= tcg_temp_new_i32();
9953 gen_load_fpr32(ctx
, fp0
, rt
);
9954 tcg_gen_ext_i32_tl(t0
, fp0
);
9955 tcg_temp_free_i32(fp0
);
9957 TCGv_i32 fp0
= tcg_temp_new_i32();
9959 gen_load_fpr32h(ctx
, fp0
, rt
);
9960 tcg_gen_ext_i32_tl(t0
, fp0
);
9961 tcg_temp_free_i32(fp0
);
9965 /* XXX: For now we support only a single FPU context. */
9966 gen_helper_1e0i(cfc1
, t0
, rt
);
9968 /* COP2: Not implemented. */
9976 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9977 gen_store_gpr(t0
, rd
);
9983 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9984 generate_exception_end(ctx
, EXCP_RI
);
9987 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9988 int u
, int sel
, int h
)
9990 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9991 TCGv t0
= tcg_temp_local_new();
9993 gen_load_gpr(t0
, rt
);
9994 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9995 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9996 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
9999 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
10000 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
10003 } else if (u
== 0) {
10008 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
10011 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
10021 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
10024 gen_helper_mttc0_tcbind(cpu_env
, t0
);
10027 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
10030 gen_helper_mttc0_tchalt(cpu_env
, t0
);
10033 gen_helper_mttc0_tccontext(cpu_env
, t0
);
10036 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
10039 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
10042 gen_mtc0(ctx
, t0
, rd
, sel
);
10049 gen_helper_mttc0_entryhi(cpu_env
, t0
);
10052 gen_mtc0(ctx
, t0
, rd
, sel
);
10059 gen_helper_mttc0_status(cpu_env
, t0
);
10062 gen_mtc0(ctx
, t0
, rd
, sel
);
10069 gen_helper_mttc0_cause(cpu_env
, t0
);
10079 gen_helper_mttc0_ebase(cpu_env
, t0
);
10089 gen_helper_mttc0_debug(cpu_env
, t0
);
10092 gen_mtc0(ctx
, t0
, rd
, sel
);
10097 gen_mtc0(ctx
, t0
, rd
, sel
);
10101 /* GPR registers. */
10103 gen_helper_0e1i(mttgpr
, t0
, rd
);
10105 /* Auxiliary CPU registers */
10109 gen_helper_0e1i(mttlo
, t0
, 0);
10112 gen_helper_0e1i(mtthi
, t0
, 0);
10115 gen_helper_0e1i(mttacx
, t0
, 0);
10118 gen_helper_0e1i(mttlo
, t0
, 1);
10121 gen_helper_0e1i(mtthi
, t0
, 1);
10124 gen_helper_0e1i(mttacx
, t0
, 1);
10127 gen_helper_0e1i(mttlo
, t0
, 2);
10130 gen_helper_0e1i(mtthi
, t0
, 2);
10133 gen_helper_0e1i(mttacx
, t0
, 2);
10136 gen_helper_0e1i(mttlo
, t0
, 3);
10139 gen_helper_0e1i(mtthi
, t0
, 3);
10142 gen_helper_0e1i(mttacx
, t0
, 3);
10145 gen_helper_mttdsp(cpu_env
, t0
);
10151 /* Floating point (COP1). */
10153 /* XXX: For now we support only a single FPU context. */
10155 TCGv_i32 fp0
= tcg_temp_new_i32();
10157 tcg_gen_trunc_tl_i32(fp0
, t0
);
10158 gen_store_fpr32(ctx
, fp0
, rd
);
10159 tcg_temp_free_i32(fp0
);
10161 TCGv_i32 fp0
= tcg_temp_new_i32();
10163 tcg_gen_trunc_tl_i32(fp0
, t0
);
10164 gen_store_fpr32h(ctx
, fp0
, rd
);
10165 tcg_temp_free_i32(fp0
);
10169 /* XXX: For now we support only a single FPU context. */
10171 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
10173 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10174 tcg_temp_free_i32(fs_tmp
);
10176 /* Stop translation as we may have changed hflags */
10177 ctx
->base
.is_jmp
= DISAS_STOP
;
10179 /* COP2: Not implemented. */
10187 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
10193 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
10194 generate_exception_end(ctx
, EXCP_RI
);
10197 static void gen_cp0(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
10200 const char *opn
= "ldst";
10202 check_cp0_enabled(ctx
);
10206 /* Treat as NOP. */
10209 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10214 TCGv t0
= tcg_temp_new();
10216 gen_load_gpr(t0
, rt
);
10217 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10222 #if defined(TARGET_MIPS64)
10224 check_insn(ctx
, ISA_MIPS3
);
10226 /* Treat as NOP. */
10229 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10233 check_insn(ctx
, ISA_MIPS3
);
10235 TCGv t0
= tcg_temp_new();
10237 gen_load_gpr(t0
, rt
);
10238 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10247 /* Treat as NOP. */
10250 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10256 TCGv t0
= tcg_temp_new();
10257 gen_load_gpr(t0
, rt
);
10258 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10264 check_cp0_enabled(ctx
);
10266 /* Treat as NOP. */
10269 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
10270 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10274 check_cp0_enabled(ctx
);
10275 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
10276 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10281 if (!env
->tlb
->helper_tlbwi
) {
10284 gen_helper_tlbwi(cpu_env
);
10288 if (ctx
->ie
>= 2) {
10289 if (!env
->tlb
->helper_tlbinv
) {
10292 gen_helper_tlbinv(cpu_env
);
10293 } /* treat as nop if TLBINV not supported */
10297 if (ctx
->ie
>= 2) {
10298 if (!env
->tlb
->helper_tlbinvf
) {
10301 gen_helper_tlbinvf(cpu_env
);
10302 } /* treat as nop if TLBINV not supported */
10306 if (!env
->tlb
->helper_tlbwr
) {
10309 gen_helper_tlbwr(cpu_env
);
10313 if (!env
->tlb
->helper_tlbp
) {
10316 gen_helper_tlbp(cpu_env
);
10320 if (!env
->tlb
->helper_tlbr
) {
10323 gen_helper_tlbr(cpu_env
);
10325 case OPC_ERET
: /* OPC_ERETNC */
10326 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10327 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10330 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
10331 if (ctx
->opcode
& (1 << bit_shift
)) {
10334 check_insn(ctx
, ISA_MIPS32R5
);
10335 gen_helper_eretnc(cpu_env
);
10339 check_insn(ctx
, ISA_MIPS2
);
10340 gen_helper_eret(cpu_env
);
10342 ctx
->base
.is_jmp
= DISAS_EXIT
;
10347 check_insn(ctx
, ISA_MIPS32
);
10348 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10349 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10352 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10354 generate_exception_end(ctx
, EXCP_RI
);
10356 gen_helper_deret(cpu_env
);
10357 ctx
->base
.is_jmp
= DISAS_EXIT
;
10362 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
10363 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10364 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10367 /* If we get an exception, we want to restart at next instruction */
10368 ctx
->base
.pc_next
+= 4;
10369 save_cpu_state(ctx
, 1);
10370 ctx
->base
.pc_next
-= 4;
10371 gen_helper_wait(cpu_env
);
10372 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10377 generate_exception_end(ctx
, EXCP_RI
);
10380 (void)opn
; /* avoid a compiler warning */
10382 #endif /* !CONFIG_USER_ONLY */
10384 /* CP1 Branches (before delay slot) */
10385 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
10386 int32_t cc
, int32_t offset
)
10388 target_ulong btarget
;
10389 TCGv_i32 t0
= tcg_temp_new_i32();
10391 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10392 generate_exception_end(ctx
, EXCP_RI
);
10397 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
10400 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
10404 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10405 tcg_gen_not_i32(t0
, t0
);
10406 tcg_gen_andi_i32(t0
, t0
, 1);
10407 tcg_gen_extu_i32_tl(bcond
, t0
);
10410 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10411 tcg_gen_not_i32(t0
, t0
);
10412 tcg_gen_andi_i32(t0
, t0
, 1);
10413 tcg_gen_extu_i32_tl(bcond
, t0
);
10416 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10417 tcg_gen_andi_i32(t0
, t0
, 1);
10418 tcg_gen_extu_i32_tl(bcond
, t0
);
10421 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10422 tcg_gen_andi_i32(t0
, t0
, 1);
10423 tcg_gen_extu_i32_tl(bcond
, t0
);
10425 ctx
->hflags
|= MIPS_HFLAG_BL
;
10429 TCGv_i32 t1
= tcg_temp_new_i32();
10430 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10431 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
10432 tcg_gen_nand_i32(t0
, t0
, t1
);
10433 tcg_temp_free_i32(t1
);
10434 tcg_gen_andi_i32(t0
, t0
, 1);
10435 tcg_gen_extu_i32_tl(bcond
, t0
);
10440 TCGv_i32 t1
= tcg_temp_new_i32();
10441 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10442 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
10443 tcg_gen_or_i32(t0
, t0
, t1
);
10444 tcg_temp_free_i32(t1
);
10445 tcg_gen_andi_i32(t0
, t0
, 1);
10446 tcg_gen_extu_i32_tl(bcond
, t0
);
10451 TCGv_i32 t1
= tcg_temp_new_i32();
10452 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10453 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
10454 tcg_gen_and_i32(t0
, t0
, t1
);
10455 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 2));
10456 tcg_gen_and_i32(t0
, t0
, t1
);
10457 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 3));
10458 tcg_gen_nand_i32(t0
, t0
, t1
);
10459 tcg_temp_free_i32(t1
);
10460 tcg_gen_andi_i32(t0
, t0
, 1);
10461 tcg_gen_extu_i32_tl(bcond
, t0
);
10466 TCGv_i32 t1
= tcg_temp_new_i32();
10467 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10468 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
10469 tcg_gen_or_i32(t0
, t0
, t1
);
10470 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 2));
10471 tcg_gen_or_i32(t0
, t0
, t1
);
10472 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 3));
10473 tcg_gen_or_i32(t0
, t0
, t1
);
10474 tcg_temp_free_i32(t1
);
10475 tcg_gen_andi_i32(t0
, t0
, 1);
10476 tcg_gen_extu_i32_tl(bcond
, t0
);
10479 ctx
->hflags
|= MIPS_HFLAG_BC
;
10482 MIPS_INVAL("cp1 cond branch");
10483 generate_exception_end(ctx
, EXCP_RI
);
10486 ctx
->btarget
= btarget
;
10487 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10489 tcg_temp_free_i32(t0
);
10492 /* R6 CP1 Branches */
10493 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10494 int32_t ft
, int32_t offset
,
10495 int delayslot_size
)
10497 target_ulong btarget
;
10498 TCGv_i64 t0
= tcg_temp_new_i64();
10500 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10501 #ifdef MIPS_DEBUG_DISAS
10502 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10503 "\n", ctx
->base
.pc_next
);
10505 generate_exception_end(ctx
, EXCP_RI
);
10509 gen_load_fpr64(ctx
, t0
, ft
);
10510 tcg_gen_andi_i64(t0
, t0
, 1);
10512 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10516 tcg_gen_xori_i64(t0
, t0
, 1);
10517 ctx
->hflags
|= MIPS_HFLAG_BC
;
10520 /* t0 already set */
10521 ctx
->hflags
|= MIPS_HFLAG_BC
;
10524 MIPS_INVAL("cp1 cond branch");
10525 generate_exception_end(ctx
, EXCP_RI
);
10529 tcg_gen_trunc_i64_tl(bcond
, t0
);
10531 ctx
->btarget
= btarget
;
10533 switch (delayslot_size
) {
10535 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10538 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10543 tcg_temp_free_i64(t0
);
10546 /* Coprocessor 1 (FPU) */
10548 #define FOP(func, fmt) (((fmt) << 21) | (func))
10551 OPC_ADD_S
= FOP(0, FMT_S
),
10552 OPC_SUB_S
= FOP(1, FMT_S
),
10553 OPC_MUL_S
= FOP(2, FMT_S
),
10554 OPC_DIV_S
= FOP(3, FMT_S
),
10555 OPC_SQRT_S
= FOP(4, FMT_S
),
10556 OPC_ABS_S
= FOP(5, FMT_S
),
10557 OPC_MOV_S
= FOP(6, FMT_S
),
10558 OPC_NEG_S
= FOP(7, FMT_S
),
10559 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10560 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10561 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10562 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10563 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10564 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10565 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10566 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10567 OPC_SEL_S
= FOP(16, FMT_S
),
10568 OPC_MOVCF_S
= FOP(17, FMT_S
),
10569 OPC_MOVZ_S
= FOP(18, FMT_S
),
10570 OPC_MOVN_S
= FOP(19, FMT_S
),
10571 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10572 OPC_RECIP_S
= FOP(21, FMT_S
),
10573 OPC_RSQRT_S
= FOP(22, FMT_S
),
10574 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10575 OPC_MADDF_S
= FOP(24, FMT_S
),
10576 OPC_MSUBF_S
= FOP(25, FMT_S
),
10577 OPC_RINT_S
= FOP(26, FMT_S
),
10578 OPC_CLASS_S
= FOP(27, FMT_S
),
10579 OPC_MIN_S
= FOP(28, FMT_S
),
10580 OPC_RECIP2_S
= FOP(28, FMT_S
),
10581 OPC_MINA_S
= FOP(29, FMT_S
),
10582 OPC_RECIP1_S
= FOP(29, FMT_S
),
10583 OPC_MAX_S
= FOP(30, FMT_S
),
10584 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10585 OPC_MAXA_S
= FOP(31, FMT_S
),
10586 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10587 OPC_CVT_D_S
= FOP(33, FMT_S
),
10588 OPC_CVT_W_S
= FOP(36, FMT_S
),
10589 OPC_CVT_L_S
= FOP(37, FMT_S
),
10590 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10591 OPC_CMP_F_S
= FOP(48, FMT_S
),
10592 OPC_CMP_UN_S
= FOP(49, FMT_S
),
10593 OPC_CMP_EQ_S
= FOP(50, FMT_S
),
10594 OPC_CMP_UEQ_S
= FOP(51, FMT_S
),
10595 OPC_CMP_OLT_S
= FOP(52, FMT_S
),
10596 OPC_CMP_ULT_S
= FOP(53, FMT_S
),
10597 OPC_CMP_OLE_S
= FOP(54, FMT_S
),
10598 OPC_CMP_ULE_S
= FOP(55, FMT_S
),
10599 OPC_CMP_SF_S
= FOP(56, FMT_S
),
10600 OPC_CMP_NGLE_S
= FOP(57, FMT_S
),
10601 OPC_CMP_SEQ_S
= FOP(58, FMT_S
),
10602 OPC_CMP_NGL_S
= FOP(59, FMT_S
),
10603 OPC_CMP_LT_S
= FOP(60, FMT_S
),
10604 OPC_CMP_NGE_S
= FOP(61, FMT_S
),
10605 OPC_CMP_LE_S
= FOP(62, FMT_S
),
10606 OPC_CMP_NGT_S
= FOP(63, FMT_S
),
10608 OPC_ADD_D
= FOP(0, FMT_D
),
10609 OPC_SUB_D
= FOP(1, FMT_D
),
10610 OPC_MUL_D
= FOP(2, FMT_D
),
10611 OPC_DIV_D
= FOP(3, FMT_D
),
10612 OPC_SQRT_D
= FOP(4, FMT_D
),
10613 OPC_ABS_D
= FOP(5, FMT_D
),
10614 OPC_MOV_D
= FOP(6, FMT_D
),
10615 OPC_NEG_D
= FOP(7, FMT_D
),
10616 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10617 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10618 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10619 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10620 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10621 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10622 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10623 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10624 OPC_SEL_D
= FOP(16, FMT_D
),
10625 OPC_MOVCF_D
= FOP(17, FMT_D
),
10626 OPC_MOVZ_D
= FOP(18, FMT_D
),
10627 OPC_MOVN_D
= FOP(19, FMT_D
),
10628 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10629 OPC_RECIP_D
= FOP(21, FMT_D
),
10630 OPC_RSQRT_D
= FOP(22, FMT_D
),
10631 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10632 OPC_MADDF_D
= FOP(24, FMT_D
),
10633 OPC_MSUBF_D
= FOP(25, FMT_D
),
10634 OPC_RINT_D
= FOP(26, FMT_D
),
10635 OPC_CLASS_D
= FOP(27, FMT_D
),
10636 OPC_MIN_D
= FOP(28, FMT_D
),
10637 OPC_RECIP2_D
= FOP(28, FMT_D
),
10638 OPC_MINA_D
= FOP(29, FMT_D
),
10639 OPC_RECIP1_D
= FOP(29, FMT_D
),
10640 OPC_MAX_D
= FOP(30, FMT_D
),
10641 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10642 OPC_MAXA_D
= FOP(31, FMT_D
),
10643 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10644 OPC_CVT_S_D
= FOP(32, FMT_D
),
10645 OPC_CVT_W_D
= FOP(36, FMT_D
),
10646 OPC_CVT_L_D
= FOP(37, FMT_D
),
10647 OPC_CMP_F_D
= FOP(48, FMT_D
),
10648 OPC_CMP_UN_D
= FOP(49, FMT_D
),
10649 OPC_CMP_EQ_D
= FOP(50, FMT_D
),
10650 OPC_CMP_UEQ_D
= FOP(51, FMT_D
),
10651 OPC_CMP_OLT_D
= FOP(52, FMT_D
),
10652 OPC_CMP_ULT_D
= FOP(53, FMT_D
),
10653 OPC_CMP_OLE_D
= FOP(54, FMT_D
),
10654 OPC_CMP_ULE_D
= FOP(55, FMT_D
),
10655 OPC_CMP_SF_D
= FOP(56, FMT_D
),
10656 OPC_CMP_NGLE_D
= FOP(57, FMT_D
),
10657 OPC_CMP_SEQ_D
= FOP(58, FMT_D
),
10658 OPC_CMP_NGL_D
= FOP(59, FMT_D
),
10659 OPC_CMP_LT_D
= FOP(60, FMT_D
),
10660 OPC_CMP_NGE_D
= FOP(61, FMT_D
),
10661 OPC_CMP_LE_D
= FOP(62, FMT_D
),
10662 OPC_CMP_NGT_D
= FOP(63, FMT_D
),
10664 OPC_CVT_S_W
= FOP(32, FMT_W
),
10665 OPC_CVT_D_W
= FOP(33, FMT_W
),
10666 OPC_CVT_S_L
= FOP(32, FMT_L
),
10667 OPC_CVT_D_L
= FOP(33, FMT_L
),
10668 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10670 OPC_ADD_PS
= FOP(0, FMT_PS
),
10671 OPC_SUB_PS
= FOP(1, FMT_PS
),
10672 OPC_MUL_PS
= FOP(2, FMT_PS
),
10673 OPC_DIV_PS
= FOP(3, FMT_PS
),
10674 OPC_ABS_PS
= FOP(5, FMT_PS
),
10675 OPC_MOV_PS
= FOP(6, FMT_PS
),
10676 OPC_NEG_PS
= FOP(7, FMT_PS
),
10677 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10678 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10679 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10680 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10681 OPC_MULR_PS
= FOP(26, FMT_PS
),
10682 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10683 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10684 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10685 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10687 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10688 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10689 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10690 OPC_PLL_PS
= FOP(44, FMT_PS
),
10691 OPC_PLU_PS
= FOP(45, FMT_PS
),
10692 OPC_PUL_PS
= FOP(46, FMT_PS
),
10693 OPC_PUU_PS
= FOP(47, FMT_PS
),
10694 OPC_CMP_F_PS
= FOP(48, FMT_PS
),
10695 OPC_CMP_UN_PS
= FOP(49, FMT_PS
),
10696 OPC_CMP_EQ_PS
= FOP(50, FMT_PS
),
10697 OPC_CMP_UEQ_PS
= FOP(51, FMT_PS
),
10698 OPC_CMP_OLT_PS
= FOP(52, FMT_PS
),
10699 OPC_CMP_ULT_PS
= FOP(53, FMT_PS
),
10700 OPC_CMP_OLE_PS
= FOP(54, FMT_PS
),
10701 OPC_CMP_ULE_PS
= FOP(55, FMT_PS
),
10702 OPC_CMP_SF_PS
= FOP(56, FMT_PS
),
10703 OPC_CMP_NGLE_PS
= FOP(57, FMT_PS
),
10704 OPC_CMP_SEQ_PS
= FOP(58, FMT_PS
),
10705 OPC_CMP_NGL_PS
= FOP(59, FMT_PS
),
10706 OPC_CMP_LT_PS
= FOP(60, FMT_PS
),
10707 OPC_CMP_NGE_PS
= FOP(61, FMT_PS
),
10708 OPC_CMP_LE_PS
= FOP(62, FMT_PS
),
10709 OPC_CMP_NGT_PS
= FOP(63, FMT_PS
),
10713 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10714 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10715 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10716 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10717 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10718 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10719 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10720 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10721 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10722 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10723 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10724 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10725 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10726 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10727 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10728 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10729 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10730 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10731 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10732 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10733 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10734 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10736 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10737 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10738 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10739 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10740 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10741 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10742 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10743 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10744 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10745 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10746 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10747 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10748 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10749 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10750 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10751 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10752 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10753 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10754 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10755 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10756 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10757 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10760 static void gen_cp1(DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10762 TCGv t0
= tcg_temp_new();
10767 TCGv_i32 fp0
= tcg_temp_new_i32();
10769 gen_load_fpr32(ctx
, fp0
, fs
);
10770 tcg_gen_ext_i32_tl(t0
, fp0
);
10771 tcg_temp_free_i32(fp0
);
10773 gen_store_gpr(t0
, rt
);
10776 gen_load_gpr(t0
, rt
);
10778 TCGv_i32 fp0
= tcg_temp_new_i32();
10780 tcg_gen_trunc_tl_i32(fp0
, t0
);
10781 gen_store_fpr32(ctx
, fp0
, fs
);
10782 tcg_temp_free_i32(fp0
);
10786 gen_helper_1e0i(cfc1
, t0
, fs
);
10787 gen_store_gpr(t0
, rt
);
10790 gen_load_gpr(t0
, rt
);
10791 save_cpu_state(ctx
, 0);
10793 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10795 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10796 tcg_temp_free_i32(fs_tmp
);
10798 /* Stop translation as we may have changed hflags */
10799 ctx
->base
.is_jmp
= DISAS_STOP
;
10801 #if defined(TARGET_MIPS64)
10803 gen_load_fpr64(ctx
, t0
, fs
);
10804 gen_store_gpr(t0
, rt
);
10807 gen_load_gpr(t0
, rt
);
10808 gen_store_fpr64(ctx
, t0
, fs
);
10813 TCGv_i32 fp0
= tcg_temp_new_i32();
10815 gen_load_fpr32h(ctx
, fp0
, fs
);
10816 tcg_gen_ext_i32_tl(t0
, fp0
);
10817 tcg_temp_free_i32(fp0
);
10819 gen_store_gpr(t0
, rt
);
10822 gen_load_gpr(t0
, rt
);
10824 TCGv_i32 fp0
= tcg_temp_new_i32();
10826 tcg_gen_trunc_tl_i32(fp0
, t0
);
10827 gen_store_fpr32h(ctx
, fp0
, fs
);
10828 tcg_temp_free_i32(fp0
);
10832 MIPS_INVAL("cp1 move");
10833 generate_exception_end(ctx
, EXCP_RI
);
10841 static void gen_movci(DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10848 /* Treat as NOP. */
10853 cond
= TCG_COND_EQ
;
10855 cond
= TCG_COND_NE
;
10858 l1
= gen_new_label();
10859 t0
= tcg_temp_new_i32();
10860 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10861 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10862 tcg_temp_free_i32(t0
);
10864 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10866 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10871 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10875 TCGv_i32 t0
= tcg_temp_new_i32();
10876 TCGLabel
*l1
= gen_new_label();
10879 cond
= TCG_COND_EQ
;
10881 cond
= TCG_COND_NE
;
10884 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10885 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10886 gen_load_fpr32(ctx
, t0
, fs
);
10887 gen_store_fpr32(ctx
, t0
, fd
);
10889 tcg_temp_free_i32(t0
);
10892 static inline void gen_movcf_d(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10896 TCGv_i32 t0
= tcg_temp_new_i32();
10898 TCGLabel
*l1
= gen_new_label();
10901 cond
= TCG_COND_EQ
;
10903 cond
= TCG_COND_NE
;
10906 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10907 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10908 tcg_temp_free_i32(t0
);
10909 fp0
= tcg_temp_new_i64();
10910 gen_load_fpr64(ctx
, fp0
, fs
);
10911 gen_store_fpr64(ctx
, fp0
, fd
);
10912 tcg_temp_free_i64(fp0
);
10916 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10920 TCGv_i32 t0
= tcg_temp_new_i32();
10921 TCGLabel
*l1
= gen_new_label();
10922 TCGLabel
*l2
= gen_new_label();
10925 cond
= TCG_COND_EQ
;
10927 cond
= TCG_COND_NE
;
10930 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10931 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10932 gen_load_fpr32(ctx
, t0
, fs
);
10933 gen_store_fpr32(ctx
, t0
, fd
);
10936 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+ 1));
10937 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10938 gen_load_fpr32h(ctx
, t0
, fs
);
10939 gen_store_fpr32h(ctx
, t0
, fd
);
10940 tcg_temp_free_i32(t0
);
10944 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10947 TCGv_i32 t1
= tcg_const_i32(0);
10948 TCGv_i32 fp0
= tcg_temp_new_i32();
10949 TCGv_i32 fp1
= tcg_temp_new_i32();
10950 TCGv_i32 fp2
= tcg_temp_new_i32();
10951 gen_load_fpr32(ctx
, fp0
, fd
);
10952 gen_load_fpr32(ctx
, fp1
, ft
);
10953 gen_load_fpr32(ctx
, fp2
, fs
);
10957 tcg_gen_andi_i32(fp0
, fp0
, 1);
10958 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10961 tcg_gen_andi_i32(fp1
, fp1
, 1);
10962 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10965 tcg_gen_andi_i32(fp1
, fp1
, 1);
10966 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10969 MIPS_INVAL("gen_sel_s");
10970 generate_exception_end(ctx
, EXCP_RI
);
10974 gen_store_fpr32(ctx
, fp0
, fd
);
10975 tcg_temp_free_i32(fp2
);
10976 tcg_temp_free_i32(fp1
);
10977 tcg_temp_free_i32(fp0
);
10978 tcg_temp_free_i32(t1
);
10981 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10984 TCGv_i64 t1
= tcg_const_i64(0);
10985 TCGv_i64 fp0
= tcg_temp_new_i64();
10986 TCGv_i64 fp1
= tcg_temp_new_i64();
10987 TCGv_i64 fp2
= tcg_temp_new_i64();
10988 gen_load_fpr64(ctx
, fp0
, fd
);
10989 gen_load_fpr64(ctx
, fp1
, ft
);
10990 gen_load_fpr64(ctx
, fp2
, fs
);
10994 tcg_gen_andi_i64(fp0
, fp0
, 1);
10995 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10998 tcg_gen_andi_i64(fp1
, fp1
, 1);
10999 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
11002 tcg_gen_andi_i64(fp1
, fp1
, 1);
11003 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
11006 MIPS_INVAL("gen_sel_d");
11007 generate_exception_end(ctx
, EXCP_RI
);
11011 gen_store_fpr64(ctx
, fp0
, fd
);
11012 tcg_temp_free_i64(fp2
);
11013 tcg_temp_free_i64(fp1
);
11014 tcg_temp_free_i64(fp0
);
11015 tcg_temp_free_i64(t1
);
11018 static void gen_farith(DisasContext
*ctx
, enum fopcode op1
,
11019 int ft
, int fs
, int fd
, int cc
)
11021 uint32_t func
= ctx
->opcode
& 0x3f;
11025 TCGv_i32 fp0
= tcg_temp_new_i32();
11026 TCGv_i32 fp1
= tcg_temp_new_i32();
11028 gen_load_fpr32(ctx
, fp0
, fs
);
11029 gen_load_fpr32(ctx
, fp1
, ft
);
11030 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
11031 tcg_temp_free_i32(fp1
);
11032 gen_store_fpr32(ctx
, fp0
, fd
);
11033 tcg_temp_free_i32(fp0
);
11038 TCGv_i32 fp0
= tcg_temp_new_i32();
11039 TCGv_i32 fp1
= tcg_temp_new_i32();
11041 gen_load_fpr32(ctx
, fp0
, fs
);
11042 gen_load_fpr32(ctx
, fp1
, ft
);
11043 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
11044 tcg_temp_free_i32(fp1
);
11045 gen_store_fpr32(ctx
, fp0
, fd
);
11046 tcg_temp_free_i32(fp0
);
11051 TCGv_i32 fp0
= tcg_temp_new_i32();
11052 TCGv_i32 fp1
= tcg_temp_new_i32();
11054 gen_load_fpr32(ctx
, fp0
, fs
);
11055 gen_load_fpr32(ctx
, fp1
, ft
);
11056 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
11057 tcg_temp_free_i32(fp1
);
11058 gen_store_fpr32(ctx
, fp0
, fd
);
11059 tcg_temp_free_i32(fp0
);
11064 TCGv_i32 fp0
= tcg_temp_new_i32();
11065 TCGv_i32 fp1
= tcg_temp_new_i32();
11067 gen_load_fpr32(ctx
, fp0
, fs
);
11068 gen_load_fpr32(ctx
, fp1
, ft
);
11069 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
11070 tcg_temp_free_i32(fp1
);
11071 gen_store_fpr32(ctx
, fp0
, fd
);
11072 tcg_temp_free_i32(fp0
);
11077 TCGv_i32 fp0
= tcg_temp_new_i32();
11079 gen_load_fpr32(ctx
, fp0
, fs
);
11080 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
11081 gen_store_fpr32(ctx
, fp0
, fd
);
11082 tcg_temp_free_i32(fp0
);
11087 TCGv_i32 fp0
= tcg_temp_new_i32();
11089 gen_load_fpr32(ctx
, fp0
, fs
);
11090 if (ctx
->abs2008
) {
11091 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
11093 gen_helper_float_abs_s(fp0
, fp0
);
11095 gen_store_fpr32(ctx
, fp0
, fd
);
11096 tcg_temp_free_i32(fp0
);
11101 TCGv_i32 fp0
= tcg_temp_new_i32();
11103 gen_load_fpr32(ctx
, fp0
, fs
);
11104 gen_store_fpr32(ctx
, fp0
, fd
);
11105 tcg_temp_free_i32(fp0
);
11110 TCGv_i32 fp0
= tcg_temp_new_i32();
11112 gen_load_fpr32(ctx
, fp0
, fs
);
11113 if (ctx
->abs2008
) {
11114 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
11116 gen_helper_float_chs_s(fp0
, fp0
);
11118 gen_store_fpr32(ctx
, fp0
, fd
);
11119 tcg_temp_free_i32(fp0
);
11122 case OPC_ROUND_L_S
:
11123 check_cp1_64bitmode(ctx
);
11125 TCGv_i32 fp32
= tcg_temp_new_i32();
11126 TCGv_i64 fp64
= tcg_temp_new_i64();
11128 gen_load_fpr32(ctx
, fp32
, fs
);
11129 if (ctx
->nan2008
) {
11130 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
11132 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
11134 tcg_temp_free_i32(fp32
);
11135 gen_store_fpr64(ctx
, fp64
, fd
);
11136 tcg_temp_free_i64(fp64
);
11139 case OPC_TRUNC_L_S
:
11140 check_cp1_64bitmode(ctx
);
11142 TCGv_i32 fp32
= tcg_temp_new_i32();
11143 TCGv_i64 fp64
= tcg_temp_new_i64();
11145 gen_load_fpr32(ctx
, fp32
, fs
);
11146 if (ctx
->nan2008
) {
11147 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
11149 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
11151 tcg_temp_free_i32(fp32
);
11152 gen_store_fpr64(ctx
, fp64
, fd
);
11153 tcg_temp_free_i64(fp64
);
11157 check_cp1_64bitmode(ctx
);
11159 TCGv_i32 fp32
= tcg_temp_new_i32();
11160 TCGv_i64 fp64
= tcg_temp_new_i64();
11162 gen_load_fpr32(ctx
, fp32
, fs
);
11163 if (ctx
->nan2008
) {
11164 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
11166 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
11168 tcg_temp_free_i32(fp32
);
11169 gen_store_fpr64(ctx
, fp64
, fd
);
11170 tcg_temp_free_i64(fp64
);
11173 case OPC_FLOOR_L_S
:
11174 check_cp1_64bitmode(ctx
);
11176 TCGv_i32 fp32
= tcg_temp_new_i32();
11177 TCGv_i64 fp64
= tcg_temp_new_i64();
11179 gen_load_fpr32(ctx
, fp32
, fs
);
11180 if (ctx
->nan2008
) {
11181 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
11183 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
11185 tcg_temp_free_i32(fp32
);
11186 gen_store_fpr64(ctx
, fp64
, fd
);
11187 tcg_temp_free_i64(fp64
);
11190 case OPC_ROUND_W_S
:
11192 TCGv_i32 fp0
= tcg_temp_new_i32();
11194 gen_load_fpr32(ctx
, fp0
, fs
);
11195 if (ctx
->nan2008
) {
11196 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
11198 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
11200 gen_store_fpr32(ctx
, fp0
, fd
);
11201 tcg_temp_free_i32(fp0
);
11204 case OPC_TRUNC_W_S
:
11206 TCGv_i32 fp0
= tcg_temp_new_i32();
11208 gen_load_fpr32(ctx
, fp0
, fs
);
11209 if (ctx
->nan2008
) {
11210 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
11212 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
11214 gen_store_fpr32(ctx
, fp0
, fd
);
11215 tcg_temp_free_i32(fp0
);
11220 TCGv_i32 fp0
= tcg_temp_new_i32();
11222 gen_load_fpr32(ctx
, fp0
, fs
);
11223 if (ctx
->nan2008
) {
11224 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
11226 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
11228 gen_store_fpr32(ctx
, fp0
, fd
);
11229 tcg_temp_free_i32(fp0
);
11232 case OPC_FLOOR_W_S
:
11234 TCGv_i32 fp0
= tcg_temp_new_i32();
11236 gen_load_fpr32(ctx
, fp0
, fs
);
11237 if (ctx
->nan2008
) {
11238 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
11240 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
11242 gen_store_fpr32(ctx
, fp0
, fd
);
11243 tcg_temp_free_i32(fp0
);
11247 check_insn(ctx
, ISA_MIPS32R6
);
11248 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11251 check_insn(ctx
, ISA_MIPS32R6
);
11252 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11255 check_insn(ctx
, ISA_MIPS32R6
);
11256 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11259 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11260 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11263 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11265 TCGLabel
*l1
= gen_new_label();
11269 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11271 fp0
= tcg_temp_new_i32();
11272 gen_load_fpr32(ctx
, fp0
, fs
);
11273 gen_store_fpr32(ctx
, fp0
, fd
);
11274 tcg_temp_free_i32(fp0
);
11279 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11281 TCGLabel
*l1
= gen_new_label();
11285 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11286 fp0
= tcg_temp_new_i32();
11287 gen_load_fpr32(ctx
, fp0
, fs
);
11288 gen_store_fpr32(ctx
, fp0
, fd
);
11289 tcg_temp_free_i32(fp0
);
11296 TCGv_i32 fp0
= tcg_temp_new_i32();
11298 gen_load_fpr32(ctx
, fp0
, fs
);
11299 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
11300 gen_store_fpr32(ctx
, fp0
, fd
);
11301 tcg_temp_free_i32(fp0
);
11306 TCGv_i32 fp0
= tcg_temp_new_i32();
11308 gen_load_fpr32(ctx
, fp0
, fs
);
11309 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
11310 gen_store_fpr32(ctx
, fp0
, fd
);
11311 tcg_temp_free_i32(fp0
);
11315 check_insn(ctx
, ISA_MIPS32R6
);
11317 TCGv_i32 fp0
= tcg_temp_new_i32();
11318 TCGv_i32 fp1
= tcg_temp_new_i32();
11319 TCGv_i32 fp2
= tcg_temp_new_i32();
11320 gen_load_fpr32(ctx
, fp0
, fs
);
11321 gen_load_fpr32(ctx
, fp1
, ft
);
11322 gen_load_fpr32(ctx
, fp2
, fd
);
11323 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11324 gen_store_fpr32(ctx
, fp2
, fd
);
11325 tcg_temp_free_i32(fp2
);
11326 tcg_temp_free_i32(fp1
);
11327 tcg_temp_free_i32(fp0
);
11331 check_insn(ctx
, ISA_MIPS32R6
);
11333 TCGv_i32 fp0
= tcg_temp_new_i32();
11334 TCGv_i32 fp1
= tcg_temp_new_i32();
11335 TCGv_i32 fp2
= tcg_temp_new_i32();
11336 gen_load_fpr32(ctx
, fp0
, fs
);
11337 gen_load_fpr32(ctx
, fp1
, ft
);
11338 gen_load_fpr32(ctx
, fp2
, fd
);
11339 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11340 gen_store_fpr32(ctx
, fp2
, fd
);
11341 tcg_temp_free_i32(fp2
);
11342 tcg_temp_free_i32(fp1
);
11343 tcg_temp_free_i32(fp0
);
11347 check_insn(ctx
, ISA_MIPS32R6
);
11349 TCGv_i32 fp0
= tcg_temp_new_i32();
11350 gen_load_fpr32(ctx
, fp0
, fs
);
11351 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
11352 gen_store_fpr32(ctx
, fp0
, fd
);
11353 tcg_temp_free_i32(fp0
);
11357 check_insn(ctx
, ISA_MIPS32R6
);
11359 TCGv_i32 fp0
= tcg_temp_new_i32();
11360 gen_load_fpr32(ctx
, fp0
, fs
);
11361 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
11362 gen_store_fpr32(ctx
, fp0
, fd
);
11363 tcg_temp_free_i32(fp0
);
11366 case OPC_MIN_S
: /* OPC_RECIP2_S */
11367 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11369 TCGv_i32 fp0
= tcg_temp_new_i32();
11370 TCGv_i32 fp1
= tcg_temp_new_i32();
11371 TCGv_i32 fp2
= tcg_temp_new_i32();
11372 gen_load_fpr32(ctx
, fp0
, fs
);
11373 gen_load_fpr32(ctx
, fp1
, ft
);
11374 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
11375 gen_store_fpr32(ctx
, fp2
, fd
);
11376 tcg_temp_free_i32(fp2
);
11377 tcg_temp_free_i32(fp1
);
11378 tcg_temp_free_i32(fp0
);
11381 check_cp1_64bitmode(ctx
);
11383 TCGv_i32 fp0
= tcg_temp_new_i32();
11384 TCGv_i32 fp1
= tcg_temp_new_i32();
11386 gen_load_fpr32(ctx
, fp0
, fs
);
11387 gen_load_fpr32(ctx
, fp1
, ft
);
11388 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
11389 tcg_temp_free_i32(fp1
);
11390 gen_store_fpr32(ctx
, fp0
, fd
);
11391 tcg_temp_free_i32(fp0
);
11395 case OPC_MINA_S
: /* OPC_RECIP1_S */
11396 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11398 TCGv_i32 fp0
= tcg_temp_new_i32();
11399 TCGv_i32 fp1
= tcg_temp_new_i32();
11400 TCGv_i32 fp2
= tcg_temp_new_i32();
11401 gen_load_fpr32(ctx
, fp0
, fs
);
11402 gen_load_fpr32(ctx
, fp1
, ft
);
11403 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
11404 gen_store_fpr32(ctx
, fp2
, fd
);
11405 tcg_temp_free_i32(fp2
);
11406 tcg_temp_free_i32(fp1
);
11407 tcg_temp_free_i32(fp0
);
11410 check_cp1_64bitmode(ctx
);
11412 TCGv_i32 fp0
= tcg_temp_new_i32();
11414 gen_load_fpr32(ctx
, fp0
, fs
);
11415 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
11416 gen_store_fpr32(ctx
, fp0
, fd
);
11417 tcg_temp_free_i32(fp0
);
11421 case OPC_MAX_S
: /* OPC_RSQRT1_S */
11422 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11424 TCGv_i32 fp0
= tcg_temp_new_i32();
11425 TCGv_i32 fp1
= tcg_temp_new_i32();
11426 gen_load_fpr32(ctx
, fp0
, fs
);
11427 gen_load_fpr32(ctx
, fp1
, ft
);
11428 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
11429 gen_store_fpr32(ctx
, fp1
, fd
);
11430 tcg_temp_free_i32(fp1
);
11431 tcg_temp_free_i32(fp0
);
11434 check_cp1_64bitmode(ctx
);
11436 TCGv_i32 fp0
= tcg_temp_new_i32();
11438 gen_load_fpr32(ctx
, fp0
, fs
);
11439 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
11440 gen_store_fpr32(ctx
, fp0
, fd
);
11441 tcg_temp_free_i32(fp0
);
11445 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11446 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11448 TCGv_i32 fp0
= tcg_temp_new_i32();
11449 TCGv_i32 fp1
= tcg_temp_new_i32();
11450 gen_load_fpr32(ctx
, fp0
, fs
);
11451 gen_load_fpr32(ctx
, fp1
, ft
);
11452 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11453 gen_store_fpr32(ctx
, fp1
, fd
);
11454 tcg_temp_free_i32(fp1
);
11455 tcg_temp_free_i32(fp0
);
11458 check_cp1_64bitmode(ctx
);
11460 TCGv_i32 fp0
= tcg_temp_new_i32();
11461 TCGv_i32 fp1
= tcg_temp_new_i32();
11463 gen_load_fpr32(ctx
, fp0
, fs
);
11464 gen_load_fpr32(ctx
, fp1
, ft
);
11465 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11466 tcg_temp_free_i32(fp1
);
11467 gen_store_fpr32(ctx
, fp0
, fd
);
11468 tcg_temp_free_i32(fp0
);
11473 check_cp1_registers(ctx
, fd
);
11475 TCGv_i32 fp32
= tcg_temp_new_i32();
11476 TCGv_i64 fp64
= tcg_temp_new_i64();
11478 gen_load_fpr32(ctx
, fp32
, fs
);
11479 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11480 tcg_temp_free_i32(fp32
);
11481 gen_store_fpr64(ctx
, fp64
, fd
);
11482 tcg_temp_free_i64(fp64
);
11487 TCGv_i32 fp0
= tcg_temp_new_i32();
11489 gen_load_fpr32(ctx
, fp0
, fs
);
11490 if (ctx
->nan2008
) {
11491 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11493 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11495 gen_store_fpr32(ctx
, fp0
, fd
);
11496 tcg_temp_free_i32(fp0
);
11500 check_cp1_64bitmode(ctx
);
11502 TCGv_i32 fp32
= tcg_temp_new_i32();
11503 TCGv_i64 fp64
= tcg_temp_new_i64();
11505 gen_load_fpr32(ctx
, fp32
, fs
);
11506 if (ctx
->nan2008
) {
11507 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11509 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11511 tcg_temp_free_i32(fp32
);
11512 gen_store_fpr64(ctx
, fp64
, fd
);
11513 tcg_temp_free_i64(fp64
);
11519 TCGv_i64 fp64
= tcg_temp_new_i64();
11520 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11521 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11523 gen_load_fpr32(ctx
, fp32_0
, fs
);
11524 gen_load_fpr32(ctx
, fp32_1
, ft
);
11525 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11526 tcg_temp_free_i32(fp32_1
);
11527 tcg_temp_free_i32(fp32_0
);
11528 gen_store_fpr64(ctx
, fp64
, fd
);
11529 tcg_temp_free_i64(fp64
);
11535 case OPC_CMP_UEQ_S
:
11536 case OPC_CMP_OLT_S
:
11537 case OPC_CMP_ULT_S
:
11538 case OPC_CMP_OLE_S
:
11539 case OPC_CMP_ULE_S
:
11541 case OPC_CMP_NGLE_S
:
11542 case OPC_CMP_SEQ_S
:
11543 case OPC_CMP_NGL_S
:
11545 case OPC_CMP_NGE_S
:
11547 case OPC_CMP_NGT_S
:
11548 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11549 if (ctx
->opcode
& (1 << 6)) {
11550 gen_cmpabs_s(ctx
, func
- 48, ft
, fs
, cc
);
11552 gen_cmp_s(ctx
, func
- 48, ft
, fs
, cc
);
11556 check_cp1_registers(ctx
, fs
| ft
| fd
);
11558 TCGv_i64 fp0
= tcg_temp_new_i64();
11559 TCGv_i64 fp1
= tcg_temp_new_i64();
11561 gen_load_fpr64(ctx
, fp0
, fs
);
11562 gen_load_fpr64(ctx
, fp1
, ft
);
11563 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11564 tcg_temp_free_i64(fp1
);
11565 gen_store_fpr64(ctx
, fp0
, fd
);
11566 tcg_temp_free_i64(fp0
);
11570 check_cp1_registers(ctx
, fs
| ft
| fd
);
11572 TCGv_i64 fp0
= tcg_temp_new_i64();
11573 TCGv_i64 fp1
= tcg_temp_new_i64();
11575 gen_load_fpr64(ctx
, fp0
, fs
);
11576 gen_load_fpr64(ctx
, fp1
, ft
);
11577 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11578 tcg_temp_free_i64(fp1
);
11579 gen_store_fpr64(ctx
, fp0
, fd
);
11580 tcg_temp_free_i64(fp0
);
11584 check_cp1_registers(ctx
, fs
| ft
| fd
);
11586 TCGv_i64 fp0
= tcg_temp_new_i64();
11587 TCGv_i64 fp1
= tcg_temp_new_i64();
11589 gen_load_fpr64(ctx
, fp0
, fs
);
11590 gen_load_fpr64(ctx
, fp1
, ft
);
11591 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11592 tcg_temp_free_i64(fp1
);
11593 gen_store_fpr64(ctx
, fp0
, fd
);
11594 tcg_temp_free_i64(fp0
);
11598 check_cp1_registers(ctx
, fs
| ft
| fd
);
11600 TCGv_i64 fp0
= tcg_temp_new_i64();
11601 TCGv_i64 fp1
= tcg_temp_new_i64();
11603 gen_load_fpr64(ctx
, fp0
, fs
);
11604 gen_load_fpr64(ctx
, fp1
, ft
);
11605 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11606 tcg_temp_free_i64(fp1
);
11607 gen_store_fpr64(ctx
, fp0
, fd
);
11608 tcg_temp_free_i64(fp0
);
11612 check_cp1_registers(ctx
, fs
| fd
);
11614 TCGv_i64 fp0
= tcg_temp_new_i64();
11616 gen_load_fpr64(ctx
, fp0
, fs
);
11617 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11618 gen_store_fpr64(ctx
, fp0
, fd
);
11619 tcg_temp_free_i64(fp0
);
11623 check_cp1_registers(ctx
, fs
| fd
);
11625 TCGv_i64 fp0
= tcg_temp_new_i64();
11627 gen_load_fpr64(ctx
, fp0
, fs
);
11628 if (ctx
->abs2008
) {
11629 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11631 gen_helper_float_abs_d(fp0
, fp0
);
11633 gen_store_fpr64(ctx
, fp0
, fd
);
11634 tcg_temp_free_i64(fp0
);
11638 check_cp1_registers(ctx
, fs
| fd
);
11640 TCGv_i64 fp0
= tcg_temp_new_i64();
11642 gen_load_fpr64(ctx
, fp0
, fs
);
11643 gen_store_fpr64(ctx
, fp0
, fd
);
11644 tcg_temp_free_i64(fp0
);
11648 check_cp1_registers(ctx
, fs
| fd
);
11650 TCGv_i64 fp0
= tcg_temp_new_i64();
11652 gen_load_fpr64(ctx
, fp0
, fs
);
11653 if (ctx
->abs2008
) {
11654 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11656 gen_helper_float_chs_d(fp0
, fp0
);
11658 gen_store_fpr64(ctx
, fp0
, fd
);
11659 tcg_temp_free_i64(fp0
);
11662 case OPC_ROUND_L_D
:
11663 check_cp1_64bitmode(ctx
);
11665 TCGv_i64 fp0
= tcg_temp_new_i64();
11667 gen_load_fpr64(ctx
, fp0
, fs
);
11668 if (ctx
->nan2008
) {
11669 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11671 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11673 gen_store_fpr64(ctx
, fp0
, fd
);
11674 tcg_temp_free_i64(fp0
);
11677 case OPC_TRUNC_L_D
:
11678 check_cp1_64bitmode(ctx
);
11680 TCGv_i64 fp0
= tcg_temp_new_i64();
11682 gen_load_fpr64(ctx
, fp0
, fs
);
11683 if (ctx
->nan2008
) {
11684 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11686 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11688 gen_store_fpr64(ctx
, fp0
, fd
);
11689 tcg_temp_free_i64(fp0
);
11693 check_cp1_64bitmode(ctx
);
11695 TCGv_i64 fp0
= tcg_temp_new_i64();
11697 gen_load_fpr64(ctx
, fp0
, fs
);
11698 if (ctx
->nan2008
) {
11699 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11701 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11703 gen_store_fpr64(ctx
, fp0
, fd
);
11704 tcg_temp_free_i64(fp0
);
11707 case OPC_FLOOR_L_D
:
11708 check_cp1_64bitmode(ctx
);
11710 TCGv_i64 fp0
= tcg_temp_new_i64();
11712 gen_load_fpr64(ctx
, fp0
, fs
);
11713 if (ctx
->nan2008
) {
11714 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11716 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11718 gen_store_fpr64(ctx
, fp0
, fd
);
11719 tcg_temp_free_i64(fp0
);
11722 case OPC_ROUND_W_D
:
11723 check_cp1_registers(ctx
, fs
);
11725 TCGv_i32 fp32
= tcg_temp_new_i32();
11726 TCGv_i64 fp64
= tcg_temp_new_i64();
11728 gen_load_fpr64(ctx
, fp64
, fs
);
11729 if (ctx
->nan2008
) {
11730 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11732 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11734 tcg_temp_free_i64(fp64
);
11735 gen_store_fpr32(ctx
, fp32
, fd
);
11736 tcg_temp_free_i32(fp32
);
11739 case OPC_TRUNC_W_D
:
11740 check_cp1_registers(ctx
, fs
);
11742 TCGv_i32 fp32
= tcg_temp_new_i32();
11743 TCGv_i64 fp64
= tcg_temp_new_i64();
11745 gen_load_fpr64(ctx
, fp64
, fs
);
11746 if (ctx
->nan2008
) {
11747 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11749 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11751 tcg_temp_free_i64(fp64
);
11752 gen_store_fpr32(ctx
, fp32
, fd
);
11753 tcg_temp_free_i32(fp32
);
11757 check_cp1_registers(ctx
, fs
);
11759 TCGv_i32 fp32
= tcg_temp_new_i32();
11760 TCGv_i64 fp64
= tcg_temp_new_i64();
11762 gen_load_fpr64(ctx
, fp64
, fs
);
11763 if (ctx
->nan2008
) {
11764 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11766 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11768 tcg_temp_free_i64(fp64
);
11769 gen_store_fpr32(ctx
, fp32
, fd
);
11770 tcg_temp_free_i32(fp32
);
11773 case OPC_FLOOR_W_D
:
11774 check_cp1_registers(ctx
, fs
);
11776 TCGv_i32 fp32
= tcg_temp_new_i32();
11777 TCGv_i64 fp64
= tcg_temp_new_i64();
11779 gen_load_fpr64(ctx
, fp64
, fs
);
11780 if (ctx
->nan2008
) {
11781 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11783 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11785 tcg_temp_free_i64(fp64
);
11786 gen_store_fpr32(ctx
, fp32
, fd
);
11787 tcg_temp_free_i32(fp32
);
11791 check_insn(ctx
, ISA_MIPS32R6
);
11792 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11795 check_insn(ctx
, ISA_MIPS32R6
);
11796 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11799 check_insn(ctx
, ISA_MIPS32R6
);
11800 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11803 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11804 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11807 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11809 TCGLabel
*l1
= gen_new_label();
11813 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11815 fp0
= tcg_temp_new_i64();
11816 gen_load_fpr64(ctx
, fp0
, fs
);
11817 gen_store_fpr64(ctx
, fp0
, fd
);
11818 tcg_temp_free_i64(fp0
);
11823 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11825 TCGLabel
*l1
= gen_new_label();
11829 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11830 fp0
= tcg_temp_new_i64();
11831 gen_load_fpr64(ctx
, fp0
, fs
);
11832 gen_store_fpr64(ctx
, fp0
, fd
);
11833 tcg_temp_free_i64(fp0
);
11839 check_cp1_registers(ctx
, fs
| fd
);
11841 TCGv_i64 fp0
= tcg_temp_new_i64();
11843 gen_load_fpr64(ctx
, fp0
, fs
);
11844 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11845 gen_store_fpr64(ctx
, fp0
, fd
);
11846 tcg_temp_free_i64(fp0
);
11850 check_cp1_registers(ctx
, fs
| fd
);
11852 TCGv_i64 fp0
= tcg_temp_new_i64();
11854 gen_load_fpr64(ctx
, fp0
, fs
);
11855 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11856 gen_store_fpr64(ctx
, fp0
, fd
);
11857 tcg_temp_free_i64(fp0
);
11861 check_insn(ctx
, ISA_MIPS32R6
);
11863 TCGv_i64 fp0
= tcg_temp_new_i64();
11864 TCGv_i64 fp1
= tcg_temp_new_i64();
11865 TCGv_i64 fp2
= tcg_temp_new_i64();
11866 gen_load_fpr64(ctx
, fp0
, fs
);
11867 gen_load_fpr64(ctx
, fp1
, ft
);
11868 gen_load_fpr64(ctx
, fp2
, fd
);
11869 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11870 gen_store_fpr64(ctx
, fp2
, fd
);
11871 tcg_temp_free_i64(fp2
);
11872 tcg_temp_free_i64(fp1
);
11873 tcg_temp_free_i64(fp0
);
11877 check_insn(ctx
, ISA_MIPS32R6
);
11879 TCGv_i64 fp0
= tcg_temp_new_i64();
11880 TCGv_i64 fp1
= tcg_temp_new_i64();
11881 TCGv_i64 fp2
= tcg_temp_new_i64();
11882 gen_load_fpr64(ctx
, fp0
, fs
);
11883 gen_load_fpr64(ctx
, fp1
, ft
);
11884 gen_load_fpr64(ctx
, fp2
, fd
);
11885 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11886 gen_store_fpr64(ctx
, fp2
, fd
);
11887 tcg_temp_free_i64(fp2
);
11888 tcg_temp_free_i64(fp1
);
11889 tcg_temp_free_i64(fp0
);
11893 check_insn(ctx
, ISA_MIPS32R6
);
11895 TCGv_i64 fp0
= tcg_temp_new_i64();
11896 gen_load_fpr64(ctx
, fp0
, fs
);
11897 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11898 gen_store_fpr64(ctx
, fp0
, fd
);
11899 tcg_temp_free_i64(fp0
);
11903 check_insn(ctx
, ISA_MIPS32R6
);
11905 TCGv_i64 fp0
= tcg_temp_new_i64();
11906 gen_load_fpr64(ctx
, fp0
, fs
);
11907 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11908 gen_store_fpr64(ctx
, fp0
, fd
);
11909 tcg_temp_free_i64(fp0
);
11912 case OPC_MIN_D
: /* OPC_RECIP2_D */
11913 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11915 TCGv_i64 fp0
= tcg_temp_new_i64();
11916 TCGv_i64 fp1
= tcg_temp_new_i64();
11917 gen_load_fpr64(ctx
, fp0
, fs
);
11918 gen_load_fpr64(ctx
, fp1
, ft
);
11919 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11920 gen_store_fpr64(ctx
, fp1
, fd
);
11921 tcg_temp_free_i64(fp1
);
11922 tcg_temp_free_i64(fp0
);
11925 check_cp1_64bitmode(ctx
);
11927 TCGv_i64 fp0
= tcg_temp_new_i64();
11928 TCGv_i64 fp1
= tcg_temp_new_i64();
11930 gen_load_fpr64(ctx
, fp0
, fs
);
11931 gen_load_fpr64(ctx
, fp1
, ft
);
11932 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11933 tcg_temp_free_i64(fp1
);
11934 gen_store_fpr64(ctx
, fp0
, fd
);
11935 tcg_temp_free_i64(fp0
);
11939 case OPC_MINA_D
: /* OPC_RECIP1_D */
11940 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11942 TCGv_i64 fp0
= tcg_temp_new_i64();
11943 TCGv_i64 fp1
= tcg_temp_new_i64();
11944 gen_load_fpr64(ctx
, fp0
, fs
);
11945 gen_load_fpr64(ctx
, fp1
, ft
);
11946 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11947 gen_store_fpr64(ctx
, fp1
, fd
);
11948 tcg_temp_free_i64(fp1
);
11949 tcg_temp_free_i64(fp0
);
11952 check_cp1_64bitmode(ctx
);
11954 TCGv_i64 fp0
= tcg_temp_new_i64();
11956 gen_load_fpr64(ctx
, fp0
, fs
);
11957 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11958 gen_store_fpr64(ctx
, fp0
, fd
);
11959 tcg_temp_free_i64(fp0
);
11963 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11964 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11966 TCGv_i64 fp0
= tcg_temp_new_i64();
11967 TCGv_i64 fp1
= tcg_temp_new_i64();
11968 gen_load_fpr64(ctx
, fp0
, fs
);
11969 gen_load_fpr64(ctx
, fp1
, ft
);
11970 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11971 gen_store_fpr64(ctx
, fp1
, fd
);
11972 tcg_temp_free_i64(fp1
);
11973 tcg_temp_free_i64(fp0
);
11976 check_cp1_64bitmode(ctx
);
11978 TCGv_i64 fp0
= tcg_temp_new_i64();
11980 gen_load_fpr64(ctx
, fp0
, fs
);
11981 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11982 gen_store_fpr64(ctx
, fp0
, fd
);
11983 tcg_temp_free_i64(fp0
);
11987 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11988 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11990 TCGv_i64 fp0
= tcg_temp_new_i64();
11991 TCGv_i64 fp1
= tcg_temp_new_i64();
11992 gen_load_fpr64(ctx
, fp0
, fs
);
11993 gen_load_fpr64(ctx
, fp1
, ft
);
11994 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11995 gen_store_fpr64(ctx
, fp1
, fd
);
11996 tcg_temp_free_i64(fp1
);
11997 tcg_temp_free_i64(fp0
);
12000 check_cp1_64bitmode(ctx
);
12002 TCGv_i64 fp0
= tcg_temp_new_i64();
12003 TCGv_i64 fp1
= tcg_temp_new_i64();
12005 gen_load_fpr64(ctx
, fp0
, fs
);
12006 gen_load_fpr64(ctx
, fp1
, ft
);
12007 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
12008 tcg_temp_free_i64(fp1
);
12009 gen_store_fpr64(ctx
, fp0
, fd
);
12010 tcg_temp_free_i64(fp0
);
12017 case OPC_CMP_UEQ_D
:
12018 case OPC_CMP_OLT_D
:
12019 case OPC_CMP_ULT_D
:
12020 case OPC_CMP_OLE_D
:
12021 case OPC_CMP_ULE_D
:
12023 case OPC_CMP_NGLE_D
:
12024 case OPC_CMP_SEQ_D
:
12025 case OPC_CMP_NGL_D
:
12027 case OPC_CMP_NGE_D
:
12029 case OPC_CMP_NGT_D
:
12030 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12031 if (ctx
->opcode
& (1 << 6)) {
12032 gen_cmpabs_d(ctx
, func
- 48, ft
, fs
, cc
);
12034 gen_cmp_d(ctx
, func
- 48, ft
, fs
, cc
);
12038 check_cp1_registers(ctx
, fs
);
12040 TCGv_i32 fp32
= tcg_temp_new_i32();
12041 TCGv_i64 fp64
= tcg_temp_new_i64();
12043 gen_load_fpr64(ctx
, fp64
, fs
);
12044 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
12045 tcg_temp_free_i64(fp64
);
12046 gen_store_fpr32(ctx
, fp32
, fd
);
12047 tcg_temp_free_i32(fp32
);
12051 check_cp1_registers(ctx
, fs
);
12053 TCGv_i32 fp32
= tcg_temp_new_i32();
12054 TCGv_i64 fp64
= tcg_temp_new_i64();
12056 gen_load_fpr64(ctx
, fp64
, fs
);
12057 if (ctx
->nan2008
) {
12058 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
12060 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
12062 tcg_temp_free_i64(fp64
);
12063 gen_store_fpr32(ctx
, fp32
, fd
);
12064 tcg_temp_free_i32(fp32
);
12068 check_cp1_64bitmode(ctx
);
12070 TCGv_i64 fp0
= tcg_temp_new_i64();
12072 gen_load_fpr64(ctx
, fp0
, fs
);
12073 if (ctx
->nan2008
) {
12074 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
12076 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
12078 gen_store_fpr64(ctx
, fp0
, fd
);
12079 tcg_temp_free_i64(fp0
);
12084 TCGv_i32 fp0
= tcg_temp_new_i32();
12086 gen_load_fpr32(ctx
, fp0
, fs
);
12087 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
12088 gen_store_fpr32(ctx
, fp0
, fd
);
12089 tcg_temp_free_i32(fp0
);
12093 check_cp1_registers(ctx
, fd
);
12095 TCGv_i32 fp32
= tcg_temp_new_i32();
12096 TCGv_i64 fp64
= tcg_temp_new_i64();
12098 gen_load_fpr32(ctx
, fp32
, fs
);
12099 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
12100 tcg_temp_free_i32(fp32
);
12101 gen_store_fpr64(ctx
, fp64
, fd
);
12102 tcg_temp_free_i64(fp64
);
12106 check_cp1_64bitmode(ctx
);
12108 TCGv_i32 fp32
= tcg_temp_new_i32();
12109 TCGv_i64 fp64
= tcg_temp_new_i64();
12111 gen_load_fpr64(ctx
, fp64
, fs
);
12112 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
12113 tcg_temp_free_i64(fp64
);
12114 gen_store_fpr32(ctx
, fp32
, fd
);
12115 tcg_temp_free_i32(fp32
);
12119 check_cp1_64bitmode(ctx
);
12121 TCGv_i64 fp0
= tcg_temp_new_i64();
12123 gen_load_fpr64(ctx
, fp0
, fs
);
12124 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
12125 gen_store_fpr64(ctx
, fp0
, fd
);
12126 tcg_temp_free_i64(fp0
);
12129 case OPC_CVT_PS_PW
:
12132 TCGv_i64 fp0
= tcg_temp_new_i64();
12134 gen_load_fpr64(ctx
, fp0
, fs
);
12135 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
12136 gen_store_fpr64(ctx
, fp0
, fd
);
12137 tcg_temp_free_i64(fp0
);
12143 TCGv_i64 fp0
= tcg_temp_new_i64();
12144 TCGv_i64 fp1
= tcg_temp_new_i64();
12146 gen_load_fpr64(ctx
, fp0
, fs
);
12147 gen_load_fpr64(ctx
, fp1
, ft
);
12148 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
12149 tcg_temp_free_i64(fp1
);
12150 gen_store_fpr64(ctx
, fp0
, fd
);
12151 tcg_temp_free_i64(fp0
);
12157 TCGv_i64 fp0
= tcg_temp_new_i64();
12158 TCGv_i64 fp1
= tcg_temp_new_i64();
12160 gen_load_fpr64(ctx
, fp0
, fs
);
12161 gen_load_fpr64(ctx
, fp1
, ft
);
12162 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
12163 tcg_temp_free_i64(fp1
);
12164 gen_store_fpr64(ctx
, fp0
, fd
);
12165 tcg_temp_free_i64(fp0
);
12171 TCGv_i64 fp0
= tcg_temp_new_i64();
12172 TCGv_i64 fp1
= tcg_temp_new_i64();
12174 gen_load_fpr64(ctx
, fp0
, fs
);
12175 gen_load_fpr64(ctx
, fp1
, ft
);
12176 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
12177 tcg_temp_free_i64(fp1
);
12178 gen_store_fpr64(ctx
, fp0
, fd
);
12179 tcg_temp_free_i64(fp0
);
12185 TCGv_i64 fp0
= tcg_temp_new_i64();
12187 gen_load_fpr64(ctx
, fp0
, fs
);
12188 gen_helper_float_abs_ps(fp0
, fp0
);
12189 gen_store_fpr64(ctx
, fp0
, fd
);
12190 tcg_temp_free_i64(fp0
);
12196 TCGv_i64 fp0
= tcg_temp_new_i64();
12198 gen_load_fpr64(ctx
, fp0
, fs
);
12199 gen_store_fpr64(ctx
, fp0
, fd
);
12200 tcg_temp_free_i64(fp0
);
12206 TCGv_i64 fp0
= tcg_temp_new_i64();
12208 gen_load_fpr64(ctx
, fp0
, fs
);
12209 gen_helper_float_chs_ps(fp0
, fp0
);
12210 gen_store_fpr64(ctx
, fp0
, fd
);
12211 tcg_temp_free_i64(fp0
);
12216 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
12221 TCGLabel
*l1
= gen_new_label();
12225 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
12227 fp0
= tcg_temp_new_i64();
12228 gen_load_fpr64(ctx
, fp0
, fs
);
12229 gen_store_fpr64(ctx
, fp0
, fd
);
12230 tcg_temp_free_i64(fp0
);
12237 TCGLabel
*l1
= gen_new_label();
12241 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
12242 fp0
= tcg_temp_new_i64();
12243 gen_load_fpr64(ctx
, fp0
, fs
);
12244 gen_store_fpr64(ctx
, fp0
, fd
);
12245 tcg_temp_free_i64(fp0
);
12253 TCGv_i64 fp0
= tcg_temp_new_i64();
12254 TCGv_i64 fp1
= tcg_temp_new_i64();
12256 gen_load_fpr64(ctx
, fp0
, ft
);
12257 gen_load_fpr64(ctx
, fp1
, fs
);
12258 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
12259 tcg_temp_free_i64(fp1
);
12260 gen_store_fpr64(ctx
, fp0
, fd
);
12261 tcg_temp_free_i64(fp0
);
12267 TCGv_i64 fp0
= tcg_temp_new_i64();
12268 TCGv_i64 fp1
= tcg_temp_new_i64();
12270 gen_load_fpr64(ctx
, fp0
, ft
);
12271 gen_load_fpr64(ctx
, fp1
, fs
);
12272 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
12273 tcg_temp_free_i64(fp1
);
12274 gen_store_fpr64(ctx
, fp0
, fd
);
12275 tcg_temp_free_i64(fp0
);
12278 case OPC_RECIP2_PS
:
12281 TCGv_i64 fp0
= tcg_temp_new_i64();
12282 TCGv_i64 fp1
= tcg_temp_new_i64();
12284 gen_load_fpr64(ctx
, fp0
, fs
);
12285 gen_load_fpr64(ctx
, fp1
, ft
);
12286 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
12287 tcg_temp_free_i64(fp1
);
12288 gen_store_fpr64(ctx
, fp0
, fd
);
12289 tcg_temp_free_i64(fp0
);
12292 case OPC_RECIP1_PS
:
12295 TCGv_i64 fp0
= tcg_temp_new_i64();
12297 gen_load_fpr64(ctx
, fp0
, fs
);
12298 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
12299 gen_store_fpr64(ctx
, fp0
, fd
);
12300 tcg_temp_free_i64(fp0
);
12303 case OPC_RSQRT1_PS
:
12306 TCGv_i64 fp0
= tcg_temp_new_i64();
12308 gen_load_fpr64(ctx
, fp0
, fs
);
12309 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
12310 gen_store_fpr64(ctx
, fp0
, fd
);
12311 tcg_temp_free_i64(fp0
);
12314 case OPC_RSQRT2_PS
:
12317 TCGv_i64 fp0
= tcg_temp_new_i64();
12318 TCGv_i64 fp1
= tcg_temp_new_i64();
12320 gen_load_fpr64(ctx
, fp0
, fs
);
12321 gen_load_fpr64(ctx
, fp1
, ft
);
12322 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
12323 tcg_temp_free_i64(fp1
);
12324 gen_store_fpr64(ctx
, fp0
, fd
);
12325 tcg_temp_free_i64(fp0
);
12329 check_cp1_64bitmode(ctx
);
12331 TCGv_i32 fp0
= tcg_temp_new_i32();
12333 gen_load_fpr32h(ctx
, fp0
, fs
);
12334 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
12335 gen_store_fpr32(ctx
, fp0
, fd
);
12336 tcg_temp_free_i32(fp0
);
12339 case OPC_CVT_PW_PS
:
12342 TCGv_i64 fp0
= tcg_temp_new_i64();
12344 gen_load_fpr64(ctx
, fp0
, fs
);
12345 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
12346 gen_store_fpr64(ctx
, fp0
, fd
);
12347 tcg_temp_free_i64(fp0
);
12351 check_cp1_64bitmode(ctx
);
12353 TCGv_i32 fp0
= tcg_temp_new_i32();
12355 gen_load_fpr32(ctx
, fp0
, fs
);
12356 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
12357 gen_store_fpr32(ctx
, fp0
, fd
);
12358 tcg_temp_free_i32(fp0
);
12364 TCGv_i32 fp0
= tcg_temp_new_i32();
12365 TCGv_i32 fp1
= tcg_temp_new_i32();
12367 gen_load_fpr32(ctx
, fp0
, fs
);
12368 gen_load_fpr32(ctx
, fp1
, ft
);
12369 gen_store_fpr32h(ctx
, fp0
, fd
);
12370 gen_store_fpr32(ctx
, fp1
, fd
);
12371 tcg_temp_free_i32(fp0
);
12372 tcg_temp_free_i32(fp1
);
12378 TCGv_i32 fp0
= tcg_temp_new_i32();
12379 TCGv_i32 fp1
= tcg_temp_new_i32();
12381 gen_load_fpr32(ctx
, fp0
, fs
);
12382 gen_load_fpr32h(ctx
, fp1
, ft
);
12383 gen_store_fpr32(ctx
, fp1
, fd
);
12384 gen_store_fpr32h(ctx
, fp0
, fd
);
12385 tcg_temp_free_i32(fp0
);
12386 tcg_temp_free_i32(fp1
);
12392 TCGv_i32 fp0
= tcg_temp_new_i32();
12393 TCGv_i32 fp1
= tcg_temp_new_i32();
12395 gen_load_fpr32h(ctx
, fp0
, fs
);
12396 gen_load_fpr32(ctx
, fp1
, ft
);
12397 gen_store_fpr32(ctx
, fp1
, fd
);
12398 gen_store_fpr32h(ctx
, fp0
, fd
);
12399 tcg_temp_free_i32(fp0
);
12400 tcg_temp_free_i32(fp1
);
12406 TCGv_i32 fp0
= tcg_temp_new_i32();
12407 TCGv_i32 fp1
= tcg_temp_new_i32();
12409 gen_load_fpr32h(ctx
, fp0
, fs
);
12410 gen_load_fpr32h(ctx
, fp1
, ft
);
12411 gen_store_fpr32(ctx
, fp1
, fd
);
12412 gen_store_fpr32h(ctx
, fp0
, fd
);
12413 tcg_temp_free_i32(fp0
);
12414 tcg_temp_free_i32(fp1
);
12418 case OPC_CMP_UN_PS
:
12419 case OPC_CMP_EQ_PS
:
12420 case OPC_CMP_UEQ_PS
:
12421 case OPC_CMP_OLT_PS
:
12422 case OPC_CMP_ULT_PS
:
12423 case OPC_CMP_OLE_PS
:
12424 case OPC_CMP_ULE_PS
:
12425 case OPC_CMP_SF_PS
:
12426 case OPC_CMP_NGLE_PS
:
12427 case OPC_CMP_SEQ_PS
:
12428 case OPC_CMP_NGL_PS
:
12429 case OPC_CMP_LT_PS
:
12430 case OPC_CMP_NGE_PS
:
12431 case OPC_CMP_LE_PS
:
12432 case OPC_CMP_NGT_PS
:
12433 if (ctx
->opcode
& (1 << 6)) {
12434 gen_cmpabs_ps(ctx
, func
- 48, ft
, fs
, cc
);
12436 gen_cmp_ps(ctx
, func
- 48, ft
, fs
, cc
);
12440 MIPS_INVAL("farith");
12441 generate_exception_end(ctx
, EXCP_RI
);
12446 /* Coprocessor 3 (FPU) */
12447 static void gen_flt3_ldst(DisasContext
*ctx
, uint32_t opc
,
12448 int fd
, int fs
, int base
, int index
)
12450 TCGv t0
= tcg_temp_new();
12453 gen_load_gpr(t0
, index
);
12454 } else if (index
== 0) {
12455 gen_load_gpr(t0
, base
);
12457 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12460 * Don't do NOP if destination is zero: we must perform the actual
12467 TCGv_i32 fp0
= tcg_temp_new_i32();
12469 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12470 tcg_gen_trunc_tl_i32(fp0
, t0
);
12471 gen_store_fpr32(ctx
, fp0
, fd
);
12472 tcg_temp_free_i32(fp0
);
12477 check_cp1_registers(ctx
, fd
);
12479 TCGv_i64 fp0
= tcg_temp_new_i64();
12480 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12481 gen_store_fpr64(ctx
, fp0
, fd
);
12482 tcg_temp_free_i64(fp0
);
12486 check_cp1_64bitmode(ctx
);
12487 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12489 TCGv_i64 fp0
= tcg_temp_new_i64();
12491 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12492 gen_store_fpr64(ctx
, fp0
, fd
);
12493 tcg_temp_free_i64(fp0
);
12499 TCGv_i32 fp0
= tcg_temp_new_i32();
12500 gen_load_fpr32(ctx
, fp0
, fs
);
12501 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12502 tcg_temp_free_i32(fp0
);
12507 check_cp1_registers(ctx
, fs
);
12509 TCGv_i64 fp0
= tcg_temp_new_i64();
12510 gen_load_fpr64(ctx
, fp0
, fs
);
12511 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12512 tcg_temp_free_i64(fp0
);
12516 check_cp1_64bitmode(ctx
);
12517 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12519 TCGv_i64 fp0
= tcg_temp_new_i64();
12520 gen_load_fpr64(ctx
, fp0
, fs
);
12521 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12522 tcg_temp_free_i64(fp0
);
12529 static void gen_flt3_arith(DisasContext
*ctx
, uint32_t opc
,
12530 int fd
, int fr
, int fs
, int ft
)
12536 TCGv t0
= tcg_temp_local_new();
12537 TCGv_i32 fp
= tcg_temp_new_i32();
12538 TCGv_i32 fph
= tcg_temp_new_i32();
12539 TCGLabel
*l1
= gen_new_label();
12540 TCGLabel
*l2
= gen_new_label();
12542 gen_load_gpr(t0
, fr
);
12543 tcg_gen_andi_tl(t0
, t0
, 0x7);
12545 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12546 gen_load_fpr32(ctx
, fp
, fs
);
12547 gen_load_fpr32h(ctx
, fph
, fs
);
12548 gen_store_fpr32(ctx
, fp
, fd
);
12549 gen_store_fpr32h(ctx
, fph
, fd
);
12552 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12554 #ifdef TARGET_WORDS_BIGENDIAN
12555 gen_load_fpr32(ctx
, fp
, fs
);
12556 gen_load_fpr32h(ctx
, fph
, ft
);
12557 gen_store_fpr32h(ctx
, fp
, fd
);
12558 gen_store_fpr32(ctx
, fph
, fd
);
12560 gen_load_fpr32h(ctx
, fph
, fs
);
12561 gen_load_fpr32(ctx
, fp
, ft
);
12562 gen_store_fpr32(ctx
, fph
, fd
);
12563 gen_store_fpr32h(ctx
, fp
, fd
);
12566 tcg_temp_free_i32(fp
);
12567 tcg_temp_free_i32(fph
);
12573 TCGv_i32 fp0
= tcg_temp_new_i32();
12574 TCGv_i32 fp1
= tcg_temp_new_i32();
12575 TCGv_i32 fp2
= tcg_temp_new_i32();
12577 gen_load_fpr32(ctx
, fp0
, fs
);
12578 gen_load_fpr32(ctx
, fp1
, ft
);
12579 gen_load_fpr32(ctx
, fp2
, fr
);
12580 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12581 tcg_temp_free_i32(fp0
);
12582 tcg_temp_free_i32(fp1
);
12583 gen_store_fpr32(ctx
, fp2
, fd
);
12584 tcg_temp_free_i32(fp2
);
12589 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12591 TCGv_i64 fp0
= tcg_temp_new_i64();
12592 TCGv_i64 fp1
= tcg_temp_new_i64();
12593 TCGv_i64 fp2
= tcg_temp_new_i64();
12595 gen_load_fpr64(ctx
, fp0
, fs
);
12596 gen_load_fpr64(ctx
, fp1
, ft
);
12597 gen_load_fpr64(ctx
, fp2
, fr
);
12598 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12599 tcg_temp_free_i64(fp0
);
12600 tcg_temp_free_i64(fp1
);
12601 gen_store_fpr64(ctx
, fp2
, fd
);
12602 tcg_temp_free_i64(fp2
);
12608 TCGv_i64 fp0
= tcg_temp_new_i64();
12609 TCGv_i64 fp1
= tcg_temp_new_i64();
12610 TCGv_i64 fp2
= tcg_temp_new_i64();
12612 gen_load_fpr64(ctx
, fp0
, fs
);
12613 gen_load_fpr64(ctx
, fp1
, ft
);
12614 gen_load_fpr64(ctx
, fp2
, fr
);
12615 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12616 tcg_temp_free_i64(fp0
);
12617 tcg_temp_free_i64(fp1
);
12618 gen_store_fpr64(ctx
, fp2
, fd
);
12619 tcg_temp_free_i64(fp2
);
12625 TCGv_i32 fp0
= tcg_temp_new_i32();
12626 TCGv_i32 fp1
= tcg_temp_new_i32();
12627 TCGv_i32 fp2
= tcg_temp_new_i32();
12629 gen_load_fpr32(ctx
, fp0
, fs
);
12630 gen_load_fpr32(ctx
, fp1
, ft
);
12631 gen_load_fpr32(ctx
, fp2
, fr
);
12632 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12633 tcg_temp_free_i32(fp0
);
12634 tcg_temp_free_i32(fp1
);
12635 gen_store_fpr32(ctx
, fp2
, fd
);
12636 tcg_temp_free_i32(fp2
);
12641 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12643 TCGv_i64 fp0
= tcg_temp_new_i64();
12644 TCGv_i64 fp1
= tcg_temp_new_i64();
12645 TCGv_i64 fp2
= tcg_temp_new_i64();
12647 gen_load_fpr64(ctx
, fp0
, fs
);
12648 gen_load_fpr64(ctx
, fp1
, ft
);
12649 gen_load_fpr64(ctx
, fp2
, fr
);
12650 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12651 tcg_temp_free_i64(fp0
);
12652 tcg_temp_free_i64(fp1
);
12653 gen_store_fpr64(ctx
, fp2
, fd
);
12654 tcg_temp_free_i64(fp2
);
12660 TCGv_i64 fp0
= tcg_temp_new_i64();
12661 TCGv_i64 fp1
= tcg_temp_new_i64();
12662 TCGv_i64 fp2
= tcg_temp_new_i64();
12664 gen_load_fpr64(ctx
, fp0
, fs
);
12665 gen_load_fpr64(ctx
, fp1
, ft
);
12666 gen_load_fpr64(ctx
, fp2
, fr
);
12667 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12668 tcg_temp_free_i64(fp0
);
12669 tcg_temp_free_i64(fp1
);
12670 gen_store_fpr64(ctx
, fp2
, fd
);
12671 tcg_temp_free_i64(fp2
);
12677 TCGv_i32 fp0
= tcg_temp_new_i32();
12678 TCGv_i32 fp1
= tcg_temp_new_i32();
12679 TCGv_i32 fp2
= tcg_temp_new_i32();
12681 gen_load_fpr32(ctx
, fp0
, fs
);
12682 gen_load_fpr32(ctx
, fp1
, ft
);
12683 gen_load_fpr32(ctx
, fp2
, fr
);
12684 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12685 tcg_temp_free_i32(fp0
);
12686 tcg_temp_free_i32(fp1
);
12687 gen_store_fpr32(ctx
, fp2
, fd
);
12688 tcg_temp_free_i32(fp2
);
12693 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12695 TCGv_i64 fp0
= tcg_temp_new_i64();
12696 TCGv_i64 fp1
= tcg_temp_new_i64();
12697 TCGv_i64 fp2
= tcg_temp_new_i64();
12699 gen_load_fpr64(ctx
, fp0
, fs
);
12700 gen_load_fpr64(ctx
, fp1
, ft
);
12701 gen_load_fpr64(ctx
, fp2
, fr
);
12702 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12703 tcg_temp_free_i64(fp0
);
12704 tcg_temp_free_i64(fp1
);
12705 gen_store_fpr64(ctx
, fp2
, fd
);
12706 tcg_temp_free_i64(fp2
);
12712 TCGv_i64 fp0
= tcg_temp_new_i64();
12713 TCGv_i64 fp1
= tcg_temp_new_i64();
12714 TCGv_i64 fp2
= tcg_temp_new_i64();
12716 gen_load_fpr64(ctx
, fp0
, fs
);
12717 gen_load_fpr64(ctx
, fp1
, ft
);
12718 gen_load_fpr64(ctx
, fp2
, fr
);
12719 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12720 tcg_temp_free_i64(fp0
);
12721 tcg_temp_free_i64(fp1
);
12722 gen_store_fpr64(ctx
, fp2
, fd
);
12723 tcg_temp_free_i64(fp2
);
12729 TCGv_i32 fp0
= tcg_temp_new_i32();
12730 TCGv_i32 fp1
= tcg_temp_new_i32();
12731 TCGv_i32 fp2
= tcg_temp_new_i32();
12733 gen_load_fpr32(ctx
, fp0
, fs
);
12734 gen_load_fpr32(ctx
, fp1
, ft
);
12735 gen_load_fpr32(ctx
, fp2
, fr
);
12736 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12737 tcg_temp_free_i32(fp0
);
12738 tcg_temp_free_i32(fp1
);
12739 gen_store_fpr32(ctx
, fp2
, fd
);
12740 tcg_temp_free_i32(fp2
);
12745 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12747 TCGv_i64 fp0
= tcg_temp_new_i64();
12748 TCGv_i64 fp1
= tcg_temp_new_i64();
12749 TCGv_i64 fp2
= tcg_temp_new_i64();
12751 gen_load_fpr64(ctx
, fp0
, fs
);
12752 gen_load_fpr64(ctx
, fp1
, ft
);
12753 gen_load_fpr64(ctx
, fp2
, fr
);
12754 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12755 tcg_temp_free_i64(fp0
);
12756 tcg_temp_free_i64(fp1
);
12757 gen_store_fpr64(ctx
, fp2
, fd
);
12758 tcg_temp_free_i64(fp2
);
12764 TCGv_i64 fp0
= tcg_temp_new_i64();
12765 TCGv_i64 fp1
= tcg_temp_new_i64();
12766 TCGv_i64 fp2
= tcg_temp_new_i64();
12768 gen_load_fpr64(ctx
, fp0
, fs
);
12769 gen_load_fpr64(ctx
, fp1
, ft
);
12770 gen_load_fpr64(ctx
, fp2
, fr
);
12771 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12772 tcg_temp_free_i64(fp0
);
12773 tcg_temp_free_i64(fp1
);
12774 gen_store_fpr64(ctx
, fp2
, fd
);
12775 tcg_temp_free_i64(fp2
);
12779 MIPS_INVAL("flt3_arith");
12780 generate_exception_end(ctx
, EXCP_RI
);
12785 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12789 #if !defined(CONFIG_USER_ONLY)
12791 * The Linux kernel will emulate rdhwr if it's not supported natively.
12792 * Therefore only check the ISA in system mode.
12794 check_insn(ctx
, ISA_MIPS32R2
);
12796 t0
= tcg_temp_new();
12800 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12801 gen_store_gpr(t0
, rt
);
12804 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12805 gen_store_gpr(t0
, rt
);
12808 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12811 gen_helper_rdhwr_cc(t0
, cpu_env
);
12812 gen_store_gpr(t0
, rt
);
12814 * Break the TB to be able to take timer interrupts immediately
12815 * after reading count. DISAS_STOP isn't sufficient, we need to ensure
12816 * we break completely out of translated code.
12818 gen_save_pc(ctx
->base
.pc_next
+ 4);
12819 ctx
->base
.is_jmp
= DISAS_EXIT
;
12822 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12823 gen_store_gpr(t0
, rt
);
12826 check_insn(ctx
, ISA_MIPS32R6
);
12829 * Performance counter registers are not implemented other than
12830 * control register 0.
12832 generate_exception(ctx
, EXCP_RI
);
12834 gen_helper_rdhwr_performance(t0
, cpu_env
);
12835 gen_store_gpr(t0
, rt
);
12838 check_insn(ctx
, ISA_MIPS32R6
);
12839 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12840 gen_store_gpr(t0
, rt
);
12843 #if defined(CONFIG_USER_ONLY)
12844 tcg_gen_ld_tl(t0
, cpu_env
,
12845 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12846 gen_store_gpr(t0
, rt
);
12849 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12850 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12851 tcg_gen_ld_tl(t0
, cpu_env
,
12852 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12853 gen_store_gpr(t0
, rt
);
12855 generate_exception_end(ctx
, EXCP_RI
);
12859 default: /* Invalid */
12860 MIPS_INVAL("rdhwr");
12861 generate_exception_end(ctx
, EXCP_RI
);
12867 static inline void clear_branch_hflags(DisasContext
*ctx
)
12869 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12870 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12871 save_cpu_state(ctx
, 0);
12874 * It is not safe to save ctx->hflags as hflags may be changed
12875 * in execution time by the instruction in delay / forbidden slot.
12877 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12881 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12883 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12884 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12885 /* Branches completion */
12886 clear_branch_hflags(ctx
);
12887 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12888 /* FIXME: Need to clear can_do_io. */
12889 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12890 case MIPS_HFLAG_FBNSLOT
:
12891 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12894 /* unconditional branch */
12895 if (proc_hflags
& MIPS_HFLAG_BX
) {
12896 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12898 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12900 case MIPS_HFLAG_BL
:
12901 /* blikely taken case */
12902 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12904 case MIPS_HFLAG_BC
:
12905 /* Conditional branch */
12907 TCGLabel
*l1
= gen_new_label();
12909 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12910 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12912 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12915 case MIPS_HFLAG_BR
:
12916 /* unconditional branch to register */
12917 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12918 TCGv t0
= tcg_temp_new();
12919 TCGv_i32 t1
= tcg_temp_new_i32();
12921 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12922 tcg_gen_trunc_tl_i32(t1
, t0
);
12924 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12925 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12926 tcg_gen_or_i32(hflags
, hflags
, t1
);
12927 tcg_temp_free_i32(t1
);
12929 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12931 tcg_gen_mov_tl(cpu_PC
, btarget
);
12933 if (ctx
->base
.singlestep_enabled
) {
12934 save_cpu_state(ctx
, 0);
12935 gen_helper_raise_exception_debug(cpu_env
);
12937 tcg_gen_lookup_and_goto_ptr();
12940 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12946 /* Compact Branches */
12947 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12948 int rs
, int rt
, int32_t offset
)
12950 int bcond_compute
= 0;
12951 TCGv t0
= tcg_temp_new();
12952 TCGv t1
= tcg_temp_new();
12953 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12955 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12956 #ifdef MIPS_DEBUG_DISAS
12957 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12958 "\n", ctx
->base
.pc_next
);
12960 generate_exception_end(ctx
, EXCP_RI
);
12964 /* Load needed operands and calculate btarget */
12966 /* compact branch */
12967 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12968 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12969 gen_load_gpr(t0
, rs
);
12970 gen_load_gpr(t1
, rt
);
12972 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12973 if (rs
<= rt
&& rs
== 0) {
12974 /* OPC_BEQZALC, OPC_BNEZALC */
12975 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12978 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12979 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12980 gen_load_gpr(t0
, rs
);
12981 gen_load_gpr(t1
, rt
);
12983 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12985 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12986 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12987 if (rs
== 0 || rs
== rt
) {
12988 /* OPC_BLEZALC, OPC_BGEZALC */
12989 /* OPC_BGTZALC, OPC_BLTZALC */
12990 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12992 gen_load_gpr(t0
, rs
);
12993 gen_load_gpr(t1
, rt
);
12995 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12999 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
13004 /* OPC_BEQZC, OPC_BNEZC */
13005 gen_load_gpr(t0
, rs
);
13007 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
13009 /* OPC_JIC, OPC_JIALC */
13010 TCGv tbase
= tcg_temp_new();
13011 TCGv toffset
= tcg_temp_new();
13013 gen_load_gpr(tbase
, rt
);
13014 tcg_gen_movi_tl(toffset
, offset
);
13015 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
13016 tcg_temp_free(tbase
);
13017 tcg_temp_free(toffset
);
13021 MIPS_INVAL("Compact branch/jump");
13022 generate_exception_end(ctx
, EXCP_RI
);
13026 if (bcond_compute
== 0) {
13027 /* Uncoditional compact branch */
13030 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
13033 ctx
->hflags
|= MIPS_HFLAG_BR
;
13036 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
13039 ctx
->hflags
|= MIPS_HFLAG_B
;
13042 MIPS_INVAL("Compact branch/jump");
13043 generate_exception_end(ctx
, EXCP_RI
);
13047 /* Generating branch here as compact branches don't have delay slot */
13048 gen_branch(ctx
, 4);
13050 /* Conditional compact branch */
13051 TCGLabel
*fs
= gen_new_label();
13052 save_cpu_state(ctx
, 0);
13055 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
13056 if (rs
== 0 && rt
!= 0) {
13058 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
13059 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13061 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
13064 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
13067 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
13068 if (rs
== 0 && rt
!= 0) {
13070 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
13071 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13073 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
13076 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
13079 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
13080 if (rs
== 0 && rt
!= 0) {
13082 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
13083 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13085 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
13088 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
13091 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
13092 if (rs
== 0 && rt
!= 0) {
13094 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
13095 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13097 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
13100 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
13103 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
13104 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
13106 /* OPC_BOVC, OPC_BNVC */
13107 TCGv t2
= tcg_temp_new();
13108 TCGv t3
= tcg_temp_new();
13109 TCGv t4
= tcg_temp_new();
13110 TCGv input_overflow
= tcg_temp_new();
13112 gen_load_gpr(t0
, rs
);
13113 gen_load_gpr(t1
, rt
);
13114 tcg_gen_ext32s_tl(t2
, t0
);
13115 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
13116 tcg_gen_ext32s_tl(t3
, t1
);
13117 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
13118 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
13120 tcg_gen_add_tl(t4
, t2
, t3
);
13121 tcg_gen_ext32s_tl(t4
, t4
);
13122 tcg_gen_xor_tl(t2
, t2
, t3
);
13123 tcg_gen_xor_tl(t3
, t4
, t3
);
13124 tcg_gen_andc_tl(t2
, t3
, t2
);
13125 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
13126 tcg_gen_or_tl(t4
, t4
, input_overflow
);
13127 if (opc
== OPC_BOVC
) {
13129 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
13132 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
13134 tcg_temp_free(input_overflow
);
13138 } else if (rs
< rt
&& rs
== 0) {
13139 /* OPC_BEQZALC, OPC_BNEZALC */
13140 if (opc
== OPC_BEQZALC
) {
13142 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
13145 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
13148 /* OPC_BEQC, OPC_BNEC */
13149 if (opc
== OPC_BEQC
) {
13151 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
13154 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
13159 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
13162 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
13165 MIPS_INVAL("Compact conditional branch/jump");
13166 generate_exception_end(ctx
, EXCP_RI
);
13170 /* Generating branch here as compact branches don't have delay slot */
13171 gen_goto_tb(ctx
, 1, ctx
->btarget
);
13174 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
13182 /* ISA extensions (ASEs) */
13183 /* MIPS16 extension to MIPS32 */
13185 /* MIPS16 major opcodes */
13187 M16_OPC_ADDIUSP
= 0x00,
13188 M16_OPC_ADDIUPC
= 0x01,
13190 M16_OPC_JAL
= 0x03,
13191 M16_OPC_BEQZ
= 0x04,
13192 M16_OPC_BNEQZ
= 0x05,
13193 M16_OPC_SHIFT
= 0x06,
13195 M16_OPC_RRIA
= 0x08,
13196 M16_OPC_ADDIU8
= 0x09,
13197 M16_OPC_SLTI
= 0x0a,
13198 M16_OPC_SLTIU
= 0x0b,
13201 M16_OPC_CMPI
= 0x0e,
13205 M16_OPC_LWSP
= 0x12,
13207 M16_OPC_LBU
= 0x14,
13208 M16_OPC_LHU
= 0x15,
13209 M16_OPC_LWPC
= 0x16,
13210 M16_OPC_LWU
= 0x17,
13213 M16_OPC_SWSP
= 0x1a,
13215 M16_OPC_RRR
= 0x1c,
13217 M16_OPC_EXTEND
= 0x1e,
13221 /* I8 funct field */
13240 /* RR funct field */
13274 /* I64 funct field */
13282 I64_DADDIUPC
= 0x6,
13286 /* RR ry field for CNVT */
13288 RR_RY_CNVT_ZEB
= 0x0,
13289 RR_RY_CNVT_ZEH
= 0x1,
13290 RR_RY_CNVT_ZEW
= 0x2,
13291 RR_RY_CNVT_SEB
= 0x4,
13292 RR_RY_CNVT_SEH
= 0x5,
13293 RR_RY_CNVT_SEW
= 0x6,
13296 static int xlat(int r
)
13298 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13303 static void gen_mips16_save(DisasContext
*ctx
,
13304 int xsregs
, int aregs
,
13305 int do_ra
, int do_s0
, int do_s1
,
13308 TCGv t0
= tcg_temp_new();
13309 TCGv t1
= tcg_temp_new();
13310 TCGv t2
= tcg_temp_new();
13340 generate_exception_end(ctx
, EXCP_RI
);
13346 gen_base_offset_addr(ctx
, t0
, 29, 12);
13347 gen_load_gpr(t1
, 7);
13348 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13351 gen_base_offset_addr(ctx
, t0
, 29, 8);
13352 gen_load_gpr(t1
, 6);
13353 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13356 gen_base_offset_addr(ctx
, t0
, 29, 4);
13357 gen_load_gpr(t1
, 5);
13358 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13361 gen_base_offset_addr(ctx
, t0
, 29, 0);
13362 gen_load_gpr(t1
, 4);
13363 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13366 gen_load_gpr(t0
, 29);
13368 #define DECR_AND_STORE(reg) do { \
13369 tcg_gen_movi_tl(t2, -4); \
13370 gen_op_addr_add(ctx, t0, t0, t2); \
13371 gen_load_gpr(t1, reg); \
13372 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
13376 DECR_AND_STORE(31);
13381 DECR_AND_STORE(30);
13384 DECR_AND_STORE(23);
13387 DECR_AND_STORE(22);
13390 DECR_AND_STORE(21);
13393 DECR_AND_STORE(20);
13396 DECR_AND_STORE(19);
13399 DECR_AND_STORE(18);
13403 DECR_AND_STORE(17);
13406 DECR_AND_STORE(16);
13436 generate_exception_end(ctx
, EXCP_RI
);
13452 #undef DECR_AND_STORE
13454 tcg_gen_movi_tl(t2
, -framesize
);
13455 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13461 static void gen_mips16_restore(DisasContext
*ctx
,
13462 int xsregs
, int aregs
,
13463 int do_ra
, int do_s0
, int do_s1
,
13467 TCGv t0
= tcg_temp_new();
13468 TCGv t1
= tcg_temp_new();
13469 TCGv t2
= tcg_temp_new();
13471 tcg_gen_movi_tl(t2
, framesize
);
13472 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13474 #define DECR_AND_LOAD(reg) do { \
13475 tcg_gen_movi_tl(t2, -4); \
13476 gen_op_addr_add(ctx, t0, t0, t2); \
13477 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13478 gen_store_gpr(t1, reg); \
13542 generate_exception_end(ctx
, EXCP_RI
);
13558 #undef DECR_AND_LOAD
13560 tcg_gen_movi_tl(t2
, framesize
);
13561 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13567 static void gen_addiupc(DisasContext
*ctx
, int rx
, int imm
,
13568 int is_64_bit
, int extended
)
13572 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13573 generate_exception_end(ctx
, EXCP_RI
);
13577 t0
= tcg_temp_new();
13579 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13580 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13582 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13588 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13591 TCGv_i32 t0
= tcg_const_i32(op
);
13592 TCGv t1
= tcg_temp_new();
13593 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13594 gen_helper_cache(cpu_env
, t1
, t0
);
13597 #if defined(TARGET_MIPS64)
13598 static void decode_i64_mips16(DisasContext
*ctx
,
13599 int ry
, int funct
, int16_t offset
,
13604 check_insn(ctx
, ISA_MIPS3
);
13605 check_mips_64(ctx
);
13606 offset
= extended
? offset
: offset
<< 3;
13607 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13610 check_insn(ctx
, ISA_MIPS3
);
13611 check_mips_64(ctx
);
13612 offset
= extended
? offset
: offset
<< 3;
13613 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13616 check_insn(ctx
, ISA_MIPS3
);
13617 check_mips_64(ctx
);
13618 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13619 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13622 check_insn(ctx
, ISA_MIPS3
);
13623 check_mips_64(ctx
);
13624 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13625 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13628 check_insn(ctx
, ISA_MIPS3
);
13629 check_mips_64(ctx
);
13630 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13631 generate_exception_end(ctx
, EXCP_RI
);
13633 offset
= extended
? offset
: offset
<< 3;
13634 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13638 check_insn(ctx
, ISA_MIPS3
);
13639 check_mips_64(ctx
);
13640 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13641 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13644 check_insn(ctx
, ISA_MIPS3
);
13645 check_mips_64(ctx
);
13646 offset
= extended
? offset
: offset
<< 2;
13647 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13650 check_insn(ctx
, ISA_MIPS3
);
13651 check_mips_64(ctx
);
13652 offset
= extended
? offset
: offset
<< 2;
13653 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13659 static int decode_extended_mips16_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13661 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13662 int op
, rx
, ry
, funct
, sa
;
13663 int16_t imm
, offset
;
13665 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13666 op
= (ctx
->opcode
>> 11) & 0x1f;
13667 sa
= (ctx
->opcode
>> 22) & 0x1f;
13668 funct
= (ctx
->opcode
>> 8) & 0x7;
13669 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13670 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13671 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13672 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13673 | (ctx
->opcode
& 0x1f));
13676 * The extended opcodes cleverly reuse the opcodes from their 16-bit
13680 case M16_OPC_ADDIUSP
:
13681 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13683 case M16_OPC_ADDIUPC
:
13684 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13687 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13688 /* No delay slot, so just process as a normal instruction */
13691 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13692 /* No delay slot, so just process as a normal instruction */
13694 case M16_OPC_BNEQZ
:
13695 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13696 /* No delay slot, so just process as a normal instruction */
13698 case M16_OPC_SHIFT
:
13699 switch (ctx
->opcode
& 0x3) {
13701 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13704 #if defined(TARGET_MIPS64)
13705 check_mips_64(ctx
);
13706 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13708 generate_exception_end(ctx
, EXCP_RI
);
13712 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13715 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13719 #if defined(TARGET_MIPS64)
13721 check_insn(ctx
, ISA_MIPS3
);
13722 check_mips_64(ctx
);
13723 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13727 imm
= ctx
->opcode
& 0xf;
13728 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13729 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13730 imm
= (int16_t) (imm
<< 1) >> 1;
13731 if ((ctx
->opcode
>> 4) & 0x1) {
13732 #if defined(TARGET_MIPS64)
13733 check_mips_64(ctx
);
13734 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13736 generate_exception_end(ctx
, EXCP_RI
);
13739 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13742 case M16_OPC_ADDIU8
:
13743 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13746 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13748 case M16_OPC_SLTIU
:
13749 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13754 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13757 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13760 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13763 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13766 check_insn(ctx
, ISA_MIPS32
);
13768 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13769 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13770 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13771 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13772 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13773 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13774 | (ctx
->opcode
& 0xf)) << 3;
13776 if (ctx
->opcode
& (1 << 7)) {
13777 gen_mips16_save(ctx
, xsregs
, aregs
,
13778 do_ra
, do_s0
, do_s1
,
13781 gen_mips16_restore(ctx
, xsregs
, aregs
,
13782 do_ra
, do_s0
, do_s1
,
13788 generate_exception_end(ctx
, EXCP_RI
);
13793 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13796 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13798 #if defined(TARGET_MIPS64)
13800 check_insn(ctx
, ISA_MIPS3
);
13801 check_mips_64(ctx
);
13802 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13806 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13809 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13812 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13815 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13818 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13821 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13824 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13826 #if defined(TARGET_MIPS64)
13828 check_insn(ctx
, ISA_MIPS3
);
13829 check_mips_64(ctx
);
13830 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13834 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13837 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13840 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13843 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13845 #if defined(TARGET_MIPS64)
13847 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13851 generate_exception_end(ctx
, EXCP_RI
);
13858 static inline bool is_uhi(int sdbbp_code
)
13860 #ifdef CONFIG_USER_ONLY
13863 return semihosting_enabled() && sdbbp_code
== 1;
13867 #ifdef CONFIG_USER_ONLY
13868 /* The above should dead-code away any calls to this..*/
13869 static inline void gen_helper_do_semihosting(void *env
)
13871 g_assert_not_reached();
13875 static int decode_mips16_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13879 int op
, cnvt_op
, op1
, offset
;
13883 op
= (ctx
->opcode
>> 11) & 0x1f;
13884 sa
= (ctx
->opcode
>> 2) & 0x7;
13885 sa
= sa
== 0 ? 8 : sa
;
13886 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13887 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13888 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13889 op1
= offset
= ctx
->opcode
& 0x1f;
13894 case M16_OPC_ADDIUSP
:
13896 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13898 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13901 case M16_OPC_ADDIUPC
:
13902 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13905 offset
= (ctx
->opcode
& 0x7ff) << 1;
13906 offset
= (int16_t)(offset
<< 4) >> 4;
13907 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13908 /* No delay slot, so just process as a normal instruction */
13911 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13912 offset
= (((ctx
->opcode
& 0x1f) << 21)
13913 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13915 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13916 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13920 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13921 ((int8_t)ctx
->opcode
) << 1, 0);
13922 /* No delay slot, so just process as a normal instruction */
13924 case M16_OPC_BNEQZ
:
13925 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13926 ((int8_t)ctx
->opcode
) << 1, 0);
13927 /* No delay slot, so just process as a normal instruction */
13929 case M16_OPC_SHIFT
:
13930 switch (ctx
->opcode
& 0x3) {
13932 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13935 #if defined(TARGET_MIPS64)
13936 check_insn(ctx
, ISA_MIPS3
);
13937 check_mips_64(ctx
);
13938 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13940 generate_exception_end(ctx
, EXCP_RI
);
13944 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13947 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13951 #if defined(TARGET_MIPS64)
13953 check_insn(ctx
, ISA_MIPS3
);
13954 check_mips_64(ctx
);
13955 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13960 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13962 if ((ctx
->opcode
>> 4) & 1) {
13963 #if defined(TARGET_MIPS64)
13964 check_insn(ctx
, ISA_MIPS3
);
13965 check_mips_64(ctx
);
13966 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13968 generate_exception_end(ctx
, EXCP_RI
);
13971 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13975 case M16_OPC_ADDIU8
:
13977 int16_t imm
= (int8_t) ctx
->opcode
;
13979 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13984 int16_t imm
= (uint8_t) ctx
->opcode
;
13985 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13988 case M16_OPC_SLTIU
:
13990 int16_t imm
= (uint8_t) ctx
->opcode
;
13991 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13998 funct
= (ctx
->opcode
>> 8) & 0x7;
14001 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
14002 ((int8_t)ctx
->opcode
) << 1, 0);
14005 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
14006 ((int8_t)ctx
->opcode
) << 1, 0);
14009 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
14012 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
14013 ((int8_t)ctx
->opcode
) << 3);
14016 check_insn(ctx
, ISA_MIPS32
);
14018 int do_ra
= ctx
->opcode
& (1 << 6);
14019 int do_s0
= ctx
->opcode
& (1 << 5);
14020 int do_s1
= ctx
->opcode
& (1 << 4);
14021 int framesize
= ctx
->opcode
& 0xf;
14023 if (framesize
== 0) {
14026 framesize
= framesize
<< 3;
14029 if (ctx
->opcode
& (1 << 7)) {
14030 gen_mips16_save(ctx
, 0, 0,
14031 do_ra
, do_s0
, do_s1
, framesize
);
14033 gen_mips16_restore(ctx
, 0, 0,
14034 do_ra
, do_s0
, do_s1
, framesize
);
14040 int rz
= xlat(ctx
->opcode
& 0x7);
14042 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
14043 ((ctx
->opcode
>> 5) & 0x7);
14044 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
14048 reg32
= ctx
->opcode
& 0x1f;
14049 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
14052 generate_exception_end(ctx
, EXCP_RI
);
14059 int16_t imm
= (uint8_t) ctx
->opcode
;
14061 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
14066 int16_t imm
= (uint8_t) ctx
->opcode
;
14067 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
14070 #if defined(TARGET_MIPS64)
14072 check_insn(ctx
, ISA_MIPS3
);
14073 check_mips_64(ctx
);
14074 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
14078 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
14081 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
14084 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
14087 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
14090 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
14093 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
14096 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
14098 #if defined(TARGET_MIPS64)
14100 check_insn(ctx
, ISA_MIPS3
);
14101 check_mips_64(ctx
);
14102 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
14106 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
14109 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
14112 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
14115 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
14119 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
14122 switch (ctx
->opcode
& 0x3) {
14124 mips32_op
= OPC_ADDU
;
14127 mips32_op
= OPC_SUBU
;
14129 #if defined(TARGET_MIPS64)
14131 mips32_op
= OPC_DADDU
;
14132 check_insn(ctx
, ISA_MIPS3
);
14133 check_mips_64(ctx
);
14136 mips32_op
= OPC_DSUBU
;
14137 check_insn(ctx
, ISA_MIPS3
);
14138 check_mips_64(ctx
);
14142 generate_exception_end(ctx
, EXCP_RI
);
14146 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
14155 int nd
= (ctx
->opcode
>> 7) & 0x1;
14156 int link
= (ctx
->opcode
>> 6) & 0x1;
14157 int ra
= (ctx
->opcode
>> 5) & 0x1;
14160 check_insn(ctx
, ISA_MIPS32
);
14169 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
14174 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
14175 gen_helper_do_semihosting(cpu_env
);
14178 * XXX: not clear which exception should be raised
14179 * when in debug mode...
14181 check_insn(ctx
, ISA_MIPS32
);
14182 generate_exception_end(ctx
, EXCP_DBp
);
14186 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
14189 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
14192 generate_exception_end(ctx
, EXCP_BREAK
);
14195 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
14198 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
14201 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
14203 #if defined(TARGET_MIPS64)
14205 check_insn(ctx
, ISA_MIPS3
);
14206 check_mips_64(ctx
);
14207 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
14211 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
14214 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
14217 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
14220 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
14223 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
14226 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
14229 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
14232 check_insn(ctx
, ISA_MIPS32
);
14234 case RR_RY_CNVT_ZEB
:
14235 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14237 case RR_RY_CNVT_ZEH
:
14238 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14240 case RR_RY_CNVT_SEB
:
14241 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14243 case RR_RY_CNVT_SEH
:
14244 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14246 #if defined(TARGET_MIPS64)
14247 case RR_RY_CNVT_ZEW
:
14248 check_insn(ctx
, ISA_MIPS64
);
14249 check_mips_64(ctx
);
14250 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14252 case RR_RY_CNVT_SEW
:
14253 check_insn(ctx
, ISA_MIPS64
);
14254 check_mips_64(ctx
);
14255 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14259 generate_exception_end(ctx
, EXCP_RI
);
14264 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
14266 #if defined(TARGET_MIPS64)
14268 check_insn(ctx
, ISA_MIPS3
);
14269 check_mips_64(ctx
);
14270 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
14273 check_insn(ctx
, ISA_MIPS3
);
14274 check_mips_64(ctx
);
14275 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
14278 check_insn(ctx
, ISA_MIPS3
);
14279 check_mips_64(ctx
);
14280 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
14283 check_insn(ctx
, ISA_MIPS3
);
14284 check_mips_64(ctx
);
14285 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
14289 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
14292 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
14295 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
14298 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
14300 #if defined(TARGET_MIPS64)
14302 check_insn(ctx
, ISA_MIPS3
);
14303 check_mips_64(ctx
);
14304 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
14307 check_insn(ctx
, ISA_MIPS3
);
14308 check_mips_64(ctx
);
14309 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
14312 check_insn(ctx
, ISA_MIPS3
);
14313 check_mips_64(ctx
);
14314 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
14317 check_insn(ctx
, ISA_MIPS3
);
14318 check_mips_64(ctx
);
14319 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
14323 generate_exception_end(ctx
, EXCP_RI
);
14327 case M16_OPC_EXTEND
:
14328 decode_extended_mips16_opc(env
, ctx
);
14331 #if defined(TARGET_MIPS64)
14333 funct
= (ctx
->opcode
>> 8) & 0x7;
14334 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
14338 generate_exception_end(ctx
, EXCP_RI
);
14345 /* microMIPS extension to MIPS32/MIPS64 */
14348 * microMIPS32/microMIPS64 major opcodes
14350 * 1. MIPS Architecture for Programmers Volume II-B:
14351 * The microMIPS32 Instruction Set (Revision 3.05)
14353 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
14355 * 2. MIPS Architecture For Programmers Volume II-A:
14356 * The MIPS64 Instruction Set (Revision 3.51)
14386 POOL32S
= 0x16, /* MIPS64 */
14387 DADDIU32
= 0x17, /* MIPS64 */
14416 /* 0x29 is reserved */
14429 /* 0x31 is reserved */
14442 SD32
= 0x36, /* MIPS64 */
14443 LD32
= 0x37, /* MIPS64 */
14445 /* 0x39 is reserved */
14461 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14483 /* POOL32A encoding of minor opcode field */
14487 * These opcodes are distinguished only by bits 9..6; those bits are
14488 * what are recorded below.
14526 /* The following can be distinguished by their lower 6 bits. */
14536 /* POOL32AXF encoding of minor opcode field extension */
14539 * 1. MIPS Architecture for Programmers Volume II-B:
14540 * The microMIPS32 Instruction Set (Revision 3.05)
14542 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14544 * 2. MIPS Architecture for Programmers VolumeIV-e:
14545 * The MIPS DSP Application-Specific Extension
14546 * to the microMIPS32 Architecture (Revision 2.34)
14548 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14563 /* begin of microMIPS32 DSP */
14565 /* bits 13..12 for 0x01 */
14571 /* bits 13..12 for 0x2a */
14577 /* bits 13..12 for 0x32 */
14581 /* end of microMIPS32 DSP */
14583 /* bits 15..12 for 0x2c */
14600 /* bits 15..12 for 0x34 */
14608 /* bits 15..12 for 0x3c */
14610 JR
= 0x0, /* alias */
14618 /* bits 15..12 for 0x05 */
14622 /* bits 15..12 for 0x0d */
14634 /* bits 15..12 for 0x15 */
14640 /* bits 15..12 for 0x1d */
14644 /* bits 15..12 for 0x2d */
14649 /* bits 15..12 for 0x35 */
14656 /* POOL32B encoding of minor opcode field (bits 15..12) */
14672 /* POOL32C encoding of minor opcode field (bits 15..12) */
14693 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14706 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14719 /* POOL32F encoding of minor opcode field (bits 5..0) */
14722 /* These are the bit 7..6 values */
14731 /* These are the bit 8..6 values */
14756 MOVZ_FMT_05
= 0x05,
14790 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14797 /* POOL32Fxf encoding of minor opcode extension field */
14835 /* POOL32I encoding of minor opcode field (bits 25..21) */
14865 /* These overlap and are distinguished by bit16 of the instruction */
14874 /* POOL16A encoding of minor opcode field */
14881 /* POOL16B encoding of minor opcode field */
14888 /* POOL16C encoding of minor opcode field */
14908 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14932 /* POOL16D encoding of minor opcode field */
14939 /* POOL16E encoding of minor opcode field */
14946 static int mmreg(int r
)
14948 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14953 /* Used for 16-bit store instructions. */
14954 static int mmreg2(int r
)
14956 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14961 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14962 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14963 #define uMIPS_RS2(op) uMIPS_RS(op)
14964 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14965 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14966 #define uMIPS_RS5(op) (op & 0x1f)
14968 /* Signed immediate */
14969 #define SIMM(op, start, width) \
14970 ((int32_t)(((op >> start) & ((~0U) >> (32 - width))) \
14973 /* Zero-extended immediate */
14974 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32 - width)))
14976 static void gen_addiur1sp(DisasContext
*ctx
)
14978 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14980 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14983 static void gen_addiur2(DisasContext
*ctx
)
14985 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14986 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14987 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14989 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14992 static void gen_addiusp(DisasContext
*ctx
)
14994 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14997 if (encoded
<= 1) {
14998 decoded
= 256 + encoded
;
14999 } else if (encoded
<= 255) {
15001 } else if (encoded
<= 509) {
15002 decoded
= encoded
- 512;
15004 decoded
= encoded
- 768;
15007 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
15010 static void gen_addius5(DisasContext
*ctx
)
15012 int imm
= SIMM(ctx
->opcode
, 1, 4);
15013 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15015 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
15018 static void gen_andi16(DisasContext
*ctx
)
15020 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
15021 31, 32, 63, 64, 255, 32768, 65535 };
15022 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15023 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15024 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
15026 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
15029 static void gen_ldst_multiple(DisasContext
*ctx
, uint32_t opc
, int reglist
,
15030 int base
, int16_t offset
)
15035 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
15036 generate_exception_end(ctx
, EXCP_RI
);
15040 t0
= tcg_temp_new();
15042 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15044 t1
= tcg_const_tl(reglist
);
15045 t2
= tcg_const_i32(ctx
->mem_idx
);
15047 save_cpu_state(ctx
, 1);
15050 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
15053 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
15055 #ifdef TARGET_MIPS64
15057 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
15060 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
15066 tcg_temp_free_i32(t2
);
15070 static void gen_pool16c_insn(DisasContext
*ctx
)
15072 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
15073 int rs
= mmreg(ctx
->opcode
& 0x7);
15075 switch (((ctx
->opcode
) >> 4) & 0x3f) {
15080 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
15086 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
15092 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
15098 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
15105 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
15106 int offset
= ZIMM(ctx
->opcode
, 0, 4);
15108 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
15117 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
15118 int offset
= ZIMM(ctx
->opcode
, 0, 4);
15120 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
15127 int reg
= ctx
->opcode
& 0x1f;
15129 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
15135 int reg
= ctx
->opcode
& 0x1f;
15136 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
15138 * Let normal delay slot handling in our caller take us
15139 * to the branch target.
15145 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
15146 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15150 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
15151 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15155 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
15159 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
15162 generate_exception_end(ctx
, EXCP_BREAK
);
15165 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
15166 gen_helper_do_semihosting(cpu_env
);
15169 * XXX: not clear which exception should be raised
15170 * when in debug mode...
15172 check_insn(ctx
, ISA_MIPS32
);
15173 generate_exception_end(ctx
, EXCP_DBp
);
15176 case JRADDIUSP
+ 0:
15177 case JRADDIUSP
+ 1:
15179 int imm
= ZIMM(ctx
->opcode
, 0, 5);
15180 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15181 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15183 * Let normal delay slot handling in our caller take us
15184 * to the branch target.
15189 generate_exception_end(ctx
, EXCP_RI
);
15194 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
15197 int rd
, rs
, re
, rt
;
15198 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
15199 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
15200 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
15201 rd
= rd_enc
[enc_dest
];
15202 re
= re_enc
[enc_dest
];
15203 rs
= rs_rt_enc
[enc_rs
];
15204 rt
= rs_rt_enc
[enc_rt
];
15206 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
15208 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
15211 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
15213 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
15217 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
15219 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
15220 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
15222 switch (ctx
->opcode
& 0xf) {
15224 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
15227 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
15231 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15232 int offset
= extract32(ctx
->opcode
, 4, 4);
15233 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
15236 case R6_JRC16
: /* JRCADDIUSP */
15237 if ((ctx
->opcode
>> 4) & 1) {
15239 int imm
= extract32(ctx
->opcode
, 5, 5);
15240 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15241 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15244 rs
= extract32(ctx
->opcode
, 5, 5);
15245 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
15257 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15258 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15259 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
15260 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15264 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
15267 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
15271 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15272 int offset
= extract32(ctx
->opcode
, 4, 4);
15273 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
15276 case JALRC16
: /* BREAK16, SDBBP16 */
15277 switch (ctx
->opcode
& 0x3f) {
15279 case JALRC16
+ 0x20:
15281 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
15286 generate_exception(ctx
, EXCP_BREAK
);
15290 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
15291 gen_helper_do_semihosting(cpu_env
);
15293 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15294 generate_exception(ctx
, EXCP_RI
);
15296 generate_exception(ctx
, EXCP_DBp
);
15303 generate_exception(ctx
, EXCP_RI
);
15308 static void gen_ldxs(DisasContext
*ctx
, int base
, int index
, int rd
)
15310 TCGv t0
= tcg_temp_new();
15311 TCGv t1
= tcg_temp_new();
15313 gen_load_gpr(t0
, base
);
15316 gen_load_gpr(t1
, index
);
15317 tcg_gen_shli_tl(t1
, t1
, 2);
15318 gen_op_addr_add(ctx
, t0
, t1
, t0
);
15321 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15322 gen_store_gpr(t1
, rd
);
15328 static void gen_ldst_pair(DisasContext
*ctx
, uint32_t opc
, int rd
,
15329 int base
, int16_t offset
)
15333 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
15334 generate_exception_end(ctx
, EXCP_RI
);
15338 t0
= tcg_temp_new();
15339 t1
= tcg_temp_new();
15341 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15346 generate_exception_end(ctx
, EXCP_RI
);
15349 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15350 gen_store_gpr(t1
, rd
);
15351 tcg_gen_movi_tl(t1
, 4);
15352 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15353 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15354 gen_store_gpr(t1
, rd
+ 1);
15357 gen_load_gpr(t1
, rd
);
15358 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15359 tcg_gen_movi_tl(t1
, 4);
15360 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15361 gen_load_gpr(t1
, rd
+ 1);
15362 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15364 #ifdef TARGET_MIPS64
15367 generate_exception_end(ctx
, EXCP_RI
);
15370 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15371 gen_store_gpr(t1
, rd
);
15372 tcg_gen_movi_tl(t1
, 8);
15373 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15374 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15375 gen_store_gpr(t1
, rd
+ 1);
15378 gen_load_gpr(t1
, rd
);
15379 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15380 tcg_gen_movi_tl(t1
, 8);
15381 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15382 gen_load_gpr(t1
, rd
+ 1);
15383 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15391 static void gen_sync(int stype
)
15393 TCGBar tcg_mo
= TCG_BAR_SC
;
15396 case 0x4: /* SYNC_WMB */
15397 tcg_mo
|= TCG_MO_ST_ST
;
15399 case 0x10: /* SYNC_MB */
15400 tcg_mo
|= TCG_MO_ALL
;
15402 case 0x11: /* SYNC_ACQUIRE */
15403 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
15405 case 0x12: /* SYNC_RELEASE */
15406 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
15408 case 0x13: /* SYNC_RMB */
15409 tcg_mo
|= TCG_MO_LD_LD
;
15412 tcg_mo
|= TCG_MO_ALL
;
15416 tcg_gen_mb(tcg_mo
);
15419 static void gen_pool32axf(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
15421 int extension
= (ctx
->opcode
>> 6) & 0x3f;
15422 int minor
= (ctx
->opcode
>> 12) & 0xf;
15423 uint32_t mips32_op
;
15425 switch (extension
) {
15427 mips32_op
= OPC_TEQ
;
15430 mips32_op
= OPC_TGE
;
15433 mips32_op
= OPC_TGEU
;
15436 mips32_op
= OPC_TLT
;
15439 mips32_op
= OPC_TLTU
;
15442 mips32_op
= OPC_TNE
;
15444 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
15446 #ifndef CONFIG_USER_ONLY
15449 check_cp0_enabled(ctx
);
15451 /* Treat as NOP. */
15454 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
15458 check_cp0_enabled(ctx
);
15460 TCGv t0
= tcg_temp_new();
15462 gen_load_gpr(t0
, rt
);
15463 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
15469 switch (minor
& 3) {
15471 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15474 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15477 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15480 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15483 goto pool32axf_invalid
;
15487 switch (minor
& 3) {
15489 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15492 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15495 goto pool32axf_invalid
;
15501 check_insn(ctx
, ISA_MIPS32R6
);
15502 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15505 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15508 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15511 mips32_op
= OPC_CLO
;
15514 mips32_op
= OPC_CLZ
;
15516 check_insn(ctx
, ISA_MIPS32
);
15517 gen_cl(ctx
, mips32_op
, rt
, rs
);
15520 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15521 gen_rdhwr(ctx
, rt
, rs
, 0);
15524 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15527 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15528 mips32_op
= OPC_MULT
;
15531 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15532 mips32_op
= OPC_MULTU
;
15535 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15536 mips32_op
= OPC_DIV
;
15539 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15540 mips32_op
= OPC_DIVU
;
15543 check_insn(ctx
, ISA_MIPS32
);
15544 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15547 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15548 mips32_op
= OPC_MADD
;
15551 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15552 mips32_op
= OPC_MADDU
;
15555 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15556 mips32_op
= OPC_MSUB
;
15559 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15560 mips32_op
= OPC_MSUBU
;
15562 check_insn(ctx
, ISA_MIPS32
);
15563 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15566 goto pool32axf_invalid
;
15577 generate_exception_err(ctx
, EXCP_CpU
, 2);
15580 goto pool32axf_invalid
;
15585 case JALR
: /* JALRC */
15586 case JALR_HB
: /* JALRC_HB */
15587 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15588 /* JALRC, JALRC_HB */
15589 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15591 /* JALR, JALR_HB */
15592 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15593 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15598 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15599 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15600 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15603 goto pool32axf_invalid
;
15609 check_cp0_enabled(ctx
);
15610 check_insn(ctx
, ISA_MIPS32R2
);
15611 gen_load_srsgpr(rs
, rt
);
15614 check_cp0_enabled(ctx
);
15615 check_insn(ctx
, ISA_MIPS32R2
);
15616 gen_store_srsgpr(rs
, rt
);
15619 goto pool32axf_invalid
;
15622 #ifndef CONFIG_USER_ONLY
15626 mips32_op
= OPC_TLBP
;
15629 mips32_op
= OPC_TLBR
;
15632 mips32_op
= OPC_TLBWI
;
15635 mips32_op
= OPC_TLBWR
;
15638 mips32_op
= OPC_TLBINV
;
15641 mips32_op
= OPC_TLBINVF
;
15644 mips32_op
= OPC_WAIT
;
15647 mips32_op
= OPC_DERET
;
15650 mips32_op
= OPC_ERET
;
15652 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15655 goto pool32axf_invalid
;
15661 check_cp0_enabled(ctx
);
15663 TCGv t0
= tcg_temp_new();
15665 save_cpu_state(ctx
, 1);
15666 gen_helper_di(t0
, cpu_env
);
15667 gen_store_gpr(t0
, rs
);
15669 * Stop translation as we may have switched the execution
15672 ctx
->base
.is_jmp
= DISAS_STOP
;
15677 check_cp0_enabled(ctx
);
15679 TCGv t0
= tcg_temp_new();
15681 save_cpu_state(ctx
, 1);
15682 gen_helper_ei(t0
, cpu_env
);
15683 gen_store_gpr(t0
, rs
);
15685 * DISAS_STOP isn't sufficient, we need to ensure we break out
15686 * of translated code to check for pending interrupts.
15688 gen_save_pc(ctx
->base
.pc_next
+ 4);
15689 ctx
->base
.is_jmp
= DISAS_EXIT
;
15694 goto pool32axf_invalid
;
15701 gen_sync(extract32(ctx
->opcode
, 16, 5));
15704 generate_exception_end(ctx
, EXCP_SYSCALL
);
15707 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15708 gen_helper_do_semihosting(cpu_env
);
15710 check_insn(ctx
, ISA_MIPS32
);
15711 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15712 generate_exception_end(ctx
, EXCP_RI
);
15714 generate_exception_end(ctx
, EXCP_DBp
);
15719 goto pool32axf_invalid
;
15723 switch (minor
& 3) {
15725 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15728 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15731 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15734 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15737 goto pool32axf_invalid
;
15741 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15744 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15747 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15750 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15753 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15756 goto pool32axf_invalid
;
15761 MIPS_INVAL("pool32axf");
15762 generate_exception_end(ctx
, EXCP_RI
);
15768 * Values for microMIPS fmt field. Variable-width, depending on which
15769 * formats the instruction supports.
15788 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15790 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15791 uint32_t mips32_op
;
15793 #define FLOAT_1BIT_FMT(opc, fmt) ((fmt << 8) | opc)
15794 #define FLOAT_2BIT_FMT(opc, fmt) ((fmt << 7) | opc)
15795 #define COND_FLOAT_MOV(opc, cond) ((cond << 7) | opc)
15797 switch (extension
) {
15798 case FLOAT_1BIT_FMT(CFC1
, 0):
15799 mips32_op
= OPC_CFC1
;
15801 case FLOAT_1BIT_FMT(CTC1
, 0):
15802 mips32_op
= OPC_CTC1
;
15804 case FLOAT_1BIT_FMT(MFC1
, 0):
15805 mips32_op
= OPC_MFC1
;
15807 case FLOAT_1BIT_FMT(MTC1
, 0):
15808 mips32_op
= OPC_MTC1
;
15810 case FLOAT_1BIT_FMT(MFHC1
, 0):
15811 mips32_op
= OPC_MFHC1
;
15813 case FLOAT_1BIT_FMT(MTHC1
, 0):
15814 mips32_op
= OPC_MTHC1
;
15816 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15819 /* Reciprocal square root */
15820 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15821 mips32_op
= OPC_RSQRT_S
;
15823 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15824 mips32_op
= OPC_RSQRT_D
;
15828 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15829 mips32_op
= OPC_SQRT_S
;
15831 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15832 mips32_op
= OPC_SQRT_D
;
15836 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15837 mips32_op
= OPC_RECIP_S
;
15839 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15840 mips32_op
= OPC_RECIP_D
;
15844 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15845 mips32_op
= OPC_FLOOR_L_S
;
15847 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15848 mips32_op
= OPC_FLOOR_L_D
;
15850 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15851 mips32_op
= OPC_FLOOR_W_S
;
15853 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15854 mips32_op
= OPC_FLOOR_W_D
;
15858 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15859 mips32_op
= OPC_CEIL_L_S
;
15861 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15862 mips32_op
= OPC_CEIL_L_D
;
15864 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15865 mips32_op
= OPC_CEIL_W_S
;
15867 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15868 mips32_op
= OPC_CEIL_W_D
;
15872 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15873 mips32_op
= OPC_TRUNC_L_S
;
15875 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15876 mips32_op
= OPC_TRUNC_L_D
;
15878 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15879 mips32_op
= OPC_TRUNC_W_S
;
15881 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15882 mips32_op
= OPC_TRUNC_W_D
;
15886 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15887 mips32_op
= OPC_ROUND_L_S
;
15889 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15890 mips32_op
= OPC_ROUND_L_D
;
15892 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15893 mips32_op
= OPC_ROUND_W_S
;
15895 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15896 mips32_op
= OPC_ROUND_W_D
;
15899 /* Integer to floating-point conversion */
15900 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15901 mips32_op
= OPC_CVT_L_S
;
15903 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15904 mips32_op
= OPC_CVT_L_D
;
15906 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15907 mips32_op
= OPC_CVT_W_S
;
15909 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15910 mips32_op
= OPC_CVT_W_D
;
15913 /* Paired-foo conversions */
15914 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15915 mips32_op
= OPC_CVT_S_PL
;
15917 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15918 mips32_op
= OPC_CVT_S_PU
;
15920 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15921 mips32_op
= OPC_CVT_PW_PS
;
15923 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15924 mips32_op
= OPC_CVT_PS_PW
;
15927 /* Floating-point moves */
15928 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15929 mips32_op
= OPC_MOV_S
;
15931 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15932 mips32_op
= OPC_MOV_D
;
15934 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15935 mips32_op
= OPC_MOV_PS
;
15938 /* Absolute value */
15939 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15940 mips32_op
= OPC_ABS_S
;
15942 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15943 mips32_op
= OPC_ABS_D
;
15945 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15946 mips32_op
= OPC_ABS_PS
;
15950 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15951 mips32_op
= OPC_NEG_S
;
15953 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15954 mips32_op
= OPC_NEG_D
;
15956 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15957 mips32_op
= OPC_NEG_PS
;
15960 /* Reciprocal square root step */
15961 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15962 mips32_op
= OPC_RSQRT1_S
;
15964 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15965 mips32_op
= OPC_RSQRT1_D
;
15967 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15968 mips32_op
= OPC_RSQRT1_PS
;
15971 /* Reciprocal step */
15972 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15973 mips32_op
= OPC_RECIP1_S
;
15975 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15976 mips32_op
= OPC_RECIP1_S
;
15978 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15979 mips32_op
= OPC_RECIP1_PS
;
15982 /* Conversions from double */
15983 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15984 mips32_op
= OPC_CVT_D_S
;
15986 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15987 mips32_op
= OPC_CVT_D_W
;
15989 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15990 mips32_op
= OPC_CVT_D_L
;
15993 /* Conversions from single */
15994 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15995 mips32_op
= OPC_CVT_S_D
;
15997 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15998 mips32_op
= OPC_CVT_S_W
;
16000 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
16001 mips32_op
= OPC_CVT_S_L
;
16003 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
16006 /* Conditional moves on floating-point codes */
16007 case COND_FLOAT_MOV(MOVT
, 0):
16008 case COND_FLOAT_MOV(MOVT
, 1):
16009 case COND_FLOAT_MOV(MOVT
, 2):
16010 case COND_FLOAT_MOV(MOVT
, 3):
16011 case COND_FLOAT_MOV(MOVT
, 4):
16012 case COND_FLOAT_MOV(MOVT
, 5):
16013 case COND_FLOAT_MOV(MOVT
, 6):
16014 case COND_FLOAT_MOV(MOVT
, 7):
16015 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16016 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
16018 case COND_FLOAT_MOV(MOVF
, 0):
16019 case COND_FLOAT_MOV(MOVF
, 1):
16020 case COND_FLOAT_MOV(MOVF
, 2):
16021 case COND_FLOAT_MOV(MOVF
, 3):
16022 case COND_FLOAT_MOV(MOVF
, 4):
16023 case COND_FLOAT_MOV(MOVF
, 5):
16024 case COND_FLOAT_MOV(MOVF
, 6):
16025 case COND_FLOAT_MOV(MOVF
, 7):
16026 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16027 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
16030 MIPS_INVAL("pool32fxf");
16031 generate_exception_end(ctx
, EXCP_RI
);
16036 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
16040 int rt
, rs
, rd
, rr
;
16042 uint32_t op
, minor
, minor2
, mips32_op
;
16043 uint32_t cond
, fmt
, cc
;
16045 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
16046 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
16048 rt
= (ctx
->opcode
>> 21) & 0x1f;
16049 rs
= (ctx
->opcode
>> 16) & 0x1f;
16050 rd
= (ctx
->opcode
>> 11) & 0x1f;
16051 rr
= (ctx
->opcode
>> 6) & 0x1f;
16052 imm
= (int16_t) ctx
->opcode
;
16054 op
= (ctx
->opcode
>> 26) & 0x3f;
16057 minor
= ctx
->opcode
& 0x3f;
16060 minor
= (ctx
->opcode
>> 6) & 0xf;
16063 mips32_op
= OPC_SLL
;
16066 mips32_op
= OPC_SRA
;
16069 mips32_op
= OPC_SRL
;
16072 mips32_op
= OPC_ROTR
;
16074 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
16077 check_insn(ctx
, ISA_MIPS32R6
);
16078 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
16081 check_insn(ctx
, ISA_MIPS32R6
);
16082 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
16085 check_insn(ctx
, ISA_MIPS32R6
);
16086 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
16089 goto pool32a_invalid
;
16093 minor
= (ctx
->opcode
>> 6) & 0xf;
16097 mips32_op
= OPC_ADD
;
16100 mips32_op
= OPC_ADDU
;
16103 mips32_op
= OPC_SUB
;
16106 mips32_op
= OPC_SUBU
;
16109 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16110 mips32_op
= OPC_MUL
;
16112 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
16116 mips32_op
= OPC_SLLV
;
16119 mips32_op
= OPC_SRLV
;
16122 mips32_op
= OPC_SRAV
;
16125 mips32_op
= OPC_ROTRV
;
16127 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
16129 /* Logical operations */
16131 mips32_op
= OPC_AND
;
16134 mips32_op
= OPC_OR
;
16137 mips32_op
= OPC_NOR
;
16140 mips32_op
= OPC_XOR
;
16142 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
16144 /* Set less than */
16146 mips32_op
= OPC_SLT
;
16149 mips32_op
= OPC_SLTU
;
16151 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
16154 goto pool32a_invalid
;
16158 minor
= (ctx
->opcode
>> 6) & 0xf;
16160 /* Conditional moves */
16161 case MOVN
: /* MUL */
16162 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16164 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
16167 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
16170 case MOVZ
: /* MUH */
16171 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16173 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
16176 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
16180 check_insn(ctx
, ISA_MIPS32R6
);
16181 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
16184 check_insn(ctx
, ISA_MIPS32R6
);
16185 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
16187 case LWXS
: /* DIV */
16188 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16190 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
16193 gen_ldxs(ctx
, rs
, rt
, rd
);
16197 check_insn(ctx
, ISA_MIPS32R6
);
16198 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
16201 check_insn(ctx
, ISA_MIPS32R6
);
16202 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
16205 check_insn(ctx
, ISA_MIPS32R6
);
16206 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
16209 goto pool32a_invalid
;
16213 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
16216 check_insn(ctx
, ISA_MIPS32R6
);
16217 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
16218 extract32(ctx
->opcode
, 9, 2));
16221 check_insn(ctx
, ISA_MIPS32R6
);
16222 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
16225 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
16228 gen_pool32axf(env
, ctx
, rt
, rs
);
16231 generate_exception_end(ctx
, EXCP_BREAK
);
16234 check_insn(ctx
, ISA_MIPS32R6
);
16235 generate_exception_end(ctx
, EXCP_RI
);
16239 MIPS_INVAL("pool32a");
16240 generate_exception_end(ctx
, EXCP_RI
);
16245 minor
= (ctx
->opcode
>> 12) & 0xf;
16248 check_cp0_enabled(ctx
);
16249 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16250 gen_cache_operation(ctx
, rt
, rs
, imm
);
16255 /* COP2: Not implemented. */
16256 generate_exception_err(ctx
, EXCP_CpU
, 2);
16258 #ifdef TARGET_MIPS64
16261 check_insn(ctx
, ISA_MIPS3
);
16262 check_mips_64(ctx
);
16267 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16269 #ifdef TARGET_MIPS64
16272 check_insn(ctx
, ISA_MIPS3
);
16273 check_mips_64(ctx
);
16278 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16281 MIPS_INVAL("pool32b");
16282 generate_exception_end(ctx
, EXCP_RI
);
16287 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
16288 minor
= ctx
->opcode
& 0x3f;
16289 check_cp1_enabled(ctx
);
16292 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16293 mips32_op
= OPC_ALNV_PS
;
16296 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16297 mips32_op
= OPC_MADD_S
;
16300 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16301 mips32_op
= OPC_MADD_D
;
16304 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16305 mips32_op
= OPC_MADD_PS
;
16308 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16309 mips32_op
= OPC_MSUB_S
;
16312 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16313 mips32_op
= OPC_MSUB_D
;
16316 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16317 mips32_op
= OPC_MSUB_PS
;
16320 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16321 mips32_op
= OPC_NMADD_S
;
16324 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16325 mips32_op
= OPC_NMADD_D
;
16328 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16329 mips32_op
= OPC_NMADD_PS
;
16332 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16333 mips32_op
= OPC_NMSUB_S
;
16336 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16337 mips32_op
= OPC_NMSUB_D
;
16340 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16341 mips32_op
= OPC_NMSUB_PS
;
16343 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
16345 case CABS_COND_FMT
:
16346 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16347 cond
= (ctx
->opcode
>> 6) & 0xf;
16348 cc
= (ctx
->opcode
>> 13) & 0x7;
16349 fmt
= (ctx
->opcode
>> 10) & 0x3;
16352 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
16355 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
16358 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
16361 goto pool32f_invalid
;
16365 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16366 cond
= (ctx
->opcode
>> 6) & 0xf;
16367 cc
= (ctx
->opcode
>> 13) & 0x7;
16368 fmt
= (ctx
->opcode
>> 10) & 0x3;
16371 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
16374 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
16377 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
16380 goto pool32f_invalid
;
16384 check_insn(ctx
, ISA_MIPS32R6
);
16385 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16388 check_insn(ctx
, ISA_MIPS32R6
);
16389 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16392 gen_pool32fxf(ctx
, rt
, rs
);
16396 switch ((ctx
->opcode
>> 6) & 0x7) {
16398 mips32_op
= OPC_PLL_PS
;
16401 mips32_op
= OPC_PLU_PS
;
16404 mips32_op
= OPC_PUL_PS
;
16407 mips32_op
= OPC_PUU_PS
;
16410 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16411 mips32_op
= OPC_CVT_PS_S
;
16413 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16416 goto pool32f_invalid
;
16420 check_insn(ctx
, ISA_MIPS32R6
);
16421 switch ((ctx
->opcode
>> 9) & 0x3) {
16423 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
16426 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
16429 goto pool32f_invalid
;
16434 switch ((ctx
->opcode
>> 6) & 0x7) {
16436 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16437 mips32_op
= OPC_LWXC1
;
16440 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16441 mips32_op
= OPC_SWXC1
;
16444 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16445 mips32_op
= OPC_LDXC1
;
16448 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16449 mips32_op
= OPC_SDXC1
;
16452 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16453 mips32_op
= OPC_LUXC1
;
16456 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16457 mips32_op
= OPC_SUXC1
;
16459 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
16462 goto pool32f_invalid
;
16466 check_insn(ctx
, ISA_MIPS32R6
);
16467 switch ((ctx
->opcode
>> 9) & 0x3) {
16469 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
16472 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
16475 goto pool32f_invalid
;
16480 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16481 fmt
= (ctx
->opcode
>> 9) & 0x3;
16482 switch ((ctx
->opcode
>> 6) & 0x7) {
16486 mips32_op
= OPC_RSQRT2_S
;
16489 mips32_op
= OPC_RSQRT2_D
;
16492 mips32_op
= OPC_RSQRT2_PS
;
16495 goto pool32f_invalid
;
16501 mips32_op
= OPC_RECIP2_S
;
16504 mips32_op
= OPC_RECIP2_D
;
16507 mips32_op
= OPC_RECIP2_PS
;
16510 goto pool32f_invalid
;
16514 mips32_op
= OPC_ADDR_PS
;
16517 mips32_op
= OPC_MULR_PS
;
16519 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16522 goto pool32f_invalid
;
16526 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16527 cc
= (ctx
->opcode
>> 13) & 0x7;
16528 fmt
= (ctx
->opcode
>> 9) & 0x3;
16529 switch ((ctx
->opcode
>> 6) & 0x7) {
16530 case MOVF_FMT
: /* RINT_FMT */
16531 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16535 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16538 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16541 goto pool32f_invalid
;
16547 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16550 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16554 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16557 goto pool32f_invalid
;
16561 case MOVT_FMT
: /* CLASS_FMT */
16562 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16566 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16569 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16572 goto pool32f_invalid
;
16578 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16581 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16585 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16588 goto pool32f_invalid
;
16593 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16596 goto pool32f_invalid
;
16599 #define FINSN_3ARG_SDPS(prfx) \
16600 switch ((ctx->opcode >> 8) & 0x3) { \
16602 mips32_op = OPC_##prfx##_S; \
16605 mips32_op = OPC_##prfx##_D; \
16607 case FMT_SDPS_PS: \
16609 mips32_op = OPC_##prfx##_PS; \
16612 goto pool32f_invalid; \
16615 check_insn(ctx
, ISA_MIPS32R6
);
16616 switch ((ctx
->opcode
>> 9) & 0x3) {
16618 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16621 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16624 goto pool32f_invalid
;
16628 check_insn(ctx
, ISA_MIPS32R6
);
16629 switch ((ctx
->opcode
>> 9) & 0x3) {
16631 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16634 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16637 goto pool32f_invalid
;
16641 /* regular FP ops */
16642 switch ((ctx
->opcode
>> 6) & 0x3) {
16644 FINSN_3ARG_SDPS(ADD
);
16647 FINSN_3ARG_SDPS(SUB
);
16650 FINSN_3ARG_SDPS(MUL
);
16653 fmt
= (ctx
->opcode
>> 8) & 0x3;
16655 mips32_op
= OPC_DIV_D
;
16656 } else if (fmt
== 0) {
16657 mips32_op
= OPC_DIV_S
;
16659 goto pool32f_invalid
;
16663 goto pool32f_invalid
;
16668 switch ((ctx
->opcode
>> 6) & 0x7) {
16669 case MOVN_FMT
: /* SELEQZ_FMT */
16670 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16672 switch ((ctx
->opcode
>> 9) & 0x3) {
16674 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16677 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16680 goto pool32f_invalid
;
16684 FINSN_3ARG_SDPS(MOVN
);
16688 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16689 FINSN_3ARG_SDPS(MOVN
);
16691 case MOVZ_FMT
: /* SELNEZ_FMT */
16692 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16694 switch ((ctx
->opcode
>> 9) & 0x3) {
16696 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16699 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16702 goto pool32f_invalid
;
16706 FINSN_3ARG_SDPS(MOVZ
);
16710 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16711 FINSN_3ARG_SDPS(MOVZ
);
16714 check_insn(ctx
, ISA_MIPS32R6
);
16715 switch ((ctx
->opcode
>> 9) & 0x3) {
16717 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16720 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16723 goto pool32f_invalid
;
16727 check_insn(ctx
, ISA_MIPS32R6
);
16728 switch ((ctx
->opcode
>> 9) & 0x3) {
16730 mips32_op
= OPC_MADDF_S
;
16733 mips32_op
= OPC_MADDF_D
;
16736 goto pool32f_invalid
;
16740 check_insn(ctx
, ISA_MIPS32R6
);
16741 switch ((ctx
->opcode
>> 9) & 0x3) {
16743 mips32_op
= OPC_MSUBF_S
;
16746 mips32_op
= OPC_MSUBF_D
;
16749 goto pool32f_invalid
;
16753 goto pool32f_invalid
;
16757 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16761 MIPS_INVAL("pool32f");
16762 generate_exception_end(ctx
, EXCP_RI
);
16766 generate_exception_err(ctx
, EXCP_CpU
, 1);
16770 minor
= (ctx
->opcode
>> 21) & 0x1f;
16773 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16774 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16777 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16778 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16779 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16782 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16783 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16784 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16787 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16788 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16791 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16792 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16793 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16796 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16797 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16798 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16801 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16802 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16805 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16806 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16810 case TLTI
: /* BC1EQZC */
16811 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16813 check_cp1_enabled(ctx
);
16814 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16817 mips32_op
= OPC_TLTI
;
16821 case TGEI
: /* BC1NEZC */
16822 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16824 check_cp1_enabled(ctx
);
16825 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16828 mips32_op
= OPC_TGEI
;
16833 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16834 mips32_op
= OPC_TLTIU
;
16837 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16838 mips32_op
= OPC_TGEIU
;
16840 case TNEI
: /* SYNCI */
16841 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16844 * Break the TB to be able to sync copied instructions
16847 ctx
->base
.is_jmp
= DISAS_STOP
;
16850 mips32_op
= OPC_TNEI
;
16855 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16856 mips32_op
= OPC_TEQI
;
16858 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16863 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16864 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16865 4, rs
, 0, imm
<< 1, 0);
16867 * Compact branches don't have a delay slot, so just let
16868 * the normal delay slot handling take us to the branch
16873 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16874 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16877 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16879 * Break the TB to be able to sync copied instructions
16882 ctx
->base
.is_jmp
= DISAS_STOP
;
16886 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16887 /* COP2: Not implemented. */
16888 generate_exception_err(ctx
, EXCP_CpU
, 2);
16891 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16892 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16895 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16896 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16899 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16900 mips32_op
= OPC_BC1FANY4
;
16903 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16904 mips32_op
= OPC_BC1TANY4
;
16907 check_insn(ctx
, ASE_MIPS3D
);
16910 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16911 check_cp1_enabled(ctx
);
16912 gen_compute_branch1(ctx
, mips32_op
,
16913 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16915 generate_exception_err(ctx
, EXCP_CpU
, 1);
16920 /* MIPS DSP: not implemented */
16923 MIPS_INVAL("pool32i");
16924 generate_exception_end(ctx
, EXCP_RI
);
16929 minor
= (ctx
->opcode
>> 12) & 0xf;
16930 offset
= sextract32(ctx
->opcode
, 0,
16931 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16934 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16935 mips32_op
= OPC_LWL
;
16938 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16939 mips32_op
= OPC_SWL
;
16942 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16943 mips32_op
= OPC_LWR
;
16946 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16947 mips32_op
= OPC_SWR
;
16949 #if defined(TARGET_MIPS64)
16951 check_insn(ctx
, ISA_MIPS3
);
16952 check_mips_64(ctx
);
16953 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16954 mips32_op
= OPC_LDL
;
16957 check_insn(ctx
, ISA_MIPS3
);
16958 check_mips_64(ctx
);
16959 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16960 mips32_op
= OPC_SDL
;
16963 check_insn(ctx
, ISA_MIPS3
);
16964 check_mips_64(ctx
);
16965 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16966 mips32_op
= OPC_LDR
;
16969 check_insn(ctx
, ISA_MIPS3
);
16970 check_mips_64(ctx
);
16971 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16972 mips32_op
= OPC_SDR
;
16975 check_insn(ctx
, ISA_MIPS3
);
16976 check_mips_64(ctx
);
16977 mips32_op
= OPC_LWU
;
16980 check_insn(ctx
, ISA_MIPS3
);
16981 check_mips_64(ctx
);
16982 mips32_op
= OPC_LLD
;
16986 mips32_op
= OPC_LL
;
16989 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16992 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16995 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TESL
, false);
16997 #if defined(TARGET_MIPS64)
16999 check_insn(ctx
, ISA_MIPS3
);
17000 check_mips_64(ctx
);
17001 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TEQ
, false);
17006 MIPS_INVAL("pool32c ld-eva");
17007 generate_exception_end(ctx
, EXCP_RI
);
17010 check_cp0_enabled(ctx
);
17012 minor2
= (ctx
->opcode
>> 9) & 0x7;
17013 offset
= sextract32(ctx
->opcode
, 0, 9);
17016 mips32_op
= OPC_LBUE
;
17019 mips32_op
= OPC_LHUE
;
17022 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17023 mips32_op
= OPC_LWLE
;
17026 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17027 mips32_op
= OPC_LWRE
;
17030 mips32_op
= OPC_LBE
;
17033 mips32_op
= OPC_LHE
;
17036 mips32_op
= OPC_LLE
;
17039 mips32_op
= OPC_LWE
;
17045 MIPS_INVAL("pool32c st-eva");
17046 generate_exception_end(ctx
, EXCP_RI
);
17049 check_cp0_enabled(ctx
);
17051 minor2
= (ctx
->opcode
>> 9) & 0x7;
17052 offset
= sextract32(ctx
->opcode
, 0, 9);
17055 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17056 mips32_op
= OPC_SWLE
;
17059 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17060 mips32_op
= OPC_SWRE
;
17063 /* Treat as no-op */
17064 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
17065 /* hint codes 24-31 are reserved and signal RI */
17066 generate_exception(ctx
, EXCP_RI
);
17070 /* Treat as no-op */
17071 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17072 gen_cache_operation(ctx
, rt
, rs
, offset
);
17076 mips32_op
= OPC_SBE
;
17079 mips32_op
= OPC_SHE
;
17082 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TESL
, true);
17085 mips32_op
= OPC_SWE
;
17090 /* Treat as no-op */
17091 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
17092 /* hint codes 24-31 are reserved and signal RI */
17093 generate_exception(ctx
, EXCP_RI
);
17097 MIPS_INVAL("pool32c");
17098 generate_exception_end(ctx
, EXCP_RI
);
17102 case ADDI32
: /* AUI, LUI */
17103 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17105 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
17108 mips32_op
= OPC_ADDI
;
17113 mips32_op
= OPC_ADDIU
;
17115 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17118 /* Logical operations */
17120 mips32_op
= OPC_ORI
;
17123 mips32_op
= OPC_XORI
;
17126 mips32_op
= OPC_ANDI
;
17128 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17131 /* Set less than immediate */
17133 mips32_op
= OPC_SLTI
;
17136 mips32_op
= OPC_SLTIU
;
17138 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17141 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17142 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
17143 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
17144 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17146 case JALS32
: /* BOVC, BEQC, BEQZALC */
17147 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17150 mips32_op
= OPC_BOVC
;
17151 } else if (rs
< rt
&& rs
== 0) {
17153 mips32_op
= OPC_BEQZALC
;
17156 mips32_op
= OPC_BEQC
;
17158 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17161 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
17162 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
17163 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17166 case BEQ32
: /* BC */
17167 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17169 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
17170 sextract32(ctx
->opcode
<< 1, 0, 27));
17173 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
17176 case BNE32
: /* BALC */
17177 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17179 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
17180 sextract32(ctx
->opcode
<< 1, 0, 27));
17183 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
17186 case J32
: /* BGTZC, BLTZC, BLTC */
17187 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17188 if (rs
== 0 && rt
!= 0) {
17190 mips32_op
= OPC_BGTZC
;
17191 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17193 mips32_op
= OPC_BLTZC
;
17196 mips32_op
= OPC_BLTC
;
17198 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17201 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
17202 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17205 case JAL32
: /* BLEZC, BGEZC, BGEC */
17206 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17207 if (rs
== 0 && rt
!= 0) {
17209 mips32_op
= OPC_BLEZC
;
17210 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17212 mips32_op
= OPC_BGEZC
;
17215 mips32_op
= OPC_BGEC
;
17217 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17220 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
17221 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17222 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17225 /* Floating point (COP1) */
17227 mips32_op
= OPC_LWC1
;
17230 mips32_op
= OPC_LDC1
;
17233 mips32_op
= OPC_SWC1
;
17236 mips32_op
= OPC_SDC1
;
17238 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
17240 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17241 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17242 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17243 switch ((ctx
->opcode
>> 16) & 0x1f) {
17252 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17255 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
17258 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
17268 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17271 generate_exception(ctx
, EXCP_RI
);
17276 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
17277 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
17279 gen_addiupc(ctx
, reg
, offset
, 0, 0);
17282 case BNVC
: /* BNEC, BNEZALC */
17283 check_insn(ctx
, ISA_MIPS32R6
);
17286 mips32_op
= OPC_BNVC
;
17287 } else if (rs
< rt
&& rs
== 0) {
17289 mips32_op
= OPC_BNEZALC
;
17292 mips32_op
= OPC_BNEC
;
17294 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17296 case R6_BNEZC
: /* JIALC */
17297 check_insn(ctx
, ISA_MIPS32R6
);
17300 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
17301 sextract32(ctx
->opcode
<< 1, 0, 22));
17304 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
17307 case R6_BEQZC
: /* JIC */
17308 check_insn(ctx
, ISA_MIPS32R6
);
17311 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
17312 sextract32(ctx
->opcode
<< 1, 0, 22));
17315 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
17318 case BLEZALC
: /* BGEZALC, BGEUC */
17319 check_insn(ctx
, ISA_MIPS32R6
);
17320 if (rs
== 0 && rt
!= 0) {
17322 mips32_op
= OPC_BLEZALC
;
17323 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17325 mips32_op
= OPC_BGEZALC
;
17328 mips32_op
= OPC_BGEUC
;
17330 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17332 case BGTZALC
: /* BLTZALC, BLTUC */
17333 check_insn(ctx
, ISA_MIPS32R6
);
17334 if (rs
== 0 && rt
!= 0) {
17336 mips32_op
= OPC_BGTZALC
;
17337 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17339 mips32_op
= OPC_BLTZALC
;
17342 mips32_op
= OPC_BLTUC
;
17344 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17346 /* Loads and stores */
17348 mips32_op
= OPC_LB
;
17351 mips32_op
= OPC_LBU
;
17354 mips32_op
= OPC_LH
;
17357 mips32_op
= OPC_LHU
;
17360 mips32_op
= OPC_LW
;
17362 #ifdef TARGET_MIPS64
17364 check_insn(ctx
, ISA_MIPS3
);
17365 check_mips_64(ctx
);
17366 mips32_op
= OPC_LD
;
17369 check_insn(ctx
, ISA_MIPS3
);
17370 check_mips_64(ctx
);
17371 mips32_op
= OPC_SD
;
17375 mips32_op
= OPC_SB
;
17378 mips32_op
= OPC_SH
;
17381 mips32_op
= OPC_SW
;
17384 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
17387 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
17390 generate_exception_end(ctx
, EXCP_RI
);
17395 static int decode_micromips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
17399 /* make sure instructions are on a halfword boundary */
17400 if (ctx
->base
.pc_next
& 0x1) {
17401 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
17402 generate_exception_end(ctx
, EXCP_AdEL
);
17406 op
= (ctx
->opcode
>> 10) & 0x3f;
17407 /* Enforce properly-sized instructions in a delay slot */
17408 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
17409 switch (op
& 0x7) { /* MSB-3..MSB-5 */
17411 /* POOL32A, POOL32B, POOL32I, POOL32C */
17413 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
17415 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
17417 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
17419 /* LB32, LH32, LWC132, LDC132, LW32 */
17420 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
17421 generate_exception_end(ctx
, EXCP_RI
);
17426 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
17428 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
17430 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
17431 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
17432 generate_exception_end(ctx
, EXCP_RI
);
17442 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17443 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
17444 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
17447 switch (ctx
->opcode
& 0x1) {
17455 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17457 * In the Release 6, the register number location in
17458 * the instruction encoding has changed.
17460 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
17462 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
17468 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17469 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
17470 int amount
= (ctx
->opcode
>> 1) & 0x7;
17472 amount
= amount
== 0 ? 8 : amount
;
17474 switch (ctx
->opcode
& 0x1) {
17483 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17487 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17488 gen_pool16c_r6_insn(ctx
);
17490 gen_pool16c_insn(ctx
);
17495 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17496 int rb
= 28; /* GP */
17497 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17499 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17503 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17504 if (ctx
->opcode
& 1) {
17505 generate_exception_end(ctx
, EXCP_RI
);
17508 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17509 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17510 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17511 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17516 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17517 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17518 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17519 offset
= (offset
== 0xf ? -1 : offset
);
17521 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17526 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17527 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17528 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17530 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17535 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17536 int rb
= 29; /* SP */
17537 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17539 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17544 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17545 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17546 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17548 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17553 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17554 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17555 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17557 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17562 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17563 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17564 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17566 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17571 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17572 int rb
= 29; /* SP */
17573 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17575 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17580 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17581 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17582 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17584 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17589 int rd
= uMIPS_RD5(ctx
->opcode
);
17590 int rs
= uMIPS_RS5(ctx
->opcode
);
17592 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17599 switch (ctx
->opcode
& 0x1) {
17609 switch (ctx
->opcode
& 0x1) {
17614 gen_addiur1sp(ctx
);
17618 case B16
: /* BC16 */
17619 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17620 sextract32(ctx
->opcode
, 0, 10) << 1,
17621 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17623 case BNEZ16
: /* BNEZC16 */
17624 case BEQZ16
: /* BEQZC16 */
17625 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17626 mmreg(uMIPS_RD(ctx
->opcode
)),
17627 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17628 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17633 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17634 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17636 imm
= (imm
== 0x7f ? -1 : imm
);
17637 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17643 generate_exception_end(ctx
, EXCP_RI
);
17646 decode_micromips32_opc(env
, ctx
);
17659 /* MAJOR, P16, and P32 pools opcodes */
17663 NM_MOVE_BALC
= 0x02,
17671 NM_P16_SHIFT
= 0x0c,
17689 NM_P_LS_U12
= 0x21,
17699 NM_P16_ADDU
= 0x2c,
17713 NM_MOVEPREV
= 0x3f,
17716 /* POOL32A instruction pool */
17718 NM_POOL32A0
= 0x00,
17719 NM_SPECIAL2
= 0x01,
17722 NM_POOL32A5
= 0x05,
17723 NM_POOL32A7
= 0x07,
17726 /* P.GP.W instruction pool */
17728 NM_ADDIUGP_W
= 0x00,
17733 /* P48I instruction pool */
17737 NM_ADDIUGP48
= 0x02,
17738 NM_ADDIUPC48
= 0x03,
17743 /* P.U12 instruction pool */
17752 NM_ADDIUNEG
= 0x08,
17759 /* POOL32F instruction pool */
17761 NM_POOL32F_0
= 0x00,
17762 NM_POOL32F_3
= 0x03,
17763 NM_POOL32F_5
= 0x05,
17766 /* POOL32S instruction pool */
17768 NM_POOL32S_0
= 0x00,
17769 NM_POOL32S_4
= 0x04,
17772 /* P.LUI instruction pool */
17778 /* P.GP.BH instruction pool */
17783 NM_ADDIUGP_B
= 0x03,
17786 NM_P_GP_CP1
= 0x06,
17789 /* P.LS.U12 instruction pool */
17794 NM_P_PREFU12
= 0x03,
17807 /* P.LS.S9 instruction pool */
17813 NM_P_LS_UAWM
= 0x05,
17816 /* P.BAL instruction pool */
17822 /* P.J instruction pool */
17825 NM_JALRC_HB
= 0x01,
17826 NM_P_BALRSC
= 0x08,
17829 /* P.BR1 instruction pool */
17837 /* P.BR2 instruction pool */
17844 /* P.BRI instruction pool */
17856 /* P16.SHIFT instruction pool */
17862 /* POOL16C instruction pool */
17864 NM_POOL16C_0
= 0x00,
17868 /* P16.A1 instruction pool */
17870 NM_ADDIUR1SP
= 0x01,
17873 /* P16.A2 instruction pool */
17876 NM_P_ADDIURS5
= 0x01,
17879 /* P16.ADDU instruction pool */
17885 /* P16.SR instruction pool */
17888 NM_RESTORE_JRC16
= 0x01,
17891 /* P16.4X4 instruction pool */
17897 /* P16.LB instruction pool */
17904 /* P16.LH instruction pool */
17911 /* P.RI instruction pool */
17914 NM_P_SYSCALL
= 0x01,
17919 /* POOL32A0 instruction pool */
17954 NM_D_E_MT_VPE
= 0x56,
17962 /* CRC32 instruction pool */
17972 /* POOL32A5 instruction pool */
17974 NM_CMP_EQ_PH
= 0x00,
17975 NM_CMP_LT_PH
= 0x08,
17976 NM_CMP_LE_PH
= 0x10,
17977 NM_CMPGU_EQ_QB
= 0x18,
17978 NM_CMPGU_LT_QB
= 0x20,
17979 NM_CMPGU_LE_QB
= 0x28,
17980 NM_CMPGDU_EQ_QB
= 0x30,
17981 NM_CMPGDU_LT_QB
= 0x38,
17982 NM_CMPGDU_LE_QB
= 0x40,
17983 NM_CMPU_EQ_QB
= 0x48,
17984 NM_CMPU_LT_QB
= 0x50,
17985 NM_CMPU_LE_QB
= 0x58,
17986 NM_ADDQ_S_W
= 0x60,
17987 NM_SUBQ_S_W
= 0x68,
17991 NM_ADDQ_S_PH
= 0x01,
17992 NM_ADDQH_R_PH
= 0x09,
17993 NM_ADDQH_R_W
= 0x11,
17994 NM_ADDU_S_QB
= 0x19,
17995 NM_ADDU_S_PH
= 0x21,
17996 NM_ADDUH_R_QB
= 0x29,
17997 NM_SHRAV_R_PH
= 0x31,
17998 NM_SHRAV_R_QB
= 0x39,
17999 NM_SUBQ_S_PH
= 0x41,
18000 NM_SUBQH_R_PH
= 0x49,
18001 NM_SUBQH_R_W
= 0x51,
18002 NM_SUBU_S_QB
= 0x59,
18003 NM_SUBU_S_PH
= 0x61,
18004 NM_SUBUH_R_QB
= 0x69,
18005 NM_SHLLV_S_PH
= 0x71,
18006 NM_PRECR_SRA_R_PH_W
= 0x79,
18008 NM_MULEU_S_PH_QBL
= 0x12,
18009 NM_MULEU_S_PH_QBR
= 0x1a,
18010 NM_MULQ_RS_PH
= 0x22,
18011 NM_MULQ_S_PH
= 0x2a,
18012 NM_MULQ_RS_W
= 0x32,
18013 NM_MULQ_S_W
= 0x3a,
18016 NM_SHRAV_R_W
= 0x5a,
18017 NM_SHRLV_PH
= 0x62,
18018 NM_SHRLV_QB
= 0x6a,
18019 NM_SHLLV_QB
= 0x72,
18020 NM_SHLLV_S_W
= 0x7a,
18024 NM_MULEQ_S_W_PHL
= 0x04,
18025 NM_MULEQ_S_W_PHR
= 0x0c,
18027 NM_MUL_S_PH
= 0x05,
18028 NM_PRECR_QB_PH
= 0x0d,
18029 NM_PRECRQ_QB_PH
= 0x15,
18030 NM_PRECRQ_PH_W
= 0x1d,
18031 NM_PRECRQ_RS_PH_W
= 0x25,
18032 NM_PRECRQU_S_QB_PH
= 0x2d,
18033 NM_PACKRL_PH
= 0x35,
18037 NM_SHRA_R_W
= 0x5e,
18038 NM_SHRA_R_PH
= 0x66,
18039 NM_SHLL_S_PH
= 0x76,
18040 NM_SHLL_S_W
= 0x7e,
18045 /* POOL32A7 instruction pool */
18050 NM_POOL32AXF
= 0x07,
18053 /* P.SR instruction pool */
18059 /* P.SHIFT instruction pool */
18067 /* P.ROTX instruction pool */
18072 /* P.INS instruction pool */
18077 /* P.EXT instruction pool */
18082 /* POOL32F_0 (fmt) instruction pool */
18087 NM_SELEQZ_S
= 0x07,
18088 NM_SELEQZ_D
= 0x47,
18092 NM_SELNEZ_S
= 0x0f,
18093 NM_SELNEZ_D
= 0x4f,
18108 /* POOL32F_3 instruction pool */
18112 NM_MINA_FMT
= 0x04,
18113 NM_MAXA_FMT
= 0x05,
18114 NM_POOL32FXF
= 0x07,
18117 /* POOL32F_5 instruction pool */
18119 NM_CMP_CONDN_S
= 0x00,
18120 NM_CMP_CONDN_D
= 0x02,
18123 /* P.GP.LH instruction pool */
18129 /* P.GP.SH instruction pool */
18134 /* P.GP.CP1 instruction pool */
18142 /* P.LS.S0 instruction pool */
18159 NM_P_PREFS9
= 0x03,
18165 /* P.LS.S1 instruction pool */
18167 NM_ASET_ACLR
= 0x02,
18175 /* P.LS.E0 instruction pool */
18191 /* P.PREFE instruction pool */
18197 /* P.LLE instruction pool */
18203 /* P.SCE instruction pool */
18209 /* P.LS.WM instruction pool */
18215 /* P.LS.UAWM instruction pool */
18221 /* P.BR3A instruction pool */
18227 NM_BPOSGE32C
= 0x04,
18230 /* P16.RI instruction pool */
18232 NM_P16_SYSCALL
= 0x01,
18237 /* POOL16C_0 instruction pool */
18239 NM_POOL16C_00
= 0x00,
18242 /* P16.JRC instruction pool */
18248 /* P.SYSCALL instruction pool */
18254 /* P.TRAP instruction pool */
18260 /* P.CMOVE instruction pool */
18266 /* POOL32Axf instruction pool */
18268 NM_POOL32AXF_1
= 0x01,
18269 NM_POOL32AXF_2
= 0x02,
18270 NM_POOL32AXF_4
= 0x04,
18271 NM_POOL32AXF_5
= 0x05,
18272 NM_POOL32AXF_7
= 0x07,
18275 /* POOL32Axf_1 instruction pool */
18277 NM_POOL32AXF_1_0
= 0x00,
18278 NM_POOL32AXF_1_1
= 0x01,
18279 NM_POOL32AXF_1_3
= 0x03,
18280 NM_POOL32AXF_1_4
= 0x04,
18281 NM_POOL32AXF_1_5
= 0x05,
18282 NM_POOL32AXF_1_7
= 0x07,
18285 /* POOL32Axf_2 instruction pool */
18287 NM_POOL32AXF_2_0_7
= 0x00,
18288 NM_POOL32AXF_2_8_15
= 0x01,
18289 NM_POOL32AXF_2_16_23
= 0x02,
18290 NM_POOL32AXF_2_24_31
= 0x03,
18293 /* POOL32Axf_7 instruction pool */
18295 NM_SHRA_R_QB
= 0x0,
18300 /* POOL32Axf_1_0 instruction pool */
18308 /* POOL32Axf_1_1 instruction pool */
18314 /* POOL32Axf_1_3 instruction pool */
18322 /* POOL32Axf_1_4 instruction pool */
18328 /* POOL32Axf_1_5 instruction pool */
18330 NM_MAQ_S_W_PHR
= 0x0,
18331 NM_MAQ_S_W_PHL
= 0x1,
18332 NM_MAQ_SA_W_PHR
= 0x2,
18333 NM_MAQ_SA_W_PHL
= 0x3,
18336 /* POOL32Axf_1_7 instruction pool */
18340 NM_EXTR_RS_W
= 0x2,
18344 /* POOL32Axf_2_0_7 instruction pool */
18347 NM_DPAQ_S_W_PH
= 0x1,
18349 NM_DPSQ_S_W_PH
= 0x3,
18356 /* POOL32Axf_2_8_15 instruction pool */
18358 NM_DPAX_W_PH
= 0x0,
18359 NM_DPAQ_SA_L_W
= 0x1,
18360 NM_DPSX_W_PH
= 0x2,
18361 NM_DPSQ_SA_L_W
= 0x3,
18364 NM_EXTRV_R_W
= 0x7,
18367 /* POOL32Axf_2_16_23 instruction pool */
18369 NM_DPAU_H_QBL
= 0x0,
18370 NM_DPAQX_S_W_PH
= 0x1,
18371 NM_DPSU_H_QBL
= 0x2,
18372 NM_DPSQX_S_W_PH
= 0x3,
18375 NM_MULSA_W_PH
= 0x6,
18376 NM_EXTRV_RS_W
= 0x7,
18379 /* POOL32Axf_2_24_31 instruction pool */
18381 NM_DPAU_H_QBR
= 0x0,
18382 NM_DPAQX_SA_W_PH
= 0x1,
18383 NM_DPSU_H_QBR
= 0x2,
18384 NM_DPSQX_SA_W_PH
= 0x3,
18387 NM_MULSAQ_S_W_PH
= 0x6,
18388 NM_EXTRV_S_H
= 0x7,
18391 /* POOL32Axf_{4, 5} instruction pool */
18410 /* nanoMIPS DSP instructions */
18411 NM_ABSQ_S_QB
= 0x00,
18412 NM_ABSQ_S_PH
= 0x08,
18413 NM_ABSQ_S_W
= 0x10,
18414 NM_PRECEQ_W_PHL
= 0x28,
18415 NM_PRECEQ_W_PHR
= 0x30,
18416 NM_PRECEQU_PH_QBL
= 0x38,
18417 NM_PRECEQU_PH_QBR
= 0x48,
18418 NM_PRECEU_PH_QBL
= 0x58,
18419 NM_PRECEU_PH_QBR
= 0x68,
18420 NM_PRECEQU_PH_QBLA
= 0x39,
18421 NM_PRECEQU_PH_QBRA
= 0x49,
18422 NM_PRECEU_PH_QBLA
= 0x59,
18423 NM_PRECEU_PH_QBRA
= 0x69,
18424 NM_REPLV_PH
= 0x01,
18425 NM_REPLV_QB
= 0x09,
18428 NM_RADDU_W_QB
= 0x78,
18434 /* PP.SR instruction pool */
18438 NM_RESTORE_JRC
= 0x03,
18441 /* P.SR.F instruction pool */
18444 NM_RESTOREF
= 0x01,
18447 /* P16.SYSCALL instruction pool */
18449 NM_SYSCALL16
= 0x00,
18450 NM_HYPCALL16
= 0x01,
18453 /* POOL16C_00 instruction pool */
18461 /* PP.LSX and PP.LSXS instruction pool */
18499 /* ERETx instruction pool */
18505 /* POOL32FxF_{0, 1} insturction pool */
18514 NM_CVT_S_PL
= 0x84,
18515 NM_CVT_S_PU
= 0xa4,
18517 NM_CVT_L_S
= 0x004,
18518 NM_CVT_L_D
= 0x104,
18519 NM_CVT_W_S
= 0x024,
18520 NM_CVT_W_D
= 0x124,
18522 NM_RSQRT_S
= 0x008,
18523 NM_RSQRT_D
= 0x108,
18528 NM_RECIP_S
= 0x048,
18529 NM_RECIP_D
= 0x148,
18531 NM_FLOOR_L_S
= 0x00c,
18532 NM_FLOOR_L_D
= 0x10c,
18534 NM_FLOOR_W_S
= 0x02c,
18535 NM_FLOOR_W_D
= 0x12c,
18537 NM_CEIL_L_S
= 0x04c,
18538 NM_CEIL_L_D
= 0x14c,
18539 NM_CEIL_W_S
= 0x06c,
18540 NM_CEIL_W_D
= 0x16c,
18541 NM_TRUNC_L_S
= 0x08c,
18542 NM_TRUNC_L_D
= 0x18c,
18543 NM_TRUNC_W_S
= 0x0ac,
18544 NM_TRUNC_W_D
= 0x1ac,
18545 NM_ROUND_L_S
= 0x0cc,
18546 NM_ROUND_L_D
= 0x1cc,
18547 NM_ROUND_W_S
= 0x0ec,
18548 NM_ROUND_W_D
= 0x1ec,
18556 NM_CVT_D_S
= 0x04d,
18557 NM_CVT_D_W
= 0x0cd,
18558 NM_CVT_D_L
= 0x14d,
18559 NM_CVT_S_D
= 0x06d,
18560 NM_CVT_S_W
= 0x0ed,
18561 NM_CVT_S_L
= 0x16d,
18564 /* P.LL instruction pool */
18570 /* P.SC instruction pool */
18576 /* P.DVP instruction pool */
18585 * nanoMIPS decoding engine
18590 /* extraction utilities */
18592 #define NANOMIPS_EXTRACT_RT3(op) ((op >> 7) & 0x7)
18593 #define NANOMIPS_EXTRACT_RS3(op) ((op >> 4) & 0x7)
18594 #define NANOMIPS_EXTRACT_RD3(op) ((op >> 1) & 0x7)
18595 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18596 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18598 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18599 static inline int decode_gpr_gpr3(int r
)
18601 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18603 return map
[r
& 0x7];
18606 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18607 static inline int decode_gpr_gpr3_src_store(int r
)
18609 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18611 return map
[r
& 0x7];
18614 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18615 static inline int decode_gpr_gpr4(int r
)
18617 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18618 16, 17, 18, 19, 20, 21, 22, 23 };
18620 return map
[r
& 0xf];
18623 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18624 static inline int decode_gpr_gpr4_zero(int r
)
18626 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18627 16, 17, 18, 19, 20, 21, 22, 23 };
18629 return map
[r
& 0xf];
18633 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18635 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18638 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18639 uint8_t gp
, uint16_t u
)
18642 TCGv va
= tcg_temp_new();
18643 TCGv t0
= tcg_temp_new();
18645 while (counter
!= count
) {
18646 bool use_gp
= gp
&& (counter
== count
- 1);
18647 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18648 int this_offset
= -((counter
+ 1) << 2);
18649 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18650 gen_load_gpr(t0
, this_rt
);
18651 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18652 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18656 /* adjust stack pointer */
18657 gen_adjust_sp(ctx
, -u
);
18663 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18664 uint8_t gp
, uint16_t u
)
18667 TCGv va
= tcg_temp_new();
18668 TCGv t0
= tcg_temp_new();
18670 while (counter
!= count
) {
18671 bool use_gp
= gp
&& (counter
== count
- 1);
18672 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18673 int this_offset
= u
- ((counter
+ 1) << 2);
18674 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18675 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18676 ctx
->default_tcg_memop_mask
);
18677 tcg_gen_ext32s_tl(t0
, t0
);
18678 gen_store_gpr(t0
, this_rt
);
18682 /* adjust stack pointer */
18683 gen_adjust_sp(ctx
, u
);
18689 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18691 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
18692 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
18694 switch (extract32(ctx
->opcode
, 2, 2)) {
18696 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18699 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18702 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18705 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18710 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18712 int rt
= extract32(ctx
->opcode
, 21, 5);
18713 int rs
= extract32(ctx
->opcode
, 16, 5);
18714 int rd
= extract32(ctx
->opcode
, 11, 5);
18716 switch (extract32(ctx
->opcode
, 3, 7)) {
18718 switch (extract32(ctx
->opcode
, 10, 1)) {
18721 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18725 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18731 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18735 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18738 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18741 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18744 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18747 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18750 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18753 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18756 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18760 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18763 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18766 switch (extract32(ctx
->opcode
, 10, 1)) {
18768 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18771 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18776 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18779 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18782 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18785 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18788 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18793 #ifndef CONFIG_USER_ONLY
18794 TCGv t0
= tcg_temp_new();
18795 switch (extract32(ctx
->opcode
, 10, 1)) {
18798 check_cp0_enabled(ctx
);
18799 gen_helper_dvp(t0
, cpu_env
);
18800 gen_store_gpr(t0
, rt
);
18805 check_cp0_enabled(ctx
);
18806 gen_helper_evp(t0
, cpu_env
);
18807 gen_store_gpr(t0
, rt
);
18814 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18819 TCGv t0
= tcg_temp_new();
18820 TCGv t1
= tcg_temp_new();
18821 TCGv t2
= tcg_temp_new();
18823 gen_load_gpr(t1
, rs
);
18824 gen_load_gpr(t2
, rt
);
18825 tcg_gen_add_tl(t0
, t1
, t2
);
18826 tcg_gen_ext32s_tl(t0
, t0
);
18827 tcg_gen_xor_tl(t1
, t1
, t2
);
18828 tcg_gen_xor_tl(t2
, t0
, t2
);
18829 tcg_gen_andc_tl(t1
, t2
, t1
);
18831 /* operands of same sign, result different sign */
18832 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18833 gen_store_gpr(t0
, rd
);
18841 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18844 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18847 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18850 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18853 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18856 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18859 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18862 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18864 #ifndef CONFIG_USER_ONLY
18866 check_cp0_enabled(ctx
);
18868 /* Treat as NOP. */
18871 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18874 check_cp0_enabled(ctx
);
18876 TCGv t0
= tcg_temp_new();
18878 gen_load_gpr(t0
, rt
);
18879 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18883 case NM_D_E_MT_VPE
:
18885 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18886 TCGv t0
= tcg_temp_new();
18893 gen_helper_dmt(t0
);
18894 gen_store_gpr(t0
, rt
);
18895 } else if (rs
== 0) {
18898 gen_helper_dvpe(t0
, cpu_env
);
18899 gen_store_gpr(t0
, rt
);
18901 generate_exception_end(ctx
, EXCP_RI
);
18908 gen_helper_emt(t0
);
18909 gen_store_gpr(t0
, rt
);
18910 } else if (rs
== 0) {
18913 gen_helper_evpe(t0
, cpu_env
);
18914 gen_store_gpr(t0
, rt
);
18916 generate_exception_end(ctx
, EXCP_RI
);
18927 TCGv t0
= tcg_temp_new();
18928 TCGv t1
= tcg_temp_new();
18930 gen_load_gpr(t0
, rt
);
18931 gen_load_gpr(t1
, rs
);
18932 gen_helper_fork(t0
, t1
);
18939 check_cp0_enabled(ctx
);
18941 /* Treat as NOP. */
18944 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18945 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18949 check_cp0_enabled(ctx
);
18950 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18951 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18956 TCGv t0
= tcg_temp_new();
18958 gen_load_gpr(t0
, rs
);
18959 gen_helper_yield(t0
, cpu_env
, t0
);
18960 gen_store_gpr(t0
, rt
);
18966 generate_exception_end(ctx
, EXCP_RI
);
18972 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18973 int ret
, int v1
, int v2
)
18979 t0
= tcg_temp_new_i32();
18981 v0_t
= tcg_temp_new();
18982 v1_t
= tcg_temp_new();
18984 tcg_gen_movi_i32(t0
, v2
>> 3);
18986 gen_load_gpr(v0_t
, ret
);
18987 gen_load_gpr(v1_t
, v1
);
18990 case NM_MAQ_S_W_PHR
:
18992 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18994 case NM_MAQ_S_W_PHL
:
18996 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18998 case NM_MAQ_SA_W_PHR
:
19000 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
19002 case NM_MAQ_SA_W_PHL
:
19004 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
19007 generate_exception_end(ctx
, EXCP_RI
);
19011 tcg_temp_free_i32(t0
);
19013 tcg_temp_free(v0_t
);
19014 tcg_temp_free(v1_t
);
19018 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19019 int ret
, int v1
, int v2
)
19022 TCGv t0
= tcg_temp_new();
19023 TCGv t1
= tcg_temp_new();
19024 TCGv v0_t
= tcg_temp_new();
19026 gen_load_gpr(v0_t
, v1
);
19029 case NM_POOL32AXF_1_0
:
19031 switch (extract32(ctx
->opcode
, 12, 2)) {
19033 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
19036 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
19039 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
19042 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
19046 case NM_POOL32AXF_1_1
:
19048 switch (extract32(ctx
->opcode
, 12, 2)) {
19050 tcg_gen_movi_tl(t0
, v2
);
19051 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
19054 tcg_gen_movi_tl(t0
, v2
>> 3);
19055 gen_helper_shilo(t0
, v0_t
, cpu_env
);
19058 generate_exception_end(ctx
, EXCP_RI
);
19062 case NM_POOL32AXF_1_3
:
19064 imm
= extract32(ctx
->opcode
, 14, 7);
19065 switch (extract32(ctx
->opcode
, 12, 2)) {
19067 tcg_gen_movi_tl(t0
, imm
);
19068 gen_helper_rddsp(t0
, t0
, cpu_env
);
19069 gen_store_gpr(t0
, ret
);
19072 gen_load_gpr(t0
, ret
);
19073 tcg_gen_movi_tl(t1
, imm
);
19074 gen_helper_wrdsp(t0
, t1
, cpu_env
);
19077 tcg_gen_movi_tl(t0
, v2
>> 3);
19078 tcg_gen_movi_tl(t1
, v1
);
19079 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
19080 gen_store_gpr(t0
, ret
);
19083 tcg_gen_movi_tl(t0
, v2
>> 3);
19084 tcg_gen_movi_tl(t1
, v1
);
19085 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
19086 gen_store_gpr(t0
, ret
);
19090 case NM_POOL32AXF_1_4
:
19092 tcg_gen_movi_tl(t0
, v2
>> 2);
19093 switch (extract32(ctx
->opcode
, 12, 1)) {
19095 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
19096 gen_store_gpr(t0
, ret
);
19099 gen_helper_shrl_qb(t0
, t0
, v0_t
);
19100 gen_store_gpr(t0
, ret
);
19104 case NM_POOL32AXF_1_5
:
19105 opc
= extract32(ctx
->opcode
, 12, 2);
19106 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
19108 case NM_POOL32AXF_1_7
:
19110 tcg_gen_movi_tl(t0
, v2
>> 3);
19111 tcg_gen_movi_tl(t1
, v1
);
19112 switch (extract32(ctx
->opcode
, 12, 2)) {
19114 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
19115 gen_store_gpr(t0
, ret
);
19118 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
19119 gen_store_gpr(t0
, ret
);
19122 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
19123 gen_store_gpr(t0
, ret
);
19126 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
19127 gen_store_gpr(t0
, ret
);
19132 generate_exception_end(ctx
, EXCP_RI
);
19138 tcg_temp_free(v0_t
);
19141 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
19142 TCGv v0
, TCGv v1
, int rd
)
19146 t0
= tcg_temp_new_i32();
19148 tcg_gen_movi_i32(t0
, rd
>> 3);
19151 case NM_POOL32AXF_2_0_7
:
19152 switch (extract32(ctx
->opcode
, 9, 3)) {
19155 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
19157 case NM_DPAQ_S_W_PH
:
19159 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19163 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
19165 case NM_DPSQ_S_W_PH
:
19167 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19170 generate_exception_end(ctx
, EXCP_RI
);
19174 case NM_POOL32AXF_2_8_15
:
19175 switch (extract32(ctx
->opcode
, 9, 3)) {
19178 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
19180 case NM_DPAQ_SA_L_W
:
19182 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19186 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
19188 case NM_DPSQ_SA_L_W
:
19190 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19193 generate_exception_end(ctx
, EXCP_RI
);
19197 case NM_POOL32AXF_2_16_23
:
19198 switch (extract32(ctx
->opcode
, 9, 3)) {
19199 case NM_DPAU_H_QBL
:
19201 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
19203 case NM_DPAQX_S_W_PH
:
19205 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19207 case NM_DPSU_H_QBL
:
19209 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
19211 case NM_DPSQX_S_W_PH
:
19213 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19215 case NM_MULSA_W_PH
:
19217 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
19220 generate_exception_end(ctx
, EXCP_RI
);
19224 case NM_POOL32AXF_2_24_31
:
19225 switch (extract32(ctx
->opcode
, 9, 3)) {
19226 case NM_DPAU_H_QBR
:
19228 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
19230 case NM_DPAQX_SA_W_PH
:
19232 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19234 case NM_DPSU_H_QBR
:
19236 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
19238 case NM_DPSQX_SA_W_PH
:
19240 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19242 case NM_MULSAQ_S_W_PH
:
19244 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19247 generate_exception_end(ctx
, EXCP_RI
);
19252 generate_exception_end(ctx
, EXCP_RI
);
19256 tcg_temp_free_i32(t0
);
19259 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19260 int rt
, int rs
, int rd
)
19263 TCGv t0
= tcg_temp_new();
19264 TCGv t1
= tcg_temp_new();
19265 TCGv v0_t
= tcg_temp_new();
19266 TCGv v1_t
= tcg_temp_new();
19268 gen_load_gpr(v0_t
, rt
);
19269 gen_load_gpr(v1_t
, rs
);
19272 case NM_POOL32AXF_2_0_7
:
19273 switch (extract32(ctx
->opcode
, 9, 3)) {
19275 case NM_DPAQ_S_W_PH
:
19277 case NM_DPSQ_S_W_PH
:
19278 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19283 gen_load_gpr(t0
, rs
);
19285 if (rd
!= 0 && rd
!= 2) {
19286 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
19287 tcg_gen_ext32u_tl(t0
, t0
);
19288 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
19289 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
19291 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
19297 int acc
= extract32(ctx
->opcode
, 14, 2);
19298 TCGv_i64 t2
= tcg_temp_new_i64();
19299 TCGv_i64 t3
= tcg_temp_new_i64();
19301 gen_load_gpr(t0
, rt
);
19302 gen_load_gpr(t1
, rs
);
19303 tcg_gen_ext_tl_i64(t2
, t0
);
19304 tcg_gen_ext_tl_i64(t3
, t1
);
19305 tcg_gen_mul_i64(t2
, t2
, t3
);
19306 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19307 tcg_gen_add_i64(t2
, t2
, t3
);
19308 tcg_temp_free_i64(t3
);
19309 gen_move_low32(cpu_LO
[acc
], t2
);
19310 gen_move_high32(cpu_HI
[acc
], t2
);
19311 tcg_temp_free_i64(t2
);
19317 int acc
= extract32(ctx
->opcode
, 14, 2);
19318 TCGv_i32 t2
= tcg_temp_new_i32();
19319 TCGv_i32 t3
= tcg_temp_new_i32();
19321 gen_load_gpr(t0
, rs
);
19322 gen_load_gpr(t1
, rt
);
19323 tcg_gen_trunc_tl_i32(t2
, t0
);
19324 tcg_gen_trunc_tl_i32(t3
, t1
);
19325 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
19326 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19327 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19328 tcg_temp_free_i32(t2
);
19329 tcg_temp_free_i32(t3
);
19334 gen_load_gpr(v1_t
, rs
);
19335 tcg_gen_movi_tl(t0
, rd
>> 3);
19336 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
19337 gen_store_gpr(t0
, ret
);
19341 case NM_POOL32AXF_2_8_15
:
19342 switch (extract32(ctx
->opcode
, 9, 3)) {
19344 case NM_DPAQ_SA_L_W
:
19346 case NM_DPSQ_SA_L_W
:
19347 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19352 int acc
= extract32(ctx
->opcode
, 14, 2);
19353 TCGv_i64 t2
= tcg_temp_new_i64();
19354 TCGv_i64 t3
= tcg_temp_new_i64();
19356 gen_load_gpr(t0
, rs
);
19357 gen_load_gpr(t1
, rt
);
19358 tcg_gen_ext32u_tl(t0
, t0
);
19359 tcg_gen_ext32u_tl(t1
, t1
);
19360 tcg_gen_extu_tl_i64(t2
, t0
);
19361 tcg_gen_extu_tl_i64(t3
, t1
);
19362 tcg_gen_mul_i64(t2
, t2
, t3
);
19363 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19364 tcg_gen_add_i64(t2
, t2
, t3
);
19365 tcg_temp_free_i64(t3
);
19366 gen_move_low32(cpu_LO
[acc
], t2
);
19367 gen_move_high32(cpu_HI
[acc
], t2
);
19368 tcg_temp_free_i64(t2
);
19374 int acc
= extract32(ctx
->opcode
, 14, 2);
19375 TCGv_i32 t2
= tcg_temp_new_i32();
19376 TCGv_i32 t3
= tcg_temp_new_i32();
19378 gen_load_gpr(t0
, rs
);
19379 gen_load_gpr(t1
, rt
);
19380 tcg_gen_trunc_tl_i32(t2
, t0
);
19381 tcg_gen_trunc_tl_i32(t3
, t1
);
19382 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
19383 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19384 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19385 tcg_temp_free_i32(t2
);
19386 tcg_temp_free_i32(t3
);
19391 tcg_gen_movi_tl(t0
, rd
>> 3);
19392 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
19393 gen_store_gpr(t0
, ret
);
19396 generate_exception_end(ctx
, EXCP_RI
);
19400 case NM_POOL32AXF_2_16_23
:
19401 switch (extract32(ctx
->opcode
, 9, 3)) {
19402 case NM_DPAU_H_QBL
:
19403 case NM_DPAQX_S_W_PH
:
19404 case NM_DPSU_H_QBL
:
19405 case NM_DPSQX_S_W_PH
:
19406 case NM_MULSA_W_PH
:
19407 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19411 tcg_gen_movi_tl(t0
, rd
>> 3);
19412 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
19413 gen_store_gpr(t0
, ret
);
19418 int acc
= extract32(ctx
->opcode
, 14, 2);
19419 TCGv_i64 t2
= tcg_temp_new_i64();
19420 TCGv_i64 t3
= tcg_temp_new_i64();
19422 gen_load_gpr(t0
, rs
);
19423 gen_load_gpr(t1
, rt
);
19424 tcg_gen_ext_tl_i64(t2
, t0
);
19425 tcg_gen_ext_tl_i64(t3
, t1
);
19426 tcg_gen_mul_i64(t2
, t2
, t3
);
19427 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19428 tcg_gen_sub_i64(t2
, t3
, t2
);
19429 tcg_temp_free_i64(t3
);
19430 gen_move_low32(cpu_LO
[acc
], t2
);
19431 gen_move_high32(cpu_HI
[acc
], t2
);
19432 tcg_temp_free_i64(t2
);
19435 case NM_EXTRV_RS_W
:
19437 tcg_gen_movi_tl(t0
, rd
>> 3);
19438 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
19439 gen_store_gpr(t0
, ret
);
19443 case NM_POOL32AXF_2_24_31
:
19444 switch (extract32(ctx
->opcode
, 9, 3)) {
19445 case NM_DPAU_H_QBR
:
19446 case NM_DPAQX_SA_W_PH
:
19447 case NM_DPSU_H_QBR
:
19448 case NM_DPSQX_SA_W_PH
:
19449 case NM_MULSAQ_S_W_PH
:
19450 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19454 tcg_gen_movi_tl(t0
, rd
>> 3);
19455 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
19456 gen_store_gpr(t0
, ret
);
19461 int acc
= extract32(ctx
->opcode
, 14, 2);
19462 TCGv_i64 t2
= tcg_temp_new_i64();
19463 TCGv_i64 t3
= tcg_temp_new_i64();
19465 gen_load_gpr(t0
, rs
);
19466 gen_load_gpr(t1
, rt
);
19467 tcg_gen_ext32u_tl(t0
, t0
);
19468 tcg_gen_ext32u_tl(t1
, t1
);
19469 tcg_gen_extu_tl_i64(t2
, t0
);
19470 tcg_gen_extu_tl_i64(t3
, t1
);
19471 tcg_gen_mul_i64(t2
, t2
, t3
);
19472 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19473 tcg_gen_sub_i64(t2
, t3
, t2
);
19474 tcg_temp_free_i64(t3
);
19475 gen_move_low32(cpu_LO
[acc
], t2
);
19476 gen_move_high32(cpu_HI
[acc
], t2
);
19477 tcg_temp_free_i64(t2
);
19482 tcg_gen_movi_tl(t0
, rd
>> 3);
19483 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19484 gen_store_gpr(t0
, ret
);
19489 generate_exception_end(ctx
, EXCP_RI
);
19496 tcg_temp_free(v0_t
);
19497 tcg_temp_free(v1_t
);
19500 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19504 TCGv t0
= tcg_temp_new();
19505 TCGv v0_t
= tcg_temp_new();
19507 gen_load_gpr(v0_t
, rs
);
19512 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19513 gen_store_gpr(v0_t
, ret
);
19517 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19518 gen_store_gpr(v0_t
, ret
);
19522 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19523 gen_store_gpr(v0_t
, ret
);
19525 case NM_PRECEQ_W_PHL
:
19527 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19528 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19529 gen_store_gpr(v0_t
, ret
);
19531 case NM_PRECEQ_W_PHR
:
19533 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19534 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19535 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19536 gen_store_gpr(v0_t
, ret
);
19538 case NM_PRECEQU_PH_QBL
:
19540 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19541 gen_store_gpr(v0_t
, ret
);
19543 case NM_PRECEQU_PH_QBR
:
19545 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19546 gen_store_gpr(v0_t
, ret
);
19548 case NM_PRECEQU_PH_QBLA
:
19550 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19551 gen_store_gpr(v0_t
, ret
);
19553 case NM_PRECEQU_PH_QBRA
:
19555 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19556 gen_store_gpr(v0_t
, ret
);
19558 case NM_PRECEU_PH_QBL
:
19560 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19561 gen_store_gpr(v0_t
, ret
);
19563 case NM_PRECEU_PH_QBR
:
19565 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19566 gen_store_gpr(v0_t
, ret
);
19568 case NM_PRECEU_PH_QBLA
:
19570 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19571 gen_store_gpr(v0_t
, ret
);
19573 case NM_PRECEU_PH_QBRA
:
19575 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19576 gen_store_gpr(v0_t
, ret
);
19580 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19581 tcg_gen_shli_tl(t0
, v0_t
, 16);
19582 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19583 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19584 gen_store_gpr(v0_t
, ret
);
19588 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19589 tcg_gen_shli_tl(t0
, v0_t
, 8);
19590 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19591 tcg_gen_shli_tl(t0
, v0_t
, 16);
19592 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19593 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19594 gen_store_gpr(v0_t
, ret
);
19598 gen_helper_bitrev(v0_t
, v0_t
);
19599 gen_store_gpr(v0_t
, ret
);
19604 TCGv tv0
= tcg_temp_new();
19606 gen_load_gpr(tv0
, rt
);
19607 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19608 gen_store_gpr(v0_t
, ret
);
19609 tcg_temp_free(tv0
);
19612 case NM_RADDU_W_QB
:
19614 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19615 gen_store_gpr(v0_t
, ret
);
19618 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19622 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19626 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19629 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19632 generate_exception_end(ctx
, EXCP_RI
);
19636 tcg_temp_free(v0_t
);
19640 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19641 int rt
, int rs
, int rd
)
19643 TCGv t0
= tcg_temp_new();
19644 TCGv rs_t
= tcg_temp_new();
19646 gen_load_gpr(rs_t
, rs
);
19651 tcg_gen_movi_tl(t0
, rd
>> 2);
19652 switch (extract32(ctx
->opcode
, 12, 1)) {
19655 gen_helper_shra_qb(t0
, t0
, rs_t
);
19656 gen_store_gpr(t0
, rt
);
19660 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19661 gen_store_gpr(t0
, rt
);
19667 tcg_gen_movi_tl(t0
, rd
>> 1);
19668 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19669 gen_store_gpr(t0
, rt
);
19675 target_long result
;
19676 imm
= extract32(ctx
->opcode
, 13, 8);
19677 result
= (uint32_t)imm
<< 24 |
19678 (uint32_t)imm
<< 16 |
19679 (uint32_t)imm
<< 8 |
19681 result
= (int32_t)result
;
19682 tcg_gen_movi_tl(t0
, result
);
19683 gen_store_gpr(t0
, rt
);
19687 generate_exception_end(ctx
, EXCP_RI
);
19691 tcg_temp_free(rs_t
);
19695 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19697 int rt
= extract32(ctx
->opcode
, 21, 5);
19698 int rs
= extract32(ctx
->opcode
, 16, 5);
19699 int rd
= extract32(ctx
->opcode
, 11, 5);
19701 switch (extract32(ctx
->opcode
, 6, 3)) {
19702 case NM_POOL32AXF_1
:
19704 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19705 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19708 case NM_POOL32AXF_2
:
19710 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19711 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19714 case NM_POOL32AXF_4
:
19716 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19717 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19720 case NM_POOL32AXF_5
:
19721 switch (extract32(ctx
->opcode
, 9, 7)) {
19722 #ifndef CONFIG_USER_ONLY
19724 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19727 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19730 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19733 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19736 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19739 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19742 check_cp0_enabled(ctx
);
19744 TCGv t0
= tcg_temp_new();
19746 save_cpu_state(ctx
, 1);
19747 gen_helper_di(t0
, cpu_env
);
19748 gen_store_gpr(t0
, rt
);
19749 /* Stop translation as we may have switched the execution mode */
19750 ctx
->base
.is_jmp
= DISAS_STOP
;
19755 check_cp0_enabled(ctx
);
19757 TCGv t0
= tcg_temp_new();
19759 save_cpu_state(ctx
, 1);
19760 gen_helper_ei(t0
, cpu_env
);
19761 gen_store_gpr(t0
, rt
);
19762 /* Stop translation as we may have switched the execution mode */
19763 ctx
->base
.is_jmp
= DISAS_STOP
;
19768 gen_load_srsgpr(rs
, rt
);
19771 gen_store_srsgpr(rs
, rt
);
19774 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19777 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19780 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19784 generate_exception_end(ctx
, EXCP_RI
);
19788 case NM_POOL32AXF_7
:
19790 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19791 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19795 generate_exception_end(ctx
, EXCP_RI
);
19800 /* Immediate Value Compact Branches */
19801 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19802 int rt
, int32_t imm
, int32_t offset
)
19805 int bcond_compute
= 0;
19806 TCGv t0
= tcg_temp_new();
19807 TCGv t1
= tcg_temp_new();
19809 gen_load_gpr(t0
, rt
);
19810 tcg_gen_movi_tl(t1
, imm
);
19811 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19813 /* Load needed operands and calculate btarget */
19816 if (rt
== 0 && imm
== 0) {
19817 /* Unconditional branch */
19818 } else if (rt
== 0 && imm
!= 0) {
19823 cond
= TCG_COND_EQ
;
19829 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19830 generate_exception_end(ctx
, EXCP_RI
);
19832 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19833 /* Unconditional branch */
19834 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19838 tcg_gen_shri_tl(t0
, t0
, imm
);
19839 tcg_gen_andi_tl(t0
, t0
, 1);
19840 tcg_gen_movi_tl(t1
, 0);
19842 if (opc
== NM_BBEQZC
) {
19843 cond
= TCG_COND_EQ
;
19845 cond
= TCG_COND_NE
;
19850 if (rt
== 0 && imm
== 0) {
19853 } else if (rt
== 0 && imm
!= 0) {
19854 /* Unconditional branch */
19857 cond
= TCG_COND_NE
;
19861 if (rt
== 0 && imm
== 0) {
19862 /* Unconditional branch */
19865 cond
= TCG_COND_GE
;
19870 cond
= TCG_COND_LT
;
19873 if (rt
== 0 && imm
== 0) {
19874 /* Unconditional branch */
19877 cond
= TCG_COND_GEU
;
19882 cond
= TCG_COND_LTU
;
19885 MIPS_INVAL("Immediate Value Compact branch");
19886 generate_exception_end(ctx
, EXCP_RI
);
19890 /* branch completion */
19891 clear_branch_hflags(ctx
);
19892 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19894 if (bcond_compute
== 0) {
19895 /* Uncoditional compact branch */
19896 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19898 /* Conditional compact branch */
19899 TCGLabel
*fs
= gen_new_label();
19901 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19903 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19906 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19914 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19915 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19918 TCGv t0
= tcg_temp_new();
19919 TCGv t1
= tcg_temp_new();
19922 gen_load_gpr(t0
, rs
);
19926 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19929 /* calculate btarget */
19930 tcg_gen_shli_tl(t0
, t0
, 1);
19931 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19932 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19934 /* branch completion */
19935 clear_branch_hflags(ctx
);
19936 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19938 /* unconditional branch to register */
19939 tcg_gen_mov_tl(cpu_PC
, btarget
);
19940 tcg_gen_lookup_and_goto_ptr();
19946 /* nanoMIPS Branches */
19947 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19948 int rs
, int rt
, int32_t offset
)
19950 int bcond_compute
= 0;
19951 TCGv t0
= tcg_temp_new();
19952 TCGv t1
= tcg_temp_new();
19954 /* Load needed operands and calculate btarget */
19956 /* compact branch */
19959 gen_load_gpr(t0
, rs
);
19960 gen_load_gpr(t1
, rt
);
19962 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19966 if (rs
== 0 || rs
== rt
) {
19967 /* OPC_BLEZALC, OPC_BGEZALC */
19968 /* OPC_BGTZALC, OPC_BLTZALC */
19969 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19971 gen_load_gpr(t0
, rs
);
19972 gen_load_gpr(t1
, rt
);
19974 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19977 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19981 /* OPC_BEQZC, OPC_BNEZC */
19982 gen_load_gpr(t0
, rs
);
19984 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19986 /* OPC_JIC, OPC_JIALC */
19987 TCGv tbase
= tcg_temp_new();
19988 TCGv toffset
= tcg_temp_new();
19990 gen_load_gpr(tbase
, rt
);
19991 tcg_gen_movi_tl(toffset
, offset
);
19992 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19993 tcg_temp_free(tbase
);
19994 tcg_temp_free(toffset
);
19998 MIPS_INVAL("Compact branch/jump");
19999 generate_exception_end(ctx
, EXCP_RI
);
20003 if (bcond_compute
== 0) {
20004 /* Uncoditional compact branch */
20007 gen_goto_tb(ctx
, 0, ctx
->btarget
);
20010 MIPS_INVAL("Compact branch/jump");
20011 generate_exception_end(ctx
, EXCP_RI
);
20015 /* Conditional compact branch */
20016 TCGLabel
*fs
= gen_new_label();
20020 if (rs
== 0 && rt
!= 0) {
20022 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
20023 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20025 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
20028 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
20032 if (rs
== 0 && rt
!= 0) {
20034 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
20035 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20037 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
20040 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
20044 if (rs
== 0 && rt
!= 0) {
20046 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
20047 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20049 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
20052 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
20056 if (rs
== 0 && rt
!= 0) {
20058 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
20059 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20061 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
20064 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
20068 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
20071 MIPS_INVAL("Compact conditional branch/jump");
20072 generate_exception_end(ctx
, EXCP_RI
);
20076 /* branch completion */
20077 clear_branch_hflags(ctx
);
20078 ctx
->base
.is_jmp
= DISAS_NORETURN
;
20080 /* Generating branch here as compact branches don't have delay slot */
20081 gen_goto_tb(ctx
, 1, ctx
->btarget
);
20084 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
20093 /* nanoMIPS CP1 Branches */
20094 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
20095 int32_t ft
, int32_t offset
)
20097 target_ulong btarget
;
20098 TCGv_i64 t0
= tcg_temp_new_i64();
20100 gen_load_fpr64(ctx
, t0
, ft
);
20101 tcg_gen_andi_i64(t0
, t0
, 1);
20103 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20107 tcg_gen_xori_i64(t0
, t0
, 1);
20108 ctx
->hflags
|= MIPS_HFLAG_BC
;
20111 /* t0 already set */
20112 ctx
->hflags
|= MIPS_HFLAG_BC
;
20115 MIPS_INVAL("cp1 cond branch");
20116 generate_exception_end(ctx
, EXCP_RI
);
20120 tcg_gen_trunc_i64_tl(bcond
, t0
);
20122 ctx
->btarget
= btarget
;
20125 tcg_temp_free_i64(t0
);
20129 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
20132 t0
= tcg_temp_new();
20133 t1
= tcg_temp_new();
20135 gen_load_gpr(t0
, rs
);
20136 gen_load_gpr(t1
, rt
);
20138 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
20139 /* PP.LSXS instructions require shifting */
20140 switch (extract32(ctx
->opcode
, 7, 4)) {
20146 tcg_gen_shli_tl(t0
, t0
, 1);
20154 tcg_gen_shli_tl(t0
, t0
, 2);
20158 tcg_gen_shli_tl(t0
, t0
, 3);
20162 gen_op_addr_add(ctx
, t0
, t0
, t1
);
20164 switch (extract32(ctx
->opcode
, 7, 4)) {
20166 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20168 gen_store_gpr(t0
, rd
);
20172 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20174 gen_store_gpr(t0
, rd
);
20178 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20180 gen_store_gpr(t0
, rd
);
20183 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20185 gen_store_gpr(t0
, rd
);
20189 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20191 gen_store_gpr(t0
, rd
);
20195 gen_load_gpr(t1
, rd
);
20196 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20202 gen_load_gpr(t1
, rd
);
20203 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20209 gen_load_gpr(t1
, rd
);
20210 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20214 /*case NM_LWC1XS:*/
20216 /*case NM_LDC1XS:*/
20218 /*case NM_SWC1XS:*/
20220 /*case NM_SDC1XS:*/
20221 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20222 check_cp1_enabled(ctx
);
20223 switch (extract32(ctx
->opcode
, 7, 4)) {
20225 /*case NM_LWC1XS:*/
20226 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
20229 /*case NM_LDC1XS:*/
20230 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
20233 /*case NM_SWC1XS:*/
20234 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
20237 /*case NM_SDC1XS:*/
20238 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
20242 generate_exception_err(ctx
, EXCP_CpU
, 1);
20246 generate_exception_end(ctx
, EXCP_RI
);
20254 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
20258 rt
= extract32(ctx
->opcode
, 21, 5);
20259 rs
= extract32(ctx
->opcode
, 16, 5);
20260 rd
= extract32(ctx
->opcode
, 11, 5);
20262 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
20263 generate_exception_end(ctx
, EXCP_RI
);
20266 check_cp1_enabled(ctx
);
20267 switch (extract32(ctx
->opcode
, 0, 3)) {
20269 switch (extract32(ctx
->opcode
, 3, 7)) {
20271 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
20274 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
20277 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
20280 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
20283 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
20286 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
20289 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
20292 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
20295 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
20298 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
20301 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
20304 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
20307 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
20310 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
20313 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
20316 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
20319 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
20322 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
20325 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
20328 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
20331 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
20334 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
20337 generate_exception_end(ctx
, EXCP_RI
);
20342 switch (extract32(ctx
->opcode
, 3, 3)) {
20344 switch (extract32(ctx
->opcode
, 9, 1)) {
20346 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
20349 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
20354 switch (extract32(ctx
->opcode
, 9, 1)) {
20356 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
20359 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
20364 switch (extract32(ctx
->opcode
, 9, 1)) {
20366 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
20369 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
20374 switch (extract32(ctx
->opcode
, 9, 1)) {
20376 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
20379 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
20384 switch (extract32(ctx
->opcode
, 6, 8)) {
20386 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
20389 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
20392 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
20395 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
20398 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
20401 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
20404 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
20407 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
20410 switch (extract32(ctx
->opcode
, 6, 9)) {
20412 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
20415 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
20418 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
20421 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
20424 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
20427 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
20430 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
20433 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
20436 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
20439 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
20442 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
20445 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
20448 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
20451 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
20454 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
20457 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
20460 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
20463 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
20466 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
20469 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
20472 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
20475 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
20478 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
20481 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
20484 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
20487 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
20490 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
20493 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
20496 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20499 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20502 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20505 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20508 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20511 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20514 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20517 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20520 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20523 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20526 generate_exception_end(ctx
, EXCP_RI
);
20535 switch (extract32(ctx
->opcode
, 3, 3)) {
20536 case NM_CMP_CONDN_S
:
20537 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20539 case NM_CMP_CONDN_D
:
20540 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20543 generate_exception_end(ctx
, EXCP_RI
);
20548 generate_exception_end(ctx
, EXCP_RI
);
20553 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20554 int rd
, int rs
, int rt
)
20557 TCGv t0
= tcg_temp_new();
20558 TCGv v1_t
= tcg_temp_new();
20559 TCGv v2_t
= tcg_temp_new();
20561 gen_load_gpr(v1_t
, rs
);
20562 gen_load_gpr(v2_t
, rt
);
20567 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20571 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20575 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20577 case NM_CMPU_EQ_QB
:
20579 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20581 case NM_CMPU_LT_QB
:
20583 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20585 case NM_CMPU_LE_QB
:
20587 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20589 case NM_CMPGU_EQ_QB
:
20591 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20592 gen_store_gpr(v1_t
, ret
);
20594 case NM_CMPGU_LT_QB
:
20596 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20597 gen_store_gpr(v1_t
, ret
);
20599 case NM_CMPGU_LE_QB
:
20601 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20602 gen_store_gpr(v1_t
, ret
);
20604 case NM_CMPGDU_EQ_QB
:
20606 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20607 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20608 gen_store_gpr(v1_t
, ret
);
20610 case NM_CMPGDU_LT_QB
:
20612 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20613 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20614 gen_store_gpr(v1_t
, ret
);
20616 case NM_CMPGDU_LE_QB
:
20618 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20619 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20620 gen_store_gpr(v1_t
, ret
);
20624 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20625 gen_store_gpr(v1_t
, ret
);
20629 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20630 gen_store_gpr(v1_t
, ret
);
20634 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20635 gen_store_gpr(v1_t
, ret
);
20639 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20640 gen_store_gpr(v1_t
, ret
);
20644 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20645 gen_store_gpr(v1_t
, ret
);
20649 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20650 gen_store_gpr(v1_t
, ret
);
20654 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20655 gen_store_gpr(v1_t
, ret
);
20659 switch (extract32(ctx
->opcode
, 10, 1)) {
20662 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20663 gen_store_gpr(v1_t
, ret
);
20667 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20668 gen_store_gpr(v1_t
, ret
);
20672 case NM_ADDQH_R_PH
:
20674 switch (extract32(ctx
->opcode
, 10, 1)) {
20677 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20678 gen_store_gpr(v1_t
, ret
);
20682 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20683 gen_store_gpr(v1_t
, ret
);
20689 switch (extract32(ctx
->opcode
, 10, 1)) {
20692 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20693 gen_store_gpr(v1_t
, ret
);
20697 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20698 gen_store_gpr(v1_t
, ret
);
20704 switch (extract32(ctx
->opcode
, 10, 1)) {
20707 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20708 gen_store_gpr(v1_t
, ret
);
20712 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20713 gen_store_gpr(v1_t
, ret
);
20719 switch (extract32(ctx
->opcode
, 10, 1)) {
20722 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20723 gen_store_gpr(v1_t
, ret
);
20727 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20728 gen_store_gpr(v1_t
, ret
);
20732 case NM_ADDUH_R_QB
:
20734 switch (extract32(ctx
->opcode
, 10, 1)) {
20737 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20738 gen_store_gpr(v1_t
, ret
);
20742 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20743 gen_store_gpr(v1_t
, ret
);
20747 case NM_SHRAV_R_PH
:
20749 switch (extract32(ctx
->opcode
, 10, 1)) {
20752 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20753 gen_store_gpr(v1_t
, ret
);
20757 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20758 gen_store_gpr(v1_t
, ret
);
20762 case NM_SHRAV_R_QB
:
20764 switch (extract32(ctx
->opcode
, 10, 1)) {
20767 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20768 gen_store_gpr(v1_t
, ret
);
20772 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20773 gen_store_gpr(v1_t
, ret
);
20779 switch (extract32(ctx
->opcode
, 10, 1)) {
20782 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20783 gen_store_gpr(v1_t
, ret
);
20787 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20788 gen_store_gpr(v1_t
, ret
);
20792 case NM_SUBQH_R_PH
:
20794 switch (extract32(ctx
->opcode
, 10, 1)) {
20797 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20798 gen_store_gpr(v1_t
, ret
);
20802 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20803 gen_store_gpr(v1_t
, ret
);
20809 switch (extract32(ctx
->opcode
, 10, 1)) {
20812 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20813 gen_store_gpr(v1_t
, ret
);
20817 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20818 gen_store_gpr(v1_t
, ret
);
20824 switch (extract32(ctx
->opcode
, 10, 1)) {
20827 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20828 gen_store_gpr(v1_t
, ret
);
20832 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20833 gen_store_gpr(v1_t
, ret
);
20839 switch (extract32(ctx
->opcode
, 10, 1)) {
20842 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20843 gen_store_gpr(v1_t
, ret
);
20847 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20848 gen_store_gpr(v1_t
, ret
);
20852 case NM_SUBUH_R_QB
:
20854 switch (extract32(ctx
->opcode
, 10, 1)) {
20857 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20858 gen_store_gpr(v1_t
, ret
);
20862 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20863 gen_store_gpr(v1_t
, ret
);
20867 case NM_SHLLV_S_PH
:
20869 switch (extract32(ctx
->opcode
, 10, 1)) {
20872 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20873 gen_store_gpr(v1_t
, ret
);
20877 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20878 gen_store_gpr(v1_t
, ret
);
20882 case NM_PRECR_SRA_R_PH_W
:
20884 switch (extract32(ctx
->opcode
, 10, 1)) {
20886 /* PRECR_SRA_PH_W */
20888 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20889 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20891 gen_store_gpr(v1_t
, rt
);
20892 tcg_temp_free_i32(sa_t
);
20896 /* PRECR_SRA_R_PH_W */
20898 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20899 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20901 gen_store_gpr(v1_t
, rt
);
20902 tcg_temp_free_i32(sa_t
);
20907 case NM_MULEU_S_PH_QBL
:
20909 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20910 gen_store_gpr(v1_t
, ret
);
20912 case NM_MULEU_S_PH_QBR
:
20914 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20915 gen_store_gpr(v1_t
, ret
);
20917 case NM_MULQ_RS_PH
:
20919 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20920 gen_store_gpr(v1_t
, ret
);
20924 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20925 gen_store_gpr(v1_t
, ret
);
20929 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20930 gen_store_gpr(v1_t
, ret
);
20934 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20935 gen_store_gpr(v1_t
, ret
);
20939 gen_load_gpr(t0
, rs
);
20941 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20943 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20947 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20948 gen_store_gpr(v1_t
, ret
);
20952 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20953 gen_store_gpr(v1_t
, ret
);
20957 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20958 gen_store_gpr(v1_t
, ret
);
20962 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20963 gen_store_gpr(v1_t
, ret
);
20967 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20968 gen_store_gpr(v1_t
, ret
);
20972 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20973 gen_store_gpr(v1_t
, ret
);
20978 TCGv tv0
= tcg_temp_new();
20979 TCGv tv1
= tcg_temp_new();
20980 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20982 tcg_gen_movi_tl(tv0
, rd
>> 3);
20983 tcg_gen_movi_tl(tv1
, imm
);
20984 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20987 case NM_MULEQ_S_W_PHL
:
20989 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20990 gen_store_gpr(v1_t
, ret
);
20992 case NM_MULEQ_S_W_PHR
:
20994 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20995 gen_store_gpr(v1_t
, ret
);
20999 switch (extract32(ctx
->opcode
, 10, 1)) {
21002 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
21003 gen_store_gpr(v1_t
, ret
);
21007 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
21008 gen_store_gpr(v1_t
, ret
);
21012 case NM_PRECR_QB_PH
:
21014 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
21015 gen_store_gpr(v1_t
, ret
);
21017 case NM_PRECRQ_QB_PH
:
21019 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
21020 gen_store_gpr(v1_t
, ret
);
21022 case NM_PRECRQ_PH_W
:
21024 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
21025 gen_store_gpr(v1_t
, ret
);
21027 case NM_PRECRQ_RS_PH_W
:
21029 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
21030 gen_store_gpr(v1_t
, ret
);
21032 case NM_PRECRQU_S_QB_PH
:
21034 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
21035 gen_store_gpr(v1_t
, ret
);
21039 tcg_gen_movi_tl(t0
, rd
);
21040 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
21041 gen_store_gpr(v1_t
, rt
);
21045 tcg_gen_movi_tl(t0
, rd
>> 1);
21046 switch (extract32(ctx
->opcode
, 10, 1)) {
21049 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
21050 gen_store_gpr(v1_t
, rt
);
21054 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
21055 gen_store_gpr(v1_t
, rt
);
21061 tcg_gen_movi_tl(t0
, rd
>> 1);
21062 switch (extract32(ctx
->opcode
, 10, 2)) {
21065 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
21066 gen_store_gpr(v1_t
, rt
);
21070 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
21071 gen_store_gpr(v1_t
, rt
);
21074 generate_exception_end(ctx
, EXCP_RI
);
21080 tcg_gen_movi_tl(t0
, rd
);
21081 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
21082 gen_store_gpr(v1_t
, rt
);
21088 imm
= sextract32(ctx
->opcode
, 11, 11);
21089 imm
= (int16_t)(imm
<< 6) >> 6;
21091 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
21096 generate_exception_end(ctx
, EXCP_RI
);
21101 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21109 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
21110 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
21112 rt
= extract32(ctx
->opcode
, 21, 5);
21113 rs
= extract32(ctx
->opcode
, 16, 5);
21114 rd
= extract32(ctx
->opcode
, 11, 5);
21116 op
= extract32(ctx
->opcode
, 26, 6);
21121 switch (extract32(ctx
->opcode
, 19, 2)) {
21124 generate_exception_end(ctx
, EXCP_RI
);
21127 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
21128 generate_exception_end(ctx
, EXCP_SYSCALL
);
21130 generate_exception_end(ctx
, EXCP_RI
);
21134 generate_exception_end(ctx
, EXCP_BREAK
);
21137 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
21138 gen_helper_do_semihosting(cpu_env
);
21140 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21141 generate_exception_end(ctx
, EXCP_RI
);
21143 generate_exception_end(ctx
, EXCP_DBp
);
21150 imm
= extract32(ctx
->opcode
, 0, 16);
21152 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
21154 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21156 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21161 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21162 extract32(ctx
->opcode
, 1, 20) << 1;
21163 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21164 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21168 switch (ctx
->opcode
& 0x07) {
21170 gen_pool32a0_nanomips_insn(env
, ctx
);
21174 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
21175 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
21179 switch (extract32(ctx
->opcode
, 3, 3)) {
21181 gen_p_lsx(ctx
, rd
, rs
, rt
);
21185 * In nanoMIPS, the shift field directly encodes the shift
21186 * amount, meaning that the supported shift values are in
21187 * the range 0 to 3 (instead of 1 to 4 in MIPSR6).
21189 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
21190 extract32(ctx
->opcode
, 9, 2) - 1);
21193 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
21196 gen_pool32axf_nanomips_insn(env
, ctx
);
21199 generate_exception_end(ctx
, EXCP_RI
);
21204 generate_exception_end(ctx
, EXCP_RI
);
21209 switch (ctx
->opcode
& 0x03) {
21212 offset
= extract32(ctx
->opcode
, 0, 21);
21213 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
21217 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21220 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21223 generate_exception_end(ctx
, EXCP_RI
);
21229 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
21230 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
21231 switch (extract32(ctx
->opcode
, 16, 5)) {
21235 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
21241 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
21242 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21248 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
21254 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21257 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21264 t0
= tcg_temp_new();
21266 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21269 tcg_gen_movi_tl(t0
, addr
);
21270 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
21278 t0
= tcg_temp_new();
21279 t1
= tcg_temp_new();
21281 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21284 tcg_gen_movi_tl(t0
, addr
);
21285 gen_load_gpr(t1
, rt
);
21287 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
21294 generate_exception_end(ctx
, EXCP_RI
);
21300 switch (extract32(ctx
->opcode
, 12, 4)) {
21302 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21305 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21308 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21311 switch (extract32(ctx
->opcode
, 20, 1)) {
21313 switch (ctx
->opcode
& 3) {
21315 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21316 extract32(ctx
->opcode
, 2, 1),
21317 extract32(ctx
->opcode
, 3, 9) << 3);
21320 case NM_RESTORE_JRC
:
21321 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21322 extract32(ctx
->opcode
, 2, 1),
21323 extract32(ctx
->opcode
, 3, 9) << 3);
21324 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
21325 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21329 generate_exception_end(ctx
, EXCP_RI
);
21334 generate_exception_end(ctx
, EXCP_RI
);
21339 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21342 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21346 TCGv t0
= tcg_temp_new();
21348 imm
= extract32(ctx
->opcode
, 0, 12);
21349 gen_load_gpr(t0
, rs
);
21350 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
21351 gen_store_gpr(t0
, rt
);
21357 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
21358 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
21362 int shift
= extract32(ctx
->opcode
, 0, 5);
21363 switch (extract32(ctx
->opcode
, 5, 4)) {
21365 if (rt
== 0 && shift
== 0) {
21367 } else if (rt
== 0 && shift
== 3) {
21368 /* EHB - treat as NOP */
21369 } else if (rt
== 0 && shift
== 5) {
21370 /* PAUSE - treat as NOP */
21371 } else if (rt
== 0 && shift
== 6) {
21373 gen_sync(extract32(ctx
->opcode
, 16, 5));
21376 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
21377 extract32(ctx
->opcode
, 0, 5));
21381 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
21382 extract32(ctx
->opcode
, 0, 5));
21385 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
21386 extract32(ctx
->opcode
, 0, 5));
21389 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
21390 extract32(ctx
->opcode
, 0, 5));
21398 TCGv t0
= tcg_temp_new();
21399 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
21400 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
21402 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
21404 gen_load_gpr(t0
, rs
);
21405 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
21408 tcg_temp_free_i32(shift
);
21409 tcg_temp_free_i32(shiftx
);
21410 tcg_temp_free_i32(stripe
);
21414 switch (((ctx
->opcode
>> 10) & 2) |
21415 (extract32(ctx
->opcode
, 5, 1))) {
21418 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21419 extract32(ctx
->opcode
, 6, 5));
21422 generate_exception_end(ctx
, EXCP_RI
);
21427 switch (((ctx
->opcode
>> 10) & 2) |
21428 (extract32(ctx
->opcode
, 5, 1))) {
21431 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21432 extract32(ctx
->opcode
, 6, 5));
21435 generate_exception_end(ctx
, EXCP_RI
);
21440 generate_exception_end(ctx
, EXCP_RI
);
21445 gen_pool32f_nanomips_insn(ctx
);
21450 switch (extract32(ctx
->opcode
, 1, 1)) {
21453 tcg_gen_movi_tl(cpu_gpr
[rt
],
21454 sextract32(ctx
->opcode
, 0, 1) << 31 |
21455 extract32(ctx
->opcode
, 2, 10) << 21 |
21456 extract32(ctx
->opcode
, 12, 9) << 12);
21461 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
21462 extract32(ctx
->opcode
, 2, 10) << 21 |
21463 extract32(ctx
->opcode
, 12, 9) << 12;
21465 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21466 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21473 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
21475 switch (extract32(ctx
->opcode
, 18, 3)) {
21477 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
21480 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
21483 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
21487 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
21492 switch (ctx
->opcode
& 1) {
21494 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
21497 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21503 switch (ctx
->opcode
& 1) {
21505 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21508 generate_exception_end(ctx
, EXCP_RI
);
21514 switch (ctx
->opcode
& 0x3) {
21516 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21519 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21522 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21525 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21530 generate_exception_end(ctx
, EXCP_RI
);
21537 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21539 switch (extract32(ctx
->opcode
, 12, 4)) {
21544 * Break the TB to be able to sync copied instructions
21547 ctx
->base
.is_jmp
= DISAS_STOP
;
21550 /* Treat as NOP. */
21554 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21557 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21560 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21563 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21566 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21569 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21572 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21575 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21578 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21581 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21584 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21587 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21590 generate_exception_end(ctx
, EXCP_RI
);
21597 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21598 extract32(ctx
->opcode
, 0, 8);
21600 switch (extract32(ctx
->opcode
, 8, 3)) {
21602 switch (extract32(ctx
->opcode
, 11, 4)) {
21604 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21607 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21610 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21613 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21616 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21619 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21622 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21625 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21628 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21631 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21634 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21637 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21643 * Break the TB to be able to sync copied instructions
21646 ctx
->base
.is_jmp
= DISAS_STOP
;
21649 /* Treat as NOP. */
21653 generate_exception_end(ctx
, EXCP_RI
);
21658 switch (extract32(ctx
->opcode
, 11, 4)) {
21663 TCGv t0
= tcg_temp_new();
21664 TCGv t1
= tcg_temp_new();
21666 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21668 switch (extract32(ctx
->opcode
, 11, 4)) {
21670 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21672 gen_store_gpr(t0
, rt
);
21675 gen_load_gpr(t1
, rt
);
21676 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21685 switch (ctx
->opcode
& 0x03) {
21687 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21691 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21696 switch (ctx
->opcode
& 0x03) {
21698 gen_st_cond(ctx
, rt
, rs
, s
, MO_TESL
, false);
21702 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21708 check_cp0_enabled(ctx
);
21709 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21710 gen_cache_operation(ctx
, rt
, rs
, s
);
21716 switch (extract32(ctx
->opcode
, 11, 4)) {
21719 check_cp0_enabled(ctx
);
21720 gen_ld(ctx
, OPC_LBE
, rt
, rs
, s
);
21724 check_cp0_enabled(ctx
);
21725 gen_st(ctx
, OPC_SBE
, rt
, rs
, s
);
21729 check_cp0_enabled(ctx
);
21730 gen_ld(ctx
, OPC_LBUE
, rt
, rs
, s
);
21734 /* case NM_SYNCIE */
21736 check_cp0_enabled(ctx
);
21738 * Break the TB to be able to sync copied instructions
21741 ctx
->base
.is_jmp
= DISAS_STOP
;
21743 /* case NM_PREFE */
21745 check_cp0_enabled(ctx
);
21746 /* Treat as NOP. */
21751 check_cp0_enabled(ctx
);
21752 gen_ld(ctx
, OPC_LHE
, rt
, rs
, s
);
21756 check_cp0_enabled(ctx
);
21757 gen_st(ctx
, OPC_SHE
, rt
, rs
, s
);
21761 check_cp0_enabled(ctx
);
21762 gen_ld(ctx
, OPC_LHUE
, rt
, rs
, s
);
21765 check_nms_dl_il_sl_tl_l2c(ctx
);
21766 gen_cache_operation(ctx
, rt
, rs
, s
);
21770 check_cp0_enabled(ctx
);
21771 gen_ld(ctx
, OPC_LWE
, rt
, rs
, s
);
21775 check_cp0_enabled(ctx
);
21776 gen_st(ctx
, OPC_SWE
, rt
, rs
, s
);
21779 switch (extract32(ctx
->opcode
, 2, 2)) {
21783 check_cp0_enabled(ctx
);
21784 gen_ld(ctx
, OPC_LLE
, rt
, rs
, s
);
21789 check_cp0_enabled(ctx
);
21790 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21793 generate_exception_end(ctx
, EXCP_RI
);
21798 switch (extract32(ctx
->opcode
, 2, 2)) {
21802 check_cp0_enabled(ctx
);
21803 gen_st_cond(ctx
, rt
, rs
, s
, MO_TESL
, true);
21808 check_cp0_enabled(ctx
);
21809 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21813 generate_exception_end(ctx
, EXCP_RI
);
21823 int count
= extract32(ctx
->opcode
, 12, 3);
21826 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21827 extract32(ctx
->opcode
, 0, 8);
21828 TCGv va
= tcg_temp_new();
21829 TCGv t1
= tcg_temp_new();
21830 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21831 NM_P_LS_UAWM
? MO_UNALN
: 0;
21833 count
= (count
== 0) ? 8 : count
;
21834 while (counter
!= count
) {
21835 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21836 int this_offset
= offset
+ (counter
<< 2);
21838 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21840 switch (extract32(ctx
->opcode
, 11, 1)) {
21842 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21844 gen_store_gpr(t1
, this_rt
);
21845 if ((this_rt
== rs
) &&
21846 (counter
!= (count
- 1))) {
21847 /* UNPREDICTABLE */
21851 this_rt
= (rt
== 0) ? 0 : this_rt
;
21852 gen_load_gpr(t1
, this_rt
);
21853 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21864 generate_exception_end(ctx
, EXCP_RI
);
21872 TCGv t0
= tcg_temp_new();
21873 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21874 extract32(ctx
->opcode
, 1, 20) << 1;
21875 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21876 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21877 extract32(ctx
->opcode
, 21, 3));
21878 gen_load_gpr(t0
, rt
);
21879 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21880 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21886 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21887 extract32(ctx
->opcode
, 1, 24) << 1;
21889 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21891 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21894 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21899 switch (extract32(ctx
->opcode
, 12, 4)) {
21902 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21905 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21908 generate_exception_end(ctx
, EXCP_RI
);
21914 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21915 extract32(ctx
->opcode
, 1, 13) << 1;
21916 switch (extract32(ctx
->opcode
, 14, 2)) {
21919 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21922 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21923 extract32(ctx
->opcode
, 1, 13) << 1;
21924 check_cp1_enabled(ctx
);
21925 switch (extract32(ctx
->opcode
, 16, 5)) {
21927 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21930 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21935 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21936 extract32(ctx
->opcode
, 0, 1) << 13;
21938 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21943 generate_exception_end(ctx
, EXCP_RI
);
21949 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21951 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21955 if (rs
== rt
|| rt
== 0) {
21956 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21957 } else if (rs
== 0) {
21958 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21960 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21968 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21969 extract32(ctx
->opcode
, 1, 13) << 1;
21970 switch (extract32(ctx
->opcode
, 14, 2)) {
21973 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21976 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21978 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21980 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21984 if (rs
== 0 || rs
== rt
) {
21986 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21988 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21992 generate_exception_end(ctx
, EXCP_RI
);
21999 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
22000 extract32(ctx
->opcode
, 1, 10) << 1;
22001 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
22003 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
22008 generate_exception_end(ctx
, EXCP_RI
);
22014 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
22017 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22018 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
22019 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD3(ctx
->opcode
));
22023 /* make sure instructions are on a halfword boundary */
22024 if (ctx
->base
.pc_next
& 0x1) {
22025 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
22026 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
22027 tcg_temp_free(tmp
);
22028 generate_exception_end(ctx
, EXCP_AdEL
);
22032 op
= extract32(ctx
->opcode
, 10, 6);
22035 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22038 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
22039 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
22042 switch (extract32(ctx
->opcode
, 3, 2)) {
22043 case NM_P16_SYSCALL
:
22044 if (extract32(ctx
->opcode
, 2, 1) == 0) {
22045 generate_exception_end(ctx
, EXCP_SYSCALL
);
22047 generate_exception_end(ctx
, EXCP_RI
);
22051 generate_exception_end(ctx
, EXCP_BREAK
);
22054 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
22055 gen_helper_do_semihosting(cpu_env
);
22057 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
22058 generate_exception_end(ctx
, EXCP_RI
);
22060 generate_exception_end(ctx
, EXCP_DBp
);
22065 generate_exception_end(ctx
, EXCP_RI
);
22072 int shift
= extract32(ctx
->opcode
, 0, 3);
22074 shift
= (shift
== 0) ? 8 : shift
;
22076 switch (extract32(ctx
->opcode
, 3, 1)) {
22084 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
22088 switch (ctx
->opcode
& 1) {
22090 gen_pool16c_nanomips_insn(ctx
);
22093 gen_ldxs(ctx
, rt
, rs
, rd
);
22098 switch (extract32(ctx
->opcode
, 6, 1)) {
22100 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
22101 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
22104 generate_exception_end(ctx
, EXCP_RI
);
22109 switch (extract32(ctx
->opcode
, 3, 1)) {
22111 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
22112 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
22114 case NM_P_ADDIURS5
:
22115 rt
= extract32(ctx
->opcode
, 5, 5);
22117 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
22118 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
22119 (extract32(ctx
->opcode
, 0, 3));
22120 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
22126 switch (ctx
->opcode
& 0x1) {
22128 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
22131 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
22136 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22137 extract32(ctx
->opcode
, 5, 3);
22138 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22139 extract32(ctx
->opcode
, 0, 3);
22140 rt
= decode_gpr_gpr4(rt
);
22141 rs
= decode_gpr_gpr4(rs
);
22142 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
22143 (extract32(ctx
->opcode
, 3, 1))) {
22146 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
22150 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
22153 generate_exception_end(ctx
, EXCP_RI
);
22159 int imm
= extract32(ctx
->opcode
, 0, 7);
22160 imm
= (imm
== 0x7f ? -1 : imm
);
22162 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
22168 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
22169 u
= (u
== 12) ? 0xff :
22170 (u
== 13) ? 0xffff : u
;
22171 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
22175 offset
= extract32(ctx
->opcode
, 0, 2);
22176 switch (extract32(ctx
->opcode
, 2, 2)) {
22178 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
22181 rt
= decode_gpr_gpr3_src_store(
22182 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22183 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
22186 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
22189 generate_exception_end(ctx
, EXCP_RI
);
22194 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
22195 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
22197 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
22200 rt
= decode_gpr_gpr3_src_store(
22201 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22202 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
22205 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
22208 generate_exception_end(ctx
, EXCP_RI
);
22213 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22214 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22217 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22218 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22219 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
22223 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22224 extract32(ctx
->opcode
, 5, 3);
22225 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22226 extract32(ctx
->opcode
, 0, 3);
22227 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22228 (extract32(ctx
->opcode
, 8, 1) << 2);
22229 rt
= decode_gpr_gpr4(rt
);
22230 rs
= decode_gpr_gpr4(rs
);
22231 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22235 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22236 extract32(ctx
->opcode
, 5, 3);
22237 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22238 extract32(ctx
->opcode
, 0, 3);
22239 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22240 (extract32(ctx
->opcode
, 8, 1) << 2);
22241 rt
= decode_gpr_gpr4_zero(rt
);
22242 rs
= decode_gpr_gpr4(rs
);
22243 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22246 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22247 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
22250 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22251 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22252 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
22255 rt
= decode_gpr_gpr3_src_store(
22256 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22257 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
22258 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22259 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22262 rt
= decode_gpr_gpr3_src_store(
22263 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22264 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22265 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
22268 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
22269 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22270 (extract32(ctx
->opcode
, 1, 9) << 1));
22273 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
22274 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22275 (extract32(ctx
->opcode
, 1, 9) << 1));
22278 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
22279 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22280 (extract32(ctx
->opcode
, 1, 6) << 1));
22283 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
22284 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22285 (extract32(ctx
->opcode
, 1, 6) << 1));
22288 switch (ctx
->opcode
& 0xf) {
22291 switch (extract32(ctx
->opcode
, 4, 1)) {
22293 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
22294 extract32(ctx
->opcode
, 5, 5), 0, 0);
22297 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
22298 extract32(ctx
->opcode
, 5, 5), 31, 0);
22305 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
22306 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
22307 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
22308 extract32(ctx
->opcode
, 0, 4) << 1);
22315 int count
= extract32(ctx
->opcode
, 0, 4);
22316 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
22318 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
22319 switch (extract32(ctx
->opcode
, 8, 1)) {
22321 gen_save(ctx
, rt
, count
, 0, u
);
22323 case NM_RESTORE_JRC16
:
22324 gen_restore(ctx
, rt
, count
, 0, u
);
22325 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
22334 static const int gpr2reg1
[] = {4, 5, 6, 7};
22335 static const int gpr2reg2
[] = {5, 6, 7, 8};
22337 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
22338 extract32(ctx
->opcode
, 8, 1);
22339 int r1
= gpr2reg1
[rd2
];
22340 int r2
= gpr2reg2
[rd2
];
22341 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
22342 extract32(ctx
->opcode
, 0, 3);
22343 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
22344 extract32(ctx
->opcode
, 5, 3);
22345 TCGv t0
= tcg_temp_new();
22346 TCGv t1
= tcg_temp_new();
22347 if (op
== NM_MOVEP
) {
22350 rs
= decode_gpr_gpr4_zero(r3
);
22351 rt
= decode_gpr_gpr4_zero(r4
);
22353 rd
= decode_gpr_gpr4(r3
);
22354 re
= decode_gpr_gpr4(r4
);
22358 gen_load_gpr(t0
, rs
);
22359 gen_load_gpr(t1
, rt
);
22360 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
22361 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
22367 return decode_nanomips_32_48_opc(env
, ctx
);
22374 /* SmartMIPS extension to MIPS32 */
22376 #if defined(TARGET_MIPS64)
22378 /* MDMX extension to MIPS64 */
22382 /* MIPSDSP functions. */
22383 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
22384 int rd
, int base
, int offset
)
22389 t0
= tcg_temp_new();
22392 gen_load_gpr(t0
, offset
);
22393 } else if (offset
== 0) {
22394 gen_load_gpr(t0
, base
);
22396 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
22401 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
22402 gen_store_gpr(t0
, rd
);
22405 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
22406 gen_store_gpr(t0
, rd
);
22409 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
22410 gen_store_gpr(t0
, rd
);
22412 #if defined(TARGET_MIPS64)
22414 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
22415 gen_store_gpr(t0
, rd
);
22422 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22423 int ret
, int v1
, int v2
)
22429 /* Treat as NOP. */
22433 v1_t
= tcg_temp_new();
22434 v2_t
= tcg_temp_new();
22436 gen_load_gpr(v1_t
, v1
);
22437 gen_load_gpr(v2_t
, v2
);
22440 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
22441 case OPC_MULT_G_2E
:
22445 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22447 case OPC_ADDUH_R_QB
:
22448 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22451 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22453 case OPC_ADDQH_R_PH
:
22454 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22457 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22459 case OPC_ADDQH_R_W
:
22460 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22463 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22465 case OPC_SUBUH_R_QB
:
22466 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22469 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22471 case OPC_SUBQH_R_PH
:
22472 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22475 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22477 case OPC_SUBQH_R_W
:
22478 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22482 case OPC_ABSQ_S_PH_DSP
:
22484 case OPC_ABSQ_S_QB
:
22486 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
22488 case OPC_ABSQ_S_PH
:
22490 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
22494 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
22496 case OPC_PRECEQ_W_PHL
:
22498 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
22499 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22501 case OPC_PRECEQ_W_PHR
:
22503 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
22504 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
22505 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22507 case OPC_PRECEQU_PH_QBL
:
22509 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
22511 case OPC_PRECEQU_PH_QBR
:
22513 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
22515 case OPC_PRECEQU_PH_QBLA
:
22517 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
22519 case OPC_PRECEQU_PH_QBRA
:
22521 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
22523 case OPC_PRECEU_PH_QBL
:
22525 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
22527 case OPC_PRECEU_PH_QBR
:
22529 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
22531 case OPC_PRECEU_PH_QBLA
:
22533 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
22535 case OPC_PRECEU_PH_QBRA
:
22537 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
22541 case OPC_ADDU_QB_DSP
:
22545 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22547 case OPC_ADDQ_S_PH
:
22549 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22553 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22557 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22559 case OPC_ADDU_S_QB
:
22561 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22565 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22567 case OPC_ADDU_S_PH
:
22569 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22573 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22575 case OPC_SUBQ_S_PH
:
22577 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22581 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22585 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22587 case OPC_SUBU_S_QB
:
22589 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22593 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22595 case OPC_SUBU_S_PH
:
22597 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22601 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22605 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22609 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22611 case OPC_RADDU_W_QB
:
22613 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22617 case OPC_CMPU_EQ_QB_DSP
:
22619 case OPC_PRECR_QB_PH
:
22621 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22623 case OPC_PRECRQ_QB_PH
:
22625 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22627 case OPC_PRECR_SRA_PH_W
:
22630 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22631 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22633 tcg_temp_free_i32(sa_t
);
22636 case OPC_PRECR_SRA_R_PH_W
:
22639 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22640 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22642 tcg_temp_free_i32(sa_t
);
22645 case OPC_PRECRQ_PH_W
:
22647 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22649 case OPC_PRECRQ_RS_PH_W
:
22651 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22653 case OPC_PRECRQU_S_QB_PH
:
22655 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22659 #ifdef TARGET_MIPS64
22660 case OPC_ABSQ_S_QH_DSP
:
22662 case OPC_PRECEQ_L_PWL
:
22664 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22666 case OPC_PRECEQ_L_PWR
:
22668 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22670 case OPC_PRECEQ_PW_QHL
:
22672 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22674 case OPC_PRECEQ_PW_QHR
:
22676 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22678 case OPC_PRECEQ_PW_QHLA
:
22680 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22682 case OPC_PRECEQ_PW_QHRA
:
22684 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22686 case OPC_PRECEQU_QH_OBL
:
22688 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22690 case OPC_PRECEQU_QH_OBR
:
22692 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22694 case OPC_PRECEQU_QH_OBLA
:
22696 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22698 case OPC_PRECEQU_QH_OBRA
:
22700 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22702 case OPC_PRECEU_QH_OBL
:
22704 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22706 case OPC_PRECEU_QH_OBR
:
22708 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22710 case OPC_PRECEU_QH_OBLA
:
22712 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22714 case OPC_PRECEU_QH_OBRA
:
22716 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22718 case OPC_ABSQ_S_OB
:
22720 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22722 case OPC_ABSQ_S_PW
:
22724 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22726 case OPC_ABSQ_S_QH
:
22728 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22732 case OPC_ADDU_OB_DSP
:
22734 case OPC_RADDU_L_OB
:
22736 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22740 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22742 case OPC_SUBQ_S_PW
:
22744 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22748 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22750 case OPC_SUBQ_S_QH
:
22752 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22756 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22758 case OPC_SUBU_S_OB
:
22760 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22764 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22766 case OPC_SUBU_S_QH
:
22768 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22772 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22774 case OPC_SUBUH_R_OB
:
22776 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22780 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22782 case OPC_ADDQ_S_PW
:
22784 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22788 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22790 case OPC_ADDQ_S_QH
:
22792 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22796 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22798 case OPC_ADDU_S_OB
:
22800 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22804 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22806 case OPC_ADDU_S_QH
:
22808 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22812 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22814 case OPC_ADDUH_R_OB
:
22816 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22820 case OPC_CMPU_EQ_OB_DSP
:
22822 case OPC_PRECR_OB_QH
:
22824 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22826 case OPC_PRECR_SRA_QH_PW
:
22829 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22830 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22831 tcg_temp_free_i32(ret_t
);
22834 case OPC_PRECR_SRA_R_QH_PW
:
22837 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22838 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22839 tcg_temp_free_i32(sa_v
);
22842 case OPC_PRECRQ_OB_QH
:
22844 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22846 case OPC_PRECRQ_PW_L
:
22848 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22850 case OPC_PRECRQ_QH_PW
:
22852 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22854 case OPC_PRECRQ_RS_QH_PW
:
22856 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22858 case OPC_PRECRQU_S_OB_QH
:
22860 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22867 tcg_temp_free(v1_t
);
22868 tcg_temp_free(v2_t
);
22871 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22872 int ret
, int v1
, int v2
)
22880 /* Treat as NOP. */
22884 t0
= tcg_temp_new();
22885 v1_t
= tcg_temp_new();
22886 v2_t
= tcg_temp_new();
22888 tcg_gen_movi_tl(t0
, v1
);
22889 gen_load_gpr(v1_t
, v1
);
22890 gen_load_gpr(v2_t
, v2
);
22893 case OPC_SHLL_QB_DSP
:
22895 op2
= MASK_SHLL_QB(ctx
->opcode
);
22899 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22903 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22907 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22911 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22913 case OPC_SHLL_S_PH
:
22915 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22917 case OPC_SHLLV_S_PH
:
22919 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22923 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22925 case OPC_SHLLV_S_W
:
22927 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22931 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22935 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22939 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22943 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22947 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22949 case OPC_SHRA_R_QB
:
22951 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22955 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22957 case OPC_SHRAV_R_QB
:
22959 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22963 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22965 case OPC_SHRA_R_PH
:
22967 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22971 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22973 case OPC_SHRAV_R_PH
:
22975 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22979 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22981 case OPC_SHRAV_R_W
:
22983 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22985 default: /* Invalid */
22986 MIPS_INVAL("MASK SHLL.QB");
22987 generate_exception_end(ctx
, EXCP_RI
);
22992 #ifdef TARGET_MIPS64
22993 case OPC_SHLL_OB_DSP
:
22994 op2
= MASK_SHLL_OB(ctx
->opcode
);
22998 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23002 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23004 case OPC_SHLL_S_PW
:
23006 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23008 case OPC_SHLLV_S_PW
:
23010 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23014 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23018 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23022 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23026 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23028 case OPC_SHLL_S_QH
:
23030 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23032 case OPC_SHLLV_S_QH
:
23034 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23038 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
23042 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
23044 case OPC_SHRA_R_OB
:
23046 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
23048 case OPC_SHRAV_R_OB
:
23050 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
23054 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
23058 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
23060 case OPC_SHRA_R_PW
:
23062 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
23064 case OPC_SHRAV_R_PW
:
23066 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
23070 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
23074 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
23076 case OPC_SHRA_R_QH
:
23078 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
23080 case OPC_SHRAV_R_QH
:
23082 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
23086 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
23090 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
23094 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
23098 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
23100 default: /* Invalid */
23101 MIPS_INVAL("MASK SHLL.OB");
23102 generate_exception_end(ctx
, EXCP_RI
);
23110 tcg_temp_free(v1_t
);
23111 tcg_temp_free(v2_t
);
23114 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23115 int ret
, int v1
, int v2
, int check_ret
)
23121 if ((ret
== 0) && (check_ret
== 1)) {
23122 /* Treat as NOP. */
23126 t0
= tcg_temp_new_i32();
23127 v1_t
= tcg_temp_new();
23128 v2_t
= tcg_temp_new();
23130 tcg_gen_movi_i32(t0
, ret
);
23131 gen_load_gpr(v1_t
, v1
);
23132 gen_load_gpr(v2_t
, v2
);
23136 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
23137 * the same mask and op1.
23139 case OPC_MULT_G_2E
:
23143 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23146 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23149 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23151 case OPC_MULQ_RS_W
:
23152 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23156 case OPC_DPA_W_PH_DSP
:
23158 case OPC_DPAU_H_QBL
:
23160 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
23162 case OPC_DPAU_H_QBR
:
23164 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
23166 case OPC_DPSU_H_QBL
:
23168 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
23170 case OPC_DPSU_H_QBR
:
23172 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
23176 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23178 case OPC_DPAX_W_PH
:
23180 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23182 case OPC_DPAQ_S_W_PH
:
23184 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23186 case OPC_DPAQX_S_W_PH
:
23188 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23190 case OPC_DPAQX_SA_W_PH
:
23192 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23196 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23198 case OPC_DPSX_W_PH
:
23200 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23202 case OPC_DPSQ_S_W_PH
:
23204 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23206 case OPC_DPSQX_S_W_PH
:
23208 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23210 case OPC_DPSQX_SA_W_PH
:
23212 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23214 case OPC_MULSAQ_S_W_PH
:
23216 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23218 case OPC_DPAQ_SA_L_W
:
23220 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23222 case OPC_DPSQ_SA_L_W
:
23224 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23226 case OPC_MAQ_S_W_PHL
:
23228 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23230 case OPC_MAQ_S_W_PHR
:
23232 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23234 case OPC_MAQ_SA_W_PHL
:
23236 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23238 case OPC_MAQ_SA_W_PHR
:
23240 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23242 case OPC_MULSA_W_PH
:
23244 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23248 #ifdef TARGET_MIPS64
23249 case OPC_DPAQ_W_QH_DSP
:
23251 int ac
= ret
& 0x03;
23252 tcg_gen_movi_i32(t0
, ac
);
23257 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
23261 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
23265 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
23269 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
23273 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23275 case OPC_DPAQ_S_W_QH
:
23277 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23279 case OPC_DPAQ_SA_L_PW
:
23281 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23283 case OPC_DPAU_H_OBL
:
23285 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23287 case OPC_DPAU_H_OBR
:
23289 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23293 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23295 case OPC_DPSQ_S_W_QH
:
23297 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23299 case OPC_DPSQ_SA_L_PW
:
23301 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23303 case OPC_DPSU_H_OBL
:
23305 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23307 case OPC_DPSU_H_OBR
:
23309 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23311 case OPC_MAQ_S_L_PWL
:
23313 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
23315 case OPC_MAQ_S_L_PWR
:
23317 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
23319 case OPC_MAQ_S_W_QHLL
:
23321 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23323 case OPC_MAQ_SA_W_QHLL
:
23325 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23327 case OPC_MAQ_S_W_QHLR
:
23329 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23331 case OPC_MAQ_SA_W_QHLR
:
23333 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23335 case OPC_MAQ_S_W_QHRL
:
23337 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23339 case OPC_MAQ_SA_W_QHRL
:
23341 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23343 case OPC_MAQ_S_W_QHRR
:
23345 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23347 case OPC_MAQ_SA_W_QHRR
:
23349 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23351 case OPC_MULSAQ_S_L_PW
:
23353 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23355 case OPC_MULSAQ_S_W_QH
:
23357 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23363 case OPC_ADDU_QB_DSP
:
23365 case OPC_MULEU_S_PH_QBL
:
23367 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23369 case OPC_MULEU_S_PH_QBR
:
23371 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23373 case OPC_MULQ_RS_PH
:
23375 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23377 case OPC_MULEQ_S_W_PHL
:
23379 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23381 case OPC_MULEQ_S_W_PHR
:
23383 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23385 case OPC_MULQ_S_PH
:
23387 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23391 #ifdef TARGET_MIPS64
23392 case OPC_ADDU_OB_DSP
:
23394 case OPC_MULEQ_S_PW_QHL
:
23396 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23398 case OPC_MULEQ_S_PW_QHR
:
23400 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23402 case OPC_MULEU_S_QH_OBL
:
23404 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23406 case OPC_MULEU_S_QH_OBR
:
23408 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23410 case OPC_MULQ_RS_QH
:
23412 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23419 tcg_temp_free_i32(t0
);
23420 tcg_temp_free(v1_t
);
23421 tcg_temp_free(v2_t
);
23424 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23432 /* Treat as NOP. */
23436 t0
= tcg_temp_new();
23437 val_t
= tcg_temp_new();
23438 gen_load_gpr(val_t
, val
);
23441 case OPC_ABSQ_S_PH_DSP
:
23445 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
23450 target_long result
;
23451 imm
= (ctx
->opcode
>> 16) & 0xFF;
23452 result
= (uint32_t)imm
<< 24 |
23453 (uint32_t)imm
<< 16 |
23454 (uint32_t)imm
<< 8 |
23456 result
= (int32_t)result
;
23457 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
23462 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23463 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23464 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23465 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23466 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23467 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23472 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23473 imm
= (int16_t)(imm
<< 6) >> 6;
23474 tcg_gen_movi_tl(cpu_gpr
[ret
], \
23475 (target_long
)((int32_t)imm
<< 16 | \
23481 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23482 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23483 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23484 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23488 #ifdef TARGET_MIPS64
23489 case OPC_ABSQ_S_QH_DSP
:
23496 imm
= (ctx
->opcode
>> 16) & 0xFF;
23497 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
23498 temp
= (temp
<< 16) | temp
;
23499 temp
= (temp
<< 32) | temp
;
23500 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23508 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23509 imm
= (int16_t)(imm
<< 6) >> 6;
23510 temp
= ((target_long
)imm
<< 32) \
23511 | ((target_long
)imm
& 0xFFFFFFFF);
23512 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23520 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23521 imm
= (int16_t)(imm
<< 6) >> 6;
23523 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
23524 ((uint64_t)(uint16_t)imm
<< 32) |
23525 ((uint64_t)(uint16_t)imm
<< 16) |
23526 (uint64_t)(uint16_t)imm
;
23527 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23532 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23533 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23534 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23535 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23536 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23537 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23538 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23542 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
23543 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23544 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23548 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23549 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23550 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23551 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23552 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23559 tcg_temp_free(val_t
);
23562 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
23563 uint32_t op1
, uint32_t op2
,
23564 int ret
, int v1
, int v2
, int check_ret
)
23570 if ((ret
== 0) && (check_ret
== 1)) {
23571 /* Treat as NOP. */
23575 t1
= tcg_temp_new();
23576 v1_t
= tcg_temp_new();
23577 v2_t
= tcg_temp_new();
23579 gen_load_gpr(v1_t
, v1
);
23580 gen_load_gpr(v2_t
, v2
);
23583 case OPC_CMPU_EQ_QB_DSP
:
23585 case OPC_CMPU_EQ_QB
:
23587 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
23589 case OPC_CMPU_LT_QB
:
23591 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
23593 case OPC_CMPU_LE_QB
:
23595 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
23597 case OPC_CMPGU_EQ_QB
:
23599 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23601 case OPC_CMPGU_LT_QB
:
23603 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23605 case OPC_CMPGU_LE_QB
:
23607 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23609 case OPC_CMPGDU_EQ_QB
:
23611 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23612 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23613 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23614 tcg_gen_shli_tl(t1
, t1
, 24);
23615 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23617 case OPC_CMPGDU_LT_QB
:
23619 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23620 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23621 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23622 tcg_gen_shli_tl(t1
, t1
, 24);
23623 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23625 case OPC_CMPGDU_LE_QB
:
23627 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23628 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23629 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23630 tcg_gen_shli_tl(t1
, t1
, 24);
23631 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23633 case OPC_CMP_EQ_PH
:
23635 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23637 case OPC_CMP_LT_PH
:
23639 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23641 case OPC_CMP_LE_PH
:
23643 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23647 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23651 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23653 case OPC_PACKRL_PH
:
23655 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23659 #ifdef TARGET_MIPS64
23660 case OPC_CMPU_EQ_OB_DSP
:
23662 case OPC_CMP_EQ_PW
:
23664 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23666 case OPC_CMP_LT_PW
:
23668 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23670 case OPC_CMP_LE_PW
:
23672 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23674 case OPC_CMP_EQ_QH
:
23676 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23678 case OPC_CMP_LT_QH
:
23680 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23682 case OPC_CMP_LE_QH
:
23684 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23686 case OPC_CMPGDU_EQ_OB
:
23688 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23690 case OPC_CMPGDU_LT_OB
:
23692 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23694 case OPC_CMPGDU_LE_OB
:
23696 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23698 case OPC_CMPGU_EQ_OB
:
23700 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23702 case OPC_CMPGU_LT_OB
:
23704 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23706 case OPC_CMPGU_LE_OB
:
23708 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23710 case OPC_CMPU_EQ_OB
:
23712 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23714 case OPC_CMPU_LT_OB
:
23716 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23718 case OPC_CMPU_LE_OB
:
23720 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23722 case OPC_PACKRL_PW
:
23724 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23728 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23732 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23736 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23744 tcg_temp_free(v1_t
);
23745 tcg_temp_free(v2_t
);
23748 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23749 uint32_t op1
, int rt
, int rs
, int sa
)
23756 /* Treat as NOP. */
23760 t0
= tcg_temp_new();
23761 gen_load_gpr(t0
, rs
);
23764 case OPC_APPEND_DSP
:
23765 switch (MASK_APPEND(ctx
->opcode
)) {
23768 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23770 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23774 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23775 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23776 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23777 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23779 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23783 if (sa
!= 0 && sa
!= 2) {
23784 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23785 tcg_gen_ext32u_tl(t0
, t0
);
23786 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23787 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23789 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23791 default: /* Invalid */
23792 MIPS_INVAL("MASK APPEND");
23793 generate_exception_end(ctx
, EXCP_RI
);
23797 #ifdef TARGET_MIPS64
23798 case OPC_DAPPEND_DSP
:
23799 switch (MASK_DAPPEND(ctx
->opcode
)) {
23802 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23806 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23807 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23808 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23812 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23813 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23814 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23819 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23820 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23821 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23822 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23825 default: /* Invalid */
23826 MIPS_INVAL("MASK DAPPEND");
23827 generate_exception_end(ctx
, EXCP_RI
);
23836 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23837 int ret
, int v1
, int v2
, int check_ret
)
23846 if ((ret
== 0) && (check_ret
== 1)) {
23847 /* Treat as NOP. */
23851 t0
= tcg_temp_new();
23852 t1
= tcg_temp_new();
23853 v1_t
= tcg_temp_new();
23854 v2_t
= tcg_temp_new();
23856 gen_load_gpr(v1_t
, v1
);
23857 gen_load_gpr(v2_t
, v2
);
23860 case OPC_EXTR_W_DSP
:
23864 tcg_gen_movi_tl(t0
, v2
);
23865 tcg_gen_movi_tl(t1
, v1
);
23866 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23869 tcg_gen_movi_tl(t0
, v2
);
23870 tcg_gen_movi_tl(t1
, v1
);
23871 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23873 case OPC_EXTR_RS_W
:
23874 tcg_gen_movi_tl(t0
, v2
);
23875 tcg_gen_movi_tl(t1
, v1
);
23876 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23879 tcg_gen_movi_tl(t0
, v2
);
23880 tcg_gen_movi_tl(t1
, v1
);
23881 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23883 case OPC_EXTRV_S_H
:
23884 tcg_gen_movi_tl(t0
, v2
);
23885 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23888 tcg_gen_movi_tl(t0
, v2
);
23889 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23891 case OPC_EXTRV_R_W
:
23892 tcg_gen_movi_tl(t0
, v2
);
23893 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23895 case OPC_EXTRV_RS_W
:
23896 tcg_gen_movi_tl(t0
, v2
);
23897 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23900 tcg_gen_movi_tl(t0
, v2
);
23901 tcg_gen_movi_tl(t1
, v1
);
23902 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23905 tcg_gen_movi_tl(t0
, v2
);
23906 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23909 tcg_gen_movi_tl(t0
, v2
);
23910 tcg_gen_movi_tl(t1
, v1
);
23911 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23914 tcg_gen_movi_tl(t0
, v2
);
23915 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23918 imm
= (ctx
->opcode
>> 20) & 0x3F;
23919 tcg_gen_movi_tl(t0
, ret
);
23920 tcg_gen_movi_tl(t1
, imm
);
23921 gen_helper_shilo(t0
, t1
, cpu_env
);
23924 tcg_gen_movi_tl(t0
, ret
);
23925 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23928 tcg_gen_movi_tl(t0
, ret
);
23929 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23932 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23933 tcg_gen_movi_tl(t0
, imm
);
23934 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23937 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23938 tcg_gen_movi_tl(t0
, imm
);
23939 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23943 #ifdef TARGET_MIPS64
23944 case OPC_DEXTR_W_DSP
:
23948 tcg_gen_movi_tl(t0
, ret
);
23949 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23953 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23954 int ac
= (ctx
->opcode
>> 11) & 0x03;
23955 tcg_gen_movi_tl(t0
, shift
);
23956 tcg_gen_movi_tl(t1
, ac
);
23957 gen_helper_dshilo(t0
, t1
, cpu_env
);
23962 int ac
= (ctx
->opcode
>> 11) & 0x03;
23963 tcg_gen_movi_tl(t0
, ac
);
23964 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23968 tcg_gen_movi_tl(t0
, v2
);
23969 tcg_gen_movi_tl(t1
, v1
);
23971 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23974 tcg_gen_movi_tl(t0
, v2
);
23975 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23978 tcg_gen_movi_tl(t0
, v2
);
23979 tcg_gen_movi_tl(t1
, v1
);
23980 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23983 tcg_gen_movi_tl(t0
, v2
);
23984 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23987 tcg_gen_movi_tl(t0
, v2
);
23988 tcg_gen_movi_tl(t1
, v1
);
23989 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23991 case OPC_DEXTR_R_L
:
23992 tcg_gen_movi_tl(t0
, v2
);
23993 tcg_gen_movi_tl(t1
, v1
);
23994 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23996 case OPC_DEXTR_RS_L
:
23997 tcg_gen_movi_tl(t0
, v2
);
23998 tcg_gen_movi_tl(t1
, v1
);
23999 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24002 tcg_gen_movi_tl(t0
, v2
);
24003 tcg_gen_movi_tl(t1
, v1
);
24004 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24006 case OPC_DEXTR_R_W
:
24007 tcg_gen_movi_tl(t0
, v2
);
24008 tcg_gen_movi_tl(t1
, v1
);
24009 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24011 case OPC_DEXTR_RS_W
:
24012 tcg_gen_movi_tl(t0
, v2
);
24013 tcg_gen_movi_tl(t1
, v1
);
24014 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24016 case OPC_DEXTR_S_H
:
24017 tcg_gen_movi_tl(t0
, v2
);
24018 tcg_gen_movi_tl(t1
, v1
);
24019 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24021 case OPC_DEXTRV_S_H
:
24022 tcg_gen_movi_tl(t0
, v2
);
24023 tcg_gen_movi_tl(t1
, v1
);
24024 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24027 tcg_gen_movi_tl(t0
, v2
);
24028 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24030 case OPC_DEXTRV_R_L
:
24031 tcg_gen_movi_tl(t0
, v2
);
24032 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24034 case OPC_DEXTRV_RS_L
:
24035 tcg_gen_movi_tl(t0
, v2
);
24036 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24039 tcg_gen_movi_tl(t0
, v2
);
24040 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24042 case OPC_DEXTRV_R_W
:
24043 tcg_gen_movi_tl(t0
, v2
);
24044 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24046 case OPC_DEXTRV_RS_W
:
24047 tcg_gen_movi_tl(t0
, v2
);
24048 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24057 tcg_temp_free(v1_t
);
24058 tcg_temp_free(v2_t
);
24061 /* End MIPSDSP functions. */
24063 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
24065 int rs
, rt
, rd
, sa
;
24068 rs
= (ctx
->opcode
>> 21) & 0x1f;
24069 rt
= (ctx
->opcode
>> 16) & 0x1f;
24070 rd
= (ctx
->opcode
>> 11) & 0x1f;
24071 sa
= (ctx
->opcode
>> 6) & 0x1f;
24073 op1
= MASK_SPECIAL(ctx
->opcode
);
24076 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
24082 op2
= MASK_R6_MULDIV(ctx
->opcode
);
24092 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
24095 MIPS_INVAL("special_r6 muldiv");
24096 generate_exception_end(ctx
, EXCP_RI
);
24102 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24106 if (rt
== 0 && sa
== 1) {
24108 * Major opcode and function field is shared with preR6 MFHI/MTHI.
24109 * We need additionally to check other fields.
24111 gen_cl(ctx
, op1
, rd
, rs
);
24113 generate_exception_end(ctx
, EXCP_RI
);
24117 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
24118 gen_helper_do_semihosting(cpu_env
);
24120 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
24121 generate_exception_end(ctx
, EXCP_RI
);
24123 generate_exception_end(ctx
, EXCP_DBp
);
24127 #if defined(TARGET_MIPS64)
24129 check_mips_64(ctx
);
24130 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
24134 if (rt
== 0 && sa
== 1) {
24136 * Major opcode and function field is shared with preR6 MFHI/MTHI.
24137 * We need additionally to check other fields.
24139 check_mips_64(ctx
);
24140 gen_cl(ctx
, op1
, rd
, rs
);
24142 generate_exception_end(ctx
, EXCP_RI
);
24150 op2
= MASK_R6_MULDIV(ctx
->opcode
);
24160 check_mips_64(ctx
);
24161 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
24164 MIPS_INVAL("special_r6 muldiv");
24165 generate_exception_end(ctx
, EXCP_RI
);
24170 default: /* Invalid */
24171 MIPS_INVAL("special_r6");
24172 generate_exception_end(ctx
, EXCP_RI
);
24177 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
24179 int rs
= extract32(ctx
->opcode
, 21, 5);
24180 int rt
= extract32(ctx
->opcode
, 16, 5);
24181 int rd
= extract32(ctx
->opcode
, 11, 5);
24182 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
24185 case OPC_MOVN
: /* Conditional move */
24187 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24189 case OPC_MFHI
: /* Move from HI/LO */
24191 gen_HILO(ctx
, op1
, 0, rd
);
24194 case OPC_MTLO
: /* Move to HI/LO */
24195 gen_HILO(ctx
, op1
, 0, rs
);
24199 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
24203 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24205 #if defined(TARGET_MIPS64)
24210 check_insn_opc_user_only(ctx
, INSN_R5900
);
24211 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24215 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
24217 default: /* Invalid */
24218 MIPS_INVAL("special_tx79");
24219 generate_exception_end(ctx
, EXCP_RI
);
24224 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
24226 int rs
, rt
, rd
, sa
;
24229 rs
= (ctx
->opcode
>> 21) & 0x1f;
24230 rt
= (ctx
->opcode
>> 16) & 0x1f;
24231 rd
= (ctx
->opcode
>> 11) & 0x1f;
24232 sa
= (ctx
->opcode
>> 6) & 0x1f;
24234 op1
= MASK_SPECIAL(ctx
->opcode
);
24236 case OPC_MOVN
: /* Conditional move */
24238 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
24239 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
24240 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24242 case OPC_MFHI
: /* Move from HI/LO */
24244 gen_HILO(ctx
, op1
, rs
& 3, rd
);
24247 case OPC_MTLO
: /* Move to HI/LO */
24248 gen_HILO(ctx
, op1
, rd
& 3, rs
);
24251 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
24252 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
24253 check_cp1_enabled(ctx
);
24254 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
24255 (ctx
->opcode
>> 16) & 1);
24257 generate_exception_err(ctx
, EXCP_CpU
, 1);
24263 check_insn(ctx
, INSN_VR54XX
);
24264 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
24265 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
24267 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
24272 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24274 #if defined(TARGET_MIPS64)
24279 check_insn(ctx
, ISA_MIPS3
);
24280 check_mips_64(ctx
);
24281 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24285 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24288 #ifdef MIPS_STRICT_STANDARD
24289 MIPS_INVAL("SPIM");
24290 generate_exception_end(ctx
, EXCP_RI
);
24292 /* Implemented as RI exception for now. */
24293 MIPS_INVAL("spim (unofficial)");
24294 generate_exception_end(ctx
, EXCP_RI
);
24297 default: /* Invalid */
24298 MIPS_INVAL("special_legacy");
24299 generate_exception_end(ctx
, EXCP_RI
);
24304 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
24306 int rs
, rt
, rd
, sa
;
24309 rs
= (ctx
->opcode
>> 21) & 0x1f;
24310 rt
= (ctx
->opcode
>> 16) & 0x1f;
24311 rd
= (ctx
->opcode
>> 11) & 0x1f;
24312 sa
= (ctx
->opcode
>> 6) & 0x1f;
24314 op1
= MASK_SPECIAL(ctx
->opcode
);
24316 case OPC_SLL
: /* Shift with immediate */
24317 if (sa
== 5 && rd
== 0 &&
24318 rs
== 0 && rt
== 0) { /* PAUSE */
24319 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
24320 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
24321 generate_exception_end(ctx
, EXCP_RI
);
24327 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24330 switch ((ctx
->opcode
>> 21) & 0x1f) {
24332 /* rotr is decoded as srl on non-R2 CPUs */
24333 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24338 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24341 generate_exception_end(ctx
, EXCP_RI
);
24349 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24351 case OPC_SLLV
: /* Shifts */
24353 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24356 switch ((ctx
->opcode
>> 6) & 0x1f) {
24358 /* rotrv is decoded as srlv on non-R2 CPUs */
24359 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24364 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24367 generate_exception_end(ctx
, EXCP_RI
);
24371 case OPC_SLT
: /* Set on less than */
24373 gen_slt(ctx
, op1
, rd
, rs
, rt
);
24375 case OPC_AND
: /* Logic*/
24379 gen_logic(ctx
, op1
, rd
, rs
, rt
);
24382 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24384 case OPC_TGE
: /* Traps */
24390 check_insn(ctx
, ISA_MIPS2
);
24391 gen_trap(ctx
, op1
, rs
, rt
, -1);
24393 case OPC_LSA
: /* OPC_PMON */
24394 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24395 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24396 decode_opc_special_r6(env
, ctx
);
24398 /* Pmon entry point, also R4010 selsl */
24399 #ifdef MIPS_STRICT_STANDARD
24400 MIPS_INVAL("PMON / selsl");
24401 generate_exception_end(ctx
, EXCP_RI
);
24403 gen_helper_0e0i(pmon
, sa
);
24408 generate_exception_end(ctx
, EXCP_SYSCALL
);
24411 generate_exception_end(ctx
, EXCP_BREAK
);
24414 check_insn(ctx
, ISA_MIPS2
);
24415 gen_sync(extract32(ctx
->opcode
, 6, 5));
24418 #if defined(TARGET_MIPS64)
24419 /* MIPS64 specific opcodes */
24424 check_insn(ctx
, ISA_MIPS3
);
24425 check_mips_64(ctx
);
24426 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24429 switch ((ctx
->opcode
>> 21) & 0x1f) {
24431 /* drotr is decoded as dsrl on non-R2 CPUs */
24432 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24437 check_insn(ctx
, ISA_MIPS3
);
24438 check_mips_64(ctx
);
24439 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24442 generate_exception_end(ctx
, EXCP_RI
);
24447 switch ((ctx
->opcode
>> 21) & 0x1f) {
24449 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
24450 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24455 check_insn(ctx
, ISA_MIPS3
);
24456 check_mips_64(ctx
);
24457 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24460 generate_exception_end(ctx
, EXCP_RI
);
24468 check_insn(ctx
, ISA_MIPS3
);
24469 check_mips_64(ctx
);
24470 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24474 check_insn(ctx
, ISA_MIPS3
);
24475 check_mips_64(ctx
);
24476 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24479 switch ((ctx
->opcode
>> 6) & 0x1f) {
24481 /* drotrv is decoded as dsrlv on non-R2 CPUs */
24482 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24487 check_insn(ctx
, ISA_MIPS3
);
24488 check_mips_64(ctx
);
24489 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24492 generate_exception_end(ctx
, EXCP_RI
);
24497 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24498 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24499 decode_opc_special_r6(env
, ctx
);
24504 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24505 decode_opc_special_r6(env
, ctx
);
24506 } else if (ctx
->insn_flags
& INSN_R5900
) {
24507 decode_opc_special_tx79(env
, ctx
);
24509 decode_opc_special_legacy(env
, ctx
);
24515 #if defined(TARGET_MIPS64)
24519 * MMI (MultiMedia Interface) ASE instructions
24520 * ===========================================
24524 * MMI instructions category: data communication
24525 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24527 * PCPYH PEXCH PEXTLB PINTH PPACB PEXT5 PREVH
24528 * PCPYLD PEXCW PEXTLH PINTEH PPACH PPAC5 PROT3W
24529 * PCPYUD PEXEH PEXTLW PPACW
24538 * Parallel Copy Halfword
24540 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24541 * +-----------+---------+---------+---------+---------+-----------+
24542 * | MMI |0 0 0 0 0| rt | rd | PCPYH | MMI3 |
24543 * +-----------+---------+---------+---------+---------+-----------+
24545 static void gen_mmi_pcpyh(DisasContext
*ctx
)
24547 uint32_t pd
, rt
, rd
;
24550 opcode
= ctx
->opcode
;
24552 pd
= extract32(opcode
, 21, 5);
24553 rt
= extract32(opcode
, 16, 5);
24554 rd
= extract32(opcode
, 11, 5);
24556 if (unlikely(pd
!= 0)) {
24557 generate_exception_end(ctx
, EXCP_RI
);
24558 } else if (rd
== 0) {
24560 } else if (rt
== 0) {
24561 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24562 tcg_gen_movi_i64(cpu_mmr
[rd
], 0);
24564 TCGv_i64 t0
= tcg_temp_new();
24565 TCGv_i64 t1
= tcg_temp_new();
24566 uint64_t mask
= (1ULL << 16) - 1;
24568 tcg_gen_andi_i64(t0
, cpu_gpr
[rt
], mask
);
24569 tcg_gen_movi_i64(t1
, 0);
24570 tcg_gen_or_i64(t1
, t0
, t1
);
24571 tcg_gen_shli_i64(t0
, t0
, 16);
24572 tcg_gen_or_i64(t1
, t0
, t1
);
24573 tcg_gen_shli_i64(t0
, t0
, 16);
24574 tcg_gen_or_i64(t1
, t0
, t1
);
24575 tcg_gen_shli_i64(t0
, t0
, 16);
24576 tcg_gen_or_i64(t1
, t0
, t1
);
24578 tcg_gen_mov_i64(cpu_gpr
[rd
], t1
);
24580 tcg_gen_andi_i64(t0
, cpu_mmr
[rt
], mask
);
24581 tcg_gen_movi_i64(t1
, 0);
24582 tcg_gen_or_i64(t1
, t0
, t1
);
24583 tcg_gen_shli_i64(t0
, t0
, 16);
24584 tcg_gen_or_i64(t1
, t0
, t1
);
24585 tcg_gen_shli_i64(t0
, t0
, 16);
24586 tcg_gen_or_i64(t1
, t0
, t1
);
24587 tcg_gen_shli_i64(t0
, t0
, 16);
24588 tcg_gen_or_i64(t1
, t0
, t1
);
24590 tcg_gen_mov_i64(cpu_mmr
[rd
], t1
);
24598 * PCPYLD rd, rs, rt
24600 * Parallel Copy Lower Doubleword
24602 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24603 * +-----------+---------+---------+---------+---------+-----------+
24604 * | MMI | rs | rt | rd | PCPYLD | MMI2 |
24605 * +-----------+---------+---------+---------+---------+-----------+
24607 static void gen_mmi_pcpyld(DisasContext
*ctx
)
24609 uint32_t rs
, rt
, rd
;
24612 opcode
= ctx
->opcode
;
24614 rs
= extract32(opcode
, 21, 5);
24615 rt
= extract32(opcode
, 16, 5);
24616 rd
= extract32(opcode
, 11, 5);
24622 tcg_gen_movi_i64(cpu_mmr
[rd
], 0);
24624 tcg_gen_mov_i64(cpu_mmr
[rd
], cpu_gpr
[rs
]);
24627 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24630 tcg_gen_mov_i64(cpu_gpr
[rd
], cpu_gpr
[rt
]);
24637 * PCPYUD rd, rs, rt
24639 * Parallel Copy Upper Doubleword
24641 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24642 * +-----------+---------+---------+---------+---------+-----------+
24643 * | MMI | rs | rt | rd | PCPYUD | MMI3 |
24644 * +-----------+---------+---------+---------+---------+-----------+
24646 static void gen_mmi_pcpyud(DisasContext
*ctx
)
24648 uint32_t rs
, rt
, rd
;
24651 opcode
= ctx
->opcode
;
24653 rs
= extract32(opcode
, 21, 5);
24654 rt
= extract32(opcode
, 16, 5);
24655 rd
= extract32(opcode
, 11, 5);
24661 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24663 tcg_gen_mov_i64(cpu_gpr
[rd
], cpu_mmr
[rs
]);
24666 tcg_gen_movi_i64(cpu_mmr
[rd
], 0);
24669 tcg_gen_mov_i64(cpu_mmr
[rd
], cpu_mmr
[rt
]);
24678 #if !defined(TARGET_MIPS64)
24680 /* MXU accumulate add/subtract 1-bit pattern 'aptn1' */
24681 #define MXU_APTN1_A 0
24682 #define MXU_APTN1_S 1
24684 /* MXU accumulate add/subtract 2-bit pattern 'aptn2' */
24685 #define MXU_APTN2_AA 0
24686 #define MXU_APTN2_AS 1
24687 #define MXU_APTN2_SA 2
24688 #define MXU_APTN2_SS 3
24690 /* MXU execute add/subtract 2-bit pattern 'eptn2' */
24691 #define MXU_EPTN2_AA 0
24692 #define MXU_EPTN2_AS 1
24693 #define MXU_EPTN2_SA 2
24694 #define MXU_EPTN2_SS 3
24696 /* MXU operand getting pattern 'optn2' */
24697 #define MXU_OPTN2_PTN0 0
24698 #define MXU_OPTN2_PTN1 1
24699 #define MXU_OPTN2_PTN2 2
24700 #define MXU_OPTN2_PTN3 3
24701 /* alternative naming scheme for 'optn2' */
24702 #define MXU_OPTN2_WW 0
24703 #define MXU_OPTN2_LW 1
24704 #define MXU_OPTN2_HW 2
24705 #define MXU_OPTN2_XW 3
24707 /* MXU operand getting pattern 'optn3' */
24708 #define MXU_OPTN3_PTN0 0
24709 #define MXU_OPTN3_PTN1 1
24710 #define MXU_OPTN3_PTN2 2
24711 #define MXU_OPTN3_PTN3 3
24712 #define MXU_OPTN3_PTN4 4
24713 #define MXU_OPTN3_PTN5 5
24714 #define MXU_OPTN3_PTN6 6
24715 #define MXU_OPTN3_PTN7 7
24719 * S32I2M XRa, rb - Register move from GRF to XRF
24721 static void gen_mxu_s32i2m(DisasContext
*ctx
)
24726 t0
= tcg_temp_new();
24728 XRa
= extract32(ctx
->opcode
, 6, 5);
24729 Rb
= extract32(ctx
->opcode
, 16, 5);
24731 gen_load_gpr(t0
, Rb
);
24733 gen_store_mxu_gpr(t0
, XRa
);
24734 } else if (XRa
== 16) {
24735 gen_store_mxu_cr(t0
);
24742 * S32M2I XRa, rb - Register move from XRF to GRF
24744 static void gen_mxu_s32m2i(DisasContext
*ctx
)
24749 t0
= tcg_temp_new();
24751 XRa
= extract32(ctx
->opcode
, 6, 5);
24752 Rb
= extract32(ctx
->opcode
, 16, 5);
24755 gen_load_mxu_gpr(t0
, XRa
);
24756 } else if (XRa
== 16) {
24757 gen_load_mxu_cr(t0
);
24760 gen_store_gpr(t0
, Rb
);
24766 * S8LDD XRa, Rb, s8, optn3 - Load a byte from memory to XRF
24768 static void gen_mxu_s8ldd(DisasContext
*ctx
)
24771 uint32_t XRa
, Rb
, s8
, optn3
;
24773 t0
= tcg_temp_new();
24774 t1
= tcg_temp_new();
24776 XRa
= extract32(ctx
->opcode
, 6, 4);
24777 s8
= extract32(ctx
->opcode
, 10, 8);
24778 optn3
= extract32(ctx
->opcode
, 18, 3);
24779 Rb
= extract32(ctx
->opcode
, 21, 5);
24781 gen_load_gpr(t0
, Rb
);
24782 tcg_gen_addi_tl(t0
, t0
, (int8_t)s8
);
24785 /* XRa[7:0] = tmp8 */
24786 case MXU_OPTN3_PTN0
:
24787 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24788 gen_load_mxu_gpr(t0
, XRa
);
24789 tcg_gen_deposit_tl(t0
, t0
, t1
, 0, 8);
24791 /* XRa[15:8] = tmp8 */
24792 case MXU_OPTN3_PTN1
:
24793 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24794 gen_load_mxu_gpr(t0
, XRa
);
24795 tcg_gen_deposit_tl(t0
, t0
, t1
, 8, 8);
24797 /* XRa[23:16] = tmp8 */
24798 case MXU_OPTN3_PTN2
:
24799 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24800 gen_load_mxu_gpr(t0
, XRa
);
24801 tcg_gen_deposit_tl(t0
, t0
, t1
, 16, 8);
24803 /* XRa[31:24] = tmp8 */
24804 case MXU_OPTN3_PTN3
:
24805 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24806 gen_load_mxu_gpr(t0
, XRa
);
24807 tcg_gen_deposit_tl(t0
, t0
, t1
, 24, 8);
24809 /* XRa = {8'b0, tmp8, 8'b0, tmp8} */
24810 case MXU_OPTN3_PTN4
:
24811 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24812 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24814 /* XRa = {tmp8, 8'b0, tmp8, 8'b0} */
24815 case MXU_OPTN3_PTN5
:
24816 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24817 tcg_gen_shli_tl(t1
, t1
, 8);
24818 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24820 /* XRa = {{8{sign of tmp8}}, tmp8, {8{sign of tmp8}}, tmp8} */
24821 case MXU_OPTN3_PTN6
:
24822 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
24823 tcg_gen_mov_tl(t0
, t1
);
24824 tcg_gen_andi_tl(t0
, t0
, 0xFF00FFFF);
24825 tcg_gen_shli_tl(t1
, t1
, 16);
24826 tcg_gen_or_tl(t0
, t0
, t1
);
24828 /* XRa = {tmp8, tmp8, tmp8, tmp8} */
24829 case MXU_OPTN3_PTN7
:
24830 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24831 tcg_gen_deposit_tl(t1
, t1
, t1
, 8, 8);
24832 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24836 gen_store_mxu_gpr(t0
, XRa
);
24843 * D16MUL XRa, XRb, XRc, XRd, optn2 - Signed 16 bit pattern multiplication
24845 static void gen_mxu_d16mul(DisasContext
*ctx
)
24847 TCGv t0
, t1
, t2
, t3
;
24848 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
;
24850 t0
= tcg_temp_new();
24851 t1
= tcg_temp_new();
24852 t2
= tcg_temp_new();
24853 t3
= tcg_temp_new();
24855 XRa
= extract32(ctx
->opcode
, 6, 4);
24856 XRb
= extract32(ctx
->opcode
, 10, 4);
24857 XRc
= extract32(ctx
->opcode
, 14, 4);
24858 XRd
= extract32(ctx
->opcode
, 18, 4);
24859 optn2
= extract32(ctx
->opcode
, 22, 2);
24861 gen_load_mxu_gpr(t1
, XRb
);
24862 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24863 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24864 gen_load_mxu_gpr(t3
, XRc
);
24865 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24866 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24869 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24870 tcg_gen_mul_tl(t3
, t1
, t3
);
24871 tcg_gen_mul_tl(t2
, t0
, t2
);
24873 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24874 tcg_gen_mul_tl(t3
, t0
, t3
);
24875 tcg_gen_mul_tl(t2
, t0
, t2
);
24877 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24878 tcg_gen_mul_tl(t3
, t1
, t3
);
24879 tcg_gen_mul_tl(t2
, t1
, t2
);
24881 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24882 tcg_gen_mul_tl(t3
, t0
, t3
);
24883 tcg_gen_mul_tl(t2
, t1
, t2
);
24886 gen_store_mxu_gpr(t3
, XRa
);
24887 gen_store_mxu_gpr(t2
, XRd
);
24896 * D16MAC XRa, XRb, XRc, XRd, aptn2, optn2 - Signed 16 bit pattern multiply
24899 static void gen_mxu_d16mac(DisasContext
*ctx
)
24901 TCGv t0
, t1
, t2
, t3
;
24902 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
, aptn2
;
24904 t0
= tcg_temp_new();
24905 t1
= tcg_temp_new();
24906 t2
= tcg_temp_new();
24907 t3
= tcg_temp_new();
24909 XRa
= extract32(ctx
->opcode
, 6, 4);
24910 XRb
= extract32(ctx
->opcode
, 10, 4);
24911 XRc
= extract32(ctx
->opcode
, 14, 4);
24912 XRd
= extract32(ctx
->opcode
, 18, 4);
24913 optn2
= extract32(ctx
->opcode
, 22, 2);
24914 aptn2
= extract32(ctx
->opcode
, 24, 2);
24916 gen_load_mxu_gpr(t1
, XRb
);
24917 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24918 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24920 gen_load_mxu_gpr(t3
, XRc
);
24921 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24922 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24925 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24926 tcg_gen_mul_tl(t3
, t1
, t3
);
24927 tcg_gen_mul_tl(t2
, t0
, t2
);
24929 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24930 tcg_gen_mul_tl(t3
, t0
, t3
);
24931 tcg_gen_mul_tl(t2
, t0
, t2
);
24933 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24934 tcg_gen_mul_tl(t3
, t1
, t3
);
24935 tcg_gen_mul_tl(t2
, t1
, t2
);
24937 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24938 tcg_gen_mul_tl(t3
, t0
, t3
);
24939 tcg_gen_mul_tl(t2
, t1
, t2
);
24942 gen_load_mxu_gpr(t0
, XRa
);
24943 gen_load_mxu_gpr(t1
, XRd
);
24947 tcg_gen_add_tl(t3
, t0
, t3
);
24948 tcg_gen_add_tl(t2
, t1
, t2
);
24951 tcg_gen_add_tl(t3
, t0
, t3
);
24952 tcg_gen_sub_tl(t2
, t1
, t2
);
24955 tcg_gen_sub_tl(t3
, t0
, t3
);
24956 tcg_gen_add_tl(t2
, t1
, t2
);
24959 tcg_gen_sub_tl(t3
, t0
, t3
);
24960 tcg_gen_sub_tl(t2
, t1
, t2
);
24963 gen_store_mxu_gpr(t3
, XRa
);
24964 gen_store_mxu_gpr(t2
, XRd
);
24973 * Q8MUL XRa, XRb, XRc, XRd - Parallel unsigned 8 bit pattern multiply
24974 * Q8MULSU XRa, XRb, XRc, XRd - Parallel signed 8 bit pattern multiply
24976 static void gen_mxu_q8mul_q8mulsu(DisasContext
*ctx
)
24978 TCGv t0
, t1
, t2
, t3
, t4
, t5
, t6
, t7
;
24979 uint32_t XRa
, XRb
, XRc
, XRd
, sel
;
24981 t0
= tcg_temp_new();
24982 t1
= tcg_temp_new();
24983 t2
= tcg_temp_new();
24984 t3
= tcg_temp_new();
24985 t4
= tcg_temp_new();
24986 t5
= tcg_temp_new();
24987 t6
= tcg_temp_new();
24988 t7
= tcg_temp_new();
24990 XRa
= extract32(ctx
->opcode
, 6, 4);
24991 XRb
= extract32(ctx
->opcode
, 10, 4);
24992 XRc
= extract32(ctx
->opcode
, 14, 4);
24993 XRd
= extract32(ctx
->opcode
, 18, 4);
24994 sel
= extract32(ctx
->opcode
, 22, 2);
24996 gen_load_mxu_gpr(t3
, XRb
);
24997 gen_load_mxu_gpr(t7
, XRc
);
25001 tcg_gen_ext8s_tl(t0
, t3
);
25002 tcg_gen_shri_tl(t3
, t3
, 8);
25003 tcg_gen_ext8s_tl(t1
, t3
);
25004 tcg_gen_shri_tl(t3
, t3
, 8);
25005 tcg_gen_ext8s_tl(t2
, t3
);
25006 tcg_gen_shri_tl(t3
, t3
, 8);
25007 tcg_gen_ext8s_tl(t3
, t3
);
25010 tcg_gen_ext8u_tl(t0
, t3
);
25011 tcg_gen_shri_tl(t3
, t3
, 8);
25012 tcg_gen_ext8u_tl(t1
, t3
);
25013 tcg_gen_shri_tl(t3
, t3
, 8);
25014 tcg_gen_ext8u_tl(t2
, t3
);
25015 tcg_gen_shri_tl(t3
, t3
, 8);
25016 tcg_gen_ext8u_tl(t3
, t3
);
25019 tcg_gen_ext8u_tl(t4
, t7
);
25020 tcg_gen_shri_tl(t7
, t7
, 8);
25021 tcg_gen_ext8u_tl(t5
, t7
);
25022 tcg_gen_shri_tl(t7
, t7
, 8);
25023 tcg_gen_ext8u_tl(t6
, t7
);
25024 tcg_gen_shri_tl(t7
, t7
, 8);
25025 tcg_gen_ext8u_tl(t7
, t7
);
25027 tcg_gen_mul_tl(t0
, t0
, t4
);
25028 tcg_gen_mul_tl(t1
, t1
, t5
);
25029 tcg_gen_mul_tl(t2
, t2
, t6
);
25030 tcg_gen_mul_tl(t3
, t3
, t7
);
25032 tcg_gen_andi_tl(t0
, t0
, 0xFFFF);
25033 tcg_gen_andi_tl(t1
, t1
, 0xFFFF);
25034 tcg_gen_andi_tl(t2
, t2
, 0xFFFF);
25035 tcg_gen_andi_tl(t3
, t3
, 0xFFFF);
25037 tcg_gen_shli_tl(t1
, t1
, 16);
25038 tcg_gen_shli_tl(t3
, t3
, 16);
25040 tcg_gen_or_tl(t0
, t0
, t1
);
25041 tcg_gen_or_tl(t1
, t2
, t3
);
25043 gen_store_mxu_gpr(t0
, XRd
);
25044 gen_store_mxu_gpr(t1
, XRa
);
25057 * S32LDD XRa, Rb, S12 - Load a word from memory to XRF
25058 * S32LDDR XRa, Rb, S12 - Load a word from memory to XRF, reversed byte seq.
25060 static void gen_mxu_s32ldd_s32lddr(DisasContext
*ctx
)
25063 uint32_t XRa
, Rb
, s12
, sel
;
25065 t0
= tcg_temp_new();
25066 t1
= tcg_temp_new();
25068 XRa
= extract32(ctx
->opcode
, 6, 4);
25069 s12
= extract32(ctx
->opcode
, 10, 10);
25070 sel
= extract32(ctx
->opcode
, 20, 1);
25071 Rb
= extract32(ctx
->opcode
, 21, 5);
25073 gen_load_gpr(t0
, Rb
);
25075 tcg_gen_movi_tl(t1
, s12
);
25076 tcg_gen_shli_tl(t1
, t1
, 2);
25078 tcg_gen_ori_tl(t1
, t1
, 0xFFFFF000);
25080 tcg_gen_add_tl(t1
, t0
, t1
);
25081 tcg_gen_qemu_ld_tl(t1
, t1
, ctx
->mem_idx
, MO_SL
);
25085 tcg_gen_bswap32_tl(t1
, t1
);
25087 gen_store_mxu_gpr(t1
, XRa
);
25095 * MXU instruction category: logic
25096 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25098 * S32NOR S32AND S32OR S32XOR
25102 * S32NOR XRa, XRb, XRc
25103 * Update XRa with the result of logical bitwise 'nor' operation
25104 * applied to the content of XRb and XRc.
25106 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25107 * +-----------+---------+-----+-------+-------+-------+-----------+
25108 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25109 * +-----------+---------+-----+-------+-------+-------+-----------+
25111 static void gen_mxu_S32NOR(DisasContext
*ctx
)
25113 uint32_t pad
, XRc
, XRb
, XRa
;
25115 pad
= extract32(ctx
->opcode
, 21, 5);
25116 XRc
= extract32(ctx
->opcode
, 14, 4);
25117 XRb
= extract32(ctx
->opcode
, 10, 4);
25118 XRa
= extract32(ctx
->opcode
, 6, 4);
25120 if (unlikely(pad
!= 0)) {
25121 /* opcode padding incorrect -> do nothing */
25122 } else if (unlikely(XRa
== 0)) {
25123 /* destination is zero register -> do nothing */
25124 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25125 /* both operands zero registers -> just set destination to all 1s */
25126 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0xFFFFFFFF);
25127 } else if (unlikely(XRb
== 0)) {
25128 /* XRb zero register -> just set destination to the negation of XRc */
25129 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25130 } else if (unlikely(XRc
== 0)) {
25131 /* XRa zero register -> just set destination to the negation of XRb */
25132 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25133 } else if (unlikely(XRb
== XRc
)) {
25134 /* both operands same -> just set destination to the negation of XRb */
25135 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25137 /* the most general case */
25138 tcg_gen_nor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25143 * S32AND XRa, XRb, XRc
25144 * Update XRa with the result of logical bitwise 'and' operation
25145 * applied to the content of XRb and XRc.
25147 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25148 * +-----------+---------+-----+-------+-------+-------+-----------+
25149 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25150 * +-----------+---------+-----+-------+-------+-------+-----------+
25152 static void gen_mxu_S32AND(DisasContext
*ctx
)
25154 uint32_t pad
, XRc
, XRb
, XRa
;
25156 pad
= extract32(ctx
->opcode
, 21, 5);
25157 XRc
= extract32(ctx
->opcode
, 14, 4);
25158 XRb
= extract32(ctx
->opcode
, 10, 4);
25159 XRa
= extract32(ctx
->opcode
, 6, 4);
25161 if (unlikely(pad
!= 0)) {
25162 /* opcode padding incorrect -> do nothing */
25163 } else if (unlikely(XRa
== 0)) {
25164 /* destination is zero register -> do nothing */
25165 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25166 /* one of operands zero register -> just set destination to all 0s */
25167 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25168 } else if (unlikely(XRb
== XRc
)) {
25169 /* both operands same -> just set destination to one of them */
25170 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25172 /* the most general case */
25173 tcg_gen_and_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25178 * S32OR XRa, XRb, XRc
25179 * Update XRa with the result of logical bitwise 'or' operation
25180 * applied to the content of XRb and XRc.
25182 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25183 * +-----------+---------+-----+-------+-------+-------+-----------+
25184 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25185 * +-----------+---------+-----+-------+-------+-------+-----------+
25187 static void gen_mxu_S32OR(DisasContext
*ctx
)
25189 uint32_t pad
, XRc
, XRb
, XRa
;
25191 pad
= extract32(ctx
->opcode
, 21, 5);
25192 XRc
= extract32(ctx
->opcode
, 14, 4);
25193 XRb
= extract32(ctx
->opcode
, 10, 4);
25194 XRa
= extract32(ctx
->opcode
, 6, 4);
25196 if (unlikely(pad
!= 0)) {
25197 /* opcode padding incorrect -> do nothing */
25198 } else if (unlikely(XRa
== 0)) {
25199 /* destination is zero register -> do nothing */
25200 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25201 /* both operands zero registers -> just set destination to all 0s */
25202 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25203 } else if (unlikely(XRb
== 0)) {
25204 /* XRb zero register -> just set destination to the content of XRc */
25205 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25206 } else if (unlikely(XRc
== 0)) {
25207 /* XRc zero register -> just set destination to the content of XRb */
25208 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25209 } else if (unlikely(XRb
== XRc
)) {
25210 /* both operands same -> just set destination to one of them */
25211 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25213 /* the most general case */
25214 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25219 * S32XOR XRa, XRb, XRc
25220 * Update XRa with the result of logical bitwise 'xor' operation
25221 * applied to the content of XRb and XRc.
25223 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25224 * +-----------+---------+-----+-------+-------+-------+-----------+
25225 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25226 * +-----------+---------+-----+-------+-------+-------+-----------+
25228 static void gen_mxu_S32XOR(DisasContext
*ctx
)
25230 uint32_t pad
, XRc
, XRb
, XRa
;
25232 pad
= extract32(ctx
->opcode
, 21, 5);
25233 XRc
= extract32(ctx
->opcode
, 14, 4);
25234 XRb
= extract32(ctx
->opcode
, 10, 4);
25235 XRa
= extract32(ctx
->opcode
, 6, 4);
25237 if (unlikely(pad
!= 0)) {
25238 /* opcode padding incorrect -> do nothing */
25239 } else if (unlikely(XRa
== 0)) {
25240 /* destination is zero register -> do nothing */
25241 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25242 /* both operands zero registers -> just set destination to all 0s */
25243 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25244 } else if (unlikely(XRb
== 0)) {
25245 /* XRb zero register -> just set destination to the content of XRc */
25246 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25247 } else if (unlikely(XRc
== 0)) {
25248 /* XRc zero register -> just set destination to the content of XRb */
25249 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25250 } else if (unlikely(XRb
== XRc
)) {
25251 /* both operands same -> just set destination to all 0s */
25252 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25254 /* the most general case */
25255 tcg_gen_xor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25261 * MXU instruction category max/min
25262 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25264 * S32MAX D16MAX Q8MAX
25265 * S32MIN D16MIN Q8MIN
25269 * S32MAX XRa, XRb, XRc
25270 * Update XRa with the maximum of signed 32-bit integers contained
25273 * S32MIN XRa, XRb, XRc
25274 * Update XRa with the minimum of signed 32-bit integers contained
25277 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25278 * +-----------+---------+-----+-------+-------+-------+-----------+
25279 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25280 * +-----------+---------+-----+-------+-------+-------+-----------+
25282 static void gen_mxu_S32MAX_S32MIN(DisasContext
*ctx
)
25284 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25286 pad
= extract32(ctx
->opcode
, 21, 5);
25287 opc
= extract32(ctx
->opcode
, 18, 3);
25288 XRc
= extract32(ctx
->opcode
, 14, 4);
25289 XRb
= extract32(ctx
->opcode
, 10, 4);
25290 XRa
= extract32(ctx
->opcode
, 6, 4);
25292 if (unlikely(pad
!= 0)) {
25293 /* opcode padding incorrect -> do nothing */
25294 } else if (unlikely(XRa
== 0)) {
25295 /* destination is zero register -> do nothing */
25296 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25297 /* both operands zero registers -> just set destination to zero */
25298 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25299 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25300 /* exactly one operand is zero register - find which one is not...*/
25301 uint32_t XRx
= XRb
? XRb
: XRc
;
25302 /* ...and do max/min operation with one operand 0 */
25303 if (opc
== OPC_MXU_S32MAX
) {
25304 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
25306 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
25308 } else if (unlikely(XRb
== XRc
)) {
25309 /* both operands same -> just set destination to one of them */
25310 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25312 /* the most general case */
25313 if (opc
== OPC_MXU_S32MAX
) {
25314 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25317 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25325 * Update XRa with the 16-bit-wise maximums of signed integers
25326 * contained in XRb and XRc.
25329 * Update XRa with the 16-bit-wise minimums of signed integers
25330 * contained in XRb and XRc.
25332 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25333 * +-----------+---------+-----+-------+-------+-------+-----------+
25334 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25335 * +-----------+---------+-----+-------+-------+-------+-----------+
25337 static void gen_mxu_D16MAX_D16MIN(DisasContext
*ctx
)
25339 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25341 pad
= extract32(ctx
->opcode
, 21, 5);
25342 opc
= extract32(ctx
->opcode
, 18, 3);
25343 XRc
= extract32(ctx
->opcode
, 14, 4);
25344 XRb
= extract32(ctx
->opcode
, 10, 4);
25345 XRa
= extract32(ctx
->opcode
, 6, 4);
25347 if (unlikely(pad
!= 0)) {
25348 /* opcode padding incorrect -> do nothing */
25349 } else if (unlikely(XRc
== 0)) {
25350 /* destination is zero register -> do nothing */
25351 } else if (unlikely((XRb
== 0) && (XRa
== 0))) {
25352 /* both operands zero registers -> just set destination to zero */
25353 tcg_gen_movi_i32(mxu_gpr
[XRc
- 1], 0);
25354 } else if (unlikely((XRb
== 0) || (XRa
== 0))) {
25355 /* exactly one operand is zero register - find which one is not...*/
25356 uint32_t XRx
= XRb
? XRb
: XRc
;
25357 /* ...and do half-word-wise max/min with one operand 0 */
25358 TCGv_i32 t0
= tcg_temp_new();
25359 TCGv_i32 t1
= tcg_const_i32(0);
25361 /* the left half-word first */
25362 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFFFF0000);
25363 if (opc
== OPC_MXU_D16MAX
) {
25364 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25366 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25369 /* the right half-word */
25370 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0x0000FFFF);
25371 /* move half-words to the leftmost position */
25372 tcg_gen_shli_i32(t0
, t0
, 16);
25373 /* t0 will be max/min of t0 and t1 */
25374 if (opc
== OPC_MXU_D16MAX
) {
25375 tcg_gen_smax_i32(t0
, t0
, t1
);
25377 tcg_gen_smin_i32(t0
, t0
, t1
);
25379 /* return resulting half-words to its original position */
25380 tcg_gen_shri_i32(t0
, t0
, 16);
25381 /* finaly update the destination */
25382 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25386 } else if (unlikely(XRb
== XRc
)) {
25387 /* both operands same -> just set destination to one of them */
25388 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25390 /* the most general case */
25391 TCGv_i32 t0
= tcg_temp_new();
25392 TCGv_i32 t1
= tcg_temp_new();
25394 /* the left half-word first */
25395 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFFFF0000);
25396 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25397 if (opc
== OPC_MXU_D16MAX
) {
25398 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25400 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25403 /* the right half-word */
25404 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25405 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0x0000FFFF);
25406 /* move half-words to the leftmost position */
25407 tcg_gen_shli_i32(t0
, t0
, 16);
25408 tcg_gen_shli_i32(t1
, t1
, 16);
25409 /* t0 will be max/min of t0 and t1 */
25410 if (opc
== OPC_MXU_D16MAX
) {
25411 tcg_gen_smax_i32(t0
, t0
, t1
);
25413 tcg_gen_smin_i32(t0
, t0
, t1
);
25415 /* return resulting half-words to its original position */
25416 tcg_gen_shri_i32(t0
, t0
, 16);
25417 /* finaly update the destination */
25418 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25427 * Update XRa with the 8-bit-wise maximums of signed integers
25428 * contained in XRb and XRc.
25431 * Update XRa with the 8-bit-wise minimums of signed integers
25432 * contained in XRb and XRc.
25434 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25435 * +-----------+---------+-----+-------+-------+-------+-----------+
25436 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25437 * +-----------+---------+-----+-------+-------+-------+-----------+
25439 static void gen_mxu_Q8MAX_Q8MIN(DisasContext
*ctx
)
25441 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25443 pad
= extract32(ctx
->opcode
, 21, 5);
25444 opc
= extract32(ctx
->opcode
, 18, 3);
25445 XRc
= extract32(ctx
->opcode
, 14, 4);
25446 XRb
= extract32(ctx
->opcode
, 10, 4);
25447 XRa
= extract32(ctx
->opcode
, 6, 4);
25449 if (unlikely(pad
!= 0)) {
25450 /* opcode padding incorrect -> do nothing */
25451 } else if (unlikely(XRa
== 0)) {
25452 /* destination is zero register -> do nothing */
25453 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25454 /* both operands zero registers -> just set destination to zero */
25455 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25456 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25457 /* exactly one operand is zero register - make it be the first...*/
25458 uint32_t XRx
= XRb
? XRb
: XRc
;
25459 /* ...and do byte-wise max/min with one operand 0 */
25460 TCGv_i32 t0
= tcg_temp_new();
25461 TCGv_i32 t1
= tcg_const_i32(0);
25464 /* the leftmost byte (byte 3) first */
25465 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF000000);
25466 if (opc
== OPC_MXU_Q8MAX
) {
25467 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25469 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25472 /* bytes 2, 1, 0 */
25473 for (i
= 2; i
>= 0; i
--) {
25474 /* extract the byte */
25475 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF << (8 * i
));
25476 /* move the byte to the leftmost position */
25477 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25478 /* t0 will be max/min of t0 and t1 */
25479 if (opc
== OPC_MXU_Q8MAX
) {
25480 tcg_gen_smax_i32(t0
, t0
, t1
);
25482 tcg_gen_smin_i32(t0
, t0
, t1
);
25484 /* return resulting byte to its original position */
25485 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25486 /* finaly update the destination */
25487 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25492 } else if (unlikely(XRb
== XRc
)) {
25493 /* both operands same -> just set destination to one of them */
25494 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25496 /* the most general case */
25497 TCGv_i32 t0
= tcg_temp_new();
25498 TCGv_i32 t1
= tcg_temp_new();
25501 /* the leftmost bytes (bytes 3) first */
25502 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF000000);
25503 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25504 if (opc
== OPC_MXU_Q8MAX
) {
25505 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25507 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25510 /* bytes 2, 1, 0 */
25511 for (i
= 2; i
>= 0; i
--) {
25512 /* extract corresponding bytes */
25513 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF << (8 * i
));
25514 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF << (8 * i
));
25515 /* move the bytes to the leftmost position */
25516 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25517 tcg_gen_shli_i32(t1
, t1
, 8 * (3 - i
));
25518 /* t0 will be max/min of t0 and t1 */
25519 if (opc
== OPC_MXU_Q8MAX
) {
25520 tcg_gen_smax_i32(t0
, t0
, t1
);
25522 tcg_gen_smin_i32(t0
, t0
, t1
);
25524 /* return resulting byte to its original position */
25525 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25526 /* finaly update the destination */
25527 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25537 * MXU instruction category: align
25538 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25544 * S32ALNI XRc, XRb, XRa, optn3
25545 * Arrange bytes from XRb and XRc according to one of five sets of
25546 * rules determined by optn3, and place the result in XRa.
25548 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25549 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25550 * | SPECIAL2 |optn3|0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25551 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25554 static void gen_mxu_S32ALNI(DisasContext
*ctx
)
25556 uint32_t optn3
, pad
, XRc
, XRb
, XRa
;
25558 optn3
= extract32(ctx
->opcode
, 23, 3);
25559 pad
= extract32(ctx
->opcode
, 21, 2);
25560 XRc
= extract32(ctx
->opcode
, 14, 4);
25561 XRb
= extract32(ctx
->opcode
, 10, 4);
25562 XRa
= extract32(ctx
->opcode
, 6, 4);
25564 if (unlikely(pad
!= 0)) {
25565 /* opcode padding incorrect -> do nothing */
25566 } else if (unlikely(XRa
== 0)) {
25567 /* destination is zero register -> do nothing */
25568 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25569 /* both operands zero registers -> just set destination to all 0s */
25570 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25571 } else if (unlikely(XRb
== 0)) {
25572 /* XRb zero register -> just appropriatelly shift XRc into XRa */
25574 case MXU_OPTN3_PTN0
:
25575 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25577 case MXU_OPTN3_PTN1
:
25578 case MXU_OPTN3_PTN2
:
25579 case MXU_OPTN3_PTN3
:
25580 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1],
25583 case MXU_OPTN3_PTN4
:
25584 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25587 } else if (unlikely(XRc
== 0)) {
25588 /* XRc zero register -> just appropriatelly shift XRb into XRa */
25590 case MXU_OPTN3_PTN0
:
25591 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25593 case MXU_OPTN3_PTN1
:
25594 case MXU_OPTN3_PTN2
:
25595 case MXU_OPTN3_PTN3
:
25596 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25598 case MXU_OPTN3_PTN4
:
25599 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25602 } else if (unlikely(XRb
== XRc
)) {
25603 /* both operands same -> just rotation or moving from any of them */
25605 case MXU_OPTN3_PTN0
:
25606 case MXU_OPTN3_PTN4
:
25607 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25609 case MXU_OPTN3_PTN1
:
25610 case MXU_OPTN3_PTN2
:
25611 case MXU_OPTN3_PTN3
:
25612 tcg_gen_rotli_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25616 /* the most general case */
25618 case MXU_OPTN3_PTN0
:
25622 /* +---------------+ */
25623 /* | A B C D | E F G H */
25624 /* +-------+-------+ */
25629 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25632 case MXU_OPTN3_PTN1
:
25636 /* +-------------------+ */
25637 /* A | B C D E | F G H */
25638 /* +---------+---------+ */
25643 TCGv_i32 t0
= tcg_temp_new();
25644 TCGv_i32 t1
= tcg_temp_new();
25646 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x00FFFFFF);
25647 tcg_gen_shli_i32(t0
, t0
, 8);
25649 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25650 tcg_gen_shri_i32(t1
, t1
, 24);
25652 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25658 case MXU_OPTN3_PTN2
:
25662 /* +-------------------+ */
25663 /* A B | C D E F | G H */
25664 /* +---------+---------+ */
25669 TCGv_i32 t0
= tcg_temp_new();
25670 TCGv_i32 t1
= tcg_temp_new();
25672 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25673 tcg_gen_shli_i32(t0
, t0
, 16);
25675 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25676 tcg_gen_shri_i32(t1
, t1
, 16);
25678 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25684 case MXU_OPTN3_PTN3
:
25688 /* +-------------------+ */
25689 /* A B C | D E F G | H */
25690 /* +---------+---------+ */
25695 TCGv_i32 t0
= tcg_temp_new();
25696 TCGv_i32 t1
= tcg_temp_new();
25698 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x000000FF);
25699 tcg_gen_shli_i32(t0
, t0
, 24);
25701 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFFFF00);
25702 tcg_gen_shri_i32(t1
, t1
, 8);
25704 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25710 case MXU_OPTN3_PTN4
:
25714 /* +---------------+ */
25715 /* A B C D | E F G H | */
25716 /* +-------+-------+ */
25721 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25730 * Decoding engine for MXU
25731 * =======================
25736 * Decode MXU pool00
25738 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25739 * +-----------+---------+-----+-------+-------+-------+-----------+
25740 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL00|
25741 * +-----------+---------+-----+-------+-------+-------+-----------+
25744 static void decode_opc_mxu__pool00(CPUMIPSState
*env
, DisasContext
*ctx
)
25746 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25749 case OPC_MXU_S32MAX
:
25750 case OPC_MXU_S32MIN
:
25751 gen_mxu_S32MAX_S32MIN(ctx
);
25753 case OPC_MXU_D16MAX
:
25754 case OPC_MXU_D16MIN
:
25755 gen_mxu_D16MAX_D16MIN(ctx
);
25757 case OPC_MXU_Q8MAX
:
25758 case OPC_MXU_Q8MIN
:
25759 gen_mxu_Q8MAX_Q8MIN(ctx
);
25761 case OPC_MXU_Q8SLT
:
25762 /* TODO: Implement emulation of Q8SLT instruction. */
25763 MIPS_INVAL("OPC_MXU_Q8SLT");
25764 generate_exception_end(ctx
, EXCP_RI
);
25766 case OPC_MXU_Q8SLTU
:
25767 /* TODO: Implement emulation of Q8SLTU instruction. */
25768 MIPS_INVAL("OPC_MXU_Q8SLTU");
25769 generate_exception_end(ctx
, EXCP_RI
);
25772 MIPS_INVAL("decode_opc_mxu");
25773 generate_exception_end(ctx
, EXCP_RI
);
25780 * Decode MXU pool01
25782 * S32SLT, D16SLT, D16AVG, D16AVGR, Q8AVG, Q8AVGR:
25783 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25784 * +-----------+---------+-----+-------+-------+-------+-----------+
25785 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25786 * +-----------+---------+-----+-------+-------+-------+-----------+
25789 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25790 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25791 * | SPECIAL2 |en2|0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25792 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25795 static void decode_opc_mxu__pool01(CPUMIPSState
*env
, DisasContext
*ctx
)
25797 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25800 case OPC_MXU_S32SLT
:
25801 /* TODO: Implement emulation of S32SLT instruction. */
25802 MIPS_INVAL("OPC_MXU_S32SLT");
25803 generate_exception_end(ctx
, EXCP_RI
);
25805 case OPC_MXU_D16SLT
:
25806 /* TODO: Implement emulation of D16SLT instruction. */
25807 MIPS_INVAL("OPC_MXU_D16SLT");
25808 generate_exception_end(ctx
, EXCP_RI
);
25810 case OPC_MXU_D16AVG
:
25811 /* TODO: Implement emulation of D16AVG instruction. */
25812 MIPS_INVAL("OPC_MXU_D16AVG");
25813 generate_exception_end(ctx
, EXCP_RI
);
25815 case OPC_MXU_D16AVGR
:
25816 /* TODO: Implement emulation of D16AVGR instruction. */
25817 MIPS_INVAL("OPC_MXU_D16AVGR");
25818 generate_exception_end(ctx
, EXCP_RI
);
25820 case OPC_MXU_Q8AVG
:
25821 /* TODO: Implement emulation of Q8AVG instruction. */
25822 MIPS_INVAL("OPC_MXU_Q8AVG");
25823 generate_exception_end(ctx
, EXCP_RI
);
25825 case OPC_MXU_Q8AVGR
:
25826 /* TODO: Implement emulation of Q8AVGR instruction. */
25827 MIPS_INVAL("OPC_MXU_Q8AVGR");
25828 generate_exception_end(ctx
, EXCP_RI
);
25830 case OPC_MXU_Q8ADD
:
25831 /* TODO: Implement emulation of Q8ADD instruction. */
25832 MIPS_INVAL("OPC_MXU_Q8ADD");
25833 generate_exception_end(ctx
, EXCP_RI
);
25836 MIPS_INVAL("decode_opc_mxu");
25837 generate_exception_end(ctx
, EXCP_RI
);
25844 * Decode MXU pool02
25846 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25847 * +-----------+---------+-----+-------+-------+-------+-----------+
25848 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL02|
25849 * +-----------+---------+-----+-------+-------+-------+-----------+
25852 static void decode_opc_mxu__pool02(CPUMIPSState
*env
, DisasContext
*ctx
)
25854 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25857 case OPC_MXU_S32CPS
:
25858 /* TODO: Implement emulation of S32CPS instruction. */
25859 MIPS_INVAL("OPC_MXU_S32CPS");
25860 generate_exception_end(ctx
, EXCP_RI
);
25862 case OPC_MXU_D16CPS
:
25863 /* TODO: Implement emulation of D16CPS instruction. */
25864 MIPS_INVAL("OPC_MXU_D16CPS");
25865 generate_exception_end(ctx
, EXCP_RI
);
25867 case OPC_MXU_Q8ABD
:
25868 /* TODO: Implement emulation of Q8ABD instruction. */
25869 MIPS_INVAL("OPC_MXU_Q8ABD");
25870 generate_exception_end(ctx
, EXCP_RI
);
25872 case OPC_MXU_Q16SAT
:
25873 /* TODO: Implement emulation of Q16SAT instruction. */
25874 MIPS_INVAL("OPC_MXU_Q16SAT");
25875 generate_exception_end(ctx
, EXCP_RI
);
25878 MIPS_INVAL("decode_opc_mxu");
25879 generate_exception_end(ctx
, EXCP_RI
);
25886 * Decode MXU pool03
25889 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25890 * +-----------+---+---+-------+-------+-------+-------+-----------+
25891 * | SPECIAL2 |x x|on2|0 0 0 0| XRc | XRb | XRa |MXU__POOL03|
25892 * +-----------+---+---+-------+-------+-------+-------+-----------+
25895 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25896 * +-----------+---+---+-------+-------+-------+-------+-----------+
25897 * | SPECIAL2 |x x|on2| Xd | XRc | XRb | XRa |MXU__POOL03|
25898 * +-----------+---+---+-------+-------+-------+-------+-----------+
25901 static void decode_opc_mxu__pool03(CPUMIPSState
*env
, DisasContext
*ctx
)
25903 uint32_t opcode
= extract32(ctx
->opcode
, 24, 2);
25906 case OPC_MXU_D16MULF
:
25907 /* TODO: Implement emulation of D16MULF instruction. */
25908 MIPS_INVAL("OPC_MXU_D16MULF");
25909 generate_exception_end(ctx
, EXCP_RI
);
25911 case OPC_MXU_D16MULE
:
25912 /* TODO: Implement emulation of D16MULE instruction. */
25913 MIPS_INVAL("OPC_MXU_D16MULE");
25914 generate_exception_end(ctx
, EXCP_RI
);
25917 MIPS_INVAL("decode_opc_mxu");
25918 generate_exception_end(ctx
, EXCP_RI
);
25925 * Decode MXU pool04
25927 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25928 * +-----------+---------+-+-------------------+-------+-----------+
25929 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL04|
25930 * +-----------+---------+-+-------------------+-------+-----------+
25933 static void decode_opc_mxu__pool04(CPUMIPSState
*env
, DisasContext
*ctx
)
25935 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25938 case OPC_MXU_S32LDD
:
25939 case OPC_MXU_S32LDDR
:
25940 gen_mxu_s32ldd_s32lddr(ctx
);
25943 MIPS_INVAL("decode_opc_mxu");
25944 generate_exception_end(ctx
, EXCP_RI
);
25951 * Decode MXU pool05
25953 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25954 * +-----------+---------+-+-------------------+-------+-----------+
25955 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL05|
25956 * +-----------+---------+-+-------------------+-------+-----------+
25959 static void decode_opc_mxu__pool05(CPUMIPSState
*env
, DisasContext
*ctx
)
25961 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25964 case OPC_MXU_S32STD
:
25965 /* TODO: Implement emulation of S32STD instruction. */
25966 MIPS_INVAL("OPC_MXU_S32STD");
25967 generate_exception_end(ctx
, EXCP_RI
);
25969 case OPC_MXU_S32STDR
:
25970 /* TODO: Implement emulation of S32STDR instruction. */
25971 MIPS_INVAL("OPC_MXU_S32STDR");
25972 generate_exception_end(ctx
, EXCP_RI
);
25975 MIPS_INVAL("decode_opc_mxu");
25976 generate_exception_end(ctx
, EXCP_RI
);
25983 * Decode MXU pool06
25985 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25986 * +-----------+---------+---------+---+-------+-------+-----------+
25987 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL06|
25988 * +-----------+---------+---------+---+-------+-------+-----------+
25991 static void decode_opc_mxu__pool06(CPUMIPSState
*env
, DisasContext
*ctx
)
25993 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25996 case OPC_MXU_S32LDDV
:
25997 /* TODO: Implement emulation of S32LDDV instruction. */
25998 MIPS_INVAL("OPC_MXU_S32LDDV");
25999 generate_exception_end(ctx
, EXCP_RI
);
26001 case OPC_MXU_S32LDDVR
:
26002 /* TODO: Implement emulation of S32LDDVR instruction. */
26003 MIPS_INVAL("OPC_MXU_S32LDDVR");
26004 generate_exception_end(ctx
, EXCP_RI
);
26007 MIPS_INVAL("decode_opc_mxu");
26008 generate_exception_end(ctx
, EXCP_RI
);
26015 * Decode MXU pool07
26017 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26018 * +-----------+---------+---------+---+-------+-------+-----------+
26019 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL07|
26020 * +-----------+---------+---------+---+-------+-------+-----------+
26023 static void decode_opc_mxu__pool07(CPUMIPSState
*env
, DisasContext
*ctx
)
26025 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
26028 case OPC_MXU_S32STDV
:
26029 /* TODO: Implement emulation of S32TDV instruction. */
26030 MIPS_INVAL("OPC_MXU_S32TDV");
26031 generate_exception_end(ctx
, EXCP_RI
);
26033 case OPC_MXU_S32STDVR
:
26034 /* TODO: Implement emulation of S32TDVR instruction. */
26035 MIPS_INVAL("OPC_MXU_S32TDVR");
26036 generate_exception_end(ctx
, EXCP_RI
);
26039 MIPS_INVAL("decode_opc_mxu");
26040 generate_exception_end(ctx
, EXCP_RI
);
26047 * Decode MXU pool08
26049 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26050 * +-----------+---------+-+-------------------+-------+-----------+
26051 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL08|
26052 * +-----------+---------+-+-------------------+-------+-----------+
26055 static void decode_opc_mxu__pool08(CPUMIPSState
*env
, DisasContext
*ctx
)
26057 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
26060 case OPC_MXU_S32LDI
:
26061 /* TODO: Implement emulation of S32LDI instruction. */
26062 MIPS_INVAL("OPC_MXU_S32LDI");
26063 generate_exception_end(ctx
, EXCP_RI
);
26065 case OPC_MXU_S32LDIR
:
26066 /* TODO: Implement emulation of S32LDIR instruction. */
26067 MIPS_INVAL("OPC_MXU_S32LDIR");
26068 generate_exception_end(ctx
, EXCP_RI
);
26071 MIPS_INVAL("decode_opc_mxu");
26072 generate_exception_end(ctx
, EXCP_RI
);
26079 * Decode MXU pool09
26081 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26082 * +-----------+---------+-+-------------------+-------+-----------+
26083 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL09|
26084 * +-----------+---------+-+-------------------+-------+-----------+
26087 static void decode_opc_mxu__pool09(CPUMIPSState
*env
, DisasContext
*ctx
)
26089 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
26092 case OPC_MXU_S32SDI
:
26093 /* TODO: Implement emulation of S32SDI instruction. */
26094 MIPS_INVAL("OPC_MXU_S32SDI");
26095 generate_exception_end(ctx
, EXCP_RI
);
26097 case OPC_MXU_S32SDIR
:
26098 /* TODO: Implement emulation of S32SDIR instruction. */
26099 MIPS_INVAL("OPC_MXU_S32SDIR");
26100 generate_exception_end(ctx
, EXCP_RI
);
26103 MIPS_INVAL("decode_opc_mxu");
26104 generate_exception_end(ctx
, EXCP_RI
);
26111 * Decode MXU pool10
26113 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26114 * +-----------+---------+---------+---+-------+-------+-----------+
26115 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL10|
26116 * +-----------+---------+---------+---+-------+-------+-----------+
26119 static void decode_opc_mxu__pool10(CPUMIPSState
*env
, DisasContext
*ctx
)
26121 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
26124 case OPC_MXU_S32LDIV
:
26125 /* TODO: Implement emulation of S32LDIV instruction. */
26126 MIPS_INVAL("OPC_MXU_S32LDIV");
26127 generate_exception_end(ctx
, EXCP_RI
);
26129 case OPC_MXU_S32LDIVR
:
26130 /* TODO: Implement emulation of S32LDIVR instruction. */
26131 MIPS_INVAL("OPC_MXU_S32LDIVR");
26132 generate_exception_end(ctx
, EXCP_RI
);
26135 MIPS_INVAL("decode_opc_mxu");
26136 generate_exception_end(ctx
, EXCP_RI
);
26143 * Decode MXU pool11
26145 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26146 * +-----------+---------+---------+---+-------+-------+-----------+
26147 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL11|
26148 * +-----------+---------+---------+---+-------+-------+-----------+
26151 static void decode_opc_mxu__pool11(CPUMIPSState
*env
, DisasContext
*ctx
)
26153 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
26156 case OPC_MXU_S32SDIV
:
26157 /* TODO: Implement emulation of S32SDIV instruction. */
26158 MIPS_INVAL("OPC_MXU_S32SDIV");
26159 generate_exception_end(ctx
, EXCP_RI
);
26161 case OPC_MXU_S32SDIVR
:
26162 /* TODO: Implement emulation of S32SDIVR instruction. */
26163 MIPS_INVAL("OPC_MXU_S32SDIVR");
26164 generate_exception_end(ctx
, EXCP_RI
);
26167 MIPS_INVAL("decode_opc_mxu");
26168 generate_exception_end(ctx
, EXCP_RI
);
26175 * Decode MXU pool12
26177 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26178 * +-----------+---+---+-------+-------+-------+-------+-----------+
26179 * | SPECIAL2 |an2|x x| Xd | XRc | XRb | XRa |MXU__POOL12|
26180 * +-----------+---+---+-------+-------+-------+-------+-----------+
26183 static void decode_opc_mxu__pool12(CPUMIPSState
*env
, DisasContext
*ctx
)
26185 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26188 case OPC_MXU_D32ACC
:
26189 /* TODO: Implement emulation of D32ACC instruction. */
26190 MIPS_INVAL("OPC_MXU_D32ACC");
26191 generate_exception_end(ctx
, EXCP_RI
);
26193 case OPC_MXU_D32ACCM
:
26194 /* TODO: Implement emulation of D32ACCM instruction. */
26195 MIPS_INVAL("OPC_MXU_D32ACCM");
26196 generate_exception_end(ctx
, EXCP_RI
);
26198 case OPC_MXU_D32ASUM
:
26199 /* TODO: Implement emulation of D32ASUM instruction. */
26200 MIPS_INVAL("OPC_MXU_D32ASUM");
26201 generate_exception_end(ctx
, EXCP_RI
);
26204 MIPS_INVAL("decode_opc_mxu");
26205 generate_exception_end(ctx
, EXCP_RI
);
26212 * Decode MXU pool13
26214 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26215 * +-----------+---+---+-------+-------+-------+-------+-----------+
26216 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL13|
26217 * +-----------+---+---+-------+-------+-------+-------+-----------+
26220 static void decode_opc_mxu__pool13(CPUMIPSState
*env
, DisasContext
*ctx
)
26222 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26225 case OPC_MXU_Q16ACC
:
26226 /* TODO: Implement emulation of Q16ACC instruction. */
26227 MIPS_INVAL("OPC_MXU_Q16ACC");
26228 generate_exception_end(ctx
, EXCP_RI
);
26230 case OPC_MXU_Q16ACCM
:
26231 /* TODO: Implement emulation of Q16ACCM instruction. */
26232 MIPS_INVAL("OPC_MXU_Q16ACCM");
26233 generate_exception_end(ctx
, EXCP_RI
);
26235 case OPC_MXU_Q16ASUM
:
26236 /* TODO: Implement emulation of Q16ASUM instruction. */
26237 MIPS_INVAL("OPC_MXU_Q16ASUM");
26238 generate_exception_end(ctx
, EXCP_RI
);
26241 MIPS_INVAL("decode_opc_mxu");
26242 generate_exception_end(ctx
, EXCP_RI
);
26249 * Decode MXU pool14
26252 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26253 * +-----------+---+---+-------+-------+-------+-------+-----------+
26254 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL14|
26255 * +-----------+---+---+-------+-------+-------+-------+-----------+
26258 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26259 * +-----------+---+---+-------+-------+-------+-------+-----------+
26260 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL14|
26261 * +-----------+---+---+-------+-------+-------+-------+-----------+
26264 static void decode_opc_mxu__pool14(CPUMIPSState
*env
, DisasContext
*ctx
)
26266 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26269 case OPC_MXU_Q8ADDE
:
26270 /* TODO: Implement emulation of Q8ADDE instruction. */
26271 MIPS_INVAL("OPC_MXU_Q8ADDE");
26272 generate_exception_end(ctx
, EXCP_RI
);
26274 case OPC_MXU_D8SUM
:
26275 /* TODO: Implement emulation of D8SUM instruction. */
26276 MIPS_INVAL("OPC_MXU_D8SUM");
26277 generate_exception_end(ctx
, EXCP_RI
);
26279 case OPC_MXU_D8SUMC
:
26280 /* TODO: Implement emulation of D8SUMC instruction. */
26281 MIPS_INVAL("OPC_MXU_D8SUMC");
26282 generate_exception_end(ctx
, EXCP_RI
);
26285 MIPS_INVAL("decode_opc_mxu");
26286 generate_exception_end(ctx
, EXCP_RI
);
26293 * Decode MXU pool15
26295 * S32MUL, S32MULU, S32EXTRV:
26296 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26297 * +-----------+---------+---------+---+-------+-------+-----------+
26298 * | SPECIAL2 | rs | rt |x x| XRd | XRa |MXU__POOL15|
26299 * +-----------+---------+---------+---+-------+-------+-----------+
26302 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26303 * +-----------+---------+---------+---+-------+-------+-----------+
26304 * | SPECIAL2 | rb | sft5 |x x| XRd | XRa |MXU__POOL15|
26305 * +-----------+---------+---------+---+-------+-------+-----------+
26308 static void decode_opc_mxu__pool15(CPUMIPSState
*env
, DisasContext
*ctx
)
26310 uint32_t opcode
= extract32(ctx
->opcode
, 14, 2);
26313 case OPC_MXU_S32MUL
:
26314 /* TODO: Implement emulation of S32MUL instruction. */
26315 MIPS_INVAL("OPC_MXU_S32MUL");
26316 generate_exception_end(ctx
, EXCP_RI
);
26318 case OPC_MXU_S32MULU
:
26319 /* TODO: Implement emulation of S32MULU instruction. */
26320 MIPS_INVAL("OPC_MXU_S32MULU");
26321 generate_exception_end(ctx
, EXCP_RI
);
26323 case OPC_MXU_S32EXTR
:
26324 /* TODO: Implement emulation of S32EXTR instruction. */
26325 MIPS_INVAL("OPC_MXU_S32EXTR");
26326 generate_exception_end(ctx
, EXCP_RI
);
26328 case OPC_MXU_S32EXTRV
:
26329 /* TODO: Implement emulation of S32EXTRV instruction. */
26330 MIPS_INVAL("OPC_MXU_S32EXTRV");
26331 generate_exception_end(ctx
, EXCP_RI
);
26334 MIPS_INVAL("decode_opc_mxu");
26335 generate_exception_end(ctx
, EXCP_RI
);
26342 * Decode MXU pool16
26345 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26346 * +-----------+---------+-----+-------+-------+-------+-----------+
26347 * | SPECIAL2 | rb |x x x| XRc | XRb | XRa |MXU__POOL16|
26348 * +-----------+---------+-----+-------+-------+-------+-----------+
26351 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26352 * +-----------+---------+-----+-------+-------+-------+-----------+
26353 * | SPECIAL2 | rs |x x x| XRc | XRb | XRa |MXU__POOL16|
26354 * +-----------+---------+-----+-------+-------+-------+-----------+
26357 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26358 * +-----------+-----+---+-----+-------+-------+-------+-----------+
26359 * | SPECIAL2 | s3 |0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
26360 * +-----------+-----+---+-----+-------+-------+-------+-----------+
26363 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26364 * +-----------+-----+---+-----+-------+---------------+-----------+
26365 * | SPECIAL2 |optn3|0 0|x x x| XRc | s8 |MXU__POOL16|
26366 * +-----------+-----+---+-----+-------+---------------+-----------+
26368 * S32NOR, S32AND, S32OR, S32XOR:
26369 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26370 * +-----------+---------+-----+-------+-------+-------+-----------+
26371 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
26372 * +-----------+---------+-----+-------+-------+-------+-----------+
26375 static void decode_opc_mxu__pool16(CPUMIPSState
*env
, DisasContext
*ctx
)
26377 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26380 case OPC_MXU_D32SARW
:
26381 /* TODO: Implement emulation of D32SARW instruction. */
26382 MIPS_INVAL("OPC_MXU_D32SARW");
26383 generate_exception_end(ctx
, EXCP_RI
);
26385 case OPC_MXU_S32ALN
:
26386 /* TODO: Implement emulation of S32ALN instruction. */
26387 MIPS_INVAL("OPC_MXU_S32ALN");
26388 generate_exception_end(ctx
, EXCP_RI
);
26390 case OPC_MXU_S32ALNI
:
26391 gen_mxu_S32ALNI(ctx
);
26393 case OPC_MXU_S32LUI
:
26394 /* TODO: Implement emulation of S32LUI instruction. */
26395 MIPS_INVAL("OPC_MXU_S32LUI");
26396 generate_exception_end(ctx
, EXCP_RI
);
26398 case OPC_MXU_S32NOR
:
26399 gen_mxu_S32NOR(ctx
);
26401 case OPC_MXU_S32AND
:
26402 gen_mxu_S32AND(ctx
);
26404 case OPC_MXU_S32OR
:
26405 gen_mxu_S32OR(ctx
);
26407 case OPC_MXU_S32XOR
:
26408 gen_mxu_S32XOR(ctx
);
26411 MIPS_INVAL("decode_opc_mxu");
26412 generate_exception_end(ctx
, EXCP_RI
);
26419 * Decode MXU pool17
26421 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26422 * +-----------+---------+---------+---+---------+-----+-----------+
26423 * | SPECIAL2 | rs | rt |0 0| rd |x x x|MXU__POOL15|
26424 * +-----------+---------+---------+---+---------+-----+-----------+
26427 static void decode_opc_mxu__pool17(CPUMIPSState
*env
, DisasContext
*ctx
)
26429 uint32_t opcode
= extract32(ctx
->opcode
, 6, 2);
26433 /* TODO: Implement emulation of LXW instruction. */
26434 MIPS_INVAL("OPC_MXU_LXW");
26435 generate_exception_end(ctx
, EXCP_RI
);
26438 /* TODO: Implement emulation of LXH instruction. */
26439 MIPS_INVAL("OPC_MXU_LXH");
26440 generate_exception_end(ctx
, EXCP_RI
);
26443 /* TODO: Implement emulation of LXHU instruction. */
26444 MIPS_INVAL("OPC_MXU_LXHU");
26445 generate_exception_end(ctx
, EXCP_RI
);
26448 /* TODO: Implement emulation of LXB instruction. */
26449 MIPS_INVAL("OPC_MXU_LXB");
26450 generate_exception_end(ctx
, EXCP_RI
);
26453 /* TODO: Implement emulation of LXBU instruction. */
26454 MIPS_INVAL("OPC_MXU_LXBU");
26455 generate_exception_end(ctx
, EXCP_RI
);
26458 MIPS_INVAL("decode_opc_mxu");
26459 generate_exception_end(ctx
, EXCP_RI
);
26465 * Decode MXU pool18
26467 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26468 * +-----------+---------+-----+-------+-------+-------+-----------+
26469 * | SPECIAL2 | rb |x x x| XRd | XRa |0 0 0 0|MXU__POOL18|
26470 * +-----------+---------+-----+-------+-------+-------+-----------+
26473 static void decode_opc_mxu__pool18(CPUMIPSState
*env
, DisasContext
*ctx
)
26475 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26478 case OPC_MXU_D32SLLV
:
26479 /* TODO: Implement emulation of D32SLLV instruction. */
26480 MIPS_INVAL("OPC_MXU_D32SLLV");
26481 generate_exception_end(ctx
, EXCP_RI
);
26483 case OPC_MXU_D32SLRV
:
26484 /* TODO: Implement emulation of D32SLRV instruction. */
26485 MIPS_INVAL("OPC_MXU_D32SLRV");
26486 generate_exception_end(ctx
, EXCP_RI
);
26488 case OPC_MXU_D32SARV
:
26489 /* TODO: Implement emulation of D32SARV instruction. */
26490 MIPS_INVAL("OPC_MXU_D32SARV");
26491 generate_exception_end(ctx
, EXCP_RI
);
26493 case OPC_MXU_Q16SLLV
:
26494 /* TODO: Implement emulation of Q16SLLV instruction. */
26495 MIPS_INVAL("OPC_MXU_Q16SLLV");
26496 generate_exception_end(ctx
, EXCP_RI
);
26498 case OPC_MXU_Q16SLRV
:
26499 /* TODO: Implement emulation of Q16SLRV instruction. */
26500 MIPS_INVAL("OPC_MXU_Q16SLRV");
26501 generate_exception_end(ctx
, EXCP_RI
);
26503 case OPC_MXU_Q16SARV
:
26504 /* TODO: Implement emulation of Q16SARV instruction. */
26505 MIPS_INVAL("OPC_MXU_Q16SARV");
26506 generate_exception_end(ctx
, EXCP_RI
);
26509 MIPS_INVAL("decode_opc_mxu");
26510 generate_exception_end(ctx
, EXCP_RI
);
26517 * Decode MXU pool19
26519 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26520 * +-----------+---+---+-------+-------+-------+-------+-----------+
26521 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL19|
26522 * +-----------+---+---+-------+-------+-------+-------+-----------+
26525 static void decode_opc_mxu__pool19(CPUMIPSState
*env
, DisasContext
*ctx
)
26527 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26530 case OPC_MXU_Q8MUL
:
26531 case OPC_MXU_Q8MULSU
:
26532 gen_mxu_q8mul_q8mulsu(ctx
);
26535 MIPS_INVAL("decode_opc_mxu");
26536 generate_exception_end(ctx
, EXCP_RI
);
26543 * Decode MXU pool20
26545 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26546 * +-----------+---------+-----+-------+-------+-------+-----------+
26547 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL20|
26548 * +-----------+---------+-----+-------+-------+-------+-----------+
26551 static void decode_opc_mxu__pool20(CPUMIPSState
*env
, DisasContext
*ctx
)
26553 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26556 case OPC_MXU_Q8MOVZ
:
26557 /* TODO: Implement emulation of Q8MOVZ instruction. */
26558 MIPS_INVAL("OPC_MXU_Q8MOVZ");
26559 generate_exception_end(ctx
, EXCP_RI
);
26561 case OPC_MXU_Q8MOVN
:
26562 /* TODO: Implement emulation of Q8MOVN instruction. */
26563 MIPS_INVAL("OPC_MXU_Q8MOVN");
26564 generate_exception_end(ctx
, EXCP_RI
);
26566 case OPC_MXU_D16MOVZ
:
26567 /* TODO: Implement emulation of D16MOVZ instruction. */
26568 MIPS_INVAL("OPC_MXU_D16MOVZ");
26569 generate_exception_end(ctx
, EXCP_RI
);
26571 case OPC_MXU_D16MOVN
:
26572 /* TODO: Implement emulation of D16MOVN instruction. */
26573 MIPS_INVAL("OPC_MXU_D16MOVN");
26574 generate_exception_end(ctx
, EXCP_RI
);
26576 case OPC_MXU_S32MOVZ
:
26577 /* TODO: Implement emulation of S32MOVZ instruction. */
26578 MIPS_INVAL("OPC_MXU_S32MOVZ");
26579 generate_exception_end(ctx
, EXCP_RI
);
26581 case OPC_MXU_S32MOVN
:
26582 /* TODO: Implement emulation of S32MOVN instruction. */
26583 MIPS_INVAL("OPC_MXU_S32MOVN");
26584 generate_exception_end(ctx
, EXCP_RI
);
26587 MIPS_INVAL("decode_opc_mxu");
26588 generate_exception_end(ctx
, EXCP_RI
);
26595 * Decode MXU pool21
26597 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26598 * +-----------+---+---+-------+-------+-------+-------+-----------+
26599 * | SPECIAL2 |an2|x x| XRd | XRc | XRb | XRa |MXU__POOL21|
26600 * +-----------+---+---+-------+-------+-------+-------+-----------+
26603 static void decode_opc_mxu__pool21(CPUMIPSState
*env
, DisasContext
*ctx
)
26605 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26608 case OPC_MXU_Q8MAC
:
26609 /* TODO: Implement emulation of Q8MAC instruction. */
26610 MIPS_INVAL("OPC_MXU_Q8MAC");
26611 generate_exception_end(ctx
, EXCP_RI
);
26613 case OPC_MXU_Q8MACSU
:
26614 /* TODO: Implement emulation of Q8MACSU instruction. */
26615 MIPS_INVAL("OPC_MXU_Q8MACSU");
26616 generate_exception_end(ctx
, EXCP_RI
);
26619 MIPS_INVAL("decode_opc_mxu");
26620 generate_exception_end(ctx
, EXCP_RI
);
26627 * Main MXU decoding function
26629 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26630 * +-----------+---------------------------------------+-----------+
26631 * | SPECIAL2 | |x x x x x x|
26632 * +-----------+---------------------------------------+-----------+
26635 static void decode_opc_mxu(CPUMIPSState
*env
, DisasContext
*ctx
)
26638 * TODO: Investigate necessity of including handling of
26639 * CLZ, CLO, SDBB in this function, as they belong to
26640 * SPECIAL2 opcode space for regular pre-R6 MIPS ISAs.
26642 uint32_t opcode
= extract32(ctx
->opcode
, 0, 6);
26644 if (opcode
== OPC__MXU_MUL
) {
26645 uint32_t rs
, rt
, rd
, op1
;
26647 rs
= extract32(ctx
->opcode
, 21, 5);
26648 rt
= extract32(ctx
->opcode
, 16, 5);
26649 rd
= extract32(ctx
->opcode
, 11, 5);
26650 op1
= MASK_SPECIAL2(ctx
->opcode
);
26652 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26657 if (opcode
== OPC_MXU_S32M2I
) {
26658 gen_mxu_s32m2i(ctx
);
26662 if (opcode
== OPC_MXU_S32I2M
) {
26663 gen_mxu_s32i2m(ctx
);
26668 TCGv t_mxu_cr
= tcg_temp_new();
26669 TCGLabel
*l_exit
= gen_new_label();
26671 gen_load_mxu_cr(t_mxu_cr
);
26672 tcg_gen_andi_tl(t_mxu_cr
, t_mxu_cr
, MXU_CR_MXU_EN
);
26673 tcg_gen_brcondi_tl(TCG_COND_NE
, t_mxu_cr
, MXU_CR_MXU_EN
, l_exit
);
26676 case OPC_MXU_S32MADD
:
26677 /* TODO: Implement emulation of S32MADD instruction. */
26678 MIPS_INVAL("OPC_MXU_S32MADD");
26679 generate_exception_end(ctx
, EXCP_RI
);
26681 case OPC_MXU_S32MADDU
:
26682 /* TODO: Implement emulation of S32MADDU instruction. */
26683 MIPS_INVAL("OPC_MXU_S32MADDU");
26684 generate_exception_end(ctx
, EXCP_RI
);
26686 case OPC_MXU__POOL00
:
26687 decode_opc_mxu__pool00(env
, ctx
);
26689 case OPC_MXU_S32MSUB
:
26690 /* TODO: Implement emulation of S32MSUB instruction. */
26691 MIPS_INVAL("OPC_MXU_S32MSUB");
26692 generate_exception_end(ctx
, EXCP_RI
);
26694 case OPC_MXU_S32MSUBU
:
26695 /* TODO: Implement emulation of S32MSUBU instruction. */
26696 MIPS_INVAL("OPC_MXU_S32MSUBU");
26697 generate_exception_end(ctx
, EXCP_RI
);
26699 case OPC_MXU__POOL01
:
26700 decode_opc_mxu__pool01(env
, ctx
);
26702 case OPC_MXU__POOL02
:
26703 decode_opc_mxu__pool02(env
, ctx
);
26705 case OPC_MXU_D16MUL
:
26706 gen_mxu_d16mul(ctx
);
26708 case OPC_MXU__POOL03
:
26709 decode_opc_mxu__pool03(env
, ctx
);
26711 case OPC_MXU_D16MAC
:
26712 gen_mxu_d16mac(ctx
);
26714 case OPC_MXU_D16MACF
:
26715 /* TODO: Implement emulation of D16MACF instruction. */
26716 MIPS_INVAL("OPC_MXU_D16MACF");
26717 generate_exception_end(ctx
, EXCP_RI
);
26719 case OPC_MXU_D16MADL
:
26720 /* TODO: Implement emulation of D16MADL instruction. */
26721 MIPS_INVAL("OPC_MXU_D16MADL");
26722 generate_exception_end(ctx
, EXCP_RI
);
26724 case OPC_MXU_S16MAD
:
26725 /* TODO: Implement emulation of S16MAD instruction. */
26726 MIPS_INVAL("OPC_MXU_S16MAD");
26727 generate_exception_end(ctx
, EXCP_RI
);
26729 case OPC_MXU_Q16ADD
:
26730 /* TODO: Implement emulation of Q16ADD instruction. */
26731 MIPS_INVAL("OPC_MXU_Q16ADD");
26732 generate_exception_end(ctx
, EXCP_RI
);
26734 case OPC_MXU_D16MACE
:
26735 /* TODO: Implement emulation of D16MACE instruction. */
26736 MIPS_INVAL("OPC_MXU_D16MACE");
26737 generate_exception_end(ctx
, EXCP_RI
);
26739 case OPC_MXU__POOL04
:
26740 decode_opc_mxu__pool04(env
, ctx
);
26742 case OPC_MXU__POOL05
:
26743 decode_opc_mxu__pool05(env
, ctx
);
26745 case OPC_MXU__POOL06
:
26746 decode_opc_mxu__pool06(env
, ctx
);
26748 case OPC_MXU__POOL07
:
26749 decode_opc_mxu__pool07(env
, ctx
);
26751 case OPC_MXU__POOL08
:
26752 decode_opc_mxu__pool08(env
, ctx
);
26754 case OPC_MXU__POOL09
:
26755 decode_opc_mxu__pool09(env
, ctx
);
26757 case OPC_MXU__POOL10
:
26758 decode_opc_mxu__pool10(env
, ctx
);
26760 case OPC_MXU__POOL11
:
26761 decode_opc_mxu__pool11(env
, ctx
);
26763 case OPC_MXU_D32ADD
:
26764 /* TODO: Implement emulation of D32ADD instruction. */
26765 MIPS_INVAL("OPC_MXU_D32ADD");
26766 generate_exception_end(ctx
, EXCP_RI
);
26768 case OPC_MXU__POOL12
:
26769 decode_opc_mxu__pool12(env
, ctx
);
26771 case OPC_MXU__POOL13
:
26772 decode_opc_mxu__pool13(env
, ctx
);
26774 case OPC_MXU__POOL14
:
26775 decode_opc_mxu__pool14(env
, ctx
);
26777 case OPC_MXU_Q8ACCE
:
26778 /* TODO: Implement emulation of Q8ACCE instruction. */
26779 MIPS_INVAL("OPC_MXU_Q8ACCE");
26780 generate_exception_end(ctx
, EXCP_RI
);
26782 case OPC_MXU_S8LDD
:
26783 gen_mxu_s8ldd(ctx
);
26785 case OPC_MXU_S8STD
:
26786 /* TODO: Implement emulation of S8STD instruction. */
26787 MIPS_INVAL("OPC_MXU_S8STD");
26788 generate_exception_end(ctx
, EXCP_RI
);
26790 case OPC_MXU_S8LDI
:
26791 /* TODO: Implement emulation of S8LDI instruction. */
26792 MIPS_INVAL("OPC_MXU_S8LDI");
26793 generate_exception_end(ctx
, EXCP_RI
);
26795 case OPC_MXU_S8SDI
:
26796 /* TODO: Implement emulation of S8SDI instruction. */
26797 MIPS_INVAL("OPC_MXU_S8SDI");
26798 generate_exception_end(ctx
, EXCP_RI
);
26800 case OPC_MXU__POOL15
:
26801 decode_opc_mxu__pool15(env
, ctx
);
26803 case OPC_MXU__POOL16
:
26804 decode_opc_mxu__pool16(env
, ctx
);
26806 case OPC_MXU__POOL17
:
26807 decode_opc_mxu__pool17(env
, ctx
);
26809 case OPC_MXU_S16LDD
:
26810 /* TODO: Implement emulation of S16LDD instruction. */
26811 MIPS_INVAL("OPC_MXU_S16LDD");
26812 generate_exception_end(ctx
, EXCP_RI
);
26814 case OPC_MXU_S16STD
:
26815 /* TODO: Implement emulation of S16STD instruction. */
26816 MIPS_INVAL("OPC_MXU_S16STD");
26817 generate_exception_end(ctx
, EXCP_RI
);
26819 case OPC_MXU_S16LDI
:
26820 /* TODO: Implement emulation of S16LDI instruction. */
26821 MIPS_INVAL("OPC_MXU_S16LDI");
26822 generate_exception_end(ctx
, EXCP_RI
);
26824 case OPC_MXU_S16SDI
:
26825 /* TODO: Implement emulation of S16SDI instruction. */
26826 MIPS_INVAL("OPC_MXU_S16SDI");
26827 generate_exception_end(ctx
, EXCP_RI
);
26829 case OPC_MXU_D32SLL
:
26830 /* TODO: Implement emulation of D32SLL instruction. */
26831 MIPS_INVAL("OPC_MXU_D32SLL");
26832 generate_exception_end(ctx
, EXCP_RI
);
26834 case OPC_MXU_D32SLR
:
26835 /* TODO: Implement emulation of D32SLR instruction. */
26836 MIPS_INVAL("OPC_MXU_D32SLR");
26837 generate_exception_end(ctx
, EXCP_RI
);
26839 case OPC_MXU_D32SARL
:
26840 /* TODO: Implement emulation of D32SARL instruction. */
26841 MIPS_INVAL("OPC_MXU_D32SARL");
26842 generate_exception_end(ctx
, EXCP_RI
);
26844 case OPC_MXU_D32SAR
:
26845 /* TODO: Implement emulation of D32SAR instruction. */
26846 MIPS_INVAL("OPC_MXU_D32SAR");
26847 generate_exception_end(ctx
, EXCP_RI
);
26849 case OPC_MXU_Q16SLL
:
26850 /* TODO: Implement emulation of Q16SLL instruction. */
26851 MIPS_INVAL("OPC_MXU_Q16SLL");
26852 generate_exception_end(ctx
, EXCP_RI
);
26854 case OPC_MXU_Q16SLR
:
26855 /* TODO: Implement emulation of Q16SLR instruction. */
26856 MIPS_INVAL("OPC_MXU_Q16SLR");
26857 generate_exception_end(ctx
, EXCP_RI
);
26859 case OPC_MXU__POOL18
:
26860 decode_opc_mxu__pool18(env
, ctx
);
26862 case OPC_MXU_Q16SAR
:
26863 /* TODO: Implement emulation of Q16SAR instruction. */
26864 MIPS_INVAL("OPC_MXU_Q16SAR");
26865 generate_exception_end(ctx
, EXCP_RI
);
26867 case OPC_MXU__POOL19
:
26868 decode_opc_mxu__pool19(env
, ctx
);
26870 case OPC_MXU__POOL20
:
26871 decode_opc_mxu__pool20(env
, ctx
);
26873 case OPC_MXU__POOL21
:
26874 decode_opc_mxu__pool21(env
, ctx
);
26876 case OPC_MXU_Q16SCOP
:
26877 /* TODO: Implement emulation of Q16SCOP instruction. */
26878 MIPS_INVAL("OPC_MXU_Q16SCOP");
26879 generate_exception_end(ctx
, EXCP_RI
);
26881 case OPC_MXU_Q8MADL
:
26882 /* TODO: Implement emulation of Q8MADL instruction. */
26883 MIPS_INVAL("OPC_MXU_Q8MADL");
26884 generate_exception_end(ctx
, EXCP_RI
);
26886 case OPC_MXU_S32SFL
:
26887 /* TODO: Implement emulation of S32SFL instruction. */
26888 MIPS_INVAL("OPC_MXU_S32SFL");
26889 generate_exception_end(ctx
, EXCP_RI
);
26891 case OPC_MXU_Q8SAD
:
26892 /* TODO: Implement emulation of Q8SAD instruction. */
26893 MIPS_INVAL("OPC_MXU_Q8SAD");
26894 generate_exception_end(ctx
, EXCP_RI
);
26897 MIPS_INVAL("decode_opc_mxu");
26898 generate_exception_end(ctx
, EXCP_RI
);
26901 gen_set_label(l_exit
);
26902 tcg_temp_free(t_mxu_cr
);
26906 #endif /* !defined(TARGET_MIPS64) */
26909 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26914 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26916 rs
= (ctx
->opcode
>> 21) & 0x1f;
26917 rt
= (ctx
->opcode
>> 16) & 0x1f;
26918 rd
= (ctx
->opcode
>> 11) & 0x1f;
26920 op1
= MASK_SPECIAL2(ctx
->opcode
);
26922 case OPC_MADD
: /* Multiply and add/sub */
26926 check_insn(ctx
, ISA_MIPS32
);
26927 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
26930 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26933 case OPC_DIVU_G_2F
:
26934 case OPC_MULT_G_2F
:
26935 case OPC_MULTU_G_2F
:
26937 case OPC_MODU_G_2F
:
26938 check_insn(ctx
, INSN_LOONGSON2F
);
26939 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26943 check_insn(ctx
, ISA_MIPS32
);
26944 gen_cl(ctx
, op1
, rd
, rs
);
26947 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
26948 gen_helper_do_semihosting(cpu_env
);
26951 * XXX: not clear which exception should be raised
26952 * when in debug mode...
26954 check_insn(ctx
, ISA_MIPS32
);
26955 generate_exception_end(ctx
, EXCP_DBp
);
26958 #if defined(TARGET_MIPS64)
26961 check_insn(ctx
, ISA_MIPS64
);
26962 check_mips_64(ctx
);
26963 gen_cl(ctx
, op1
, rd
, rs
);
26965 case OPC_DMULT_G_2F
:
26966 case OPC_DMULTU_G_2F
:
26967 case OPC_DDIV_G_2F
:
26968 case OPC_DDIVU_G_2F
:
26969 case OPC_DMOD_G_2F
:
26970 case OPC_DMODU_G_2F
:
26971 check_insn(ctx
, INSN_LOONGSON2F
);
26972 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26975 default: /* Invalid */
26976 MIPS_INVAL("special2_legacy");
26977 generate_exception_end(ctx
, EXCP_RI
);
26982 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
26984 int rs
, rt
, rd
, sa
;
26988 rs
= (ctx
->opcode
>> 21) & 0x1f;
26989 rt
= (ctx
->opcode
>> 16) & 0x1f;
26990 rd
= (ctx
->opcode
>> 11) & 0x1f;
26991 sa
= (ctx
->opcode
>> 6) & 0x1f;
26992 imm
= (int16_t)ctx
->opcode
>> 7;
26994 op1
= MASK_SPECIAL3(ctx
->opcode
);
26998 /* hint codes 24-31 are reserved and signal RI */
26999 generate_exception_end(ctx
, EXCP_RI
);
27001 /* Treat as NOP. */
27004 check_cp0_enabled(ctx
);
27005 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27006 gen_cache_operation(ctx
, rt
, rs
, imm
);
27010 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
27013 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27018 /* Treat as NOP. */
27021 op2
= MASK_BSHFL(ctx
->opcode
);
27027 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
27030 gen_bitswap(ctx
, op2
, rd
, rt
);
27035 #if defined(TARGET_MIPS64)
27037 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEQ
, false);
27040 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27043 check_mips_64(ctx
);
27046 /* Treat as NOP. */
27049 op2
= MASK_DBSHFL(ctx
->opcode
);
27059 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
27062 gen_bitswap(ctx
, op2
, rd
, rt
);
27069 default: /* Invalid */
27070 MIPS_INVAL("special3_r6");
27071 generate_exception_end(ctx
, EXCP_RI
);
27076 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
27081 rs
= (ctx
->opcode
>> 21) & 0x1f;
27082 rt
= (ctx
->opcode
>> 16) & 0x1f;
27083 rd
= (ctx
->opcode
>> 11) & 0x1f;
27085 op1
= MASK_SPECIAL3(ctx
->opcode
);
27088 case OPC_DIVU_G_2E
:
27090 case OPC_MODU_G_2E
:
27091 case OPC_MULT_G_2E
:
27092 case OPC_MULTU_G_2E
:
27094 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
27095 * the same mask and op1.
27097 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
27098 op2
= MASK_ADDUH_QB(ctx
->opcode
);
27101 case OPC_ADDUH_R_QB
:
27103 case OPC_ADDQH_R_PH
:
27105 case OPC_ADDQH_R_W
:
27107 case OPC_SUBUH_R_QB
:
27109 case OPC_SUBQH_R_PH
:
27111 case OPC_SUBQH_R_W
:
27112 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27117 case OPC_MULQ_RS_W
:
27118 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27121 MIPS_INVAL("MASK ADDUH.QB");
27122 generate_exception_end(ctx
, EXCP_RI
);
27125 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
27126 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
27128 generate_exception_end(ctx
, EXCP_RI
);
27132 op2
= MASK_LX(ctx
->opcode
);
27134 #if defined(TARGET_MIPS64)
27140 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
27142 default: /* Invalid */
27143 MIPS_INVAL("MASK LX");
27144 generate_exception_end(ctx
, EXCP_RI
);
27148 case OPC_ABSQ_S_PH_DSP
:
27149 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
27151 case OPC_ABSQ_S_QB
:
27152 case OPC_ABSQ_S_PH
:
27154 case OPC_PRECEQ_W_PHL
:
27155 case OPC_PRECEQ_W_PHR
:
27156 case OPC_PRECEQU_PH_QBL
:
27157 case OPC_PRECEQU_PH_QBR
:
27158 case OPC_PRECEQU_PH_QBLA
:
27159 case OPC_PRECEQU_PH_QBRA
:
27160 case OPC_PRECEU_PH_QBL
:
27161 case OPC_PRECEU_PH_QBR
:
27162 case OPC_PRECEU_PH_QBLA
:
27163 case OPC_PRECEU_PH_QBRA
:
27164 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27171 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
27174 MIPS_INVAL("MASK ABSQ_S.PH");
27175 generate_exception_end(ctx
, EXCP_RI
);
27179 case OPC_ADDU_QB_DSP
:
27180 op2
= MASK_ADDU_QB(ctx
->opcode
);
27183 case OPC_ADDQ_S_PH
:
27186 case OPC_ADDU_S_QB
:
27188 case OPC_ADDU_S_PH
:
27190 case OPC_SUBQ_S_PH
:
27193 case OPC_SUBU_S_QB
:
27195 case OPC_SUBU_S_PH
:
27199 case OPC_RADDU_W_QB
:
27200 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27202 case OPC_MULEU_S_PH_QBL
:
27203 case OPC_MULEU_S_PH_QBR
:
27204 case OPC_MULQ_RS_PH
:
27205 case OPC_MULEQ_S_W_PHL
:
27206 case OPC_MULEQ_S_W_PHR
:
27207 case OPC_MULQ_S_PH
:
27208 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27210 default: /* Invalid */
27211 MIPS_INVAL("MASK ADDU.QB");
27212 generate_exception_end(ctx
, EXCP_RI
);
27217 case OPC_CMPU_EQ_QB_DSP
:
27218 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
27220 case OPC_PRECR_SRA_PH_W
:
27221 case OPC_PRECR_SRA_R_PH_W
:
27222 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
27224 case OPC_PRECR_QB_PH
:
27225 case OPC_PRECRQ_QB_PH
:
27226 case OPC_PRECRQ_PH_W
:
27227 case OPC_PRECRQ_RS_PH_W
:
27228 case OPC_PRECRQU_S_QB_PH
:
27229 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27231 case OPC_CMPU_EQ_QB
:
27232 case OPC_CMPU_LT_QB
:
27233 case OPC_CMPU_LE_QB
:
27234 case OPC_CMP_EQ_PH
:
27235 case OPC_CMP_LT_PH
:
27236 case OPC_CMP_LE_PH
:
27237 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27239 case OPC_CMPGU_EQ_QB
:
27240 case OPC_CMPGU_LT_QB
:
27241 case OPC_CMPGU_LE_QB
:
27242 case OPC_CMPGDU_EQ_QB
:
27243 case OPC_CMPGDU_LT_QB
:
27244 case OPC_CMPGDU_LE_QB
:
27247 case OPC_PACKRL_PH
:
27248 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27250 default: /* Invalid */
27251 MIPS_INVAL("MASK CMPU.EQ.QB");
27252 generate_exception_end(ctx
, EXCP_RI
);
27256 case OPC_SHLL_QB_DSP
:
27257 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
27259 case OPC_DPA_W_PH_DSP
:
27260 op2
= MASK_DPA_W_PH(ctx
->opcode
);
27262 case OPC_DPAU_H_QBL
:
27263 case OPC_DPAU_H_QBR
:
27264 case OPC_DPSU_H_QBL
:
27265 case OPC_DPSU_H_QBR
:
27267 case OPC_DPAX_W_PH
:
27268 case OPC_DPAQ_S_W_PH
:
27269 case OPC_DPAQX_S_W_PH
:
27270 case OPC_DPAQX_SA_W_PH
:
27272 case OPC_DPSX_W_PH
:
27273 case OPC_DPSQ_S_W_PH
:
27274 case OPC_DPSQX_S_W_PH
:
27275 case OPC_DPSQX_SA_W_PH
:
27276 case OPC_MULSAQ_S_W_PH
:
27277 case OPC_DPAQ_SA_L_W
:
27278 case OPC_DPSQ_SA_L_W
:
27279 case OPC_MAQ_S_W_PHL
:
27280 case OPC_MAQ_S_W_PHR
:
27281 case OPC_MAQ_SA_W_PHL
:
27282 case OPC_MAQ_SA_W_PHR
:
27283 case OPC_MULSA_W_PH
:
27284 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27286 default: /* Invalid */
27287 MIPS_INVAL("MASK DPAW.PH");
27288 generate_exception_end(ctx
, EXCP_RI
);
27293 op2
= MASK_INSV(ctx
->opcode
);
27304 t0
= tcg_temp_new();
27305 t1
= tcg_temp_new();
27307 gen_load_gpr(t0
, rt
);
27308 gen_load_gpr(t1
, rs
);
27310 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
27316 default: /* Invalid */
27317 MIPS_INVAL("MASK INSV");
27318 generate_exception_end(ctx
, EXCP_RI
);
27322 case OPC_APPEND_DSP
:
27323 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
27325 case OPC_EXTR_W_DSP
:
27326 op2
= MASK_EXTR_W(ctx
->opcode
);
27330 case OPC_EXTR_RS_W
:
27332 case OPC_EXTRV_S_H
:
27334 case OPC_EXTRV_R_W
:
27335 case OPC_EXTRV_RS_W
:
27340 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27343 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27349 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27351 default: /* Invalid */
27352 MIPS_INVAL("MASK EXTR.W");
27353 generate_exception_end(ctx
, EXCP_RI
);
27357 #if defined(TARGET_MIPS64)
27358 case OPC_DDIV_G_2E
:
27359 case OPC_DDIVU_G_2E
:
27360 case OPC_DMULT_G_2E
:
27361 case OPC_DMULTU_G_2E
:
27362 case OPC_DMOD_G_2E
:
27363 case OPC_DMODU_G_2E
:
27364 check_insn(ctx
, INSN_LOONGSON2E
);
27365 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
27367 case OPC_ABSQ_S_QH_DSP
:
27368 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
27370 case OPC_PRECEQ_L_PWL
:
27371 case OPC_PRECEQ_L_PWR
:
27372 case OPC_PRECEQ_PW_QHL
:
27373 case OPC_PRECEQ_PW_QHR
:
27374 case OPC_PRECEQ_PW_QHLA
:
27375 case OPC_PRECEQ_PW_QHRA
:
27376 case OPC_PRECEQU_QH_OBL
:
27377 case OPC_PRECEQU_QH_OBR
:
27378 case OPC_PRECEQU_QH_OBLA
:
27379 case OPC_PRECEQU_QH_OBRA
:
27380 case OPC_PRECEU_QH_OBL
:
27381 case OPC_PRECEU_QH_OBR
:
27382 case OPC_PRECEU_QH_OBLA
:
27383 case OPC_PRECEU_QH_OBRA
:
27384 case OPC_ABSQ_S_OB
:
27385 case OPC_ABSQ_S_PW
:
27386 case OPC_ABSQ_S_QH
:
27387 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27395 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
27397 default: /* Invalid */
27398 MIPS_INVAL("MASK ABSQ_S.QH");
27399 generate_exception_end(ctx
, EXCP_RI
);
27403 case OPC_ADDU_OB_DSP
:
27404 op2
= MASK_ADDU_OB(ctx
->opcode
);
27406 case OPC_RADDU_L_OB
:
27408 case OPC_SUBQ_S_PW
:
27410 case OPC_SUBQ_S_QH
:
27412 case OPC_SUBU_S_OB
:
27414 case OPC_SUBU_S_QH
:
27416 case OPC_SUBUH_R_OB
:
27418 case OPC_ADDQ_S_PW
:
27420 case OPC_ADDQ_S_QH
:
27422 case OPC_ADDU_S_OB
:
27424 case OPC_ADDU_S_QH
:
27426 case OPC_ADDUH_R_OB
:
27427 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27429 case OPC_MULEQ_S_PW_QHL
:
27430 case OPC_MULEQ_S_PW_QHR
:
27431 case OPC_MULEU_S_QH_OBL
:
27432 case OPC_MULEU_S_QH_OBR
:
27433 case OPC_MULQ_RS_QH
:
27434 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27436 default: /* Invalid */
27437 MIPS_INVAL("MASK ADDU.OB");
27438 generate_exception_end(ctx
, EXCP_RI
);
27442 case OPC_CMPU_EQ_OB_DSP
:
27443 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
27445 case OPC_PRECR_SRA_QH_PW
:
27446 case OPC_PRECR_SRA_R_QH_PW
:
27447 /* Return value is rt. */
27448 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
27450 case OPC_PRECR_OB_QH
:
27451 case OPC_PRECRQ_OB_QH
:
27452 case OPC_PRECRQ_PW_L
:
27453 case OPC_PRECRQ_QH_PW
:
27454 case OPC_PRECRQ_RS_QH_PW
:
27455 case OPC_PRECRQU_S_OB_QH
:
27456 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27458 case OPC_CMPU_EQ_OB
:
27459 case OPC_CMPU_LT_OB
:
27460 case OPC_CMPU_LE_OB
:
27461 case OPC_CMP_EQ_QH
:
27462 case OPC_CMP_LT_QH
:
27463 case OPC_CMP_LE_QH
:
27464 case OPC_CMP_EQ_PW
:
27465 case OPC_CMP_LT_PW
:
27466 case OPC_CMP_LE_PW
:
27467 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27469 case OPC_CMPGDU_EQ_OB
:
27470 case OPC_CMPGDU_LT_OB
:
27471 case OPC_CMPGDU_LE_OB
:
27472 case OPC_CMPGU_EQ_OB
:
27473 case OPC_CMPGU_LT_OB
:
27474 case OPC_CMPGU_LE_OB
:
27475 case OPC_PACKRL_PW
:
27479 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27481 default: /* Invalid */
27482 MIPS_INVAL("MASK CMPU_EQ.OB");
27483 generate_exception_end(ctx
, EXCP_RI
);
27487 case OPC_DAPPEND_DSP
:
27488 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
27490 case OPC_DEXTR_W_DSP
:
27491 op2
= MASK_DEXTR_W(ctx
->opcode
);
27498 case OPC_DEXTR_R_L
:
27499 case OPC_DEXTR_RS_L
:
27501 case OPC_DEXTR_R_W
:
27502 case OPC_DEXTR_RS_W
:
27503 case OPC_DEXTR_S_H
:
27505 case OPC_DEXTRV_R_L
:
27506 case OPC_DEXTRV_RS_L
:
27507 case OPC_DEXTRV_S_H
:
27509 case OPC_DEXTRV_R_W
:
27510 case OPC_DEXTRV_RS_W
:
27511 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27516 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27518 default: /* Invalid */
27519 MIPS_INVAL("MASK EXTR.W");
27520 generate_exception_end(ctx
, EXCP_RI
);
27524 case OPC_DPAQ_W_QH_DSP
:
27525 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
27527 case OPC_DPAU_H_OBL
:
27528 case OPC_DPAU_H_OBR
:
27529 case OPC_DPSU_H_OBL
:
27530 case OPC_DPSU_H_OBR
:
27532 case OPC_DPAQ_S_W_QH
:
27534 case OPC_DPSQ_S_W_QH
:
27535 case OPC_MULSAQ_S_W_QH
:
27536 case OPC_DPAQ_SA_L_PW
:
27537 case OPC_DPSQ_SA_L_PW
:
27538 case OPC_MULSAQ_S_L_PW
:
27539 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27541 case OPC_MAQ_S_W_QHLL
:
27542 case OPC_MAQ_S_W_QHLR
:
27543 case OPC_MAQ_S_W_QHRL
:
27544 case OPC_MAQ_S_W_QHRR
:
27545 case OPC_MAQ_SA_W_QHLL
:
27546 case OPC_MAQ_SA_W_QHLR
:
27547 case OPC_MAQ_SA_W_QHRL
:
27548 case OPC_MAQ_SA_W_QHRR
:
27549 case OPC_MAQ_S_L_PWL
:
27550 case OPC_MAQ_S_L_PWR
:
27555 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27557 default: /* Invalid */
27558 MIPS_INVAL("MASK DPAQ.W.QH");
27559 generate_exception_end(ctx
, EXCP_RI
);
27563 case OPC_DINSV_DSP
:
27564 op2
= MASK_INSV(ctx
->opcode
);
27575 t0
= tcg_temp_new();
27576 t1
= tcg_temp_new();
27578 gen_load_gpr(t0
, rt
);
27579 gen_load_gpr(t1
, rs
);
27581 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
27587 default: /* Invalid */
27588 MIPS_INVAL("MASK DINSV");
27589 generate_exception_end(ctx
, EXCP_RI
);
27593 case OPC_SHLL_OB_DSP
:
27594 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
27597 default: /* Invalid */
27598 MIPS_INVAL("special3_legacy");
27599 generate_exception_end(ctx
, EXCP_RI
);
27605 #if defined(TARGET_MIPS64)
27607 static void decode_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
27609 uint32_t opc
= MASK_MMI0(ctx
->opcode
);
27612 case MMI_OPC_0_PADDW
: /* TODO: MMI_OPC_0_PADDW */
27613 case MMI_OPC_0_PSUBW
: /* TODO: MMI_OPC_0_PSUBW */
27614 case MMI_OPC_0_PCGTW
: /* TODO: MMI_OPC_0_PCGTW */
27615 case MMI_OPC_0_PMAXW
: /* TODO: MMI_OPC_0_PMAXW */
27616 case MMI_OPC_0_PADDH
: /* TODO: MMI_OPC_0_PADDH */
27617 case MMI_OPC_0_PSUBH
: /* TODO: MMI_OPC_0_PSUBH */
27618 case MMI_OPC_0_PCGTH
: /* TODO: MMI_OPC_0_PCGTH */
27619 case MMI_OPC_0_PMAXH
: /* TODO: MMI_OPC_0_PMAXH */
27620 case MMI_OPC_0_PADDB
: /* TODO: MMI_OPC_0_PADDB */
27621 case MMI_OPC_0_PSUBB
: /* TODO: MMI_OPC_0_PSUBB */
27622 case MMI_OPC_0_PCGTB
: /* TODO: MMI_OPC_0_PCGTB */
27623 case MMI_OPC_0_PADDSW
: /* TODO: MMI_OPC_0_PADDSW */
27624 case MMI_OPC_0_PSUBSW
: /* TODO: MMI_OPC_0_PSUBSW */
27625 case MMI_OPC_0_PEXTLW
: /* TODO: MMI_OPC_0_PEXTLW */
27626 case MMI_OPC_0_PPACW
: /* TODO: MMI_OPC_0_PPACW */
27627 case MMI_OPC_0_PADDSH
: /* TODO: MMI_OPC_0_PADDSH */
27628 case MMI_OPC_0_PSUBSH
: /* TODO: MMI_OPC_0_PSUBSH */
27629 case MMI_OPC_0_PEXTLH
: /* TODO: MMI_OPC_0_PEXTLH */
27630 case MMI_OPC_0_PPACH
: /* TODO: MMI_OPC_0_PPACH */
27631 case MMI_OPC_0_PADDSB
: /* TODO: MMI_OPC_0_PADDSB */
27632 case MMI_OPC_0_PSUBSB
: /* TODO: MMI_OPC_0_PSUBSB */
27633 case MMI_OPC_0_PEXTLB
: /* TODO: MMI_OPC_0_PEXTLB */
27634 case MMI_OPC_0_PPACB
: /* TODO: MMI_OPC_0_PPACB */
27635 case MMI_OPC_0_PEXT5
: /* TODO: MMI_OPC_0_PEXT5 */
27636 case MMI_OPC_0_PPAC5
: /* TODO: MMI_OPC_0_PPAC5 */
27637 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI0 */
27640 MIPS_INVAL("TX79 MMI class MMI0");
27641 generate_exception_end(ctx
, EXCP_RI
);
27646 static void decode_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
27648 uint32_t opc
= MASK_MMI1(ctx
->opcode
);
27651 case MMI_OPC_1_PABSW
: /* TODO: MMI_OPC_1_PABSW */
27652 case MMI_OPC_1_PCEQW
: /* TODO: MMI_OPC_1_PCEQW */
27653 case MMI_OPC_1_PMINW
: /* TODO: MMI_OPC_1_PMINW */
27654 case MMI_OPC_1_PADSBH
: /* TODO: MMI_OPC_1_PADSBH */
27655 case MMI_OPC_1_PABSH
: /* TODO: MMI_OPC_1_PABSH */
27656 case MMI_OPC_1_PCEQH
: /* TODO: MMI_OPC_1_PCEQH */
27657 case MMI_OPC_1_PMINH
: /* TODO: MMI_OPC_1_PMINH */
27658 case MMI_OPC_1_PCEQB
: /* TODO: MMI_OPC_1_PCEQB */
27659 case MMI_OPC_1_PADDUW
: /* TODO: MMI_OPC_1_PADDUW */
27660 case MMI_OPC_1_PSUBUW
: /* TODO: MMI_OPC_1_PSUBUW */
27661 case MMI_OPC_1_PEXTUW
: /* TODO: MMI_OPC_1_PEXTUW */
27662 case MMI_OPC_1_PADDUH
: /* TODO: MMI_OPC_1_PADDUH */
27663 case MMI_OPC_1_PSUBUH
: /* TODO: MMI_OPC_1_PSUBUH */
27664 case MMI_OPC_1_PEXTUH
: /* TODO: MMI_OPC_1_PEXTUH */
27665 case MMI_OPC_1_PADDUB
: /* TODO: MMI_OPC_1_PADDUB */
27666 case MMI_OPC_1_PSUBUB
: /* TODO: MMI_OPC_1_PSUBUB */
27667 case MMI_OPC_1_PEXTUB
: /* TODO: MMI_OPC_1_PEXTUB */
27668 case MMI_OPC_1_QFSRV
: /* TODO: MMI_OPC_1_QFSRV */
27669 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI1 */
27672 MIPS_INVAL("TX79 MMI class MMI1");
27673 generate_exception_end(ctx
, EXCP_RI
);
27678 static void decode_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
27680 uint32_t opc
= MASK_MMI2(ctx
->opcode
);
27683 case MMI_OPC_2_PMADDW
: /* TODO: MMI_OPC_2_PMADDW */
27684 case MMI_OPC_2_PSLLVW
: /* TODO: MMI_OPC_2_PSLLVW */
27685 case MMI_OPC_2_PSRLVW
: /* TODO: MMI_OPC_2_PSRLVW */
27686 case MMI_OPC_2_PMSUBW
: /* TODO: MMI_OPC_2_PMSUBW */
27687 case MMI_OPC_2_PMFHI
: /* TODO: MMI_OPC_2_PMFHI */
27688 case MMI_OPC_2_PMFLO
: /* TODO: MMI_OPC_2_PMFLO */
27689 case MMI_OPC_2_PINTH
: /* TODO: MMI_OPC_2_PINTH */
27690 case MMI_OPC_2_PMULTW
: /* TODO: MMI_OPC_2_PMULTW */
27691 case MMI_OPC_2_PDIVW
: /* TODO: MMI_OPC_2_PDIVW */
27692 case MMI_OPC_2_PMADDH
: /* TODO: MMI_OPC_2_PMADDH */
27693 case MMI_OPC_2_PHMADH
: /* TODO: MMI_OPC_2_PHMADH */
27694 case MMI_OPC_2_PAND
: /* TODO: MMI_OPC_2_PAND */
27695 case MMI_OPC_2_PXOR
: /* TODO: MMI_OPC_2_PXOR */
27696 case MMI_OPC_2_PMSUBH
: /* TODO: MMI_OPC_2_PMSUBH */
27697 case MMI_OPC_2_PHMSBH
: /* TODO: MMI_OPC_2_PHMSBH */
27698 case MMI_OPC_2_PEXEH
: /* TODO: MMI_OPC_2_PEXEH */
27699 case MMI_OPC_2_PREVH
: /* TODO: MMI_OPC_2_PREVH */
27700 case MMI_OPC_2_PMULTH
: /* TODO: MMI_OPC_2_PMULTH */
27701 case MMI_OPC_2_PDIVBW
: /* TODO: MMI_OPC_2_PDIVBW */
27702 case MMI_OPC_2_PEXEW
: /* TODO: MMI_OPC_2_PEXEW */
27703 case MMI_OPC_2_PROT3W
: /* TODO: MMI_OPC_2_PROT3W */
27704 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI2 */
27706 case MMI_OPC_2_PCPYLD
:
27707 gen_mmi_pcpyld(ctx
);
27710 MIPS_INVAL("TX79 MMI class MMI2");
27711 generate_exception_end(ctx
, EXCP_RI
);
27716 static void decode_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
27718 uint32_t opc
= MASK_MMI3(ctx
->opcode
);
27721 case MMI_OPC_3_PMADDUW
: /* TODO: MMI_OPC_3_PMADDUW */
27722 case MMI_OPC_3_PSRAVW
: /* TODO: MMI_OPC_3_PSRAVW */
27723 case MMI_OPC_3_PMTHI
: /* TODO: MMI_OPC_3_PMTHI */
27724 case MMI_OPC_3_PMTLO
: /* TODO: MMI_OPC_3_PMTLO */
27725 case MMI_OPC_3_PINTEH
: /* TODO: MMI_OPC_3_PINTEH */
27726 case MMI_OPC_3_PMULTUW
: /* TODO: MMI_OPC_3_PMULTUW */
27727 case MMI_OPC_3_PDIVUW
: /* TODO: MMI_OPC_3_PDIVUW */
27728 case MMI_OPC_3_POR
: /* TODO: MMI_OPC_3_POR */
27729 case MMI_OPC_3_PNOR
: /* TODO: MMI_OPC_3_PNOR */
27730 case MMI_OPC_3_PEXCH
: /* TODO: MMI_OPC_3_PEXCH */
27731 case MMI_OPC_3_PEXCW
: /* TODO: MMI_OPC_3_PEXCW */
27732 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI3 */
27734 case MMI_OPC_3_PCPYH
:
27735 gen_mmi_pcpyh(ctx
);
27737 case MMI_OPC_3_PCPYUD
:
27738 gen_mmi_pcpyud(ctx
);
27741 MIPS_INVAL("TX79 MMI class MMI3");
27742 generate_exception_end(ctx
, EXCP_RI
);
27747 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
27749 uint32_t opc
= MASK_MMI(ctx
->opcode
);
27750 int rs
= extract32(ctx
->opcode
, 21, 5);
27751 int rt
= extract32(ctx
->opcode
, 16, 5);
27752 int rd
= extract32(ctx
->opcode
, 11, 5);
27755 case MMI_OPC_CLASS_MMI0
:
27756 decode_mmi0(env
, ctx
);
27758 case MMI_OPC_CLASS_MMI1
:
27759 decode_mmi1(env
, ctx
);
27761 case MMI_OPC_CLASS_MMI2
:
27762 decode_mmi2(env
, ctx
);
27764 case MMI_OPC_CLASS_MMI3
:
27765 decode_mmi3(env
, ctx
);
27767 case MMI_OPC_MULT1
:
27768 case MMI_OPC_MULTU1
:
27770 case MMI_OPC_MADDU
:
27771 case MMI_OPC_MADD1
:
27772 case MMI_OPC_MADDU1
:
27773 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
27776 case MMI_OPC_DIVU1
:
27777 gen_div1_tx79(ctx
, opc
, rs
, rt
);
27779 case MMI_OPC_MTLO1
:
27780 case MMI_OPC_MTHI1
:
27781 gen_HILO1_tx79(ctx
, opc
, rs
);
27783 case MMI_OPC_MFLO1
:
27784 case MMI_OPC_MFHI1
:
27785 gen_HILO1_tx79(ctx
, opc
, rd
);
27787 case MMI_OPC_PLZCW
: /* TODO: MMI_OPC_PLZCW */
27788 case MMI_OPC_PMFHL
: /* TODO: MMI_OPC_PMFHL */
27789 case MMI_OPC_PMTHL
: /* TODO: MMI_OPC_PMTHL */
27790 case MMI_OPC_PSLLH
: /* TODO: MMI_OPC_PSLLH */
27791 case MMI_OPC_PSRLH
: /* TODO: MMI_OPC_PSRLH */
27792 case MMI_OPC_PSRAH
: /* TODO: MMI_OPC_PSRAH */
27793 case MMI_OPC_PSLLW
: /* TODO: MMI_OPC_PSLLW */
27794 case MMI_OPC_PSRLW
: /* TODO: MMI_OPC_PSRLW */
27795 case MMI_OPC_PSRAW
: /* TODO: MMI_OPC_PSRAW */
27796 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI */
27799 MIPS_INVAL("TX79 MMI class");
27800 generate_exception_end(ctx
, EXCP_RI
);
27805 static void gen_mmi_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
27807 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_LQ */
27810 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
27812 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_SQ */
27816 * The TX79-specific instruction Store Quadword
27818 * +--------+-------+-------+------------------------+
27819 * | 011111 | base | rt | offset | SQ
27820 * +--------+-------+-------+------------------------+
27823 * has the same opcode as the Read Hardware Register instruction
27825 * +--------+-------+-------+-------+-------+--------+
27826 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
27827 * +--------+-------+-------+-------+-------+--------+
27830 * that is required, trapped and emulated by the Linux kernel. However, all
27831 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
27832 * offset is odd. Therefore all valid SQ instructions can execute normally.
27833 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
27834 * between SQ and RDHWR, as the Linux kernel does.
27836 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
27838 int base
= extract32(ctx
->opcode
, 21, 5);
27839 int rt
= extract32(ctx
->opcode
, 16, 5);
27840 int offset
= extract32(ctx
->opcode
, 0, 16);
27842 #ifdef CONFIG_USER_ONLY
27843 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
27844 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
27846 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
27847 int rd
= extract32(ctx
->opcode
, 11, 5);
27849 gen_rdhwr(ctx
, rt
, rd
, 0);
27854 gen_mmi_sq(ctx
, base
, rt
, offset
);
27859 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
27861 int rs
, rt
, rd
, sa
;
27865 rs
= (ctx
->opcode
>> 21) & 0x1f;
27866 rt
= (ctx
->opcode
>> 16) & 0x1f;
27867 rd
= (ctx
->opcode
>> 11) & 0x1f;
27868 sa
= (ctx
->opcode
>> 6) & 0x1f;
27869 imm
= sextract32(ctx
->opcode
, 7, 9);
27871 op1
= MASK_SPECIAL3(ctx
->opcode
);
27874 * EVA loads and stores overlap Loongson 2E instructions decoded by
27875 * decode_opc_special3_legacy(), so be careful to allow their decoding when
27882 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27890 check_cp0_enabled(ctx
);
27891 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27895 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27900 check_cp0_enabled(ctx
);
27901 gen_st(ctx
, op1
, rt
, rs
, imm
);
27904 check_cp0_enabled(ctx
);
27905 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, true);
27908 check_cp0_enabled(ctx
);
27909 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27910 gen_cache_operation(ctx
, rt
, rs
, imm
);
27912 /* Treat as NOP. */
27915 check_cp0_enabled(ctx
);
27916 /* Treat as NOP. */
27924 check_insn(ctx
, ISA_MIPS32R2
);
27925 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27928 op2
= MASK_BSHFL(ctx
->opcode
);
27935 check_insn(ctx
, ISA_MIPS32R6
);
27936 decode_opc_special3_r6(env
, ctx
);
27939 check_insn(ctx
, ISA_MIPS32R2
);
27940 gen_bshfl(ctx
, op2
, rt
, rd
);
27944 #if defined(TARGET_MIPS64)
27951 check_insn(ctx
, ISA_MIPS64R2
);
27952 check_mips_64(ctx
);
27953 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27956 op2
= MASK_DBSHFL(ctx
->opcode
);
27967 check_insn(ctx
, ISA_MIPS32R6
);
27968 decode_opc_special3_r6(env
, ctx
);
27971 check_insn(ctx
, ISA_MIPS64R2
);
27972 check_mips_64(ctx
);
27973 op2
= MASK_DBSHFL(ctx
->opcode
);
27974 gen_bshfl(ctx
, op2
, rt
, rd
);
27980 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
27985 TCGv t0
= tcg_temp_new();
27986 TCGv t1
= tcg_temp_new();
27988 gen_load_gpr(t0
, rt
);
27989 gen_load_gpr(t1
, rs
);
27990 gen_helper_fork(t0
, t1
);
27998 TCGv t0
= tcg_temp_new();
28000 gen_load_gpr(t0
, rs
);
28001 gen_helper_yield(t0
, cpu_env
, t0
);
28002 gen_store_gpr(t0
, rd
);
28007 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28008 decode_opc_special3_r6(env
, ctx
);
28010 decode_opc_special3_legacy(env
, ctx
);
28015 /* MIPS SIMD Architecture (MSA) */
28016 static inline int check_msa_access(DisasContext
*ctx
)
28018 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
28019 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
28020 generate_exception_end(ctx
, EXCP_RI
);
28024 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
28025 if (ctx
->insn_flags
& ASE_MSA
) {
28026 generate_exception_end(ctx
, EXCP_MSADIS
);
28029 generate_exception_end(ctx
, EXCP_RI
);
28036 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
28038 /* generates tcg ops to check if any element is 0 */
28039 /* Note this function only works with MSA_WRLEN = 128 */
28040 uint64_t eval_zero_or_big
= 0;
28041 uint64_t eval_big
= 0;
28042 TCGv_i64 t0
= tcg_temp_new_i64();
28043 TCGv_i64 t1
= tcg_temp_new_i64();
28046 eval_zero_or_big
= 0x0101010101010101ULL
;
28047 eval_big
= 0x8080808080808080ULL
;
28050 eval_zero_or_big
= 0x0001000100010001ULL
;
28051 eval_big
= 0x8000800080008000ULL
;
28054 eval_zero_or_big
= 0x0000000100000001ULL
;
28055 eval_big
= 0x8000000080000000ULL
;
28058 eval_zero_or_big
= 0x0000000000000001ULL
;
28059 eval_big
= 0x8000000000000000ULL
;
28062 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<< 1], eval_zero_or_big
);
28063 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<< 1]);
28064 tcg_gen_andi_i64(t0
, t0
, eval_big
);
28065 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<< 1) + 1], eval_zero_or_big
);
28066 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<< 1) + 1]);
28067 tcg_gen_andi_i64(t1
, t1
, eval_big
);
28068 tcg_gen_or_i64(t0
, t0
, t1
);
28069 /* if all bits are zero then all elements are not zero */
28070 /* if some bit is non-zero then some element is zero */
28071 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
28072 tcg_gen_trunc_i64_tl(tresult
, t0
);
28073 tcg_temp_free_i64(t0
);
28074 tcg_temp_free_i64(t1
);
28077 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
28079 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28080 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28081 int64_t s16
= (int16_t)ctx
->opcode
;
28083 check_msa_access(ctx
);
28085 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
28086 generate_exception_end(ctx
, EXCP_RI
);
28093 TCGv_i64 t0
= tcg_temp_new_i64();
28094 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<< 1], msa_wr_d
[(wt
<< 1) + 1]);
28095 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
28096 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
28097 tcg_gen_trunc_i64_tl(bcond
, t0
);
28098 tcg_temp_free_i64(t0
);
28105 gen_check_zero_element(bcond
, df
, wt
);
28111 gen_check_zero_element(bcond
, df
, wt
);
28112 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
28116 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
28118 ctx
->hflags
|= MIPS_HFLAG_BC
;
28119 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
28122 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
28124 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
28125 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
28126 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28127 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28129 TCGv_i32 twd
= tcg_const_i32(wd
);
28130 TCGv_i32 tws
= tcg_const_i32(ws
);
28131 TCGv_i32 ti8
= tcg_const_i32(i8
);
28133 switch (MASK_MSA_I8(ctx
->opcode
)) {
28135 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
28138 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
28141 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
28144 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
28147 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
28150 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
28153 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
28159 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
28160 if (df
== DF_DOUBLE
) {
28161 generate_exception_end(ctx
, EXCP_RI
);
28163 TCGv_i32 tdf
= tcg_const_i32(df
);
28164 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
28165 tcg_temp_free_i32(tdf
);
28170 MIPS_INVAL("MSA instruction");
28171 generate_exception_end(ctx
, EXCP_RI
);
28175 tcg_temp_free_i32(twd
);
28176 tcg_temp_free_i32(tws
);
28177 tcg_temp_free_i32(ti8
);
28180 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
28182 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28183 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28184 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
28185 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
28186 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28187 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28189 TCGv_i32 tdf
= tcg_const_i32(df
);
28190 TCGv_i32 twd
= tcg_const_i32(wd
);
28191 TCGv_i32 tws
= tcg_const_i32(ws
);
28192 TCGv_i32 timm
= tcg_temp_new_i32();
28193 tcg_gen_movi_i32(timm
, u5
);
28195 switch (MASK_MSA_I5(ctx
->opcode
)) {
28197 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
28200 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
28202 case OPC_MAXI_S_df
:
28203 tcg_gen_movi_i32(timm
, s5
);
28204 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28206 case OPC_MAXI_U_df
:
28207 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28209 case OPC_MINI_S_df
:
28210 tcg_gen_movi_i32(timm
, s5
);
28211 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28213 case OPC_MINI_U_df
:
28214 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28217 tcg_gen_movi_i32(timm
, s5
);
28218 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
28220 case OPC_CLTI_S_df
:
28221 tcg_gen_movi_i32(timm
, s5
);
28222 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28224 case OPC_CLTI_U_df
:
28225 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28227 case OPC_CLEI_S_df
:
28228 tcg_gen_movi_i32(timm
, s5
);
28229 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28231 case OPC_CLEI_U_df
:
28232 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28236 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
28237 tcg_gen_movi_i32(timm
, s10
);
28238 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
28242 MIPS_INVAL("MSA instruction");
28243 generate_exception_end(ctx
, EXCP_RI
);
28247 tcg_temp_free_i32(tdf
);
28248 tcg_temp_free_i32(twd
);
28249 tcg_temp_free_i32(tws
);
28250 tcg_temp_free_i32(timm
);
28253 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
28255 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28256 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
28257 uint32_t df
= 0, m
= 0;
28258 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28259 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28266 if ((dfm
& 0x40) == 0x00) {
28269 } else if ((dfm
& 0x60) == 0x40) {
28272 } else if ((dfm
& 0x70) == 0x60) {
28275 } else if ((dfm
& 0x78) == 0x70) {
28279 generate_exception_end(ctx
, EXCP_RI
);
28283 tdf
= tcg_const_i32(df
);
28284 tm
= tcg_const_i32(m
);
28285 twd
= tcg_const_i32(wd
);
28286 tws
= tcg_const_i32(ws
);
28288 switch (MASK_MSA_BIT(ctx
->opcode
)) {
28290 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
28293 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
28296 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
28299 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28302 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
28305 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
28307 case OPC_BINSLI_df
:
28308 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
28310 case OPC_BINSRI_df
:
28311 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28314 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
28317 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
28320 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
28323 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28326 MIPS_INVAL("MSA instruction");
28327 generate_exception_end(ctx
, EXCP_RI
);
28331 tcg_temp_free_i32(tdf
);
28332 tcg_temp_free_i32(tm
);
28333 tcg_temp_free_i32(twd
);
28334 tcg_temp_free_i32(tws
);
28337 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
28339 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28340 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28341 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28342 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28343 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28345 TCGv_i32 tdf
= tcg_const_i32(df
);
28346 TCGv_i32 twd
= tcg_const_i32(wd
);
28347 TCGv_i32 tws
= tcg_const_i32(ws
);
28348 TCGv_i32 twt
= tcg_const_i32(wt
);
28350 switch (MASK_MSA_3R(ctx
->opcode
)) {
28352 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
28355 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28358 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28361 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28363 case OPC_SUBS_S_df
:
28364 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28367 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28370 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
28373 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28376 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
28379 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28381 case OPC_ADDS_A_df
:
28382 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28384 case OPC_SUBS_U_df
:
28385 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28388 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28391 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
28394 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
28397 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28400 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28403 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28405 case OPC_ADDS_S_df
:
28406 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28408 case OPC_SUBSUS_U_df
:
28409 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28412 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28415 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28418 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28421 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28424 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28427 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28429 case OPC_ADDS_U_df
:
28430 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28432 case OPC_SUBSUU_S_df
:
28433 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28436 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28439 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
28442 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28445 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28448 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28450 case OPC_ASUB_S_df
:
28451 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28454 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28457 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28460 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
28463 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28466 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28469 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28471 case OPC_ASUB_U_df
:
28472 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28475 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28478 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28481 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28484 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28486 case OPC_AVER_S_df
:
28487 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28490 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28493 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28496 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28499 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28501 case OPC_AVER_U_df
:
28502 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28505 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28508 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28511 case OPC_DOTP_S_df
:
28512 case OPC_DOTP_U_df
:
28513 case OPC_DPADD_S_df
:
28514 case OPC_DPADD_U_df
:
28515 case OPC_DPSUB_S_df
:
28516 case OPC_HADD_S_df
:
28517 case OPC_DPSUB_U_df
:
28518 case OPC_HADD_U_df
:
28519 case OPC_HSUB_S_df
:
28520 case OPC_HSUB_U_df
:
28521 if (df
== DF_BYTE
) {
28522 generate_exception_end(ctx
, EXCP_RI
);
28525 switch (MASK_MSA_3R(ctx
->opcode
)) {
28526 case OPC_DOTP_S_df
:
28527 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28529 case OPC_DOTP_U_df
:
28530 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28532 case OPC_DPADD_S_df
:
28533 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28535 case OPC_DPADD_U_df
:
28536 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28538 case OPC_DPSUB_S_df
:
28539 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28541 case OPC_HADD_S_df
:
28542 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28544 case OPC_DPSUB_U_df
:
28545 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28547 case OPC_HADD_U_df
:
28548 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28550 case OPC_HSUB_S_df
:
28551 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28553 case OPC_HSUB_U_df
:
28554 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28559 MIPS_INVAL("MSA instruction");
28560 generate_exception_end(ctx
, EXCP_RI
);
28563 tcg_temp_free_i32(twd
);
28564 tcg_temp_free_i32(tws
);
28565 tcg_temp_free_i32(twt
);
28566 tcg_temp_free_i32(tdf
);
28569 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
28571 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
28572 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
28573 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
28574 TCGv telm
= tcg_temp_new();
28575 TCGv_i32 tsr
= tcg_const_i32(source
);
28576 TCGv_i32 tdt
= tcg_const_i32(dest
);
28578 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
28580 gen_load_gpr(telm
, source
);
28581 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
28584 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
28585 gen_store_gpr(telm
, dest
);
28588 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
28591 MIPS_INVAL("MSA instruction");
28592 generate_exception_end(ctx
, EXCP_RI
);
28596 tcg_temp_free(telm
);
28597 tcg_temp_free_i32(tdt
);
28598 tcg_temp_free_i32(tsr
);
28601 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
28604 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28605 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28606 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28608 TCGv_i32 tws
= tcg_const_i32(ws
);
28609 TCGv_i32 twd
= tcg_const_i32(wd
);
28610 TCGv_i32 tn
= tcg_const_i32(n
);
28611 TCGv_i32 tdf
= tcg_const_i32(df
);
28613 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28615 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
28617 case OPC_SPLATI_df
:
28618 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
28621 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
28623 case OPC_COPY_S_df
:
28624 case OPC_COPY_U_df
:
28625 case OPC_INSERT_df
:
28626 #if !defined(TARGET_MIPS64)
28627 /* Double format valid only for MIPS64 */
28628 if (df
== DF_DOUBLE
) {
28629 generate_exception_end(ctx
, EXCP_RI
);
28632 if ((MASK_MSA_ELM(ctx
->opcode
) == OPC_COPY_U_df
) &&
28634 generate_exception_end(ctx
, EXCP_RI
);
28638 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28639 case OPC_COPY_S_df
:
28640 if (likely(wd
!= 0)) {
28643 gen_helper_msa_copy_s_b(cpu_env
, twd
, tws
, tn
);
28646 gen_helper_msa_copy_s_h(cpu_env
, twd
, tws
, tn
);
28649 gen_helper_msa_copy_s_w(cpu_env
, twd
, tws
, tn
);
28651 #if defined(TARGET_MIPS64)
28653 gen_helper_msa_copy_s_d(cpu_env
, twd
, tws
, tn
);
28661 case OPC_COPY_U_df
:
28662 if (likely(wd
!= 0)) {
28665 gen_helper_msa_copy_u_b(cpu_env
, twd
, tws
, tn
);
28668 gen_helper_msa_copy_u_h(cpu_env
, twd
, tws
, tn
);
28670 #if defined(TARGET_MIPS64)
28672 gen_helper_msa_copy_u_w(cpu_env
, twd
, tws
, tn
);
28680 case OPC_INSERT_df
:
28683 gen_helper_msa_insert_b(cpu_env
, twd
, tws
, tn
);
28686 gen_helper_msa_insert_h(cpu_env
, twd
, tws
, tn
);
28689 gen_helper_msa_insert_w(cpu_env
, twd
, tws
, tn
);
28691 #if defined(TARGET_MIPS64)
28693 gen_helper_msa_insert_d(cpu_env
, twd
, tws
, tn
);
28703 MIPS_INVAL("MSA instruction");
28704 generate_exception_end(ctx
, EXCP_RI
);
28706 tcg_temp_free_i32(twd
);
28707 tcg_temp_free_i32(tws
);
28708 tcg_temp_free_i32(tn
);
28709 tcg_temp_free_i32(tdf
);
28712 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
28714 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
28715 uint32_t df
= 0, n
= 0;
28717 if ((dfn
& 0x30) == 0x00) {
28720 } else if ((dfn
& 0x38) == 0x20) {
28723 } else if ((dfn
& 0x3c) == 0x30) {
28726 } else if ((dfn
& 0x3e) == 0x38) {
28729 } else if (dfn
== 0x3E) {
28730 /* CTCMSA, CFCMSA, MOVE.V */
28731 gen_msa_elm_3e(env
, ctx
);
28734 generate_exception_end(ctx
, EXCP_RI
);
28738 gen_msa_elm_df(env
, ctx
, df
, n
);
28741 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28743 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28744 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
28745 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28746 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28747 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28749 TCGv_i32 twd
= tcg_const_i32(wd
);
28750 TCGv_i32 tws
= tcg_const_i32(ws
);
28751 TCGv_i32 twt
= tcg_const_i32(wt
);
28752 TCGv_i32 tdf
= tcg_temp_new_i32();
28754 /* adjust df value for floating-point instruction */
28755 tcg_gen_movi_i32(tdf
, df
+ 2);
28757 switch (MASK_MSA_3RF(ctx
->opcode
)) {
28759 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28762 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28765 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28768 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28771 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28774 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28777 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
28780 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28783 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28786 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28789 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28792 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28795 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28798 tcg_gen_movi_i32(tdf
, df
+ 1);
28799 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28802 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28805 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28807 case OPC_MADD_Q_df
:
28808 tcg_gen_movi_i32(tdf
, df
+ 1);
28809 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28812 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28814 case OPC_MSUB_Q_df
:
28815 tcg_gen_movi_i32(tdf
, df
+ 1);
28816 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28819 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28822 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
28825 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28828 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
28831 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28834 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28837 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28840 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28843 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28846 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28849 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28852 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28855 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
28857 case OPC_MULR_Q_df
:
28858 tcg_gen_movi_i32(tdf
, df
+ 1);
28859 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28862 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28864 case OPC_FMIN_A_df
:
28865 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28867 case OPC_MADDR_Q_df
:
28868 tcg_gen_movi_i32(tdf
, df
+ 1);
28869 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28872 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28875 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
28877 case OPC_MSUBR_Q_df
:
28878 tcg_gen_movi_i32(tdf
, df
+ 1);
28879 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28882 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28884 case OPC_FMAX_A_df
:
28885 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28888 MIPS_INVAL("MSA instruction");
28889 generate_exception_end(ctx
, EXCP_RI
);
28893 tcg_temp_free_i32(twd
);
28894 tcg_temp_free_i32(tws
);
28895 tcg_temp_free_i32(twt
);
28896 tcg_temp_free_i32(tdf
);
28899 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
28901 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28902 (op & (0x7 << 18)))
28903 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28904 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28905 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28906 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
28907 TCGv_i32 twd
= tcg_const_i32(wd
);
28908 TCGv_i32 tws
= tcg_const_i32(ws
);
28909 TCGv_i32 twt
= tcg_const_i32(wt
);
28910 TCGv_i32 tdf
= tcg_const_i32(df
);
28912 switch (MASK_MSA_2R(ctx
->opcode
)) {
28914 #if !defined(TARGET_MIPS64)
28915 /* Double format valid only for MIPS64 */
28916 if (df
== DF_DOUBLE
) {
28917 generate_exception_end(ctx
, EXCP_RI
);
28921 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
28924 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
28927 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
28930 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
28933 MIPS_INVAL("MSA instruction");
28934 generate_exception_end(ctx
, EXCP_RI
);
28938 tcg_temp_free_i32(twd
);
28939 tcg_temp_free_i32(tws
);
28940 tcg_temp_free_i32(twt
);
28941 tcg_temp_free_i32(tdf
);
28944 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28946 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28947 (op & (0xf << 17)))
28948 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28949 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28950 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28951 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
28952 TCGv_i32 twd
= tcg_const_i32(wd
);
28953 TCGv_i32 tws
= tcg_const_i32(ws
);
28954 TCGv_i32 twt
= tcg_const_i32(wt
);
28955 /* adjust df value for floating-point instruction */
28956 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
28958 switch (MASK_MSA_2RF(ctx
->opcode
)) {
28959 case OPC_FCLASS_df
:
28960 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
28962 case OPC_FTRUNC_S_df
:
28963 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
28965 case OPC_FTRUNC_U_df
:
28966 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
28969 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
28971 case OPC_FRSQRT_df
:
28972 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
28975 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
28978 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
28981 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
28983 case OPC_FEXUPL_df
:
28984 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
28986 case OPC_FEXUPR_df
:
28987 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
28990 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
28993 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
28995 case OPC_FTINT_S_df
:
28996 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
28998 case OPC_FTINT_U_df
:
28999 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
29001 case OPC_FFINT_S_df
:
29002 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
29004 case OPC_FFINT_U_df
:
29005 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
29009 tcg_temp_free_i32(twd
);
29010 tcg_temp_free_i32(tws
);
29011 tcg_temp_free_i32(twt
);
29012 tcg_temp_free_i32(tdf
);
29015 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
29017 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
29018 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
29019 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
29020 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
29021 TCGv_i32 twd
= tcg_const_i32(wd
);
29022 TCGv_i32 tws
= tcg_const_i32(ws
);
29023 TCGv_i32 twt
= tcg_const_i32(wt
);
29025 switch (MASK_MSA_VEC(ctx
->opcode
)) {
29027 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
29030 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
29033 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
29036 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
29039 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
29042 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
29045 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
29048 MIPS_INVAL("MSA instruction");
29049 generate_exception_end(ctx
, EXCP_RI
);
29053 tcg_temp_free_i32(twd
);
29054 tcg_temp_free_i32(tws
);
29055 tcg_temp_free_i32(twt
);
29058 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
29060 switch (MASK_MSA_VEC(ctx
->opcode
)) {
29068 gen_msa_vec_v(env
, ctx
);
29071 gen_msa_2r(env
, ctx
);
29074 gen_msa_2rf(env
, ctx
);
29077 MIPS_INVAL("MSA instruction");
29078 generate_exception_end(ctx
, EXCP_RI
);
29083 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
29085 uint32_t opcode
= ctx
->opcode
;
29086 check_insn(ctx
, ASE_MSA
);
29087 check_msa_access(ctx
);
29089 switch (MASK_MSA_MINOR(opcode
)) {
29090 case OPC_MSA_I8_00
:
29091 case OPC_MSA_I8_01
:
29092 case OPC_MSA_I8_02
:
29093 gen_msa_i8(env
, ctx
);
29095 case OPC_MSA_I5_06
:
29096 case OPC_MSA_I5_07
:
29097 gen_msa_i5(env
, ctx
);
29099 case OPC_MSA_BIT_09
:
29100 case OPC_MSA_BIT_0A
:
29101 gen_msa_bit(env
, ctx
);
29103 case OPC_MSA_3R_0D
:
29104 case OPC_MSA_3R_0E
:
29105 case OPC_MSA_3R_0F
:
29106 case OPC_MSA_3R_10
:
29107 case OPC_MSA_3R_11
:
29108 case OPC_MSA_3R_12
:
29109 case OPC_MSA_3R_13
:
29110 case OPC_MSA_3R_14
:
29111 case OPC_MSA_3R_15
:
29112 gen_msa_3r(env
, ctx
);
29115 gen_msa_elm(env
, ctx
);
29117 case OPC_MSA_3RF_1A
:
29118 case OPC_MSA_3RF_1B
:
29119 case OPC_MSA_3RF_1C
:
29120 gen_msa_3rf(env
, ctx
);
29123 gen_msa_vec(env
, ctx
);
29134 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
29135 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
29136 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
29137 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
29139 TCGv_i32 twd
= tcg_const_i32(wd
);
29140 TCGv taddr
= tcg_temp_new();
29141 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
29143 switch (MASK_MSA_MINOR(opcode
)) {
29145 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
29148 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
29151 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
29154 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
29157 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
29160 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
29163 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
29166 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
29170 tcg_temp_free_i32(twd
);
29171 tcg_temp_free(taddr
);
29175 MIPS_INVAL("MSA instruction");
29176 generate_exception_end(ctx
, EXCP_RI
);
29182 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
29185 int rs
, rt
, rd
, sa
;
29189 /* make sure instructions are on a word boundary */
29190 if (ctx
->base
.pc_next
& 0x3) {
29191 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
29192 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
29196 /* Handle blikely not taken case */
29197 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
29198 TCGLabel
*l1
= gen_new_label();
29200 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
29201 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
29202 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
29206 op
= MASK_OP_MAJOR(ctx
->opcode
);
29207 rs
= (ctx
->opcode
>> 21) & 0x1f;
29208 rt
= (ctx
->opcode
>> 16) & 0x1f;
29209 rd
= (ctx
->opcode
>> 11) & 0x1f;
29210 sa
= (ctx
->opcode
>> 6) & 0x1f;
29211 imm
= (int16_t)ctx
->opcode
;
29214 decode_opc_special(env
, ctx
);
29217 #if defined(TARGET_MIPS64)
29218 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
29219 decode_mmi(env
, ctx
);
29221 if (ctx
->insn_flags
& ASE_MXU
) {
29222 decode_opc_mxu(env
, ctx
);
29225 decode_opc_special2_legacy(env
, ctx
);
29229 #if defined(TARGET_MIPS64)
29230 if (ctx
->insn_flags
& INSN_R5900
) {
29231 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
29233 decode_opc_special3(env
, ctx
);
29236 decode_opc_special3(env
, ctx
);
29240 op1
= MASK_REGIMM(ctx
->opcode
);
29242 case OPC_BLTZL
: /* REGIMM branches */
29246 check_insn(ctx
, ISA_MIPS2
);
29247 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29251 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
29255 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29257 /* OPC_NAL, OPC_BAL */
29258 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
29260 generate_exception_end(ctx
, EXCP_RI
);
29263 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
29266 case OPC_TGEI
: /* REGIMM traps */
29273 check_insn(ctx
, ISA_MIPS2
);
29274 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29275 gen_trap(ctx
, op1
, rs
, -1, imm
);
29278 check_insn(ctx
, ISA_MIPS32R6
);
29279 generate_exception_end(ctx
, EXCP_RI
);
29282 check_insn(ctx
, ISA_MIPS32R2
);
29284 * Break the TB to be able to sync copied instructions
29287 ctx
->base
.is_jmp
= DISAS_STOP
;
29289 case OPC_BPOSGE32
: /* MIPS DSP branch */
29290 #if defined(TARGET_MIPS64)
29294 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
29296 #if defined(TARGET_MIPS64)
29298 check_insn(ctx
, ISA_MIPS32R6
);
29299 check_mips_64(ctx
);
29301 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
29305 check_insn(ctx
, ISA_MIPS32R6
);
29306 check_mips_64(ctx
);
29308 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
29312 default: /* Invalid */
29313 MIPS_INVAL("regimm");
29314 generate_exception_end(ctx
, EXCP_RI
);
29319 check_cp0_enabled(ctx
);
29320 op1
= MASK_CP0(ctx
->opcode
);
29328 #if defined(TARGET_MIPS64)
29332 #ifndef CONFIG_USER_ONLY
29333 gen_cp0(env
, ctx
, op1
, rt
, rd
);
29334 #endif /* !CONFIG_USER_ONLY */
29352 #ifndef CONFIG_USER_ONLY
29353 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
29354 #endif /* !CONFIG_USER_ONLY */
29357 #ifndef CONFIG_USER_ONLY
29360 TCGv t0
= tcg_temp_new();
29362 op2
= MASK_MFMC0(ctx
->opcode
);
29366 gen_helper_dmt(t0
);
29367 gen_store_gpr(t0
, rt
);
29371 gen_helper_emt(t0
);
29372 gen_store_gpr(t0
, rt
);
29376 gen_helper_dvpe(t0
, cpu_env
);
29377 gen_store_gpr(t0
, rt
);
29381 gen_helper_evpe(t0
, cpu_env
);
29382 gen_store_gpr(t0
, rt
);
29385 check_insn(ctx
, ISA_MIPS32R6
);
29387 gen_helper_dvp(t0
, cpu_env
);
29388 gen_store_gpr(t0
, rt
);
29392 check_insn(ctx
, ISA_MIPS32R6
);
29394 gen_helper_evp(t0
, cpu_env
);
29395 gen_store_gpr(t0
, rt
);
29399 check_insn(ctx
, ISA_MIPS32R2
);
29400 save_cpu_state(ctx
, 1);
29401 gen_helper_di(t0
, cpu_env
);
29402 gen_store_gpr(t0
, rt
);
29404 * Stop translation as we may have switched
29405 * the execution mode.
29407 ctx
->base
.is_jmp
= DISAS_STOP
;
29410 check_insn(ctx
, ISA_MIPS32R2
);
29411 save_cpu_state(ctx
, 1);
29412 gen_helper_ei(t0
, cpu_env
);
29413 gen_store_gpr(t0
, rt
);
29415 * DISAS_STOP isn't sufficient, we need to ensure we break
29416 * out of translated code to check for pending interrupts.
29418 gen_save_pc(ctx
->base
.pc_next
+ 4);
29419 ctx
->base
.is_jmp
= DISAS_EXIT
;
29421 default: /* Invalid */
29422 MIPS_INVAL("mfmc0");
29423 generate_exception_end(ctx
, EXCP_RI
);
29428 #endif /* !CONFIG_USER_ONLY */
29431 check_insn(ctx
, ISA_MIPS32R2
);
29432 gen_load_srsgpr(rt
, rd
);
29435 check_insn(ctx
, ISA_MIPS32R2
);
29436 gen_store_srsgpr(rt
, rd
);
29440 generate_exception_end(ctx
, EXCP_RI
);
29444 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
29445 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29446 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
29447 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29450 /* Arithmetic with immediate opcode */
29451 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29455 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29457 case OPC_SLTI
: /* Set on less than with immediate opcode */
29459 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
29461 case OPC_ANDI
: /* Arithmetic with immediate opcode */
29462 case OPC_LUI
: /* OPC_AUI */
29465 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
29467 case OPC_J
: /* Jump */
29469 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29470 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29473 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
29474 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29476 generate_exception_end(ctx
, EXCP_RI
);
29479 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
29480 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29483 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29486 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
29487 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29489 generate_exception_end(ctx
, EXCP_RI
);
29492 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
29493 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29496 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29499 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
29502 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29504 check_insn(ctx
, ISA_MIPS32R6
);
29505 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
29506 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29509 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
29512 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29514 check_insn(ctx
, ISA_MIPS32R6
);
29515 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
29516 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29521 check_insn(ctx
, ISA_MIPS2
);
29522 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29526 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29528 case OPC_LL
: /* Load and stores */
29529 check_insn(ctx
, ISA_MIPS2
);
29530 if (ctx
->insn_flags
& INSN_R5900
) {
29531 check_insn_opc_user_only(ctx
, INSN_R5900
);
29536 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29544 gen_ld(ctx
, op
, rt
, rs
, imm
);
29548 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29553 gen_st(ctx
, op
, rt
, rs
, imm
);
29556 check_insn(ctx
, ISA_MIPS2
);
29557 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29558 if (ctx
->insn_flags
& INSN_R5900
) {
29559 check_insn_opc_user_only(ctx
, INSN_R5900
);
29561 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
29564 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29565 check_cp0_enabled(ctx
);
29566 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
29567 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
29568 gen_cache_operation(ctx
, rt
, rs
, imm
);
29570 /* Treat as NOP. */
29573 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29574 if (ctx
->insn_flags
& INSN_R5900
) {
29575 /* Treat as NOP. */
29577 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
29578 /* Treat as NOP. */
29582 /* Floating point (COP1). */
29587 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
29591 op1
= MASK_CP1(ctx
->opcode
);
29596 check_cp1_enabled(ctx
);
29597 check_insn(ctx
, ISA_MIPS32R2
);
29603 check_cp1_enabled(ctx
);
29604 gen_cp1(ctx
, op1
, rt
, rd
);
29606 #if defined(TARGET_MIPS64)
29609 check_cp1_enabled(ctx
);
29610 check_insn(ctx
, ISA_MIPS3
);
29611 check_mips_64(ctx
);
29612 gen_cp1(ctx
, op1
, rt
, rd
);
29615 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
29616 check_cp1_enabled(ctx
);
29617 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29619 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29624 check_insn(ctx
, ASE_MIPS3D
);
29625 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29626 (rt
>> 2) & 0x7, imm
<< 2);
29630 check_cp1_enabled(ctx
);
29631 check_insn(ctx
, ISA_MIPS32R6
);
29632 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29636 check_cp1_enabled(ctx
);
29637 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29639 check_insn(ctx
, ASE_MIPS3D
);
29642 check_cp1_enabled(ctx
);
29643 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29644 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29645 (rt
>> 2) & 0x7, imm
<< 2);
29652 check_cp1_enabled(ctx
);
29653 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29659 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
29660 check_cp1_enabled(ctx
);
29661 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29663 case R6_OPC_CMP_AF_S
:
29664 case R6_OPC_CMP_UN_S
:
29665 case R6_OPC_CMP_EQ_S
:
29666 case R6_OPC_CMP_UEQ_S
:
29667 case R6_OPC_CMP_LT_S
:
29668 case R6_OPC_CMP_ULT_S
:
29669 case R6_OPC_CMP_LE_S
:
29670 case R6_OPC_CMP_ULE_S
:
29671 case R6_OPC_CMP_SAF_S
:
29672 case R6_OPC_CMP_SUN_S
:
29673 case R6_OPC_CMP_SEQ_S
:
29674 case R6_OPC_CMP_SEUQ_S
:
29675 case R6_OPC_CMP_SLT_S
:
29676 case R6_OPC_CMP_SULT_S
:
29677 case R6_OPC_CMP_SLE_S
:
29678 case R6_OPC_CMP_SULE_S
:
29679 case R6_OPC_CMP_OR_S
:
29680 case R6_OPC_CMP_UNE_S
:
29681 case R6_OPC_CMP_NE_S
:
29682 case R6_OPC_CMP_SOR_S
:
29683 case R6_OPC_CMP_SUNE_S
:
29684 case R6_OPC_CMP_SNE_S
:
29685 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29687 case R6_OPC_CMP_AF_D
:
29688 case R6_OPC_CMP_UN_D
:
29689 case R6_OPC_CMP_EQ_D
:
29690 case R6_OPC_CMP_UEQ_D
:
29691 case R6_OPC_CMP_LT_D
:
29692 case R6_OPC_CMP_ULT_D
:
29693 case R6_OPC_CMP_LE_D
:
29694 case R6_OPC_CMP_ULE_D
:
29695 case R6_OPC_CMP_SAF_D
:
29696 case R6_OPC_CMP_SUN_D
:
29697 case R6_OPC_CMP_SEQ_D
:
29698 case R6_OPC_CMP_SEUQ_D
:
29699 case R6_OPC_CMP_SLT_D
:
29700 case R6_OPC_CMP_SULT_D
:
29701 case R6_OPC_CMP_SLE_D
:
29702 case R6_OPC_CMP_SULE_D
:
29703 case R6_OPC_CMP_OR_D
:
29704 case R6_OPC_CMP_UNE_D
:
29705 case R6_OPC_CMP_NE_D
:
29706 case R6_OPC_CMP_SOR_D
:
29707 case R6_OPC_CMP_SUNE_D
:
29708 case R6_OPC_CMP_SNE_D
:
29709 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29712 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
29713 rt
, rd
, sa
, (imm
>> 8) & 0x7);
29718 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29733 check_insn(ctx
, ASE_MSA
);
29734 gen_msa_branch(env
, ctx
, op1
);
29738 generate_exception_end(ctx
, EXCP_RI
);
29743 /* Compact branches [R6] and COP2 [non-R6] */
29744 case OPC_BC
: /* OPC_LWC2 */
29745 case OPC_BALC
: /* OPC_SWC2 */
29746 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29747 /* OPC_BC, OPC_BALC */
29748 gen_compute_compact_branch(ctx
, op
, 0, 0,
29749 sextract32(ctx
->opcode
<< 2, 0, 28));
29751 /* OPC_LWC2, OPC_SWC2 */
29752 /* COP2: Not implemented. */
29753 generate_exception_err(ctx
, EXCP_CpU
, 2);
29756 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
29757 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
29758 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29760 /* OPC_BEQZC, OPC_BNEZC */
29761 gen_compute_compact_branch(ctx
, op
, rs
, 0,
29762 sextract32(ctx
->opcode
<< 2, 0, 23));
29764 /* OPC_JIC, OPC_JIALC */
29765 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
29768 /* OPC_LWC2, OPC_SWC2 */
29769 /* COP2: Not implemented. */
29770 generate_exception_err(ctx
, EXCP_CpU
, 2);
29774 check_insn(ctx
, INSN_LOONGSON2F
);
29775 /* Note that these instructions use different fields. */
29776 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
29780 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29781 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
29782 check_cp1_enabled(ctx
);
29783 op1
= MASK_CP3(ctx
->opcode
);
29787 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29793 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29794 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
29797 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29798 /* Treat as NOP. */
29801 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29815 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29816 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
29820 generate_exception_end(ctx
, EXCP_RI
);
29824 generate_exception_err(ctx
, EXCP_CpU
, 1);
29828 #if defined(TARGET_MIPS64)
29829 /* MIPS64 opcodes */
29831 if (ctx
->insn_flags
& INSN_R5900
) {
29832 check_insn_opc_user_only(ctx
, INSN_R5900
);
29837 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29841 check_insn(ctx
, ISA_MIPS3
);
29842 check_mips_64(ctx
);
29843 gen_ld(ctx
, op
, rt
, rs
, imm
);
29847 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29850 check_insn(ctx
, ISA_MIPS3
);
29851 check_mips_64(ctx
);
29852 gen_st(ctx
, op
, rt
, rs
, imm
);
29855 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29856 check_insn(ctx
, ISA_MIPS3
);
29857 if (ctx
->insn_flags
& INSN_R5900
) {
29858 check_insn_opc_user_only(ctx
, INSN_R5900
);
29860 check_mips_64(ctx
);
29861 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEQ
, false);
29863 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
29864 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29865 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
29866 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29869 check_insn(ctx
, ISA_MIPS3
);
29870 check_mips_64(ctx
);
29871 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29875 check_insn(ctx
, ISA_MIPS3
);
29876 check_mips_64(ctx
);
29877 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29880 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
29881 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29882 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29884 MIPS_INVAL("major opcode");
29885 generate_exception_end(ctx
, EXCP_RI
);
29889 case OPC_DAUI
: /* OPC_JALX */
29890 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29891 #if defined(TARGET_MIPS64)
29893 check_mips_64(ctx
);
29895 generate_exception(ctx
, EXCP_RI
);
29896 } else if (rt
!= 0) {
29897 TCGv t0
= tcg_temp_new();
29898 gen_load_gpr(t0
, rs
);
29899 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
29903 generate_exception_end(ctx
, EXCP_RI
);
29904 MIPS_INVAL("major opcode");
29908 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
29909 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29910 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29913 case OPC_MSA
: /* OPC_MDMX */
29914 if (ctx
->insn_flags
& INSN_R5900
) {
29915 #if defined(TARGET_MIPS64)
29916 gen_mmi_lq(env
, ctx
); /* MMI_OPC_LQ */
29919 /* MDMX: Not implemented. */
29924 check_insn(ctx
, ISA_MIPS32R6
);
29925 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
29927 default: /* Invalid */
29928 MIPS_INVAL("major opcode");
29929 generate_exception_end(ctx
, EXCP_RI
);
29934 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
29936 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29937 CPUMIPSState
*env
= cs
->env_ptr
;
29939 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
29940 ctx
->saved_pc
= -1;
29941 ctx
->insn_flags
= env
->insn_flags
;
29942 ctx
->CP0_Config1
= env
->CP0_Config1
;
29943 ctx
->CP0_Config2
= env
->CP0_Config2
;
29944 ctx
->CP0_Config3
= env
->CP0_Config3
;
29945 ctx
->CP0_Config5
= env
->CP0_Config5
;
29947 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
29948 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
29949 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
29950 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
29951 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
29952 ctx
->PAMask
= env
->PAMask
;
29953 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
29954 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
29955 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
29956 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
29957 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
29958 /* Restore delay slot state from the tb context. */
29959 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
29960 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
29961 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
29962 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
29963 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
29964 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
29965 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
29966 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
29967 restore_cpu_state(env
, ctx
);
29968 #ifdef CONFIG_USER_ONLY
29969 ctx
->mem_idx
= MIPS_HFLAG_UM
;
29971 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
29973 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
29974 MO_UNALN
: MO_ALIGN
;
29976 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
29980 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29984 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29986 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29988 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
29992 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
29993 const CPUBreakpoint
*bp
)
29995 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29997 save_cpu_state(ctx
, 1);
29998 ctx
->base
.is_jmp
= DISAS_NORETURN
;
29999 gen_helper_raise_exception_debug(cpu_env
);
30001 * The address covered by the breakpoint must be included in
30002 * [tb->pc, tb->pc + tb->size) in order to for it to be
30003 * properly cleared -- thus we increment the PC here so that
30004 * the logic setting tb->size below does the right thing.
30006 ctx
->base
.pc_next
+= 4;
30010 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
30012 CPUMIPSState
*env
= cs
->env_ptr
;
30013 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
30017 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
30018 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
30019 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
30020 insn_bytes
= decode_nanomips_opc(env
, ctx
);
30021 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
30022 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
30024 decode_opc(env
, ctx
);
30025 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
30026 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
30027 insn_bytes
= decode_micromips_opc(env
, ctx
);
30028 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
30029 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
30030 insn_bytes
= decode_mips16_opc(env
, ctx
);
30032 generate_exception_end(ctx
, EXCP_RI
);
30033 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
30037 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
30038 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
30039 MIPS_HFLAG_FBNSLOT
))) {
30041 * Force to generate branch as there is neither delay nor
30046 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
30047 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
30049 * Force to generate branch as microMIPS R6 doesn't restrict
30050 * branches in the forbidden slot.
30056 gen_branch(ctx
, insn_bytes
);
30058 ctx
->base
.pc_next
+= insn_bytes
;
30060 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
30064 * Execute a branch and its delay slot as a single instruction.
30065 * This is what GDB expects and is consistent with what the
30066 * hardware does (e.g. if a delay slot instruction faults, the
30067 * reported PC is the PC of the branch).
30069 if (ctx
->base
.singlestep_enabled
&&
30070 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
30071 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
30073 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
30074 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
30078 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
30080 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
30082 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
30083 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
30084 gen_helper_raise_exception_debug(cpu_env
);
30086 switch (ctx
->base
.is_jmp
) {
30088 gen_save_pc(ctx
->base
.pc_next
);
30089 tcg_gen_lookup_and_goto_ptr();
30092 case DISAS_TOO_MANY
:
30093 save_cpu_state(ctx
, 0);
30094 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
30097 tcg_gen_exit_tb(NULL
, 0);
30099 case DISAS_NORETURN
:
30102 g_assert_not_reached();
30107 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
30109 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
30110 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
30113 static const TranslatorOps mips_tr_ops
= {
30114 .init_disas_context
= mips_tr_init_disas_context
,
30115 .tb_start
= mips_tr_tb_start
,
30116 .insn_start
= mips_tr_insn_start
,
30117 .breakpoint_check
= mips_tr_breakpoint_check
,
30118 .translate_insn
= mips_tr_translate_insn
,
30119 .tb_stop
= mips_tr_tb_stop
,
30120 .disas_log
= mips_tr_disas_log
,
30123 void gen_intermediate_code(CPUState
*cs
, TranslationBlock
*tb
, int max_insns
)
30127 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
, max_insns
);
30130 static void fpu_dump_state(CPUMIPSState
*env
, FILE * f
, int flags
)
30133 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
30135 #define printfpr(fp) \
30138 qemu_fprintf(f, "w:%08x d:%016" PRIx64 \
30139 " fd:%13g fs:%13g psu: %13g\n", \
30140 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
30141 (double)(fp)->fd, \
30142 (double)(fp)->fs[FP_ENDIAN_IDX], \
30143 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
30146 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
30147 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
30148 qemu_fprintf(f, "w:%08x d:%016" PRIx64 \
30149 " fd:%13g fs:%13g psu:%13g\n", \
30150 tmp.w[FP_ENDIAN_IDX], tmp.d, \
30152 (double)tmp.fs[FP_ENDIAN_IDX], \
30153 (double)tmp.fs[!FP_ENDIAN_IDX]); \
30159 "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
30160 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
30161 get_float_exception_flags(&env
->active_fpu
.fp_status
));
30162 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
30163 qemu_fprintf(f
, "%3s: ", fregnames
[i
]);
30164 printfpr(&env
->active_fpu
.fpr
[i
]);
30170 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, int flags
)
30172 MIPSCPU
*cpu
= MIPS_CPU(cs
);
30173 CPUMIPSState
*env
= &cpu
->env
;
30176 qemu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
30177 " LO=0x" TARGET_FMT_lx
" ds %04x "
30178 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
30179 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
30180 env
->hflags
, env
->btarget
, env
->bcond
);
30181 for (i
= 0; i
< 32; i
++) {
30182 if ((i
& 3) == 0) {
30183 qemu_fprintf(f
, "GPR%02d:", i
);
30185 qemu_fprintf(f
, " %s " TARGET_FMT_lx
,
30186 regnames
[i
], env
->active_tc
.gpr
[i
]);
30187 if ((i
& 3) == 3) {
30188 qemu_fprintf(f
, "\n");
30192 qemu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x"
30193 TARGET_FMT_lx
"\n",
30194 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
30195 qemu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
30197 env
->CP0_Config0
, env
->CP0_Config1
, env
->CP0_LLAddr
);
30198 qemu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
30199 env
->CP0_Config2
, env
->CP0_Config3
);
30200 qemu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
30201 env
->CP0_Config4
, env
->CP0_Config5
);
30202 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
30203 fpu_dump_state(env
, f
, flags
);
30207 void mips_tcg_init(void)
30212 for (i
= 1; i
< 32; i
++)
30213 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
30214 offsetof(CPUMIPSState
,
30218 for (i
= 0; i
< 32; i
++) {
30219 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
30221 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
30223 * The scalar floating-point unit (FPU) registers are mapped on
30224 * the MSA vector registers.
30226 fpu_f64
[i
] = msa_wr_d
[i
* 2];
30227 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
30228 msa_wr_d
[i
* 2 + 1] =
30229 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
30232 cpu_PC
= tcg_global_mem_new(cpu_env
,
30233 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
30234 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
30235 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
30236 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
30238 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
30239 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
30242 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
30243 offsetof(CPUMIPSState
,
30244 active_tc
.DSPControl
),
30246 bcond
= tcg_global_mem_new(cpu_env
,
30247 offsetof(CPUMIPSState
, bcond
), "bcond");
30248 btarget
= tcg_global_mem_new(cpu_env
,
30249 offsetof(CPUMIPSState
, btarget
), "btarget");
30250 hflags
= tcg_global_mem_new_i32(cpu_env
,
30251 offsetof(CPUMIPSState
, hflags
), "hflags");
30253 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
30254 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
30256 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
30257 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
30259 cpu_lladdr
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, lladdr
),
30261 cpu_llval
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, llval
),
30264 #if defined(TARGET_MIPS64)
30266 for (i
= 1; i
< 32; i
++) {
30267 cpu_mmr
[i
] = tcg_global_mem_new_i64(cpu_env
,
30268 offsetof(CPUMIPSState
,
30274 #if !defined(TARGET_MIPS64)
30275 for (i
= 0; i
< NUMBER_OF_MXU_REGISTERS
- 1; i
++) {
30276 mxu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
30277 offsetof(CPUMIPSState
,
30278 active_tc
.mxu_gpr
[i
]),
30282 mxu_CR
= tcg_global_mem_new(cpu_env
,
30283 offsetof(CPUMIPSState
, active_tc
.mxu_cr
),
30284 mxuregnames
[NUMBER_OF_MXU_REGISTERS
- 1]);
30288 #include "translate_init.inc.c"
30290 void cpu_mips_realize_env(CPUMIPSState
*env
)
30292 env
->exception_base
= (int32_t)0xBFC00000;
30294 #ifndef CONFIG_USER_ONLY
30295 mmu_init(env
, env
->cpu_model
);
30297 fpu_init(env
, env
->cpu_model
);
30298 mvp_init(env
, env
->cpu_model
);
30301 bool cpu_supports_cps_smp(const char *cpu_type
)
30303 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
30304 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
30307 bool cpu_supports_isa(const char *cpu_type
, uint64_t isa
)
30309 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
30310 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
30313 void cpu_set_exception_base(int vp_index
, target_ulong address
)
30315 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
30316 vp
->env
.exception_base
= address
;
30319 void cpu_state_reset(CPUMIPSState
*env
)
30321 CPUState
*cs
= env_cpu(env
);
30323 /* Reset registers to their default values */
30324 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
30325 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
30326 #ifdef TARGET_WORDS_BIGENDIAN
30327 env
->CP0_Config0
|= (1 << CP0C0_BE
);
30329 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
30330 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
30331 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
30332 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
30333 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
30334 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
30335 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
30336 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
30337 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
30338 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
30339 << env
->cpu_model
->CP0_LLAddr_shift
;
30340 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
30341 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
30342 env
->CCRes
= env
->cpu_model
->CCRes
;
30343 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
30344 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
30345 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
30346 env
->current_tc
= 0;
30347 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
30348 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
30349 #if defined(TARGET_MIPS64)
30350 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
30351 env
->SEGMask
|= 3ULL << 62;
30354 env
->PABITS
= env
->cpu_model
->PABITS
;
30355 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
30356 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
30357 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
30358 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
30359 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
30360 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
30361 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
30362 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
30363 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
30364 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
30365 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
30366 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
30367 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
30368 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
30369 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
30370 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
30371 env
->msair
= env
->cpu_model
->MSAIR
;
30372 env
->insn_flags
= env
->cpu_model
->insn_flags
;
30374 #if defined(CONFIG_USER_ONLY)
30375 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
30376 # ifdef TARGET_MIPS64
30377 /* Enable 64-bit register mode. */
30378 env
->CP0_Status
|= (1 << CP0St_PX
);
30380 # ifdef TARGET_ABI_MIPSN64
30381 /* Enable 64-bit address mode. */
30382 env
->CP0_Status
|= (1 << CP0St_UX
);
30385 * Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
30386 * hardware registers.
30388 env
->CP0_HWREna
|= 0x0000000F;
30389 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
30390 env
->CP0_Status
|= (1 << CP0St_CU1
);
30392 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
30393 env
->CP0_Status
|= (1 << CP0St_MX
);
30395 # if defined(TARGET_MIPS64)
30396 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
30397 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
30398 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
30399 env
->CP0_Status
|= (1 << CP0St_FR
);
30403 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
30405 * If the exception was raised from a delay slot,
30406 * come back to the jump.
30408 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
30409 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
30411 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
30413 env
->active_tc
.PC
= env
->exception_base
;
30414 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
30415 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
30416 env
->CP0_Wired
= 0;
30417 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
30418 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
30419 if (mips_um_ksegs_enabled()) {
30420 env
->CP0_EBase
|= 0x40000000;
30422 env
->CP0_EBase
|= (int32_t)0x80000000;
30424 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
30425 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
30427 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
30429 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
30431 * Vectored interrupts not implemented, timer on int 7,
30432 * no performance counters.
30434 env
->CP0_IntCtl
= 0xe0000000;
30438 for (i
= 0; i
< 7; i
++) {
30439 env
->CP0_WatchLo
[i
] = 0;
30440 env
->CP0_WatchHi
[i
] = 0x80000000;
30442 env
->CP0_WatchLo
[7] = 0;
30443 env
->CP0_WatchHi
[7] = 0;
30445 /* Count register increments in debug mode, EJTAG version 1 */
30446 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
30448 cpu_mips_store_count(env
, 1);
30450 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
30453 /* Only TC0 on VPE 0 starts as active. */
30454 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
30455 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
30456 env
->tcs
[i
].CP0_TCHalt
= 1;
30458 env
->active_tc
.CP0_TCHalt
= 1;
30461 if (cs
->cpu_index
== 0) {
30462 /* VPE0 starts up enabled. */
30463 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
30464 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
30466 /* TC0 starts up unhalted. */
30468 env
->active_tc
.CP0_TCHalt
= 0;
30469 env
->tcs
[0].CP0_TCHalt
= 0;
30470 /* With thread 0 active. */
30471 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
30472 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
30477 * Configure default legacy segmentation control. We use this regardless of
30478 * whether segmentation control is presented to the guest.
30480 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
30481 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
30482 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
30483 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
30484 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
30485 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
30487 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
30488 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
30489 (3 << CP0SC_C
)) << 16;
30490 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
30491 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30492 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
30493 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
30494 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30495 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
30496 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
30497 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
30499 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
30500 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
30501 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
30502 env
->CP0_Status
|= (1 << CP0St_FR
);
30505 if (env
->insn_flags
& ISA_MIPS32R6
) {
30507 env
->CP0_PWSize
= 0x40;
30513 env
->CP0_PWField
= 0x0C30C302;
30520 env
->CP0_PWField
= 0x02;
30523 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
30524 /* microMIPS on reset when Config3.ISA is 3 */
30525 env
->hflags
|= MIPS_HFLAG_M16
;
30529 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
30533 compute_hflags(env
);
30534 restore_fp_status(env
);
30535 restore_pamask(env
);
30536 cs
->exception_index
= EXCP_NONE
;
30538 if (semihosting_get_argc()) {
30539 /* UHI interface can be used to obtain argc and argv */
30540 env
->active_tc
.gpr
[4] = -1;
30544 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
30545 target_ulong
*data
)
30547 env
->active_tc
.PC
= data
[0];
30548 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
30549 env
->hflags
|= data
[1];
30550 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
30551 case MIPS_HFLAG_BR
:
30553 case MIPS_HFLAG_BC
:
30554 case MIPS_HFLAG_BL
:
30556 env
->btarget
= data
[2];