2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "hw/semihosting/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
41 #include "qemu/qemu-print.h"
43 #define MIPS_DEBUG_DISAS 0
45 /* MIPS major opcodes */
46 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
49 /* indirect opcode tables */
50 OPC_SPECIAL
= (0x00 << 26),
51 OPC_REGIMM
= (0x01 << 26),
52 OPC_CP0
= (0x10 << 26),
53 OPC_CP1
= (0x11 << 26),
54 OPC_CP2
= (0x12 << 26),
55 OPC_CP3
= (0x13 << 26),
56 OPC_SPECIAL2
= (0x1C << 26),
57 OPC_SPECIAL3
= (0x1F << 26),
58 /* arithmetic with immediate */
59 OPC_ADDI
= (0x08 << 26),
60 OPC_ADDIU
= (0x09 << 26),
61 OPC_SLTI
= (0x0A << 26),
62 OPC_SLTIU
= (0x0B << 26),
63 /* logic with immediate */
64 OPC_ANDI
= (0x0C << 26),
65 OPC_ORI
= (0x0D << 26),
66 OPC_XORI
= (0x0E << 26),
67 OPC_LUI
= (0x0F << 26),
68 /* arithmetic with immediate */
69 OPC_DADDI
= (0x18 << 26),
70 OPC_DADDIU
= (0x19 << 26),
71 /* Jump and branches */
73 OPC_JAL
= (0x03 << 26),
74 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
75 OPC_BEQL
= (0x14 << 26),
76 OPC_BNE
= (0x05 << 26),
77 OPC_BNEL
= (0x15 << 26),
78 OPC_BLEZ
= (0x06 << 26),
79 OPC_BLEZL
= (0x16 << 26),
80 OPC_BGTZ
= (0x07 << 26),
81 OPC_BGTZL
= (0x17 << 26),
82 OPC_JALX
= (0x1D << 26),
83 OPC_DAUI
= (0x1D << 26),
85 OPC_LDL
= (0x1A << 26),
86 OPC_LDR
= (0x1B << 26),
87 OPC_LB
= (0x20 << 26),
88 OPC_LH
= (0x21 << 26),
89 OPC_LWL
= (0x22 << 26),
90 OPC_LW
= (0x23 << 26),
91 OPC_LWPC
= OPC_LW
| 0x5,
92 OPC_LBU
= (0x24 << 26),
93 OPC_LHU
= (0x25 << 26),
94 OPC_LWR
= (0x26 << 26),
95 OPC_LWU
= (0x27 << 26),
96 OPC_SB
= (0x28 << 26),
97 OPC_SH
= (0x29 << 26),
98 OPC_SWL
= (0x2A << 26),
99 OPC_SW
= (0x2B << 26),
100 OPC_SDL
= (0x2C << 26),
101 OPC_SDR
= (0x2D << 26),
102 OPC_SWR
= (0x2E << 26),
103 OPC_LL
= (0x30 << 26),
104 OPC_LLD
= (0x34 << 26),
105 OPC_LD
= (0x37 << 26),
106 OPC_LDPC
= OPC_LD
| 0x5,
107 OPC_SC
= (0x38 << 26),
108 OPC_SCD
= (0x3C << 26),
109 OPC_SD
= (0x3F << 26),
110 /* Floating point load/store */
111 OPC_LWC1
= (0x31 << 26),
112 OPC_LWC2
= (0x32 << 26),
113 OPC_LDC1
= (0x35 << 26),
114 OPC_LDC2
= (0x36 << 26),
115 OPC_SWC1
= (0x39 << 26),
116 OPC_SWC2
= (0x3A << 26),
117 OPC_SDC1
= (0x3D << 26),
118 OPC_SDC2
= (0x3E << 26),
119 /* Compact Branches */
120 OPC_BLEZALC
= (0x06 << 26),
121 OPC_BGEZALC
= (0x06 << 26),
122 OPC_BGEUC
= (0x06 << 26),
123 OPC_BGTZALC
= (0x07 << 26),
124 OPC_BLTZALC
= (0x07 << 26),
125 OPC_BLTUC
= (0x07 << 26),
126 OPC_BOVC
= (0x08 << 26),
127 OPC_BEQZALC
= (0x08 << 26),
128 OPC_BEQC
= (0x08 << 26),
129 OPC_BLEZC
= (0x16 << 26),
130 OPC_BGEZC
= (0x16 << 26),
131 OPC_BGEC
= (0x16 << 26),
132 OPC_BGTZC
= (0x17 << 26),
133 OPC_BLTZC
= (0x17 << 26),
134 OPC_BLTC
= (0x17 << 26),
135 OPC_BNVC
= (0x18 << 26),
136 OPC_BNEZALC
= (0x18 << 26),
137 OPC_BNEC
= (0x18 << 26),
138 OPC_BC
= (0x32 << 26),
139 OPC_BEQZC
= (0x36 << 26),
140 OPC_JIC
= (0x36 << 26),
141 OPC_BALC
= (0x3A << 26),
142 OPC_BNEZC
= (0x3E << 26),
143 OPC_JIALC
= (0x3E << 26),
144 /* MDMX ASE specific */
145 OPC_MDMX
= (0x1E << 26),
146 /* MSA ASE, same as MDMX */
148 /* Cache and prefetch */
149 OPC_CACHE
= (0x2F << 26),
150 OPC_PREF
= (0x33 << 26),
151 /* PC-relative address computation / loads */
152 OPC_PCREL
= (0x3B << 26),
155 /* PC-relative address computation / loads */
156 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
157 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
159 /* Instructions determined by bits 19 and 20 */
160 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
161 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
162 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
164 /* Instructions determined by bits 16 ... 20 */
165 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
166 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
169 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
172 /* MIPS special opcodes */
173 #define MASK_SPECIAL(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
177 OPC_SLL
= 0x00 | OPC_SPECIAL
,
178 /* NOP is SLL r0, r0, 0 */
179 /* SSNOP is SLL r0, r0, 1 */
180 /* EHB is SLL r0, r0, 3 */
181 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
182 OPC_ROTR
= OPC_SRL
| (1 << 21),
183 OPC_SRA
= 0x03 | OPC_SPECIAL
,
184 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
185 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
186 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
187 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
188 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
189 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
190 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
191 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
192 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
193 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
194 OPC_DROTR
= OPC_DSRL
| (1 << 21),
195 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
196 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
197 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
198 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
199 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
200 /* Multiplication / division */
201 OPC_MULT
= 0x18 | OPC_SPECIAL
,
202 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
203 OPC_DIV
= 0x1A | OPC_SPECIAL
,
204 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
205 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
206 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
207 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
208 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
210 /* 2 registers arithmetic / logic */
211 OPC_ADD
= 0x20 | OPC_SPECIAL
,
212 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
213 OPC_SUB
= 0x22 | OPC_SPECIAL
,
214 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
215 OPC_AND
= 0x24 | OPC_SPECIAL
,
216 OPC_OR
= 0x25 | OPC_SPECIAL
,
217 OPC_XOR
= 0x26 | OPC_SPECIAL
,
218 OPC_NOR
= 0x27 | OPC_SPECIAL
,
219 OPC_SLT
= 0x2A | OPC_SPECIAL
,
220 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
221 OPC_DADD
= 0x2C | OPC_SPECIAL
,
222 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
223 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
224 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
226 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
227 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
229 OPC_TGE
= 0x30 | OPC_SPECIAL
,
230 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
231 OPC_TLT
= 0x32 | OPC_SPECIAL
,
232 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
233 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
234 OPC_TNE
= 0x36 | OPC_SPECIAL
,
235 /* HI / LO registers load & stores */
236 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
237 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
238 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
239 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
240 /* Conditional moves */
241 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
242 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
244 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
245 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
247 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
250 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
251 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
252 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
253 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
254 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
256 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
257 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
258 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
259 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
263 * R6 Multiply and Divide instructions have the same opcode
264 * and function field as legacy OPC_MULT[U]/OPC_DIV[U]
266 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
269 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
270 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
271 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
272 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
273 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
274 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
275 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
276 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
278 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
279 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
280 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
281 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
282 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
283 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
284 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
285 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
287 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
288 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
289 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
290 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
291 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
293 OPC_LSA
= 0x05 | OPC_SPECIAL
,
294 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
297 /* Multiplication variants of the vr54xx. */
298 #define MASK_MUL_VR54XX(op) (MASK_SPECIAL(op) | (op & (0x1F << 6)))
301 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
302 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
303 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
304 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
305 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
306 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
307 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
308 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
309 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
310 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
311 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
312 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
313 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
314 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
317 /* REGIMM (rt field) opcodes */
318 #define MASK_REGIMM(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 16)))
321 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
322 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
323 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
324 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
325 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
326 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
327 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
328 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
329 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
330 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
331 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
332 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
333 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
334 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
335 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
336 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
338 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
339 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
342 /* Special2 opcodes */
343 #define MASK_SPECIAL2(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
346 /* Multiply & xxx operations */
347 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
348 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
349 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
350 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
351 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
353 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
354 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
355 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
356 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
357 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
358 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
359 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
360 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
361 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
362 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
363 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
364 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
366 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
367 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
368 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
369 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
371 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
374 /* Special3 opcodes */
375 #define MASK_SPECIAL3(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
378 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
379 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
380 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
381 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
382 OPC_INS
= 0x04 | OPC_SPECIAL3
,
383 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
384 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
385 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
386 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
387 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
388 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
389 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
390 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
393 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
394 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
395 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
396 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
397 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
398 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
399 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
400 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
401 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
402 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
403 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
404 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
407 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
408 /* MIPS DSP Arithmetic */
409 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
410 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
411 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
412 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
413 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
414 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
415 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
416 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
417 /* MIPS DSP GPR-Based Shift Sub-class */
418 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
419 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
420 /* MIPS DSP Multiply Sub-class insns */
421 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
422 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
423 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
424 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
425 /* DSP Bit/Manipulation Sub-class */
426 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
427 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
428 /* MIPS DSP Append Sub-class */
429 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
430 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
431 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
432 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
433 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
436 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
437 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
438 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
439 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
440 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
441 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
442 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
443 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
444 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
445 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
446 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
447 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
448 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
449 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
450 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
451 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
454 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
455 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
456 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
457 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
458 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
459 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
463 #define MASK_BSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
466 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
467 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
468 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
469 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
470 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
471 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
472 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
473 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
477 #define MASK_DBSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
480 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
481 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
482 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
483 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
484 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
485 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
486 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
487 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
488 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
489 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
490 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
493 /* MIPS DSP REGIMM opcodes */
495 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
496 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
499 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
502 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
503 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
504 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
505 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
508 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
510 /* MIPS DSP Arithmetic Sub-class */
511 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
517 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
518 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
519 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
520 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
522 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
523 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
524 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
525 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
526 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
527 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
528 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
529 /* MIPS DSP Multiply Sub-class insns */
530 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
531 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
532 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
533 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
534 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
535 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
538 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
539 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
541 /* MIPS DSP Arithmetic Sub-class */
542 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
548 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
549 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
550 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
551 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
552 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
553 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
554 /* MIPS DSP Multiply Sub-class insns */
555 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
556 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
557 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
558 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
561 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
563 /* MIPS DSP Arithmetic Sub-class */
564 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
572 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
573 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
574 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
575 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
576 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
577 /* DSP Bit/Manipulation Sub-class */
578 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
579 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
580 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
581 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
582 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
585 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
587 /* MIPS DSP Arithmetic Sub-class */
588 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
595 /* DSP Compare-Pick Sub-class */
596 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
600 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
601 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
602 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
603 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
604 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
605 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
606 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
607 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
608 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
609 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
610 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
613 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
615 /* MIPS DSP GPR-Based Shift Sub-class */
616 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
627 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
628 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
629 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
630 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
631 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
632 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
633 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
634 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
635 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
636 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
637 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
640 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
642 /* MIPS DSP Multiply Sub-class insns */
643 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
654 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
655 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
656 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
657 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
658 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
659 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
660 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
661 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
662 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
663 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
664 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
667 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
669 /* DSP Bit/Manipulation Sub-class */
670 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
673 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
675 /* MIPS DSP Append Sub-class */
676 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
677 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
678 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
681 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
683 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
684 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
685 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
686 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
687 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
688 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
689 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
690 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
691 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
692 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
693 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
694 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
695 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
696 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
697 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
698 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
699 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
700 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
703 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
705 /* MIPS DSP Arithmetic Sub-class */
706 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
719 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
720 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
721 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
722 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
723 /* DSP Bit/Manipulation Sub-class */
724 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
725 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
726 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
727 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
728 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
729 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
732 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
734 /* MIPS DSP Multiply Sub-class insns */
735 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
736 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
737 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
738 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
739 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
740 /* MIPS DSP Arithmetic Sub-class */
741 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
743 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
744 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
745 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
746 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
749 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
750 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
751 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
752 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
753 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
754 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
755 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
756 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
757 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
758 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
759 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
760 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
761 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
764 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
766 /* DSP Compare-Pick Sub-class */
767 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
784 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
785 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
786 /* MIPS DSP Arithmetic Sub-class */
787 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
788 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
789 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
792 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
793 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
794 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
797 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
799 /* DSP Append Sub-class */
800 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
801 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
802 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
803 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
806 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
808 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
809 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
819 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
820 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
821 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
822 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
823 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
824 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
825 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
826 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
827 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
828 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
829 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
832 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
834 /* DSP Bit/Manipulation Sub-class */
835 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
838 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
840 /* MIPS DSP Multiply Sub-class insns */
841 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
856 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
857 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
858 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
859 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
860 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
861 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
862 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
863 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
864 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
865 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
866 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
869 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
871 /* MIPS DSP GPR-Based Shift Sub-class */
872 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
887 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
888 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
889 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
890 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
891 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
892 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
893 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
894 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
895 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
896 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
897 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
900 /* Coprocessor 0 (rs field) */
901 #define MASK_CP0(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
904 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
905 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
906 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
907 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
908 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
909 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
910 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
911 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
912 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
913 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
914 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
915 OPC_C0
= (0x10 << 21) | OPC_CP0
,
916 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
917 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
918 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
919 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
920 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
921 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
922 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
923 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
924 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
925 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
926 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
927 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
928 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
929 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
930 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
934 #define MASK_MFMC0(op) (MASK_CP0(op) | (op & 0xFFFF))
937 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
938 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
939 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
940 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
941 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
942 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
943 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
944 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
947 /* Coprocessor 0 (with rs == C0) */
948 #define MASK_C0(op) (MASK_CP0(op) | (op & 0x3F))
951 OPC_TLBR
= 0x01 | OPC_C0
,
952 OPC_TLBWI
= 0x02 | OPC_C0
,
953 OPC_TLBINV
= 0x03 | OPC_C0
,
954 OPC_TLBINVF
= 0x04 | OPC_C0
,
955 OPC_TLBWR
= 0x06 | OPC_C0
,
956 OPC_TLBP
= 0x08 | OPC_C0
,
957 OPC_RFE
= 0x10 | OPC_C0
,
958 OPC_ERET
= 0x18 | OPC_C0
,
959 OPC_DERET
= 0x1F | OPC_C0
,
960 OPC_WAIT
= 0x20 | OPC_C0
,
963 /* Coprocessor 1 (rs field) */
964 #define MASK_CP1(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
966 /* Values for the fmt field in FP instructions */
968 /* 0 - 15 are reserved */
969 FMT_S
= 16, /* single fp */
970 FMT_D
= 17, /* double fp */
971 FMT_E
= 18, /* extended fp */
972 FMT_Q
= 19, /* quad fp */
973 FMT_W
= 20, /* 32-bit fixed */
974 FMT_L
= 21, /* 64-bit fixed */
975 FMT_PS
= 22, /* paired single fp */
976 /* 23 - 31 are reserved */
980 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
981 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
982 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
983 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
984 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
985 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
986 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
987 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
988 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
989 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
990 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
991 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
992 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
993 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
994 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
995 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
996 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
997 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
998 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
999 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
1000 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
1001 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
1002 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
1003 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
1004 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
1005 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
1006 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
1007 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
1008 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
1009 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1012 #define MASK_CP1_FUNC(op) (MASK_CP1(op) | (op & 0x3F))
1013 #define MASK_BC1(op) (MASK_CP1(op) | (op & (0x3 << 16)))
1016 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1017 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1018 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1019 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1023 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1024 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1028 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1029 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1032 #define MASK_CP2(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
1035 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1036 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1037 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1038 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1039 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1040 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1041 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1042 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1043 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1044 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1045 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1048 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1051 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1052 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1053 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1054 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1055 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1056 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1057 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1058 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1060 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1061 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1062 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1063 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1064 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1065 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1066 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1067 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1069 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1070 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1071 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1072 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1073 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1074 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1075 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1076 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1078 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1079 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1080 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1081 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1082 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1083 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1084 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1085 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1087 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1088 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1089 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1090 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1091 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1092 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1094 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1095 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1096 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1097 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1098 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1099 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1101 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1102 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1103 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1104 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1105 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1106 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1108 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1109 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1110 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1111 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1112 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1113 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1115 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1116 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1117 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1118 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1119 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1120 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1122 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1123 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1124 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1125 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1126 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1127 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1129 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1130 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1131 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1132 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1133 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1134 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1136 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1137 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1138 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1139 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1140 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1141 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1145 #define MASK_CP3(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1148 OPC_LWXC1
= 0x00 | OPC_CP3
,
1149 OPC_LDXC1
= 0x01 | OPC_CP3
,
1150 OPC_LUXC1
= 0x05 | OPC_CP3
,
1151 OPC_SWXC1
= 0x08 | OPC_CP3
,
1152 OPC_SDXC1
= 0x09 | OPC_CP3
,
1153 OPC_SUXC1
= 0x0D | OPC_CP3
,
1154 OPC_PREFX
= 0x0F | OPC_CP3
,
1155 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1156 OPC_MADD_S
= 0x20 | OPC_CP3
,
1157 OPC_MADD_D
= 0x21 | OPC_CP3
,
1158 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1159 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1160 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1161 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1162 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1163 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1164 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1165 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1166 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1167 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1171 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1173 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1174 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1175 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1176 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1177 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1178 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1179 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1180 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1181 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1182 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1183 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1184 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1185 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1186 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1187 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1188 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1189 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1190 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1191 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1192 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1193 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1195 /* MI10 instruction */
1196 OPC_LD_B
= (0x20) | OPC_MSA
,
1197 OPC_LD_H
= (0x21) | OPC_MSA
,
1198 OPC_LD_W
= (0x22) | OPC_MSA
,
1199 OPC_LD_D
= (0x23) | OPC_MSA
,
1200 OPC_ST_B
= (0x24) | OPC_MSA
,
1201 OPC_ST_H
= (0x25) | OPC_MSA
,
1202 OPC_ST_W
= (0x26) | OPC_MSA
,
1203 OPC_ST_D
= (0x27) | OPC_MSA
,
1207 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1208 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1209 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1210 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1211 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1212 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1213 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1214 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1215 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1216 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1217 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1218 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1219 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1221 /* I8 instruction */
1222 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1223 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1224 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1225 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1226 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1227 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1228 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1229 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1230 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1231 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1233 /* VEC/2R/2RF instruction */
1234 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1235 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1236 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1237 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1238 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1239 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1240 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1242 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1243 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1245 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1246 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1247 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1248 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1249 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1251 /* 2RF instruction df(bit 16) = _w, _d */
1252 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1253 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1254 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1255 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1256 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1257 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1258 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1259 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1260 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1261 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1262 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1263 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1264 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1265 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1266 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1267 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1269 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1270 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1271 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1272 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1273 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1274 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1275 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1276 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1277 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1278 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1279 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1280 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1281 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1282 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1283 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1284 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1285 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1286 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1287 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1288 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1289 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1290 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1291 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1292 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1293 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1294 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1295 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1296 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1297 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1298 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1299 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1300 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1301 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1302 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1303 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1304 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1305 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1306 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1307 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1308 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1309 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1310 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1311 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1312 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1313 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1314 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1315 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1316 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1317 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1318 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1319 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1320 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1321 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1322 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1323 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1324 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1325 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1326 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1327 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1328 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1329 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1330 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1331 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1332 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1334 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1335 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1336 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1337 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1338 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1339 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1340 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1341 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1342 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1343 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1345 /* 3RF instruction _df(bit 21) = _w, _d */
1346 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1348 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1349 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1350 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1351 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1352 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1353 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1357 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1358 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1359 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1361 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1362 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1363 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1364 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1365 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1366 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1367 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1368 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1369 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1370 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1373 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1374 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1375 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1376 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1377 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1378 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1379 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1380 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1381 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1382 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1383 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1384 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1385 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1386 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1388 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1389 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1390 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1391 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1392 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1393 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1394 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1395 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1396 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1397 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1398 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1399 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1400 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1406 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1407 * ============================================
1410 * MXU (full name: MIPS eXtension/enhanced Unit) is a SIMD extension of MIPS32
1411 * instructions set. It is designed to fit the needs of signal, graphical and
1412 * video processing applications. MXU instruction set is used in Xburst family
1413 * of microprocessors by Ingenic.
1415 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1416 * the control register.
1419 * The notation used in MXU assembler mnemonics
1420 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1422 * Register operands:
1424 * XRa, XRb, XRc, XRd - MXU registers
1425 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1427 * Non-register operands:
1429 * aptn1 - 1-bit accumulate add/subtract pattern
1430 * aptn2 - 2-bit accumulate add/subtract pattern
1431 * eptn2 - 2-bit execute add/subtract pattern
1432 * optn2 - 2-bit operand pattern
1433 * optn3 - 3-bit operand pattern
1434 * sft4 - 4-bit shift amount
1435 * strd2 - 2-bit stride amount
1439 * Level of parallelism: Operand size:
1440 * S - single operation at a time 32 - word
1441 * D - two operations in parallel 16 - half word
1442 * Q - four operations in parallel 8 - byte
1446 * ADD - Add or subtract
1447 * ADDC - Add with carry-in
1449 * ASUM - Sum together then accumulate (add or subtract)
1450 * ASUMC - Sum together then accumulate (add or subtract) with carry-in
1451 * AVG - Average between 2 operands
1452 * ABD - Absolute difference
1454 * AND - Logical bitwise 'and' operation
1456 * EXTR - Extract bits
1457 * I2M - Move from GPR register to MXU register
1458 * LDD - Load data from memory to XRF
1459 * LDI - Load data from memory to XRF (and increase the address base)
1460 * LUI - Load unsigned immediate
1462 * MULU - Unsigned multiply
1463 * MADD - 64-bit operand add 32x32 product
1464 * MSUB - 64-bit operand subtract 32x32 product
1465 * MAC - Multiply and accumulate (add or subtract)
1466 * MAD - Multiply and add or subtract
1467 * MAX - Maximum between 2 operands
1468 * MIN - Minimum between 2 operands
1469 * M2I - Move from MXU register to GPR register
1470 * MOVZ - Move if zero
1471 * MOVN - Move if non-zero
1472 * NOR - Logical bitwise 'nor' operation
1473 * OR - Logical bitwise 'or' operation
1474 * STD - Store data from XRF to memory
1475 * SDI - Store data from XRF to memory (and increase the address base)
1476 * SLT - Set of less than comparison
1477 * SAD - Sum of absolute differences
1478 * SLL - Logical shift left
1479 * SLR - Logical shift right
1480 * SAR - Arithmetic shift right
1483 * SCOP - Calculate x’s scope (-1, means x<0; 0, means x==0; 1, means x>0)
1484 * XOR - Logical bitwise 'exclusive or' operation
1488 * E - Expand results
1489 * F - Fixed point multiplication
1490 * L - Low part result
1491 * R - Doing rounding
1492 * V - Variable instead of immediate
1493 * W - Combine above L and V
1496 * The list of MXU instructions grouped by functionality
1497 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1499 * Load/Store instructions Multiplication instructions
1500 * ----------------------- ---------------------------
1502 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1503 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1504 * S32LDDV XRa, Rb, rc, strd2 S32MSUB XRa, XRd, Rs, Rt
1505 * S32STDV XRa, Rb, rc, strd2 S32MSUBU XRa, XRd, Rs, Rt
1506 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1507 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1508 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1509 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1510 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1511 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1512 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1513 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1514 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1515 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1516 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1517 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1518 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1519 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1520 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1521 * S16SDI XRa, Rb, s10, eptn2
1522 * S8LDD XRa, Rb, s8, eptn3
1523 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1524 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1525 * S8SDI XRa, Rb, s8, eptn3
1526 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1527 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1528 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1529 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1530 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1531 * S32CPS XRa, XRb, XRc
1532 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1533 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1534 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1535 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1536 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1537 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1538 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1539 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1540 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1541 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1542 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1543 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1544 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1545 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1546 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1547 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1548 * Q8SLT XRa, XRb, XRc
1549 * Q8SLTU XRa, XRb, XRc
1550 * Q8MOVZ XRa, XRb, XRc Shift instructions
1551 * Q8MOVN XRa, XRb, XRc ------------------
1553 * D32SLL XRa, XRb, XRc, XRd, sft4
1554 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1555 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1556 * D32SARL XRa, XRb, XRc, sft4
1557 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1558 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1559 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1560 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1561 * Q16SLL XRa, XRb, XRc, XRd, sft4
1562 * Q16SLR XRa, XRb, XRc, XRd, sft4
1563 * Miscellaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1564 * ------------------------- Q16SLLV XRa, XRb, Rb
1565 * Q16SLRV XRa, XRb, Rb
1566 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1567 * S32ALN XRa, XRb, XRc, Rb
1568 * S32ALNI XRa, XRb, XRc, s3
1569 * S32LUI XRa, s8, optn3 Move instructions
1570 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1571 * S32EXTRV XRa, XRb, Rs, Rt
1572 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1573 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1576 * The opcode organization of MXU instructions
1577 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1579 * The bits 31..26 of all MXU instructions are equal to 0x1C (also referred
1580 * as opcode SPECIAL2 in the base MIPS ISA). The organization and meaning of
1581 * other bits up to the instruction level is as follows:
1586 * ┌─ 000000 ─ OPC_MXU_S32MADD
1587 * ├─ 000001 ─ OPC_MXU_S32MADDU
1588 * ├─ 000010 ─ <not assigned> (non-MXU OPC_MUL)
1591 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1592 * │ ├─ 001 ─ OPC_MXU_S32MIN
1593 * │ ├─ 010 ─ OPC_MXU_D16MAX
1594 * │ ├─ 011 ─ OPC_MXU_D16MIN
1595 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1596 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1597 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1598 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1599 * ├─ 000100 ─ OPC_MXU_S32MSUB
1600 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1601 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1602 * │ ├─ 001 ─ OPC_MXU_D16SLT
1603 * │ ├─ 010 ─ OPC_MXU_D16AVG
1604 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1605 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1606 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1607 * │ └─ 111 ─ OPC_MXU_Q8ADD
1610 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1611 * │ ├─ 010 ─ OPC_MXU_D16CPS
1612 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1613 * │ └─ 110 ─ OPC_MXU_Q16SAT
1614 * ├─ 001000 ─ OPC_MXU_D16MUL
1616 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1617 * │ └─ 01 ─ OPC_MXU_D16MULE
1618 * ├─ 001010 ─ OPC_MXU_D16MAC
1619 * ├─ 001011 ─ OPC_MXU_D16MACF
1620 * ├─ 001100 ─ OPC_MXU_D16MADL
1621 * ├─ 001101 ─ OPC_MXU_S16MAD
1622 * ├─ 001110 ─ OPC_MXU_Q16ADD
1623 * ├─ 001111 ─ OPC_MXU_D16MACE 23
1624 * │ ┌─ 0 ─ OPC_MXU_S32LDD
1625 * ├─ 010000 ─ OPC_MXU__POOL04 ─┴─ 1 ─ OPC_MXU_S32LDDR
1628 * ├─ 010001 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32STD
1629 * │ └─ 1 ─ OPC_MXU_S32STDR
1632 * ├─ 010010 ─ OPC_MXU__POOL06 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1633 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1636 * ├─ 010011 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32STDV
1637 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1640 * ├─ 010100 ─ OPC_MXU__POOL08 ─┬─ 0 ─ OPC_MXU_S32LDI
1641 * │ └─ 1 ─ OPC_MXU_S32LDIR
1644 * ├─ 010101 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32SDI
1645 * │ └─ 1 ─ OPC_MXU_S32SDIR
1648 * ├─ 010110 ─ OPC_MXU__POOL10 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1649 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1652 * ├─ 010111 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1653 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1654 * ├─ 011000 ─ OPC_MXU_D32ADD
1656 * MXU ├─ 011001 ─ OPC_MXU__POOL12 ─┬─ 00 ─ OPC_MXU_D32ACC
1657 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1658 * │ └─ 10 ─ OPC_MXU_D32ASUM
1659 * ├─ 011010 ─ <not assigned>
1661 * ├─ 011011 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_Q16ACC
1662 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1663 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1666 * ├─ 011100 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1667 * │ ├─ 01 ─ OPC_MXU_D8SUM
1668 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1669 * ├─ 011110 ─ <not assigned>
1670 * ├─ 011111 ─ <not assigned>
1671 * ├─ 100000 ─ <not assigned> (overlaps with CLZ)
1672 * ├─ 100001 ─ <not assigned> (overlaps with CLO)
1673 * ├─ 100010 ─ OPC_MXU_S8LDD
1674 * ├─ 100011 ─ OPC_MXU_S8STD 15..14
1675 * ├─ 100100 ─ OPC_MXU_S8LDI ┌─ 00 ─ OPC_MXU_S32MUL
1676 * ├─ 100101 ─ OPC_MXU_S8SDI ├─ 00 ─ OPC_MXU_S32MULU
1677 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1678 * ├─ 100110 ─ OPC_MXU__POOL15 ─┴─ 00 ─ OPC_MXU_S32EXTRV
1681 * ├─ 100111 ─ OPC_MXU__POOL16 ─┬─ 000 ─ OPC_MXU_D32SARW
1682 * │ ├─ 001 ─ OPC_MXU_S32ALN
1683 * │ ├─ 010 ─ OPC_MXU_S32ALNI
1684 * │ ├─ 011 ─ OPC_MXU_S32LUI
1685 * │ ├─ 100 ─ OPC_MXU_S32NOR
1686 * │ ├─ 101 ─ OPC_MXU_S32AND
1687 * │ ├─ 110 ─ OPC_MXU_S32OR
1688 * │ └─ 111 ─ OPC_MXU_S32XOR
1691 * ├─ 101000 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_LXB
1692 * │ ├─ 001 ─ OPC_MXU_LXH
1693 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_LXW
1694 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_LXBU
1695 * ├─ 101011 ─ OPC_MXU_S16STD └─ 101 ─ OPC_MXU_LXHU
1696 * ├─ 101100 ─ OPC_MXU_S16LDI
1697 * ├─ 101101 ─ OPC_MXU_S16SDI
1698 * ├─ 101110 ─ OPC_MXU_S32M2I
1699 * ├─ 101111 ─ OPC_MXU_S32I2M
1700 * ├─ 110000 ─ OPC_MXU_D32SLL
1701 * ├─ 110001 ─ OPC_MXU_D32SLR 20..18
1702 * ├─ 110010 ─ OPC_MXU_D32SARL ┌─ 000 ─ OPC_MXU_D32SLLV
1703 * ├─ 110011 ─ OPC_MXU_D32SAR ├─ 001 ─ OPC_MXU_D32SLRV
1704 * ├─ 110100 ─ OPC_MXU_Q16SLL ├─ 010 ─ OPC_MXU_D32SARV
1705 * ├─ 110101 ─ OPC_MXU_Q16SLR ├─ 011 ─ OPC_MXU_Q16SLLV
1706 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1707 * ├─ 110110 ─ OPC_MXU__POOL18 ─┴─ 101 ─ OPC_MXU_Q16SARV
1709 * ├─ 110111 ─ OPC_MXU_Q16SAR
1711 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1712 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1715 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1716 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1717 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1718 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1719 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1720 * │ └─ 101 ─ OPC_MXU_S32MOVN
1723 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1724 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1725 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1726 * ├─ 111100 ─ OPC_MXU_Q8MADL
1727 * ├─ 111101 ─ OPC_MXU_S32SFL
1728 * ├─ 111110 ─ OPC_MXU_Q8SAD
1729 * └─ 111111 ─ <not assigned> (overlaps with SDBBP)
1734 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1735 * Programming Manual", Ingenic Semiconductor Co, Ltd., revision June 2, 2017
1739 OPC_MXU_S32MADD
= 0x00,
1740 OPC_MXU_S32MADDU
= 0x01,
1741 OPC__MXU_MUL
= 0x02,
1742 OPC_MXU__POOL00
= 0x03,
1743 OPC_MXU_S32MSUB
= 0x04,
1744 OPC_MXU_S32MSUBU
= 0x05,
1745 OPC_MXU__POOL01
= 0x06,
1746 OPC_MXU__POOL02
= 0x07,
1747 OPC_MXU_D16MUL
= 0x08,
1748 OPC_MXU__POOL03
= 0x09,
1749 OPC_MXU_D16MAC
= 0x0A,
1750 OPC_MXU_D16MACF
= 0x0B,
1751 OPC_MXU_D16MADL
= 0x0C,
1752 OPC_MXU_S16MAD
= 0x0D,
1753 OPC_MXU_Q16ADD
= 0x0E,
1754 OPC_MXU_D16MACE
= 0x0F,
1755 OPC_MXU__POOL04
= 0x10,
1756 OPC_MXU__POOL05
= 0x11,
1757 OPC_MXU__POOL06
= 0x12,
1758 OPC_MXU__POOL07
= 0x13,
1759 OPC_MXU__POOL08
= 0x14,
1760 OPC_MXU__POOL09
= 0x15,
1761 OPC_MXU__POOL10
= 0x16,
1762 OPC_MXU__POOL11
= 0x17,
1763 OPC_MXU_D32ADD
= 0x18,
1764 OPC_MXU__POOL12
= 0x19,
1765 /* not assigned 0x1A */
1766 OPC_MXU__POOL13
= 0x1B,
1767 OPC_MXU__POOL14
= 0x1C,
1768 OPC_MXU_Q8ACCE
= 0x1D,
1769 /* not assigned 0x1E */
1770 /* not assigned 0x1F */
1771 /* not assigned 0x20 */
1772 /* not assigned 0x21 */
1773 OPC_MXU_S8LDD
= 0x22,
1774 OPC_MXU_S8STD
= 0x23,
1775 OPC_MXU_S8LDI
= 0x24,
1776 OPC_MXU_S8SDI
= 0x25,
1777 OPC_MXU__POOL15
= 0x26,
1778 OPC_MXU__POOL16
= 0x27,
1779 OPC_MXU__POOL17
= 0x28,
1780 /* not assigned 0x29 */
1781 OPC_MXU_S16LDD
= 0x2A,
1782 OPC_MXU_S16STD
= 0x2B,
1783 OPC_MXU_S16LDI
= 0x2C,
1784 OPC_MXU_S16SDI
= 0x2D,
1785 OPC_MXU_S32M2I
= 0x2E,
1786 OPC_MXU_S32I2M
= 0x2F,
1787 OPC_MXU_D32SLL
= 0x30,
1788 OPC_MXU_D32SLR
= 0x31,
1789 OPC_MXU_D32SARL
= 0x32,
1790 OPC_MXU_D32SAR
= 0x33,
1791 OPC_MXU_Q16SLL
= 0x34,
1792 OPC_MXU_Q16SLR
= 0x35,
1793 OPC_MXU__POOL18
= 0x36,
1794 OPC_MXU_Q16SAR
= 0x37,
1795 OPC_MXU__POOL19
= 0x38,
1796 OPC_MXU__POOL20
= 0x39,
1797 OPC_MXU__POOL21
= 0x3A,
1798 OPC_MXU_Q16SCOP
= 0x3B,
1799 OPC_MXU_Q8MADL
= 0x3C,
1800 OPC_MXU_S32SFL
= 0x3D,
1801 OPC_MXU_Q8SAD
= 0x3E,
1802 /* not assigned 0x3F */
1810 OPC_MXU_S32MAX
= 0x00,
1811 OPC_MXU_S32MIN
= 0x01,
1812 OPC_MXU_D16MAX
= 0x02,
1813 OPC_MXU_D16MIN
= 0x03,
1814 OPC_MXU_Q8MAX
= 0x04,
1815 OPC_MXU_Q8MIN
= 0x05,
1816 OPC_MXU_Q8SLT
= 0x06,
1817 OPC_MXU_Q8SLTU
= 0x07,
1824 OPC_MXU_S32SLT
= 0x00,
1825 OPC_MXU_D16SLT
= 0x01,
1826 OPC_MXU_D16AVG
= 0x02,
1827 OPC_MXU_D16AVGR
= 0x03,
1828 OPC_MXU_Q8AVG
= 0x04,
1829 OPC_MXU_Q8AVGR
= 0x05,
1830 OPC_MXU_Q8ADD
= 0x07,
1837 OPC_MXU_S32CPS
= 0x00,
1838 OPC_MXU_D16CPS
= 0x02,
1839 OPC_MXU_Q8ABD
= 0x04,
1840 OPC_MXU_Q16SAT
= 0x06,
1847 OPC_MXU_D16MULF
= 0x00,
1848 OPC_MXU_D16MULE
= 0x01,
1855 OPC_MXU_S32LDD
= 0x00,
1856 OPC_MXU_S32LDDR
= 0x01,
1863 OPC_MXU_S32STD
= 0x00,
1864 OPC_MXU_S32STDR
= 0x01,
1871 OPC_MXU_S32LDDV
= 0x00,
1872 OPC_MXU_S32LDDVR
= 0x01,
1879 OPC_MXU_S32STDV
= 0x00,
1880 OPC_MXU_S32STDVR
= 0x01,
1887 OPC_MXU_S32LDI
= 0x00,
1888 OPC_MXU_S32LDIR
= 0x01,
1895 OPC_MXU_S32SDI
= 0x00,
1896 OPC_MXU_S32SDIR
= 0x01,
1903 OPC_MXU_S32LDIV
= 0x00,
1904 OPC_MXU_S32LDIVR
= 0x01,
1911 OPC_MXU_S32SDIV
= 0x00,
1912 OPC_MXU_S32SDIVR
= 0x01,
1919 OPC_MXU_D32ACC
= 0x00,
1920 OPC_MXU_D32ACCM
= 0x01,
1921 OPC_MXU_D32ASUM
= 0x02,
1928 OPC_MXU_Q16ACC
= 0x00,
1929 OPC_MXU_Q16ACCM
= 0x01,
1930 OPC_MXU_Q16ASUM
= 0x02,
1937 OPC_MXU_Q8ADDE
= 0x00,
1938 OPC_MXU_D8SUM
= 0x01,
1939 OPC_MXU_D8SUMC
= 0x02,
1946 OPC_MXU_S32MUL
= 0x00,
1947 OPC_MXU_S32MULU
= 0x01,
1948 OPC_MXU_S32EXTR
= 0x02,
1949 OPC_MXU_S32EXTRV
= 0x03,
1956 OPC_MXU_D32SARW
= 0x00,
1957 OPC_MXU_S32ALN
= 0x01,
1958 OPC_MXU_S32ALNI
= 0x02,
1959 OPC_MXU_S32LUI
= 0x03,
1960 OPC_MXU_S32NOR
= 0x04,
1961 OPC_MXU_S32AND
= 0x05,
1962 OPC_MXU_S32OR
= 0x06,
1963 OPC_MXU_S32XOR
= 0x07,
1973 OPC_MXU_LXBU
= 0x04,
1974 OPC_MXU_LXHU
= 0x05,
1981 OPC_MXU_D32SLLV
= 0x00,
1982 OPC_MXU_D32SLRV
= 0x01,
1983 OPC_MXU_D32SARV
= 0x03,
1984 OPC_MXU_Q16SLLV
= 0x04,
1985 OPC_MXU_Q16SLRV
= 0x05,
1986 OPC_MXU_Q16SARV
= 0x07,
1993 OPC_MXU_Q8MUL
= 0x00,
1994 OPC_MXU_Q8MULSU
= 0x01,
2001 OPC_MXU_Q8MOVZ
= 0x00,
2002 OPC_MXU_Q8MOVN
= 0x01,
2003 OPC_MXU_D16MOVZ
= 0x02,
2004 OPC_MXU_D16MOVN
= 0x03,
2005 OPC_MXU_S32MOVZ
= 0x04,
2006 OPC_MXU_S32MOVN
= 0x05,
2013 OPC_MXU_Q8MAC
= 0x00,
2014 OPC_MXU_Q8MACSU
= 0x01,
2018 * Overview of the TX79-specific instruction set
2019 * =============================================
2021 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
2022 * are only used by the specific quadword (128-bit) LQ/SQ load/store
2023 * instructions and certain multimedia instructions (MMIs). These MMIs
2024 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
2025 * or sixteen 8-bit paths.
2029 * The Toshiba TX System RISC TX79 Core Architecture manual,
2030 * https://wiki.qemu.org/File:C790.pdf
2032 * Three-Operand Multiply and Multiply-Add (4 instructions)
2033 * --------------------------------------------------------
2034 * MADD [rd,] rs, rt Multiply/Add
2035 * MADDU [rd,] rs, rt Multiply/Add Unsigned
2036 * MULT [rd,] rs, rt Multiply (3-operand)
2037 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
2039 * Multiply Instructions for Pipeline 1 (10 instructions)
2040 * ------------------------------------------------------
2041 * MULT1 [rd,] rs, rt Multiply Pipeline 1
2042 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
2043 * DIV1 rs, rt Divide Pipeline 1
2044 * DIVU1 rs, rt Divide Unsigned Pipeline 1
2045 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
2046 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
2047 * MFHI1 rd Move From HI1 Register
2048 * MFLO1 rd Move From LO1 Register
2049 * MTHI1 rs Move To HI1 Register
2050 * MTLO1 rs Move To LO1 Register
2052 * Arithmetic (19 instructions)
2053 * ----------------------------
2054 * PADDB rd, rs, rt Parallel Add Byte
2055 * PSUBB rd, rs, rt Parallel Subtract Byte
2056 * PADDH rd, rs, rt Parallel Add Halfword
2057 * PSUBH rd, rs, rt Parallel Subtract Halfword
2058 * PADDW rd, rs, rt Parallel Add Word
2059 * PSUBW rd, rs, rt Parallel Subtract Word
2060 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
2061 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
2062 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
2063 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
2064 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
2065 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
2066 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
2067 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
2068 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
2069 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
2070 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
2071 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
2072 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
2074 * Min/Max (4 instructions)
2075 * ------------------------
2076 * PMAXH rd, rs, rt Parallel Maximum Halfword
2077 * PMINH rd, rs, rt Parallel Minimum Halfword
2078 * PMAXW rd, rs, rt Parallel Maximum Word
2079 * PMINW rd, rs, rt Parallel Minimum Word
2081 * Absolute (2 instructions)
2082 * -------------------------
2083 * PABSH rd, rt Parallel Absolute Halfword
2084 * PABSW rd, rt Parallel Absolute Word
2086 * Logical (4 instructions)
2087 * ------------------------
2088 * PAND rd, rs, rt Parallel AND
2089 * POR rd, rs, rt Parallel OR
2090 * PXOR rd, rs, rt Parallel XOR
2091 * PNOR rd, rs, rt Parallel NOR
2093 * Shift (9 instructions)
2094 * ----------------------
2095 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2096 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2097 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2098 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2099 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2100 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2101 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2102 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2103 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2105 * Compare (6 instructions)
2106 * ------------------------
2107 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2108 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2109 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2110 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2111 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2112 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2114 * LZC (1 instruction)
2115 * -------------------
2116 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2118 * Quadword Load and Store (2 instructions)
2119 * ----------------------------------------
2120 * LQ rt, offset(base) Load Quadword
2121 * SQ rt, offset(base) Store Quadword
2123 * Multiply and Divide (19 instructions)
2124 * -------------------------------------
2125 * PMULTW rd, rs, rt Parallel Multiply Word
2126 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2127 * PDIVW rs, rt Parallel Divide Word
2128 * PDIVUW rs, rt Parallel Divide Unsigned Word
2129 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2130 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2131 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2132 * PMULTH rd, rs, rt Parallel Multiply Halfword
2133 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2134 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2135 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2136 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2137 * PDIVBW rs, rt Parallel Divide Broadcast Word
2138 * PMFHI rd Parallel Move From HI Register
2139 * PMFLO rd Parallel Move From LO Register
2140 * PMTHI rs Parallel Move To HI Register
2141 * PMTLO rs Parallel Move To LO Register
2142 * PMFHL rd Parallel Move From HI/LO Register
2143 * PMTHL rs Parallel Move To HI/LO Register
2145 * Pack/Extend (11 instructions)
2146 * -----------------------------
2147 * PPAC5 rd, rt Parallel Pack to 5 bits
2148 * PPACB rd, rs, rt Parallel Pack to Byte
2149 * PPACH rd, rs, rt Parallel Pack to Halfword
2150 * PPACW rd, rs, rt Parallel Pack to Word
2151 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2152 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2153 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2154 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2155 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2156 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2157 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2159 * Others (16 instructions)
2160 * ------------------------
2161 * PCPYH rd, rt Parallel Copy Halfword
2162 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2163 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2164 * PREVH rd, rt Parallel Reverse Halfword
2165 * PINTH rd, rs, rt Parallel Interleave Halfword
2166 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2167 * PEXEH rd, rt Parallel Exchange Even Halfword
2168 * PEXCH rd, rt Parallel Exchange Center Halfword
2169 * PEXEW rd, rt Parallel Exchange Even Word
2170 * PEXCW rd, rt Parallel Exchange Center Word
2171 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2172 * MFSA rd Move from Shift Amount Register
2173 * MTSA rs Move to Shift Amount Register
2174 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2175 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2176 * PROT3W rd, rt Parallel Rotate 3 Words
2178 * MMI (MultiMedia Instruction) encodings
2179 * ======================================
2181 * MMI instructions encoding table keys:
2183 * * This code is reserved for future use. An attempt to execute it
2184 * causes a Reserved Instruction exception.
2185 * % This code indicates an instruction class. The instruction word
2186 * must be further decoded by examining additional tables that show
2187 * the values for other instruction fields.
2188 * # This code is reserved for the unsupported instructions DMULT,
2189 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2190 * to execute it causes a Reserved Instruction exception.
2192 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
2195 * +--------+----------------------------------------+
2197 * +--------+----------------------------------------+
2199 * opcode bits 28..26
2200 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2201 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2202 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2203 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2204 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2205 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2206 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2207 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2208 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2209 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2210 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2214 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2215 MMI_OPC_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2216 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2220 * MMI instructions with opcode field = MMI:
2223 * +--------+-------------------------------+--------+
2224 * | MMI | |function|
2225 * +--------+-------------------------------+--------+
2227 * function bits 2..0
2228 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2229 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2230 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2231 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2232 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2233 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2234 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2235 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2236 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2237 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2238 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2241 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2243 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
2244 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
2245 MMI_OPC_PLZCW
= 0x04 | MMI_OPC_CLASS_MMI
,
2246 MMI_OPC_CLASS_MMI0
= 0x08 | MMI_OPC_CLASS_MMI
,
2247 MMI_OPC_CLASS_MMI2
= 0x09 | MMI_OPC_CLASS_MMI
,
2248 MMI_OPC_MFHI1
= 0x10 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFHI */
2249 MMI_OPC_MTHI1
= 0x11 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTHI */
2250 MMI_OPC_MFLO1
= 0x12 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFLO */
2251 MMI_OPC_MTLO1
= 0x13 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTLO */
2252 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
2253 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
2254 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
2255 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
2256 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
2257 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
2258 MMI_OPC_CLASS_MMI1
= 0x28 | MMI_OPC_CLASS_MMI
,
2259 MMI_OPC_CLASS_MMI3
= 0x29 | MMI_OPC_CLASS_MMI
,
2260 MMI_OPC_PMFHL
= 0x30 | MMI_OPC_CLASS_MMI
,
2261 MMI_OPC_PMTHL
= 0x31 | MMI_OPC_CLASS_MMI
,
2262 MMI_OPC_PSLLH
= 0x34 | MMI_OPC_CLASS_MMI
,
2263 MMI_OPC_PSRLH
= 0x36 | MMI_OPC_CLASS_MMI
,
2264 MMI_OPC_PSRAH
= 0x37 | MMI_OPC_CLASS_MMI
,
2265 MMI_OPC_PSLLW
= 0x3C | MMI_OPC_CLASS_MMI
,
2266 MMI_OPC_PSRLW
= 0x3E | MMI_OPC_CLASS_MMI
,
2267 MMI_OPC_PSRAW
= 0x3F | MMI_OPC_CLASS_MMI
,
2271 * MMI instructions with opcode field = MMI and bits 5..0 = MMI0:
2274 * +--------+----------------------+--------+--------+
2275 * | MMI | |function| MMI0 |
2276 * +--------+----------------------+--------+--------+
2278 * function bits 7..6
2279 * bits | 0 | 1 | 2 | 3
2280 * 10..8 | 00 | 01 | 10 | 11
2281 * -------+-------+-------+-------+-------
2282 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2283 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2284 * 2 010 | PADDB | PSUBB | PCGTB | *
2285 * 3 011 | * | * | * | *
2286 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2287 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2288 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2289 * 7 111 | * | * | PEXT5 | PPAC5
2292 #define MASK_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2294 MMI_OPC_0_PADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI0
,
2295 MMI_OPC_0_PSUBW
= (0x01 << 6) | MMI_OPC_CLASS_MMI0
,
2296 MMI_OPC_0_PCGTW
= (0x02 << 6) | MMI_OPC_CLASS_MMI0
,
2297 MMI_OPC_0_PMAXW
= (0x03 << 6) | MMI_OPC_CLASS_MMI0
,
2298 MMI_OPC_0_PADDH
= (0x04 << 6) | MMI_OPC_CLASS_MMI0
,
2299 MMI_OPC_0_PSUBH
= (0x05 << 6) | MMI_OPC_CLASS_MMI0
,
2300 MMI_OPC_0_PCGTH
= (0x06 << 6) | MMI_OPC_CLASS_MMI0
,
2301 MMI_OPC_0_PMAXH
= (0x07 << 6) | MMI_OPC_CLASS_MMI0
,
2302 MMI_OPC_0_PADDB
= (0x08 << 6) | MMI_OPC_CLASS_MMI0
,
2303 MMI_OPC_0_PSUBB
= (0x09 << 6) | MMI_OPC_CLASS_MMI0
,
2304 MMI_OPC_0_PCGTB
= (0x0A << 6) | MMI_OPC_CLASS_MMI0
,
2305 MMI_OPC_0_PADDSW
= (0x10 << 6) | MMI_OPC_CLASS_MMI0
,
2306 MMI_OPC_0_PSUBSW
= (0x11 << 6) | MMI_OPC_CLASS_MMI0
,
2307 MMI_OPC_0_PEXTLW
= (0x12 << 6) | MMI_OPC_CLASS_MMI0
,
2308 MMI_OPC_0_PPACW
= (0x13 << 6) | MMI_OPC_CLASS_MMI0
,
2309 MMI_OPC_0_PADDSH
= (0x14 << 6) | MMI_OPC_CLASS_MMI0
,
2310 MMI_OPC_0_PSUBSH
= (0x15 << 6) | MMI_OPC_CLASS_MMI0
,
2311 MMI_OPC_0_PEXTLH
= (0x16 << 6) | MMI_OPC_CLASS_MMI0
,
2312 MMI_OPC_0_PPACH
= (0x17 << 6) | MMI_OPC_CLASS_MMI0
,
2313 MMI_OPC_0_PADDSB
= (0x18 << 6) | MMI_OPC_CLASS_MMI0
,
2314 MMI_OPC_0_PSUBSB
= (0x19 << 6) | MMI_OPC_CLASS_MMI0
,
2315 MMI_OPC_0_PEXTLB
= (0x1A << 6) | MMI_OPC_CLASS_MMI0
,
2316 MMI_OPC_0_PPACB
= (0x1B << 6) | MMI_OPC_CLASS_MMI0
,
2317 MMI_OPC_0_PEXT5
= (0x1E << 6) | MMI_OPC_CLASS_MMI0
,
2318 MMI_OPC_0_PPAC5
= (0x1F << 6) | MMI_OPC_CLASS_MMI0
,
2322 * MMI instructions with opcode field = MMI and bits 5..0 = MMI1:
2325 * +--------+----------------------+--------+--------+
2326 * | MMI | |function| MMI1 |
2327 * +--------+----------------------+--------+--------+
2329 * function bits 7..6
2330 * bits | 0 | 1 | 2 | 3
2331 * 10..8 | 00 | 01 | 10 | 11
2332 * -------+-------+-------+-------+-------
2333 * 0 000 | * | PABSW | PCEQW | PMINW
2334 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2335 * 2 010 | * | * | PCEQB | *
2336 * 3 011 | * | * | * | *
2337 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2338 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2339 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2340 * 7 111 | * | * | * | *
2343 #define MASK_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2345 MMI_OPC_1_PABSW
= (0x01 << 6) | MMI_OPC_CLASS_MMI1
,
2346 MMI_OPC_1_PCEQW
= (0x02 << 6) | MMI_OPC_CLASS_MMI1
,
2347 MMI_OPC_1_PMINW
= (0x03 << 6) | MMI_OPC_CLASS_MMI1
,
2348 MMI_OPC_1_PADSBH
= (0x04 << 6) | MMI_OPC_CLASS_MMI1
,
2349 MMI_OPC_1_PABSH
= (0x05 << 6) | MMI_OPC_CLASS_MMI1
,
2350 MMI_OPC_1_PCEQH
= (0x06 << 6) | MMI_OPC_CLASS_MMI1
,
2351 MMI_OPC_1_PMINH
= (0x07 << 6) | MMI_OPC_CLASS_MMI1
,
2352 MMI_OPC_1_PCEQB
= (0x0A << 6) | MMI_OPC_CLASS_MMI1
,
2353 MMI_OPC_1_PADDUW
= (0x10 << 6) | MMI_OPC_CLASS_MMI1
,
2354 MMI_OPC_1_PSUBUW
= (0x11 << 6) | MMI_OPC_CLASS_MMI1
,
2355 MMI_OPC_1_PEXTUW
= (0x12 << 6) | MMI_OPC_CLASS_MMI1
,
2356 MMI_OPC_1_PADDUH
= (0x14 << 6) | MMI_OPC_CLASS_MMI1
,
2357 MMI_OPC_1_PSUBUH
= (0x15 << 6) | MMI_OPC_CLASS_MMI1
,
2358 MMI_OPC_1_PEXTUH
= (0x16 << 6) | MMI_OPC_CLASS_MMI1
,
2359 MMI_OPC_1_PADDUB
= (0x18 << 6) | MMI_OPC_CLASS_MMI1
,
2360 MMI_OPC_1_PSUBUB
= (0x19 << 6) | MMI_OPC_CLASS_MMI1
,
2361 MMI_OPC_1_PEXTUB
= (0x1A << 6) | MMI_OPC_CLASS_MMI1
,
2362 MMI_OPC_1_QFSRV
= (0x1B << 6) | MMI_OPC_CLASS_MMI1
,
2366 * MMI instructions with opcode field = MMI and bits 5..0 = MMI2:
2369 * +--------+----------------------+--------+--------+
2370 * | MMI | |function| MMI2 |
2371 * +--------+----------------------+--------+--------+
2373 * function bits 7..6
2374 * bits | 0 | 1 | 2 | 3
2375 * 10..8 | 00 | 01 | 10 | 11
2376 * -------+-------+-------+-------+-------
2377 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2378 * 1 001 | PMSUBW| * | * | *
2379 * 2 010 | PMFHI | PMFLO | PINTH | *
2380 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2381 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2382 * 5 101 | PMSUBH| PHMSBH| * | *
2383 * 6 110 | * | * | PEXEH | PREVH
2384 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2387 #define MASK_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2389 MMI_OPC_2_PMADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI2
,
2390 MMI_OPC_2_PSLLVW
= (0x02 << 6) | MMI_OPC_CLASS_MMI2
,
2391 MMI_OPC_2_PSRLVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI2
,
2392 MMI_OPC_2_PMSUBW
= (0x04 << 6) | MMI_OPC_CLASS_MMI2
,
2393 MMI_OPC_2_PMFHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI2
,
2394 MMI_OPC_2_PMFLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI2
,
2395 MMI_OPC_2_PINTH
= (0x0A << 6) | MMI_OPC_CLASS_MMI2
,
2396 MMI_OPC_2_PMULTW
= (0x0C << 6) | MMI_OPC_CLASS_MMI2
,
2397 MMI_OPC_2_PDIVW
= (0x0D << 6) | MMI_OPC_CLASS_MMI2
,
2398 MMI_OPC_2_PCPYLD
= (0x0E << 6) | MMI_OPC_CLASS_MMI2
,
2399 MMI_OPC_2_PMADDH
= (0x10 << 6) | MMI_OPC_CLASS_MMI2
,
2400 MMI_OPC_2_PHMADH
= (0x11 << 6) | MMI_OPC_CLASS_MMI2
,
2401 MMI_OPC_2_PAND
= (0x12 << 6) | MMI_OPC_CLASS_MMI2
,
2402 MMI_OPC_2_PXOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI2
,
2403 MMI_OPC_2_PMSUBH
= (0x14 << 6) | MMI_OPC_CLASS_MMI2
,
2404 MMI_OPC_2_PHMSBH
= (0x15 << 6) | MMI_OPC_CLASS_MMI2
,
2405 MMI_OPC_2_PEXEH
= (0x1A << 6) | MMI_OPC_CLASS_MMI2
,
2406 MMI_OPC_2_PREVH
= (0x1B << 6) | MMI_OPC_CLASS_MMI2
,
2407 MMI_OPC_2_PMULTH
= (0x1C << 6) | MMI_OPC_CLASS_MMI2
,
2408 MMI_OPC_2_PDIVBW
= (0x1D << 6) | MMI_OPC_CLASS_MMI2
,
2409 MMI_OPC_2_PEXEW
= (0x1E << 6) | MMI_OPC_CLASS_MMI2
,
2410 MMI_OPC_2_PROT3W
= (0x1F << 6) | MMI_OPC_CLASS_MMI2
,
2414 * MMI instructions with opcode field = MMI and bits 5..0 = MMI3:
2417 * +--------+----------------------+--------+--------+
2418 * | MMI | |function| MMI3 |
2419 * +--------+----------------------+--------+--------+
2421 * function bits 7..6
2422 * bits | 0 | 1 | 2 | 3
2423 * 10..8 | 00 | 01 | 10 | 11
2424 * -------+-------+-------+-------+-------
2425 * 0 000 |PMADDUW| * | * | PSRAVW
2426 * 1 001 | * | * | * | *
2427 * 2 010 | PMTHI | PMTLO | PINTEH| *
2428 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2429 * 4 100 | * | * | POR | PNOR
2430 * 5 101 | * | * | * | *
2431 * 6 110 | * | * | PEXCH | PCPYH
2432 * 7 111 | * | * | PEXCW | *
2435 #define MASK_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2437 MMI_OPC_3_PMADDUW
= (0x00 << 6) | MMI_OPC_CLASS_MMI3
,
2438 MMI_OPC_3_PSRAVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI3
,
2439 MMI_OPC_3_PMTHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI3
,
2440 MMI_OPC_3_PMTLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI3
,
2441 MMI_OPC_3_PINTEH
= (0x0A << 6) | MMI_OPC_CLASS_MMI3
,
2442 MMI_OPC_3_PMULTUW
= (0x0C << 6) | MMI_OPC_CLASS_MMI3
,
2443 MMI_OPC_3_PDIVUW
= (0x0D << 6) | MMI_OPC_CLASS_MMI3
,
2444 MMI_OPC_3_PCPYUD
= (0x0E << 6) | MMI_OPC_CLASS_MMI3
,
2445 MMI_OPC_3_POR
= (0x12 << 6) | MMI_OPC_CLASS_MMI3
,
2446 MMI_OPC_3_PNOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI3
,
2447 MMI_OPC_3_PEXCH
= (0x1A << 6) | MMI_OPC_CLASS_MMI3
,
2448 MMI_OPC_3_PCPYH
= (0x1B << 6) | MMI_OPC_CLASS_MMI3
,
2449 MMI_OPC_3_PEXCW
= (0x1E << 6) | MMI_OPC_CLASS_MMI3
,
2452 /* global register indices */
2453 static TCGv cpu_gpr
[32], cpu_PC
;
2454 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2455 static TCGv cpu_dspctrl
, btarget
, bcond
;
2456 static TCGv cpu_lladdr
, cpu_llval
;
2457 static TCGv_i32 hflags
;
2458 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2459 static TCGv_i64 fpu_f64
[32];
2460 static TCGv_i64 msa_wr_d
[64];
2462 #if defined(TARGET_MIPS64)
2463 /* Upper halves of R5900's 128-bit registers: MMRs (multimedia registers) */
2464 static TCGv_i64 cpu_mmr
[32];
2467 #if !defined(TARGET_MIPS64)
2469 static TCGv mxu_gpr
[NUMBER_OF_MXU_REGISTERS
- 1];
2473 #include "exec/gen-icount.h"
2475 #define gen_helper_0e0i(name, arg) do { \
2476 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2477 gen_helper_##name(cpu_env, helper_tmp); \
2478 tcg_temp_free_i32(helper_tmp); \
2481 #define gen_helper_0e1i(name, arg1, arg2) do { \
2482 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2483 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2484 tcg_temp_free_i32(helper_tmp); \
2487 #define gen_helper_1e0i(name, ret, arg1) do { \
2488 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2489 gen_helper_##name(ret, cpu_env, helper_tmp); \
2490 tcg_temp_free_i32(helper_tmp); \
2493 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2494 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2495 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2496 tcg_temp_free_i32(helper_tmp); \
2499 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2500 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2501 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2502 tcg_temp_free_i32(helper_tmp); \
2505 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2506 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2507 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2508 tcg_temp_free_i32(helper_tmp); \
2511 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2512 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2513 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2514 tcg_temp_free_i32(helper_tmp); \
2517 typedef struct DisasContext
{
2518 DisasContextBase base
;
2519 target_ulong saved_pc
;
2520 target_ulong page_start
;
2522 uint64_t insn_flags
;
2523 int32_t CP0_Config1
;
2524 int32_t CP0_Config2
;
2525 int32_t CP0_Config3
;
2526 int32_t CP0_Config5
;
2527 /* Routine used to access memory */
2529 TCGMemOp default_tcg_memop_mask
;
2530 uint32_t hflags
, saved_hflags
;
2531 target_ulong btarget
;
2542 int CP0_LLAddr_shift
;
2552 #define DISAS_STOP DISAS_TARGET_0
2553 #define DISAS_EXIT DISAS_TARGET_1
2555 static const char * const regnames
[] = {
2556 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2557 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2558 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2559 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2562 static const char * const regnames_HI
[] = {
2563 "HI0", "HI1", "HI2", "HI3",
2566 static const char * const regnames_LO
[] = {
2567 "LO0", "LO1", "LO2", "LO3",
2570 static const char * const fregnames
[] = {
2571 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2572 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2573 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2574 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2577 static const char * const msaregnames
[] = {
2578 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2579 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2580 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2581 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2582 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2583 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2584 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2585 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2586 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2587 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2588 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2589 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2590 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2591 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2592 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2593 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2596 #if !defined(TARGET_MIPS64)
2597 static const char * const mxuregnames
[] = {
2598 "XR1", "XR2", "XR3", "XR4", "XR5", "XR6", "XR7", "XR8",
2599 "XR9", "XR10", "XR11", "XR12", "XR13", "XR14", "XR15", "MXU_CR",
2603 #define LOG_DISAS(...) \
2605 if (MIPS_DEBUG_DISAS) { \
2606 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2610 #define MIPS_INVAL(op) \
2612 if (MIPS_DEBUG_DISAS) { \
2613 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2614 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2615 ctx->base.pc_next, ctx->opcode, op, \
2616 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2617 ((ctx->opcode >> 16) & 0x1F)); \
2621 /* General purpose registers moves. */
2622 static inline void gen_load_gpr(TCGv t
, int reg
)
2625 tcg_gen_movi_tl(t
, 0);
2627 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2631 static inline void gen_store_gpr(TCGv t
, int reg
)
2634 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2638 /* Moves to/from shadow registers. */
2639 static inline void gen_load_srsgpr(int from
, int to
)
2641 TCGv t0
= tcg_temp_new();
2644 tcg_gen_movi_tl(t0
, 0);
2646 TCGv_i32 t2
= tcg_temp_new_i32();
2647 TCGv_ptr addr
= tcg_temp_new_ptr();
2649 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2650 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2651 tcg_gen_andi_i32(t2
, t2
, 0xf);
2652 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2653 tcg_gen_ext_i32_ptr(addr
, t2
);
2654 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2656 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2657 tcg_temp_free_ptr(addr
);
2658 tcg_temp_free_i32(t2
);
2660 gen_store_gpr(t0
, to
);
2664 static inline void gen_store_srsgpr(int from
, int to
)
2667 TCGv t0
= tcg_temp_new();
2668 TCGv_i32 t2
= tcg_temp_new_i32();
2669 TCGv_ptr addr
= tcg_temp_new_ptr();
2671 gen_load_gpr(t0
, from
);
2672 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2673 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2674 tcg_gen_andi_i32(t2
, t2
, 0xf);
2675 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2676 tcg_gen_ext_i32_ptr(addr
, t2
);
2677 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2679 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2680 tcg_temp_free_ptr(addr
);
2681 tcg_temp_free_i32(t2
);
2686 #if !defined(TARGET_MIPS64)
2687 /* MXU General purpose registers moves. */
2688 static inline void gen_load_mxu_gpr(TCGv t
, unsigned int reg
)
2691 tcg_gen_movi_tl(t
, 0);
2692 } else if (reg
<= 15) {
2693 tcg_gen_mov_tl(t
, mxu_gpr
[reg
- 1]);
2697 static inline void gen_store_mxu_gpr(TCGv t
, unsigned int reg
)
2699 if (reg
> 0 && reg
<= 15) {
2700 tcg_gen_mov_tl(mxu_gpr
[reg
- 1], t
);
2704 /* MXU control register moves. */
2705 static inline void gen_load_mxu_cr(TCGv t
)
2707 tcg_gen_mov_tl(t
, mxu_CR
);
2710 static inline void gen_store_mxu_cr(TCGv t
)
2712 /* TODO: Add handling of RW rules for MXU_CR. */
2713 tcg_gen_mov_tl(mxu_CR
, t
);
2719 static inline void gen_save_pc(target_ulong pc
)
2721 tcg_gen_movi_tl(cpu_PC
, pc
);
2724 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2726 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2727 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2728 gen_save_pc(ctx
->base
.pc_next
);
2729 ctx
->saved_pc
= ctx
->base
.pc_next
;
2731 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2732 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2733 ctx
->saved_hflags
= ctx
->hflags
;
2734 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2740 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2746 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2748 ctx
->saved_hflags
= ctx
->hflags
;
2749 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2755 ctx
->btarget
= env
->btarget
;
2760 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2762 TCGv_i32 texcp
= tcg_const_i32(excp
);
2763 TCGv_i32 terr
= tcg_const_i32(err
);
2764 save_cpu_state(ctx
, 1);
2765 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2766 tcg_temp_free_i32(terr
);
2767 tcg_temp_free_i32(texcp
);
2768 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2771 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2773 gen_helper_0e0i(raise_exception
, excp
);
2776 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2778 generate_exception_err(ctx
, excp
, 0);
2781 /* Floating point register moves. */
2782 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2784 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2785 generate_exception(ctx
, EXCP_RI
);
2787 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2790 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2793 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2794 generate_exception(ctx
, EXCP_RI
);
2796 t64
= tcg_temp_new_i64();
2797 tcg_gen_extu_i32_i64(t64
, t
);
2798 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2799 tcg_temp_free_i64(t64
);
2802 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2804 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2805 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2807 gen_load_fpr32(ctx
, t
, reg
| 1);
2811 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2813 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2814 TCGv_i64 t64
= tcg_temp_new_i64();
2815 tcg_gen_extu_i32_i64(t64
, t
);
2816 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2817 tcg_temp_free_i64(t64
);
2819 gen_store_fpr32(ctx
, t
, reg
| 1);
2823 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2825 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2826 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2828 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2832 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2834 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2835 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2838 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2839 t0
= tcg_temp_new_i64();
2840 tcg_gen_shri_i64(t0
, t
, 32);
2841 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2842 tcg_temp_free_i64(t0
);
2846 static inline int get_fp_bit(int cc
)
2855 /* Addresses computation */
2856 static inline void gen_op_addr_add(DisasContext
*ctx
, TCGv ret
, TCGv arg0
,
2859 tcg_gen_add_tl(ret
, arg0
, arg1
);
2861 #if defined(TARGET_MIPS64)
2862 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2863 tcg_gen_ext32s_i64(ret
, ret
);
2868 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2871 tcg_gen_addi_tl(ret
, base
, ofs
);
2873 #if defined(TARGET_MIPS64)
2874 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2875 tcg_gen_ext32s_i64(ret
, ret
);
2880 /* Addresses computation (translation time) */
2881 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2884 target_long sum
= base
+ offset
;
2886 #if defined(TARGET_MIPS64)
2887 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2894 /* Sign-extract the low 32-bits to a target_long. */
2895 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2897 #if defined(TARGET_MIPS64)
2898 tcg_gen_ext32s_i64(ret
, arg
);
2900 tcg_gen_extrl_i64_i32(ret
, arg
);
2904 /* Sign-extract the high 32-bits to a target_long. */
2905 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2907 #if defined(TARGET_MIPS64)
2908 tcg_gen_sari_i64(ret
, arg
, 32);
2910 tcg_gen_extrh_i64_i32(ret
, arg
);
2914 static inline void check_cp0_enabled(DisasContext
*ctx
)
2916 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
2917 generate_exception_err(ctx
, EXCP_CpU
, 0);
2921 static inline void check_cp1_enabled(DisasContext
*ctx
)
2923 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
))) {
2924 generate_exception_err(ctx
, EXCP_CpU
, 1);
2929 * Verify that the processor is running with COP1X instructions enabled.
2930 * This is associated with the nabla symbol in the MIPS32 and MIPS64
2933 static inline void check_cop1x(DisasContext
*ctx
)
2935 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
))) {
2936 generate_exception_end(ctx
, EXCP_RI
);
2941 * Verify that the processor is running with 64-bit floating-point
2942 * operations enabled.
2944 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2946 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
))) {
2947 generate_exception_end(ctx
, EXCP_RI
);
2952 * Verify if floating point register is valid; an operation is not defined
2953 * if bit 0 of any register specification is set and the FR bit in the
2954 * Status register equals zero, since the register numbers specify an
2955 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2956 * in the Status register equals one, both even and odd register numbers
2957 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2959 * Multiple 64 bit wide registers can be checked by calling
2960 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2962 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2964 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1))) {
2965 generate_exception_end(ctx
, EXCP_RI
);
2970 * Verify that the processor is running with DSP instructions enabled.
2971 * This is enabled by CP0 Status register MX(24) bit.
2973 static inline void check_dsp(DisasContext
*ctx
)
2975 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2976 if (ctx
->insn_flags
& ASE_DSP
) {
2977 generate_exception_end(ctx
, EXCP_DSPDIS
);
2979 generate_exception_end(ctx
, EXCP_RI
);
2984 static inline void check_dsp_r2(DisasContext
*ctx
)
2986 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2987 if (ctx
->insn_flags
& ASE_DSP
) {
2988 generate_exception_end(ctx
, EXCP_DSPDIS
);
2990 generate_exception_end(ctx
, EXCP_RI
);
2995 static inline void check_dsp_r3(DisasContext
*ctx
)
2997 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2998 if (ctx
->insn_flags
& ASE_DSP
) {
2999 generate_exception_end(ctx
, EXCP_DSPDIS
);
3001 generate_exception_end(ctx
, EXCP_RI
);
3007 * This code generates a "reserved instruction" exception if the
3008 * CPU does not support the instruction set corresponding to flags.
3010 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
3012 if (unlikely(!(ctx
->insn_flags
& flags
))) {
3013 generate_exception_end(ctx
, EXCP_RI
);
3018 * This code generates a "reserved instruction" exception if the
3019 * CPU has corresponding flag set which indicates that the instruction
3022 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
3024 if (unlikely(ctx
->insn_flags
& flags
)) {
3025 generate_exception_end(ctx
, EXCP_RI
);
3030 * The Linux kernel traps certain reserved instruction exceptions to
3031 * emulate the corresponding instructions. QEMU is the kernel in user
3032 * mode, so those traps are emulated by accepting the instructions.
3034 * A reserved instruction exception is generated for flagged CPUs if
3035 * QEMU runs in system mode.
3037 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
3039 #ifndef CONFIG_USER_ONLY
3040 check_insn_opc_removed(ctx
, flags
);
3045 * This code generates a "reserved instruction" exception if the
3046 * CPU does not support 64-bit paired-single (PS) floating point data type.
3048 static inline void check_ps(DisasContext
*ctx
)
3050 if (unlikely(!ctx
->ps
)) {
3051 generate_exception(ctx
, EXCP_RI
);
3053 check_cp1_64bitmode(ctx
);
3056 #ifdef TARGET_MIPS64
3058 * This code generates a "reserved instruction" exception if 64-bit
3059 * instructions are not enabled.
3061 static inline void check_mips_64(DisasContext
*ctx
)
3063 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
))) {
3064 generate_exception_end(ctx
, EXCP_RI
);
3069 #ifndef CONFIG_USER_ONLY
3070 static inline void check_mvh(DisasContext
*ctx
)
3072 if (unlikely(!ctx
->mvh
)) {
3073 generate_exception(ctx
, EXCP_RI
);
3079 * This code generates a "reserved instruction" exception if the
3080 * Config5 XNP bit is set.
3082 static inline void check_xnp(DisasContext
*ctx
)
3084 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
3085 generate_exception_end(ctx
, EXCP_RI
);
3089 #ifndef CONFIG_USER_ONLY
3091 * This code generates a "reserved instruction" exception if the
3092 * Config3 PW bit is NOT set.
3094 static inline void check_pw(DisasContext
*ctx
)
3096 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
3097 generate_exception_end(ctx
, EXCP_RI
);
3103 * This code generates a "reserved instruction" exception if the
3104 * Config3 MT bit is NOT set.
3106 static inline void check_mt(DisasContext
*ctx
)
3108 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3109 generate_exception_end(ctx
, EXCP_RI
);
3113 #ifndef CONFIG_USER_ONLY
3115 * This code generates a "coprocessor unusable" exception if CP0 is not
3116 * available, and, if that is not the case, generates a "reserved instruction"
3117 * exception if the Config5 MT bit is NOT set. This is needed for availability
3118 * control of some of MT ASE instructions.
3120 static inline void check_cp0_mt(DisasContext
*ctx
)
3122 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
3123 generate_exception_err(ctx
, EXCP_CpU
, 0);
3125 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3126 generate_exception_err(ctx
, EXCP_RI
, 0);
3133 * This code generates a "reserved instruction" exception if the
3134 * Config5 NMS bit is set.
3136 static inline void check_nms(DisasContext
*ctx
)
3138 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
3139 generate_exception_end(ctx
, EXCP_RI
);
3144 * This code generates a "reserved instruction" exception if the
3145 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
3146 * Config2 TL, and Config5 L2C are unset.
3148 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
3150 if (unlikely((ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
3151 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
3152 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
3153 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
3154 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
3155 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))) {
3156 generate_exception_end(ctx
, EXCP_RI
);
3161 * This code generates a "reserved instruction" exception if the
3162 * Config5 EVA bit is NOT set.
3164 static inline void check_eva(DisasContext
*ctx
)
3166 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
3167 generate_exception_end(ctx
, EXCP_RI
);
3173 * Define small wrappers for gen_load_fpr* so that we have a uniform
3174 * calling interface for 32 and 64-bit FPRs. No sense in changing
3175 * all callers for gen_load_fpr32 when we need the CTX parameter for
3178 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
3179 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
3180 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
3181 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
3182 int ft, int fs, int cc) \
3184 TCGv_i##bits fp0 = tcg_temp_new_i##bits(); \
3185 TCGv_i##bits fp1 = tcg_temp_new_i##bits(); \
3194 check_cp1_registers(ctx, fs | ft); \
3202 gen_ldcmp_fpr##bits(ctx, fp0, fs); \
3203 gen_ldcmp_fpr##bits(ctx, fp1, ft); \
3206 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); \
3209 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); \
3212 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); \
3215 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); \
3218 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); \
3221 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); \
3224 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); \
3227 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); \
3230 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); \
3233 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); \
3236 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); \
3239 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); \
3242 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); \
3245 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); \
3248 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); \
3251 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); \
3256 tcg_temp_free_i##bits(fp0); \
3257 tcg_temp_free_i##bits(fp1); \
3260 FOP_CONDS(, 0, d
, FMT_D
, 64)
3261 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3262 FOP_CONDS(, 0, s
, FMT_S
, 32)
3263 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3264 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3265 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3268 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3269 static inline void gen_r6_cmp_ ## fmt(DisasContext *ctx, int n, \
3270 int ft, int fs, int fd) \
3272 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3273 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3274 if (ifmt == FMT_D) { \
3275 check_cp1_registers(ctx, fs | ft | fd); \
3277 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3278 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3281 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3284 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3287 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3290 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3293 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3296 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3299 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3302 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3305 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3308 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3311 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3314 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3317 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3320 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3323 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3326 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3329 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3332 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3335 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3338 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3341 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3344 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3350 tcg_temp_free_i ## bits(fp0); \
3351 tcg_temp_free_i ## bits(fp1); \
3354 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3355 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3357 #undef gen_ldcmp_fpr32
3358 #undef gen_ldcmp_fpr64
3360 /* load/store instructions. */
3361 #ifdef CONFIG_USER_ONLY
3362 #define OP_LD_ATOMIC(insn, fname) \
3363 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3364 DisasContext *ctx) \
3366 TCGv t0 = tcg_temp_new(); \
3367 tcg_gen_mov_tl(t0, arg1); \
3368 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3369 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3370 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3371 tcg_temp_free(t0); \
3374 #define OP_LD_ATOMIC(insn, fname) \
3375 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3376 DisasContext *ctx) \
3378 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3381 OP_LD_ATOMIC(ll
, ld32s
);
3382 #if defined(TARGET_MIPS64)
3383 OP_LD_ATOMIC(lld
, ld64
);
3387 static void gen_base_offset_addr(DisasContext
*ctx
, TCGv addr
,
3388 int base
, int offset
)
3391 tcg_gen_movi_tl(addr
, offset
);
3392 } else if (offset
== 0) {
3393 gen_load_gpr(addr
, base
);
3395 tcg_gen_movi_tl(addr
, offset
);
3396 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3400 static target_ulong
pc_relative_pc(DisasContext
*ctx
)
3402 target_ulong pc
= ctx
->base
.pc_next
;
3404 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3405 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3410 pc
&= ~(target_ulong
)3;
3415 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3416 int rt
, int base
, int offset
)
3419 int mem_idx
= ctx
->mem_idx
;
3421 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3423 * Loongson CPU uses a load to zero register for prefetch.
3424 * We emulate it as a NOP. On other CPU we must perform the
3425 * actual memory access.
3430 t0
= tcg_temp_new();
3431 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3434 #if defined(TARGET_MIPS64)
3436 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3437 ctx
->default_tcg_memop_mask
);
3438 gen_store_gpr(t0
, rt
);
3441 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3442 ctx
->default_tcg_memop_mask
);
3443 gen_store_gpr(t0
, rt
);
3447 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3448 gen_store_gpr(t0
, rt
);
3451 t1
= tcg_temp_new();
3453 * Do a byte access to possibly trigger a page
3454 * fault with the unaligned address.
3456 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3457 tcg_gen_andi_tl(t1
, t0
, 7);
3458 #ifndef TARGET_WORDS_BIGENDIAN
3459 tcg_gen_xori_tl(t1
, t1
, 7);
3461 tcg_gen_shli_tl(t1
, t1
, 3);
3462 tcg_gen_andi_tl(t0
, t0
, ~7);
3463 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3464 tcg_gen_shl_tl(t0
, t0
, t1
);
3465 t2
= tcg_const_tl(-1);
3466 tcg_gen_shl_tl(t2
, t2
, t1
);
3467 gen_load_gpr(t1
, rt
);
3468 tcg_gen_andc_tl(t1
, t1
, t2
);
3470 tcg_gen_or_tl(t0
, t0
, t1
);
3472 gen_store_gpr(t0
, rt
);
3475 t1
= tcg_temp_new();
3477 * Do a byte access to possibly trigger a page
3478 * fault with the unaligned address.
3480 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3481 tcg_gen_andi_tl(t1
, t0
, 7);
3482 #ifdef TARGET_WORDS_BIGENDIAN
3483 tcg_gen_xori_tl(t1
, t1
, 7);
3485 tcg_gen_shli_tl(t1
, t1
, 3);
3486 tcg_gen_andi_tl(t0
, t0
, ~7);
3487 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3488 tcg_gen_shr_tl(t0
, t0
, t1
);
3489 tcg_gen_xori_tl(t1
, t1
, 63);
3490 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3491 tcg_gen_shl_tl(t2
, t2
, t1
);
3492 gen_load_gpr(t1
, rt
);
3493 tcg_gen_and_tl(t1
, t1
, t2
);
3495 tcg_gen_or_tl(t0
, t0
, t1
);
3497 gen_store_gpr(t0
, rt
);
3500 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3501 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3503 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3504 gen_store_gpr(t0
, rt
);
3508 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3509 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3511 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3512 gen_store_gpr(t0
, rt
);
3515 mem_idx
= MIPS_HFLAG_UM
;
3518 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3519 ctx
->default_tcg_memop_mask
);
3520 gen_store_gpr(t0
, rt
);
3523 mem_idx
= MIPS_HFLAG_UM
;
3526 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3527 ctx
->default_tcg_memop_mask
);
3528 gen_store_gpr(t0
, rt
);
3531 mem_idx
= MIPS_HFLAG_UM
;
3534 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3535 ctx
->default_tcg_memop_mask
);
3536 gen_store_gpr(t0
, rt
);
3539 mem_idx
= MIPS_HFLAG_UM
;
3542 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3543 gen_store_gpr(t0
, rt
);
3546 mem_idx
= MIPS_HFLAG_UM
;
3549 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3550 gen_store_gpr(t0
, rt
);
3553 mem_idx
= MIPS_HFLAG_UM
;
3556 t1
= tcg_temp_new();
3558 * Do a byte access to possibly trigger a page
3559 * fault with the unaligned address.
3561 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3562 tcg_gen_andi_tl(t1
, t0
, 3);
3563 #ifndef TARGET_WORDS_BIGENDIAN
3564 tcg_gen_xori_tl(t1
, t1
, 3);
3566 tcg_gen_shli_tl(t1
, t1
, 3);
3567 tcg_gen_andi_tl(t0
, t0
, ~3);
3568 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3569 tcg_gen_shl_tl(t0
, t0
, t1
);
3570 t2
= tcg_const_tl(-1);
3571 tcg_gen_shl_tl(t2
, t2
, t1
);
3572 gen_load_gpr(t1
, rt
);
3573 tcg_gen_andc_tl(t1
, t1
, t2
);
3575 tcg_gen_or_tl(t0
, t0
, t1
);
3577 tcg_gen_ext32s_tl(t0
, t0
);
3578 gen_store_gpr(t0
, rt
);
3581 mem_idx
= MIPS_HFLAG_UM
;
3584 t1
= tcg_temp_new();
3586 * Do a byte access to possibly trigger a page
3587 * fault with the unaligned address.
3589 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3590 tcg_gen_andi_tl(t1
, t0
, 3);
3591 #ifdef TARGET_WORDS_BIGENDIAN
3592 tcg_gen_xori_tl(t1
, t1
, 3);
3594 tcg_gen_shli_tl(t1
, t1
, 3);
3595 tcg_gen_andi_tl(t0
, t0
, ~3);
3596 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3597 tcg_gen_shr_tl(t0
, t0
, t1
);
3598 tcg_gen_xori_tl(t1
, t1
, 31);
3599 t2
= tcg_const_tl(0xfffffffeull
);
3600 tcg_gen_shl_tl(t2
, t2
, t1
);
3601 gen_load_gpr(t1
, rt
);
3602 tcg_gen_and_tl(t1
, t1
, t2
);
3604 tcg_gen_or_tl(t0
, t0
, t1
);
3606 tcg_gen_ext32s_tl(t0
, t0
);
3607 gen_store_gpr(t0
, rt
);
3610 mem_idx
= MIPS_HFLAG_UM
;
3614 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3615 gen_store_gpr(t0
, rt
);
3621 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3622 uint32_t reg1
, uint32_t reg2
)
3624 TCGv taddr
= tcg_temp_new();
3625 TCGv_i64 tval
= tcg_temp_new_i64();
3626 TCGv tmp1
= tcg_temp_new();
3627 TCGv tmp2
= tcg_temp_new();
3629 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3630 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3631 #ifdef TARGET_WORDS_BIGENDIAN
3632 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3634 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3636 gen_store_gpr(tmp1
, reg1
);
3637 tcg_temp_free(tmp1
);
3638 gen_store_gpr(tmp2
, reg2
);
3639 tcg_temp_free(tmp2
);
3640 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3641 tcg_temp_free_i64(tval
);
3642 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3643 tcg_temp_free(taddr
);
3647 static void gen_st(DisasContext
*ctx
, uint32_t opc
, int rt
,
3648 int base
, int offset
)
3650 TCGv t0
= tcg_temp_new();
3651 TCGv t1
= tcg_temp_new();
3652 int mem_idx
= ctx
->mem_idx
;
3654 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3655 gen_load_gpr(t1
, rt
);
3657 #if defined(TARGET_MIPS64)
3659 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3660 ctx
->default_tcg_memop_mask
);
3663 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3666 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3670 mem_idx
= MIPS_HFLAG_UM
;
3673 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3674 ctx
->default_tcg_memop_mask
);
3677 mem_idx
= MIPS_HFLAG_UM
;
3680 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3681 ctx
->default_tcg_memop_mask
);
3684 mem_idx
= MIPS_HFLAG_UM
;
3687 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3690 mem_idx
= MIPS_HFLAG_UM
;
3693 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3696 mem_idx
= MIPS_HFLAG_UM
;
3699 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3707 /* Store conditional */
3708 static void gen_st_cond(DisasContext
*ctx
, int rt
, int base
, int offset
,
3709 TCGMemOp tcg_mo
, bool eva
)
3712 TCGLabel
*l1
= gen_new_label();
3713 TCGLabel
*done
= gen_new_label();
3715 t0
= tcg_temp_new();
3716 addr
= tcg_temp_new();
3717 /* compare the address against that of the preceeding LL */
3718 gen_base_offset_addr(ctx
, addr
, base
, offset
);
3719 tcg_gen_brcond_tl(TCG_COND_EQ
, addr
, cpu_lladdr
, l1
);
3720 tcg_temp_free(addr
);
3721 tcg_gen_movi_tl(t0
, 0);
3722 gen_store_gpr(t0
, rt
);
3726 /* generate cmpxchg */
3727 val
= tcg_temp_new();
3728 gen_load_gpr(val
, rt
);
3729 tcg_gen_atomic_cmpxchg_tl(t0
, cpu_lladdr
, cpu_llval
, val
,
3730 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, tcg_mo
);
3731 tcg_gen_setcond_tl(TCG_COND_EQ
, t0
, t0
, cpu_llval
);
3732 gen_store_gpr(t0
, rt
);
3735 gen_set_label(done
);
3740 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3741 uint32_t reg1
, uint32_t reg2
, bool eva
)
3743 TCGv taddr
= tcg_temp_local_new();
3744 TCGv lladdr
= tcg_temp_local_new();
3745 TCGv_i64 tval
= tcg_temp_new_i64();
3746 TCGv_i64 llval
= tcg_temp_new_i64();
3747 TCGv_i64 val
= tcg_temp_new_i64();
3748 TCGv tmp1
= tcg_temp_new();
3749 TCGv tmp2
= tcg_temp_new();
3750 TCGLabel
*lab_fail
= gen_new_label();
3751 TCGLabel
*lab_done
= gen_new_label();
3753 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3755 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3756 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3758 gen_load_gpr(tmp1
, reg1
);
3759 gen_load_gpr(tmp2
, reg2
);
3761 #ifdef TARGET_WORDS_BIGENDIAN
3762 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3764 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3767 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3768 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3769 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, MO_64
);
3771 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3773 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3775 gen_set_label(lab_fail
);
3778 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3780 gen_set_label(lab_done
);
3781 tcg_gen_movi_tl(lladdr
, -1);
3782 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3785 /* Load and store */
3786 static void gen_flt_ldst(DisasContext
*ctx
, uint32_t opc
, int ft
,
3790 * Don't do NOP if destination is zero: we must perform the actual
3796 TCGv_i32 fp0
= tcg_temp_new_i32();
3797 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3798 ctx
->default_tcg_memop_mask
);
3799 gen_store_fpr32(ctx
, fp0
, ft
);
3800 tcg_temp_free_i32(fp0
);
3805 TCGv_i32 fp0
= tcg_temp_new_i32();
3806 gen_load_fpr32(ctx
, fp0
, ft
);
3807 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3808 ctx
->default_tcg_memop_mask
);
3809 tcg_temp_free_i32(fp0
);
3814 TCGv_i64 fp0
= tcg_temp_new_i64();
3815 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3816 ctx
->default_tcg_memop_mask
);
3817 gen_store_fpr64(ctx
, fp0
, ft
);
3818 tcg_temp_free_i64(fp0
);
3823 TCGv_i64 fp0
= tcg_temp_new_i64();
3824 gen_load_fpr64(ctx
, fp0
, ft
);
3825 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3826 ctx
->default_tcg_memop_mask
);
3827 tcg_temp_free_i64(fp0
);
3831 MIPS_INVAL("flt_ldst");
3832 generate_exception_end(ctx
, EXCP_RI
);
3837 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3838 int rs
, int16_t imm
)
3840 TCGv t0
= tcg_temp_new();
3842 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3843 check_cp1_enabled(ctx
);
3847 check_insn(ctx
, ISA_MIPS2
);
3850 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3851 gen_flt_ldst(ctx
, op
, rt
, t0
);
3854 generate_exception_err(ctx
, EXCP_CpU
, 1);
3859 /* Arithmetic with immediate operand */
3860 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3861 int rt
, int rs
, int imm
)
3863 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3865 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3867 * If no destination, treat it as a NOP.
3868 * For addi, we must generate the overflow exception when needed.
3875 TCGv t0
= tcg_temp_local_new();
3876 TCGv t1
= tcg_temp_new();
3877 TCGv t2
= tcg_temp_new();
3878 TCGLabel
*l1
= gen_new_label();
3880 gen_load_gpr(t1
, rs
);
3881 tcg_gen_addi_tl(t0
, t1
, uimm
);
3882 tcg_gen_ext32s_tl(t0
, t0
);
3884 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3885 tcg_gen_xori_tl(t2
, t0
, uimm
);
3886 tcg_gen_and_tl(t1
, t1
, t2
);
3888 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3890 /* operands of same sign, result different sign */
3891 generate_exception(ctx
, EXCP_OVERFLOW
);
3893 tcg_gen_ext32s_tl(t0
, t0
);
3894 gen_store_gpr(t0
, rt
);
3900 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3901 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3903 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3906 #if defined(TARGET_MIPS64)
3909 TCGv t0
= tcg_temp_local_new();
3910 TCGv t1
= tcg_temp_new();
3911 TCGv t2
= tcg_temp_new();
3912 TCGLabel
*l1
= gen_new_label();
3914 gen_load_gpr(t1
, rs
);
3915 tcg_gen_addi_tl(t0
, t1
, uimm
);
3917 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3918 tcg_gen_xori_tl(t2
, t0
, uimm
);
3919 tcg_gen_and_tl(t1
, t1
, t2
);
3921 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3923 /* operands of same sign, result different sign */
3924 generate_exception(ctx
, EXCP_OVERFLOW
);
3926 gen_store_gpr(t0
, rt
);
3932 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3934 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3941 /* Logic with immediate operand */
3942 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3943 int rt
, int rs
, int16_t imm
)
3948 /* If no destination, treat it as a NOP. */
3951 uimm
= (uint16_t)imm
;
3954 if (likely(rs
!= 0)) {
3955 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3957 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3962 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3964 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3968 if (likely(rs
!= 0)) {
3969 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3971 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3975 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3977 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3978 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3980 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3989 /* Set on less than with immediate operand */
3990 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3991 int rt
, int rs
, int16_t imm
)
3993 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3997 /* If no destination, treat it as a NOP. */
4000 t0
= tcg_temp_new();
4001 gen_load_gpr(t0
, rs
);
4004 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
4007 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
4013 /* Shifts with immediate operand */
4014 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
4015 int rt
, int rs
, int16_t imm
)
4017 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
4021 /* If no destination, treat it as a NOP. */
4025 t0
= tcg_temp_new();
4026 gen_load_gpr(t0
, rs
);
4029 tcg_gen_shli_tl(t0
, t0
, uimm
);
4030 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4033 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4037 tcg_gen_ext32u_tl(t0
, t0
);
4038 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4040 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4045 TCGv_i32 t1
= tcg_temp_new_i32();
4047 tcg_gen_trunc_tl_i32(t1
, t0
);
4048 tcg_gen_rotri_i32(t1
, t1
, uimm
);
4049 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
4050 tcg_temp_free_i32(t1
);
4052 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4055 #if defined(TARGET_MIPS64)
4057 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
4060 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4063 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4067 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
4069 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
4073 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4076 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4079 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4082 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4090 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
4091 int rd
, int rs
, int rt
)
4093 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
4094 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
4096 * If no destination, treat it as a NOP.
4097 * For add & sub, we must generate the overflow exception when needed.
4105 TCGv t0
= tcg_temp_local_new();
4106 TCGv t1
= tcg_temp_new();
4107 TCGv t2
= tcg_temp_new();
4108 TCGLabel
*l1
= gen_new_label();
4110 gen_load_gpr(t1
, rs
);
4111 gen_load_gpr(t2
, rt
);
4112 tcg_gen_add_tl(t0
, t1
, t2
);
4113 tcg_gen_ext32s_tl(t0
, t0
);
4114 tcg_gen_xor_tl(t1
, t1
, t2
);
4115 tcg_gen_xor_tl(t2
, t0
, t2
);
4116 tcg_gen_andc_tl(t1
, t2
, t1
);
4118 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4120 /* operands of same sign, result different sign */
4121 generate_exception(ctx
, EXCP_OVERFLOW
);
4123 gen_store_gpr(t0
, rd
);
4128 if (rs
!= 0 && rt
!= 0) {
4129 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4130 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4131 } else if (rs
== 0 && rt
!= 0) {
4132 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4133 } else if (rs
!= 0 && rt
== 0) {
4134 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4136 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4141 TCGv t0
= tcg_temp_local_new();
4142 TCGv t1
= tcg_temp_new();
4143 TCGv t2
= tcg_temp_new();
4144 TCGLabel
*l1
= gen_new_label();
4146 gen_load_gpr(t1
, rs
);
4147 gen_load_gpr(t2
, rt
);
4148 tcg_gen_sub_tl(t0
, t1
, t2
);
4149 tcg_gen_ext32s_tl(t0
, t0
);
4150 tcg_gen_xor_tl(t2
, t1
, t2
);
4151 tcg_gen_xor_tl(t1
, t0
, t1
);
4152 tcg_gen_and_tl(t1
, t1
, t2
);
4154 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4157 * operands of different sign, first operand and the result
4160 generate_exception(ctx
, EXCP_OVERFLOW
);
4162 gen_store_gpr(t0
, rd
);
4167 if (rs
!= 0 && rt
!= 0) {
4168 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4169 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4170 } else if (rs
== 0 && rt
!= 0) {
4171 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4172 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4173 } else if (rs
!= 0 && rt
== 0) {
4174 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4176 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4179 #if defined(TARGET_MIPS64)
4182 TCGv t0
= tcg_temp_local_new();
4183 TCGv t1
= tcg_temp_new();
4184 TCGv t2
= tcg_temp_new();
4185 TCGLabel
*l1
= gen_new_label();
4187 gen_load_gpr(t1
, rs
);
4188 gen_load_gpr(t2
, rt
);
4189 tcg_gen_add_tl(t0
, t1
, t2
);
4190 tcg_gen_xor_tl(t1
, t1
, t2
);
4191 tcg_gen_xor_tl(t2
, t0
, t2
);
4192 tcg_gen_andc_tl(t1
, t2
, t1
);
4194 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4196 /* operands of same sign, result different sign */
4197 generate_exception(ctx
, EXCP_OVERFLOW
);
4199 gen_store_gpr(t0
, rd
);
4204 if (rs
!= 0 && rt
!= 0) {
4205 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4206 } else if (rs
== 0 && rt
!= 0) {
4207 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4208 } else if (rs
!= 0 && rt
== 0) {
4209 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4211 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4216 TCGv t0
= tcg_temp_local_new();
4217 TCGv t1
= tcg_temp_new();
4218 TCGv t2
= tcg_temp_new();
4219 TCGLabel
*l1
= gen_new_label();
4221 gen_load_gpr(t1
, rs
);
4222 gen_load_gpr(t2
, rt
);
4223 tcg_gen_sub_tl(t0
, t1
, t2
);
4224 tcg_gen_xor_tl(t2
, t1
, t2
);
4225 tcg_gen_xor_tl(t1
, t0
, t1
);
4226 tcg_gen_and_tl(t1
, t1
, t2
);
4228 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4231 * Operands of different sign, first operand and result different
4234 generate_exception(ctx
, EXCP_OVERFLOW
);
4236 gen_store_gpr(t0
, rd
);
4241 if (rs
!= 0 && rt
!= 0) {
4242 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4243 } else if (rs
== 0 && rt
!= 0) {
4244 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4245 } else if (rs
!= 0 && rt
== 0) {
4246 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4248 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4253 if (likely(rs
!= 0 && rt
!= 0)) {
4254 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4255 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4257 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4263 /* Conditional move */
4264 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4265 int rd
, int rs
, int rt
)
4270 /* If no destination, treat it as a NOP. */
4274 t0
= tcg_temp_new();
4275 gen_load_gpr(t0
, rt
);
4276 t1
= tcg_const_tl(0);
4277 t2
= tcg_temp_new();
4278 gen_load_gpr(t2
, rs
);
4281 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4284 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4287 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4290 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4299 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4300 int rd
, int rs
, int rt
)
4303 /* If no destination, treat it as a NOP. */
4309 if (likely(rs
!= 0 && rt
!= 0)) {
4310 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4312 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4316 if (rs
!= 0 && rt
!= 0) {
4317 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4318 } else if (rs
== 0 && rt
!= 0) {
4319 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4320 } else if (rs
!= 0 && rt
== 0) {
4321 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4323 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4327 if (likely(rs
!= 0 && rt
!= 0)) {
4328 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4329 } else if (rs
== 0 && rt
!= 0) {
4330 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4331 } else if (rs
!= 0 && rt
== 0) {
4332 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4334 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4338 if (likely(rs
!= 0 && rt
!= 0)) {
4339 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4340 } else if (rs
== 0 && rt
!= 0) {
4341 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4342 } else if (rs
!= 0 && rt
== 0) {
4343 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4345 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4351 /* Set on lower than */
4352 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4353 int rd
, int rs
, int rt
)
4358 /* If no destination, treat it as a NOP. */
4362 t0
= tcg_temp_new();
4363 t1
= tcg_temp_new();
4364 gen_load_gpr(t0
, rs
);
4365 gen_load_gpr(t1
, rt
);
4368 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4371 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4379 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4380 int rd
, int rs
, int rt
)
4386 * If no destination, treat it as a NOP.
4387 * For add & sub, we must generate the overflow exception when needed.
4392 t0
= tcg_temp_new();
4393 t1
= tcg_temp_new();
4394 gen_load_gpr(t0
, rs
);
4395 gen_load_gpr(t1
, rt
);
4398 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4399 tcg_gen_shl_tl(t0
, t1
, t0
);
4400 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4403 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4404 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4407 tcg_gen_ext32u_tl(t1
, t1
);
4408 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4409 tcg_gen_shr_tl(t0
, t1
, t0
);
4410 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4414 TCGv_i32 t2
= tcg_temp_new_i32();
4415 TCGv_i32 t3
= tcg_temp_new_i32();
4417 tcg_gen_trunc_tl_i32(t2
, t0
);
4418 tcg_gen_trunc_tl_i32(t3
, t1
);
4419 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4420 tcg_gen_rotr_i32(t2
, t3
, t2
);
4421 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4422 tcg_temp_free_i32(t2
);
4423 tcg_temp_free_i32(t3
);
4426 #if defined(TARGET_MIPS64)
4428 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4429 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4432 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4433 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4436 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4437 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4440 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4441 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4449 #if defined(TARGET_MIPS64)
4450 /* Copy GPR to and from TX79 HI1/LO1 register. */
4451 static void gen_HILO1_tx79(DisasContext
*ctx
, uint32_t opc
, int reg
)
4453 if (reg
== 0 && (opc
== MMI_OPC_MFHI1
|| opc
== MMI_OPC_MFLO1
)) {
4460 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[1]);
4463 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[1]);
4467 tcg_gen_mov_tl(cpu_HI
[1], cpu_gpr
[reg
]);
4469 tcg_gen_movi_tl(cpu_HI
[1], 0);
4474 tcg_gen_mov_tl(cpu_LO
[1], cpu_gpr
[reg
]);
4476 tcg_gen_movi_tl(cpu_LO
[1], 0);
4480 MIPS_INVAL("mfthilo1 TX79");
4481 generate_exception_end(ctx
, EXCP_RI
);
4487 /* Arithmetic on HI/LO registers */
4488 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4490 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
4501 #if defined(TARGET_MIPS64)
4503 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4507 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4511 #if defined(TARGET_MIPS64)
4513 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4517 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4522 #if defined(TARGET_MIPS64)
4524 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4528 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4531 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4536 #if defined(TARGET_MIPS64)
4538 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4542 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4545 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4551 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4554 TCGv t0
= tcg_const_tl(addr
);
4555 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4556 gen_store_gpr(t0
, reg
);
4560 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4566 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4569 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4570 addr
= addr_add(ctx
, pc
, offset
);
4571 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4575 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4576 addr
= addr_add(ctx
, pc
, offset
);
4577 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4579 #if defined(TARGET_MIPS64)
4582 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4583 addr
= addr_add(ctx
, pc
, offset
);
4584 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4588 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4591 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4592 addr
= addr_add(ctx
, pc
, offset
);
4593 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4598 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4599 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4600 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4603 #if defined(TARGET_MIPS64)
4604 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4605 case R6_OPC_LDPC
+ (1 << 16):
4606 case R6_OPC_LDPC
+ (2 << 16):
4607 case R6_OPC_LDPC
+ (3 << 16):
4609 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4610 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4611 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4615 MIPS_INVAL("OPC_PCREL");
4616 generate_exception_end(ctx
, EXCP_RI
);
4623 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4632 t0
= tcg_temp_new();
4633 t1
= tcg_temp_new();
4635 gen_load_gpr(t0
, rs
);
4636 gen_load_gpr(t1
, rt
);
4641 TCGv t2
= tcg_temp_new();
4642 TCGv t3
= tcg_temp_new();
4643 tcg_gen_ext32s_tl(t0
, t0
);
4644 tcg_gen_ext32s_tl(t1
, t1
);
4645 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4646 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4647 tcg_gen_and_tl(t2
, t2
, t3
);
4648 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4649 tcg_gen_or_tl(t2
, t2
, t3
);
4650 tcg_gen_movi_tl(t3
, 0);
4651 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4652 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4653 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4660 TCGv t2
= tcg_temp_new();
4661 TCGv t3
= tcg_temp_new();
4662 tcg_gen_ext32s_tl(t0
, t0
);
4663 tcg_gen_ext32s_tl(t1
, t1
);
4664 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4665 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4666 tcg_gen_and_tl(t2
, t2
, t3
);
4667 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4668 tcg_gen_or_tl(t2
, t2
, t3
);
4669 tcg_gen_movi_tl(t3
, 0);
4670 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4671 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4672 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4679 TCGv t2
= tcg_const_tl(0);
4680 TCGv t3
= tcg_const_tl(1);
4681 tcg_gen_ext32u_tl(t0
, t0
);
4682 tcg_gen_ext32u_tl(t1
, t1
);
4683 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4684 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4685 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4692 TCGv t2
= tcg_const_tl(0);
4693 TCGv t3
= tcg_const_tl(1);
4694 tcg_gen_ext32u_tl(t0
, t0
);
4695 tcg_gen_ext32u_tl(t1
, t1
);
4696 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4697 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4698 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4705 TCGv_i32 t2
= tcg_temp_new_i32();
4706 TCGv_i32 t3
= tcg_temp_new_i32();
4707 tcg_gen_trunc_tl_i32(t2
, t0
);
4708 tcg_gen_trunc_tl_i32(t3
, t1
);
4709 tcg_gen_mul_i32(t2
, t2
, t3
);
4710 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4711 tcg_temp_free_i32(t2
);
4712 tcg_temp_free_i32(t3
);
4717 TCGv_i32 t2
= tcg_temp_new_i32();
4718 TCGv_i32 t3
= tcg_temp_new_i32();
4719 tcg_gen_trunc_tl_i32(t2
, t0
);
4720 tcg_gen_trunc_tl_i32(t3
, t1
);
4721 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4722 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4723 tcg_temp_free_i32(t2
);
4724 tcg_temp_free_i32(t3
);
4729 TCGv_i32 t2
= tcg_temp_new_i32();
4730 TCGv_i32 t3
= tcg_temp_new_i32();
4731 tcg_gen_trunc_tl_i32(t2
, t0
);
4732 tcg_gen_trunc_tl_i32(t3
, t1
);
4733 tcg_gen_mul_i32(t2
, t2
, t3
);
4734 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4735 tcg_temp_free_i32(t2
);
4736 tcg_temp_free_i32(t3
);
4741 TCGv_i32 t2
= tcg_temp_new_i32();
4742 TCGv_i32 t3
= tcg_temp_new_i32();
4743 tcg_gen_trunc_tl_i32(t2
, t0
);
4744 tcg_gen_trunc_tl_i32(t3
, t1
);
4745 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4746 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4747 tcg_temp_free_i32(t2
);
4748 tcg_temp_free_i32(t3
);
4751 #if defined(TARGET_MIPS64)
4754 TCGv t2
= tcg_temp_new();
4755 TCGv t3
= tcg_temp_new();
4756 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4757 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4758 tcg_gen_and_tl(t2
, t2
, t3
);
4759 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4760 tcg_gen_or_tl(t2
, t2
, t3
);
4761 tcg_gen_movi_tl(t3
, 0);
4762 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4763 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4770 TCGv t2
= tcg_temp_new();
4771 TCGv t3
= tcg_temp_new();
4772 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4773 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4774 tcg_gen_and_tl(t2
, t2
, t3
);
4775 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4776 tcg_gen_or_tl(t2
, t2
, t3
);
4777 tcg_gen_movi_tl(t3
, 0);
4778 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4779 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4786 TCGv t2
= tcg_const_tl(0);
4787 TCGv t3
= tcg_const_tl(1);
4788 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4789 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4796 TCGv t2
= tcg_const_tl(0);
4797 TCGv t3
= tcg_const_tl(1);
4798 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4799 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4805 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4809 TCGv t2
= tcg_temp_new();
4810 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4815 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4819 TCGv t2
= tcg_temp_new();
4820 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4826 MIPS_INVAL("r6 mul/div");
4827 generate_exception_end(ctx
, EXCP_RI
);
4835 #if defined(TARGET_MIPS64)
4836 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
4840 t0
= tcg_temp_new();
4841 t1
= tcg_temp_new();
4843 gen_load_gpr(t0
, rs
);
4844 gen_load_gpr(t1
, rt
);
4849 TCGv t2
= tcg_temp_new();
4850 TCGv t3
= tcg_temp_new();
4851 tcg_gen_ext32s_tl(t0
, t0
);
4852 tcg_gen_ext32s_tl(t1
, t1
);
4853 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4854 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4855 tcg_gen_and_tl(t2
, t2
, t3
);
4856 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4857 tcg_gen_or_tl(t2
, t2
, t3
);
4858 tcg_gen_movi_tl(t3
, 0);
4859 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4860 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
4861 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
4862 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4863 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4870 TCGv t2
= tcg_const_tl(0);
4871 TCGv t3
= tcg_const_tl(1);
4872 tcg_gen_ext32u_tl(t0
, t0
);
4873 tcg_gen_ext32u_tl(t1
, t1
);
4874 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4875 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
4876 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
4877 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4878 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4884 MIPS_INVAL("div1 TX79");
4885 generate_exception_end(ctx
, EXCP_RI
);
4894 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4895 int acc
, int rs
, int rt
)
4899 t0
= tcg_temp_new();
4900 t1
= tcg_temp_new();
4902 gen_load_gpr(t0
, rs
);
4903 gen_load_gpr(t1
, rt
);
4912 TCGv t2
= tcg_temp_new();
4913 TCGv t3
= tcg_temp_new();
4914 tcg_gen_ext32s_tl(t0
, t0
);
4915 tcg_gen_ext32s_tl(t1
, t1
);
4916 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4917 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4918 tcg_gen_and_tl(t2
, t2
, t3
);
4919 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4920 tcg_gen_or_tl(t2
, t2
, t3
);
4921 tcg_gen_movi_tl(t3
, 0);
4922 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4923 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4924 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4925 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4926 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4933 TCGv t2
= tcg_const_tl(0);
4934 TCGv t3
= tcg_const_tl(1);
4935 tcg_gen_ext32u_tl(t0
, t0
);
4936 tcg_gen_ext32u_tl(t1
, t1
);
4937 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4938 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4939 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4940 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4941 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4948 TCGv_i32 t2
= tcg_temp_new_i32();
4949 TCGv_i32 t3
= tcg_temp_new_i32();
4950 tcg_gen_trunc_tl_i32(t2
, t0
);
4951 tcg_gen_trunc_tl_i32(t3
, t1
);
4952 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4953 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4954 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4955 tcg_temp_free_i32(t2
);
4956 tcg_temp_free_i32(t3
);
4961 TCGv_i32 t2
= tcg_temp_new_i32();
4962 TCGv_i32 t3
= tcg_temp_new_i32();
4963 tcg_gen_trunc_tl_i32(t2
, t0
);
4964 tcg_gen_trunc_tl_i32(t3
, t1
);
4965 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4966 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4967 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4968 tcg_temp_free_i32(t2
);
4969 tcg_temp_free_i32(t3
);
4972 #if defined(TARGET_MIPS64)
4975 TCGv t2
= tcg_temp_new();
4976 TCGv t3
= tcg_temp_new();
4977 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4978 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4979 tcg_gen_and_tl(t2
, t2
, t3
);
4980 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4981 tcg_gen_or_tl(t2
, t2
, t3
);
4982 tcg_gen_movi_tl(t3
, 0);
4983 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4984 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4985 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4992 TCGv t2
= tcg_const_tl(0);
4993 TCGv t3
= tcg_const_tl(1);
4994 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4995 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4996 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
5002 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
5005 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
5010 TCGv_i64 t2
= tcg_temp_new_i64();
5011 TCGv_i64 t3
= tcg_temp_new_i64();
5013 tcg_gen_ext_tl_i64(t2
, t0
);
5014 tcg_gen_ext_tl_i64(t3
, t1
);
5015 tcg_gen_mul_i64(t2
, t2
, t3
);
5016 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5017 tcg_gen_add_i64(t2
, t2
, t3
);
5018 tcg_temp_free_i64(t3
);
5019 gen_move_low32(cpu_LO
[acc
], t2
);
5020 gen_move_high32(cpu_HI
[acc
], t2
);
5021 tcg_temp_free_i64(t2
);
5026 TCGv_i64 t2
= tcg_temp_new_i64();
5027 TCGv_i64 t3
= tcg_temp_new_i64();
5029 tcg_gen_ext32u_tl(t0
, t0
);
5030 tcg_gen_ext32u_tl(t1
, t1
);
5031 tcg_gen_extu_tl_i64(t2
, t0
);
5032 tcg_gen_extu_tl_i64(t3
, t1
);
5033 tcg_gen_mul_i64(t2
, t2
, t3
);
5034 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5035 tcg_gen_add_i64(t2
, t2
, t3
);
5036 tcg_temp_free_i64(t3
);
5037 gen_move_low32(cpu_LO
[acc
], t2
);
5038 gen_move_high32(cpu_HI
[acc
], t2
);
5039 tcg_temp_free_i64(t2
);
5044 TCGv_i64 t2
= tcg_temp_new_i64();
5045 TCGv_i64 t3
= tcg_temp_new_i64();
5047 tcg_gen_ext_tl_i64(t2
, t0
);
5048 tcg_gen_ext_tl_i64(t3
, t1
);
5049 tcg_gen_mul_i64(t2
, t2
, t3
);
5050 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5051 tcg_gen_sub_i64(t2
, t3
, t2
);
5052 tcg_temp_free_i64(t3
);
5053 gen_move_low32(cpu_LO
[acc
], t2
);
5054 gen_move_high32(cpu_HI
[acc
], t2
);
5055 tcg_temp_free_i64(t2
);
5060 TCGv_i64 t2
= tcg_temp_new_i64();
5061 TCGv_i64 t3
= tcg_temp_new_i64();
5063 tcg_gen_ext32u_tl(t0
, t0
);
5064 tcg_gen_ext32u_tl(t1
, t1
);
5065 tcg_gen_extu_tl_i64(t2
, t0
);
5066 tcg_gen_extu_tl_i64(t3
, t1
);
5067 tcg_gen_mul_i64(t2
, t2
, t3
);
5068 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5069 tcg_gen_sub_i64(t2
, t3
, t2
);
5070 tcg_temp_free_i64(t3
);
5071 gen_move_low32(cpu_LO
[acc
], t2
);
5072 gen_move_high32(cpu_HI
[acc
], t2
);
5073 tcg_temp_free_i64(t2
);
5077 MIPS_INVAL("mul/div");
5078 generate_exception_end(ctx
, EXCP_RI
);
5087 * These MULT[U] and MADD[U] instructions implemented in for example
5088 * the Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
5089 * architectures are special three-operand variants with the syntax
5091 * MULT[U][1] rd, rs, rt
5095 * (rd, LO, HI) <- rs * rt
5099 * MADD[U][1] rd, rs, rt
5103 * (rd, LO, HI) <- (LO, HI) + rs * rt
5105 * where the low-order 32-bits of the result is placed into both the
5106 * GPR rd and the special register LO. The high-order 32-bits of the
5107 * result is placed into the special register HI.
5109 * If the GPR rd is omitted in assembly language, it is taken to be 0,
5110 * which is the zero register that always reads as 0.
5112 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
5113 int rd
, int rs
, int rt
)
5115 TCGv t0
= tcg_temp_new();
5116 TCGv t1
= tcg_temp_new();
5119 gen_load_gpr(t0
, rs
);
5120 gen_load_gpr(t1
, rt
);
5128 TCGv_i32 t2
= tcg_temp_new_i32();
5129 TCGv_i32 t3
= tcg_temp_new_i32();
5130 tcg_gen_trunc_tl_i32(t2
, t0
);
5131 tcg_gen_trunc_tl_i32(t3
, t1
);
5132 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
5134 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5136 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5137 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5138 tcg_temp_free_i32(t2
);
5139 tcg_temp_free_i32(t3
);
5142 case MMI_OPC_MULTU1
:
5147 TCGv_i32 t2
= tcg_temp_new_i32();
5148 TCGv_i32 t3
= tcg_temp_new_i32();
5149 tcg_gen_trunc_tl_i32(t2
, t0
);
5150 tcg_gen_trunc_tl_i32(t3
, t1
);
5151 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
5153 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5155 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5156 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5157 tcg_temp_free_i32(t2
);
5158 tcg_temp_free_i32(t3
);
5166 TCGv_i64 t2
= tcg_temp_new_i64();
5167 TCGv_i64 t3
= tcg_temp_new_i64();
5169 tcg_gen_ext_tl_i64(t2
, t0
);
5170 tcg_gen_ext_tl_i64(t3
, t1
);
5171 tcg_gen_mul_i64(t2
, t2
, t3
);
5172 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5173 tcg_gen_add_i64(t2
, t2
, t3
);
5174 tcg_temp_free_i64(t3
);
5175 gen_move_low32(cpu_LO
[acc
], t2
);
5176 gen_move_high32(cpu_HI
[acc
], t2
);
5178 gen_move_low32(cpu_gpr
[rd
], t2
);
5180 tcg_temp_free_i64(t2
);
5183 case MMI_OPC_MADDU1
:
5188 TCGv_i64 t2
= tcg_temp_new_i64();
5189 TCGv_i64 t3
= tcg_temp_new_i64();
5191 tcg_gen_ext32u_tl(t0
, t0
);
5192 tcg_gen_ext32u_tl(t1
, t1
);
5193 tcg_gen_extu_tl_i64(t2
, t0
);
5194 tcg_gen_extu_tl_i64(t3
, t1
);
5195 tcg_gen_mul_i64(t2
, t2
, t3
);
5196 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5197 tcg_gen_add_i64(t2
, t2
, t3
);
5198 tcg_temp_free_i64(t3
);
5199 gen_move_low32(cpu_LO
[acc
], t2
);
5200 gen_move_high32(cpu_HI
[acc
], t2
);
5202 gen_move_low32(cpu_gpr
[rd
], t2
);
5204 tcg_temp_free_i64(t2
);
5208 MIPS_INVAL("mul/madd TXx9");
5209 generate_exception_end(ctx
, EXCP_RI
);
5218 static void gen_mul_vr54xx(DisasContext
*ctx
, uint32_t opc
,
5219 int rd
, int rs
, int rt
)
5221 TCGv t0
= tcg_temp_new();
5222 TCGv t1
= tcg_temp_new();
5224 gen_load_gpr(t0
, rs
);
5225 gen_load_gpr(t1
, rt
);
5228 case OPC_VR54XX_MULS
:
5229 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
5231 case OPC_VR54XX_MULSU
:
5232 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
5234 case OPC_VR54XX_MACC
:
5235 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
5237 case OPC_VR54XX_MACCU
:
5238 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
5240 case OPC_VR54XX_MSAC
:
5241 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
5243 case OPC_VR54XX_MSACU
:
5244 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
5246 case OPC_VR54XX_MULHI
:
5247 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
5249 case OPC_VR54XX_MULHIU
:
5250 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
5252 case OPC_VR54XX_MULSHI
:
5253 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
5255 case OPC_VR54XX_MULSHIU
:
5256 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
5258 case OPC_VR54XX_MACCHI
:
5259 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
5261 case OPC_VR54XX_MACCHIU
:
5262 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
5264 case OPC_VR54XX_MSACHI
:
5265 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
5267 case OPC_VR54XX_MSACHIU
:
5268 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
5271 MIPS_INVAL("mul vr54xx");
5272 generate_exception_end(ctx
, EXCP_RI
);
5275 gen_store_gpr(t0
, rd
);
5282 static void gen_cl(DisasContext
*ctx
, uint32_t opc
,
5292 gen_load_gpr(t0
, rs
);
5297 #if defined(TARGET_MIPS64)
5301 tcg_gen_not_tl(t0
, t0
);
5310 tcg_gen_ext32u_tl(t0
, t0
);
5311 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
5312 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
5314 #if defined(TARGET_MIPS64)
5319 tcg_gen_clzi_i64(t0
, t0
, 64);
5325 /* Godson integer instructions */
5326 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
5327 int rd
, int rs
, int rt
)
5339 case OPC_MULTU_G_2E
:
5340 case OPC_MULTU_G_2F
:
5341 #if defined(TARGET_MIPS64)
5342 case OPC_DMULT_G_2E
:
5343 case OPC_DMULT_G_2F
:
5344 case OPC_DMULTU_G_2E
:
5345 case OPC_DMULTU_G_2F
:
5347 t0
= tcg_temp_new();
5348 t1
= tcg_temp_new();
5351 t0
= tcg_temp_local_new();
5352 t1
= tcg_temp_local_new();
5356 gen_load_gpr(t0
, rs
);
5357 gen_load_gpr(t1
, rt
);
5362 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5363 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5365 case OPC_MULTU_G_2E
:
5366 case OPC_MULTU_G_2F
:
5367 tcg_gen_ext32u_tl(t0
, t0
);
5368 tcg_gen_ext32u_tl(t1
, t1
);
5369 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5370 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5375 TCGLabel
*l1
= gen_new_label();
5376 TCGLabel
*l2
= gen_new_label();
5377 TCGLabel
*l3
= gen_new_label();
5378 tcg_gen_ext32s_tl(t0
, t0
);
5379 tcg_gen_ext32s_tl(t1
, t1
);
5380 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5381 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5384 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5385 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5386 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5389 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5390 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5397 TCGLabel
*l1
= gen_new_label();
5398 TCGLabel
*l2
= gen_new_label();
5399 tcg_gen_ext32u_tl(t0
, t0
);
5400 tcg_gen_ext32u_tl(t1
, t1
);
5401 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5402 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5405 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5406 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5413 TCGLabel
*l1
= gen_new_label();
5414 TCGLabel
*l2
= gen_new_label();
5415 TCGLabel
*l3
= gen_new_label();
5416 tcg_gen_ext32u_tl(t0
, t0
);
5417 tcg_gen_ext32u_tl(t1
, t1
);
5418 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5419 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5420 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5422 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5425 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5426 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5433 TCGLabel
*l1
= gen_new_label();
5434 TCGLabel
*l2
= gen_new_label();
5435 tcg_gen_ext32u_tl(t0
, t0
);
5436 tcg_gen_ext32u_tl(t1
, t1
);
5437 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5438 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5441 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5442 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5446 #if defined(TARGET_MIPS64)
5447 case OPC_DMULT_G_2E
:
5448 case OPC_DMULT_G_2F
:
5449 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5451 case OPC_DMULTU_G_2E
:
5452 case OPC_DMULTU_G_2F
:
5453 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5458 TCGLabel
*l1
= gen_new_label();
5459 TCGLabel
*l2
= gen_new_label();
5460 TCGLabel
*l3
= gen_new_label();
5461 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5462 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5465 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5466 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5467 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5470 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5474 case OPC_DDIVU_G_2E
:
5475 case OPC_DDIVU_G_2F
:
5477 TCGLabel
*l1
= gen_new_label();
5478 TCGLabel
*l2
= gen_new_label();
5479 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5480 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5483 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5490 TCGLabel
*l1
= gen_new_label();
5491 TCGLabel
*l2
= gen_new_label();
5492 TCGLabel
*l3
= gen_new_label();
5493 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5494 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5495 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5497 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5500 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5504 case OPC_DMODU_G_2E
:
5505 case OPC_DMODU_G_2F
:
5507 TCGLabel
*l1
= gen_new_label();
5508 TCGLabel
*l2
= gen_new_label();
5509 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5510 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5513 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5524 /* Loongson multimedia instructions */
5525 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5527 uint32_t opc
, shift_max
;
5530 opc
= MASK_LMI(ctx
->opcode
);
5536 t0
= tcg_temp_local_new_i64();
5537 t1
= tcg_temp_local_new_i64();
5540 t0
= tcg_temp_new_i64();
5541 t1
= tcg_temp_new_i64();
5545 check_cp1_enabled(ctx
);
5546 gen_load_fpr64(ctx
, t0
, rs
);
5547 gen_load_fpr64(ctx
, t1
, rt
);
5549 #define LMI_HELPER(UP, LO) \
5550 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5551 #define LMI_HELPER_1(UP, LO) \
5552 case OPC_##UP: gen_helper_##LO(t0, t0); break
5553 #define LMI_DIRECT(UP, LO, OP) \
5554 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5557 LMI_HELPER(PADDSH
, paddsh
);
5558 LMI_HELPER(PADDUSH
, paddush
);
5559 LMI_HELPER(PADDH
, paddh
);
5560 LMI_HELPER(PADDW
, paddw
);
5561 LMI_HELPER(PADDSB
, paddsb
);
5562 LMI_HELPER(PADDUSB
, paddusb
);
5563 LMI_HELPER(PADDB
, paddb
);
5565 LMI_HELPER(PSUBSH
, psubsh
);
5566 LMI_HELPER(PSUBUSH
, psubush
);
5567 LMI_HELPER(PSUBH
, psubh
);
5568 LMI_HELPER(PSUBW
, psubw
);
5569 LMI_HELPER(PSUBSB
, psubsb
);
5570 LMI_HELPER(PSUBUSB
, psubusb
);
5571 LMI_HELPER(PSUBB
, psubb
);
5573 LMI_HELPER(PSHUFH
, pshufh
);
5574 LMI_HELPER(PACKSSWH
, packsswh
);
5575 LMI_HELPER(PACKSSHB
, packsshb
);
5576 LMI_HELPER(PACKUSHB
, packushb
);
5578 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5579 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5580 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5581 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5582 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5583 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5585 LMI_HELPER(PAVGH
, pavgh
);
5586 LMI_HELPER(PAVGB
, pavgb
);
5587 LMI_HELPER(PMAXSH
, pmaxsh
);
5588 LMI_HELPER(PMINSH
, pminsh
);
5589 LMI_HELPER(PMAXUB
, pmaxub
);
5590 LMI_HELPER(PMINUB
, pminub
);
5592 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5593 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5594 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5595 LMI_HELPER(PCMPGTH
, pcmpgth
);
5596 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5597 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5599 LMI_HELPER(PSLLW
, psllw
);
5600 LMI_HELPER(PSLLH
, psllh
);
5601 LMI_HELPER(PSRLW
, psrlw
);
5602 LMI_HELPER(PSRLH
, psrlh
);
5603 LMI_HELPER(PSRAW
, psraw
);
5604 LMI_HELPER(PSRAH
, psrah
);
5606 LMI_HELPER(PMULLH
, pmullh
);
5607 LMI_HELPER(PMULHH
, pmulhh
);
5608 LMI_HELPER(PMULHUH
, pmulhuh
);
5609 LMI_HELPER(PMADDHW
, pmaddhw
);
5611 LMI_HELPER(PASUBUB
, pasubub
);
5612 LMI_HELPER_1(BIADD
, biadd
);
5613 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5615 LMI_DIRECT(PADDD
, paddd
, add
);
5616 LMI_DIRECT(PSUBD
, psubd
, sub
);
5617 LMI_DIRECT(XOR_CP2
, xor, xor);
5618 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5619 LMI_DIRECT(AND_CP2
, and, and);
5620 LMI_DIRECT(OR_CP2
, or, or);
5623 tcg_gen_andc_i64(t0
, t1
, t0
);
5627 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5630 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5633 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5636 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5640 tcg_gen_andi_i64(t1
, t1
, 3);
5641 tcg_gen_shli_i64(t1
, t1
, 4);
5642 tcg_gen_shr_i64(t0
, t0
, t1
);
5643 tcg_gen_ext16u_i64(t0
, t0
);
5647 tcg_gen_add_i64(t0
, t0
, t1
);
5648 tcg_gen_ext32s_i64(t0
, t0
);
5651 tcg_gen_sub_i64(t0
, t0
, t1
);
5652 tcg_gen_ext32s_i64(t0
, t0
);
5674 /* Make sure shift count isn't TCG undefined behaviour. */
5675 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5680 tcg_gen_shl_i64(t0
, t0
, t1
);
5685 * Since SRA is UndefinedResult without sign-extended inputs,
5686 * we can treat SRA and DSRA the same.
5688 tcg_gen_sar_i64(t0
, t0
, t1
);
5691 /* We want to shift in zeros for SRL; zero-extend first. */
5692 tcg_gen_ext32u_i64(t0
, t0
);
5695 tcg_gen_shr_i64(t0
, t0
, t1
);
5699 if (shift_max
== 32) {
5700 tcg_gen_ext32s_i64(t0
, t0
);
5703 /* Shifts larger than MAX produce zero. */
5704 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5705 tcg_gen_neg_i64(t1
, t1
);
5706 tcg_gen_and_i64(t0
, t0
, t1
);
5712 TCGv_i64 t2
= tcg_temp_new_i64();
5713 TCGLabel
*lab
= gen_new_label();
5715 tcg_gen_mov_i64(t2
, t0
);
5716 tcg_gen_add_i64(t0
, t1
, t2
);
5717 if (opc
== OPC_ADD_CP2
) {
5718 tcg_gen_ext32s_i64(t0
, t0
);
5720 tcg_gen_xor_i64(t1
, t1
, t2
);
5721 tcg_gen_xor_i64(t2
, t2
, t0
);
5722 tcg_gen_andc_i64(t1
, t2
, t1
);
5723 tcg_temp_free_i64(t2
);
5724 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5725 generate_exception(ctx
, EXCP_OVERFLOW
);
5733 TCGv_i64 t2
= tcg_temp_new_i64();
5734 TCGLabel
*lab
= gen_new_label();
5736 tcg_gen_mov_i64(t2
, t0
);
5737 tcg_gen_sub_i64(t0
, t1
, t2
);
5738 if (opc
== OPC_SUB_CP2
) {
5739 tcg_gen_ext32s_i64(t0
, t0
);
5741 tcg_gen_xor_i64(t1
, t1
, t2
);
5742 tcg_gen_xor_i64(t2
, t2
, t0
);
5743 tcg_gen_and_i64(t1
, t1
, t2
);
5744 tcg_temp_free_i64(t2
);
5745 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5746 generate_exception(ctx
, EXCP_OVERFLOW
);
5752 tcg_gen_ext32u_i64(t0
, t0
);
5753 tcg_gen_ext32u_i64(t1
, t1
);
5754 tcg_gen_mul_i64(t0
, t0
, t1
);
5764 * ??? Document is unclear: Set FCC[CC]. Does that mean the
5765 * FD field is the CC field?
5768 MIPS_INVAL("loongson_cp2");
5769 generate_exception_end(ctx
, EXCP_RI
);
5776 gen_store_fpr64(ctx
, t0
, rd
);
5778 tcg_temp_free_i64(t0
);
5779 tcg_temp_free_i64(t1
);
5783 static void gen_trap(DisasContext
*ctx
, uint32_t opc
,
5784 int rs
, int rt
, int16_t imm
)
5787 TCGv t0
= tcg_temp_new();
5788 TCGv t1
= tcg_temp_new();
5791 /* Load needed operands */
5799 /* Compare two registers */
5801 gen_load_gpr(t0
, rs
);
5802 gen_load_gpr(t1
, rt
);
5812 /* Compare register to immediate */
5813 if (rs
!= 0 || imm
!= 0) {
5814 gen_load_gpr(t0
, rs
);
5815 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5822 case OPC_TEQ
: /* rs == rs */
5823 case OPC_TEQI
: /* r0 == 0 */
5824 case OPC_TGE
: /* rs >= rs */
5825 case OPC_TGEI
: /* r0 >= 0 */
5826 case OPC_TGEU
: /* rs >= rs unsigned */
5827 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5829 generate_exception_end(ctx
, EXCP_TRAP
);
5831 case OPC_TLT
: /* rs < rs */
5832 case OPC_TLTI
: /* r0 < 0 */
5833 case OPC_TLTU
: /* rs < rs unsigned */
5834 case OPC_TLTIU
: /* r0 < 0 unsigned */
5835 case OPC_TNE
: /* rs != rs */
5836 case OPC_TNEI
: /* r0 != 0 */
5837 /* Never trap: treat as NOP. */
5841 TCGLabel
*l1
= gen_new_label();
5846 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5850 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5854 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5858 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5862 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5866 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5869 generate_exception(ctx
, EXCP_TRAP
);
5876 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5878 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5882 #ifndef CONFIG_USER_ONLY
5883 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5889 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5891 if (use_goto_tb(ctx
, dest
)) {
5894 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5897 if (ctx
->base
.singlestep_enabled
) {
5898 save_cpu_state(ctx
, 0);
5899 gen_helper_raise_exception_debug(cpu_env
);
5901 tcg_gen_lookup_and_goto_ptr();
5905 /* Branches (before delay slot) */
5906 static void gen_compute_branch(DisasContext
*ctx
, uint32_t opc
,
5908 int rs
, int rt
, int32_t offset
,
5911 target_ulong btgt
= -1;
5913 int bcond_compute
= 0;
5914 TCGv t0
= tcg_temp_new();
5915 TCGv t1
= tcg_temp_new();
5917 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5918 #ifdef MIPS_DEBUG_DISAS
5919 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5920 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5922 generate_exception_end(ctx
, EXCP_RI
);
5926 /* Load needed operands */
5932 /* Compare two registers */
5934 gen_load_gpr(t0
, rs
);
5935 gen_load_gpr(t1
, rt
);
5938 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5952 /* Compare to zero */
5954 gen_load_gpr(t0
, rs
);
5957 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5960 #if defined(TARGET_MIPS64)
5962 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5964 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5967 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5972 /* Jump to immediate */
5973 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5978 /* Jump to register */
5979 if (offset
!= 0 && offset
!= 16) {
5981 * Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5982 * others are reserved.
5984 MIPS_INVAL("jump hint");
5985 generate_exception_end(ctx
, EXCP_RI
);
5988 gen_load_gpr(btarget
, rs
);
5991 MIPS_INVAL("branch/jump");
5992 generate_exception_end(ctx
, EXCP_RI
);
5995 if (bcond_compute
== 0) {
5996 /* No condition to be computed */
5998 case OPC_BEQ
: /* rx == rx */
5999 case OPC_BEQL
: /* rx == rx likely */
6000 case OPC_BGEZ
: /* 0 >= 0 */
6001 case OPC_BGEZL
: /* 0 >= 0 likely */
6002 case OPC_BLEZ
: /* 0 <= 0 */
6003 case OPC_BLEZL
: /* 0 <= 0 likely */
6005 ctx
->hflags
|= MIPS_HFLAG_B
;
6007 case OPC_BGEZAL
: /* 0 >= 0 */
6008 case OPC_BGEZALL
: /* 0 >= 0 likely */
6009 /* Always take and link */
6011 ctx
->hflags
|= MIPS_HFLAG_B
;
6013 case OPC_BNE
: /* rx != rx */
6014 case OPC_BGTZ
: /* 0 > 0 */
6015 case OPC_BLTZ
: /* 0 < 0 */
6018 case OPC_BLTZAL
: /* 0 < 0 */
6020 * Handle as an unconditional branch to get correct delay
6024 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
6025 ctx
->hflags
|= MIPS_HFLAG_B
;
6027 case OPC_BLTZALL
: /* 0 < 0 likely */
6028 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6029 /* Skip the instruction in the delay slot */
6030 ctx
->base
.pc_next
+= 4;
6032 case OPC_BNEL
: /* rx != rx likely */
6033 case OPC_BGTZL
: /* 0 > 0 likely */
6034 case OPC_BLTZL
: /* 0 < 0 likely */
6035 /* Skip the instruction in the delay slot */
6036 ctx
->base
.pc_next
+= 4;
6039 ctx
->hflags
|= MIPS_HFLAG_B
;
6042 ctx
->hflags
|= MIPS_HFLAG_BX
;
6046 ctx
->hflags
|= MIPS_HFLAG_B
;
6049 ctx
->hflags
|= MIPS_HFLAG_BR
;
6053 ctx
->hflags
|= MIPS_HFLAG_BR
;
6056 MIPS_INVAL("branch/jump");
6057 generate_exception_end(ctx
, EXCP_RI
);
6063 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6066 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6069 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6072 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6075 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6078 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6081 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6085 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6089 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6092 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6095 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6098 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6101 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6104 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6107 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6109 #if defined(TARGET_MIPS64)
6111 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
6115 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6118 ctx
->hflags
|= MIPS_HFLAG_BC
;
6121 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6124 ctx
->hflags
|= MIPS_HFLAG_BL
;
6127 MIPS_INVAL("conditional branch/jump");
6128 generate_exception_end(ctx
, EXCP_RI
);
6133 ctx
->btarget
= btgt
;
6135 switch (delayslot_size
) {
6137 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
6140 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
6145 int post_delay
= insn_bytes
+ delayslot_size
;
6146 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
6148 tcg_gen_movi_tl(cpu_gpr
[blink
],
6149 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
6153 if (insn_bytes
== 2) {
6154 ctx
->hflags
|= MIPS_HFLAG_B16
;
6161 /* nanoMIPS Branches */
6162 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
6164 int rs
, int rt
, int32_t offset
)
6166 target_ulong btgt
= -1;
6167 int bcond_compute
= 0;
6168 TCGv t0
= tcg_temp_new();
6169 TCGv t1
= tcg_temp_new();
6171 /* Load needed operands */
6175 /* Compare two registers */
6177 gen_load_gpr(t0
, rs
);
6178 gen_load_gpr(t1
, rt
);
6181 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6184 /* Compare to zero */
6186 gen_load_gpr(t0
, rs
);
6189 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6192 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
6194 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6198 /* Jump to register */
6199 if (offset
!= 0 && offset
!= 16) {
6201 * Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
6202 * others are reserved.
6204 MIPS_INVAL("jump hint");
6205 generate_exception_end(ctx
, EXCP_RI
);
6208 gen_load_gpr(btarget
, rs
);
6211 MIPS_INVAL("branch/jump");
6212 generate_exception_end(ctx
, EXCP_RI
);
6215 if (bcond_compute
== 0) {
6216 /* No condition to be computed */
6218 case OPC_BEQ
: /* rx == rx */
6220 ctx
->hflags
|= MIPS_HFLAG_B
;
6222 case OPC_BGEZAL
: /* 0 >= 0 */
6223 /* Always take and link */
6224 tcg_gen_movi_tl(cpu_gpr
[31],
6225 ctx
->base
.pc_next
+ insn_bytes
);
6226 ctx
->hflags
|= MIPS_HFLAG_B
;
6228 case OPC_BNE
: /* rx != rx */
6229 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6230 /* Skip the instruction in the delay slot */
6231 ctx
->base
.pc_next
+= 4;
6234 ctx
->hflags
|= MIPS_HFLAG_BR
;
6238 tcg_gen_movi_tl(cpu_gpr
[rt
],
6239 ctx
->base
.pc_next
+ insn_bytes
);
6241 ctx
->hflags
|= MIPS_HFLAG_BR
;
6244 MIPS_INVAL("branch/jump");
6245 generate_exception_end(ctx
, EXCP_RI
);
6251 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6254 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6257 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6258 tcg_gen_movi_tl(cpu_gpr
[31],
6259 ctx
->base
.pc_next
+ insn_bytes
);
6262 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6264 ctx
->hflags
|= MIPS_HFLAG_BC
;
6267 MIPS_INVAL("conditional branch/jump");
6268 generate_exception_end(ctx
, EXCP_RI
);
6273 ctx
->btarget
= btgt
;
6276 if (insn_bytes
== 2) {
6277 ctx
->hflags
|= MIPS_HFLAG_B16
;
6284 /* special3 bitfield operations */
6285 static void gen_bitops(DisasContext
*ctx
, uint32_t opc
, int rt
,
6286 int rs
, int lsb
, int msb
)
6288 TCGv t0
= tcg_temp_new();
6289 TCGv t1
= tcg_temp_new();
6291 gen_load_gpr(t1
, rs
);
6294 if (lsb
+ msb
> 31) {
6298 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6301 * The two checks together imply that lsb == 0,
6302 * so this is a simple sign-extension.
6304 tcg_gen_ext32s_tl(t0
, t1
);
6307 #if defined(TARGET_MIPS64)
6316 if (lsb
+ msb
> 63) {
6319 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6326 gen_load_gpr(t0
, rt
);
6327 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6328 tcg_gen_ext32s_tl(t0
, t0
);
6330 #if defined(TARGET_MIPS64)
6341 gen_load_gpr(t0
, rt
);
6342 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6347 MIPS_INVAL("bitops");
6348 generate_exception_end(ctx
, EXCP_RI
);
6353 gen_store_gpr(t0
, rt
);
6358 static void gen_bshfl(DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6363 /* If no destination, treat it as a NOP. */
6367 t0
= tcg_temp_new();
6368 gen_load_gpr(t0
, rt
);
6372 TCGv t1
= tcg_temp_new();
6373 TCGv t2
= tcg_const_tl(0x00FF00FF);
6375 tcg_gen_shri_tl(t1
, t0
, 8);
6376 tcg_gen_and_tl(t1
, t1
, t2
);
6377 tcg_gen_and_tl(t0
, t0
, t2
);
6378 tcg_gen_shli_tl(t0
, t0
, 8);
6379 tcg_gen_or_tl(t0
, t0
, t1
);
6382 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6386 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6389 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6391 #if defined(TARGET_MIPS64)
6394 TCGv t1
= tcg_temp_new();
6395 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6397 tcg_gen_shri_tl(t1
, t0
, 8);
6398 tcg_gen_and_tl(t1
, t1
, t2
);
6399 tcg_gen_and_tl(t0
, t0
, t2
);
6400 tcg_gen_shli_tl(t0
, t0
, 8);
6401 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6408 TCGv t1
= tcg_temp_new();
6409 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6411 tcg_gen_shri_tl(t1
, t0
, 16);
6412 tcg_gen_and_tl(t1
, t1
, t2
);
6413 tcg_gen_and_tl(t0
, t0
, t2
);
6414 tcg_gen_shli_tl(t0
, t0
, 16);
6415 tcg_gen_or_tl(t0
, t0
, t1
);
6416 tcg_gen_shri_tl(t1
, t0
, 32);
6417 tcg_gen_shli_tl(t0
, t0
, 32);
6418 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6425 MIPS_INVAL("bsfhl");
6426 generate_exception_end(ctx
, EXCP_RI
);
6433 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6442 t0
= tcg_temp_new();
6443 t1
= tcg_temp_new();
6444 gen_load_gpr(t0
, rs
);
6445 gen_load_gpr(t1
, rt
);
6446 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6447 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6448 if (opc
== OPC_LSA
) {
6449 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6458 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6466 t0
= tcg_temp_new();
6467 if (bits
== 0 || bits
== wordsz
) {
6469 gen_load_gpr(t0
, rt
);
6471 gen_load_gpr(t0
, rs
);
6475 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6477 #if defined(TARGET_MIPS64)
6479 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6484 TCGv t1
= tcg_temp_new();
6485 gen_load_gpr(t0
, rt
);
6486 gen_load_gpr(t1
, rs
);
6490 TCGv_i64 t2
= tcg_temp_new_i64();
6491 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6492 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6493 gen_move_low32(cpu_gpr
[rd
], t2
);
6494 tcg_temp_free_i64(t2
);
6497 #if defined(TARGET_MIPS64)
6499 tcg_gen_shli_tl(t0
, t0
, bits
);
6500 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6501 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6511 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6514 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6517 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6520 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6523 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6530 t0
= tcg_temp_new();
6531 gen_load_gpr(t0
, rt
);
6534 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6536 #if defined(TARGET_MIPS64)
6538 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6545 #ifndef CONFIG_USER_ONLY
6546 /* CP0 (MMU and control) */
6547 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6549 TCGv_i64 t0
= tcg_temp_new_i64();
6550 TCGv_i64 t1
= tcg_temp_new_i64();
6552 tcg_gen_ext_tl_i64(t0
, arg
);
6553 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6554 #if defined(TARGET_MIPS64)
6555 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6557 tcg_gen_concat32_i64(t1
, t1
, t0
);
6559 tcg_gen_st_i64(t1
, cpu_env
, off
);
6560 tcg_temp_free_i64(t1
);
6561 tcg_temp_free_i64(t0
);
6564 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6566 TCGv_i64 t0
= tcg_temp_new_i64();
6567 TCGv_i64 t1
= tcg_temp_new_i64();
6569 tcg_gen_ext_tl_i64(t0
, arg
);
6570 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6571 tcg_gen_concat32_i64(t1
, t1
, t0
);
6572 tcg_gen_st_i64(t1
, cpu_env
, off
);
6573 tcg_temp_free_i64(t1
);
6574 tcg_temp_free_i64(t0
);
6577 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6579 TCGv_i64 t0
= tcg_temp_new_i64();
6581 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6582 #if defined(TARGET_MIPS64)
6583 tcg_gen_shri_i64(t0
, t0
, 30);
6585 tcg_gen_shri_i64(t0
, t0
, 32);
6587 gen_move_low32(arg
, t0
);
6588 tcg_temp_free_i64(t0
);
6591 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6593 TCGv_i64 t0
= tcg_temp_new_i64();
6595 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6596 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6597 gen_move_low32(arg
, t0
);
6598 tcg_temp_free_i64(t0
);
6601 static inline void gen_mfc0_load32(TCGv arg
, target_ulong off
)
6603 TCGv_i32 t0
= tcg_temp_new_i32();
6605 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6606 tcg_gen_ext_i32_tl(arg
, t0
);
6607 tcg_temp_free_i32(t0
);
6610 static inline void gen_mfc0_load64(TCGv arg
, target_ulong off
)
6612 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6613 tcg_gen_ext32s_tl(arg
, arg
);
6616 static inline void gen_mtc0_store32(TCGv arg
, target_ulong off
)
6618 TCGv_i32 t0
= tcg_temp_new_i32();
6620 tcg_gen_trunc_tl_i32(t0
, arg
);
6621 tcg_gen_st_i32(t0
, cpu_env
, off
);
6622 tcg_temp_free_i32(t0
);
6625 #define CP0_CHECK(c) \
6628 goto cp0_unimplemented; \
6632 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6634 const char *register_name
= "invalid";
6637 case CP0_REGISTER_02
:
6640 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6641 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6642 register_name
= "EntryLo0";
6645 goto cp0_unimplemented
;
6648 case CP0_REGISTER_03
:
6650 case CP0_REG03__ENTRYLO1
:
6651 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6652 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6653 register_name
= "EntryLo1";
6656 goto cp0_unimplemented
;
6659 case CP0_REGISTER_09
:
6661 case CP0_REG09__SAAR
:
6662 CP0_CHECK(ctx
->saar
);
6663 gen_helper_mfhc0_saar(arg
, cpu_env
);
6664 register_name
= "SAAR";
6667 goto cp0_unimplemented
;
6670 case CP0_REGISTER_17
:
6673 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_LLAddr
),
6674 ctx
->CP0_LLAddr_shift
);
6675 register_name
= "LLAddr";
6678 CP0_CHECK(ctx
->mrp
);
6679 gen_helper_mfhc0_maar(arg
, cpu_env
);
6680 register_name
= "MAAR";
6683 goto cp0_unimplemented
;
6686 case CP0_REGISTER_28
:
6692 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6693 register_name
= "TagLo";
6696 goto cp0_unimplemented
;
6700 goto cp0_unimplemented
;
6702 trace_mips_translate_c0("mfhc0", register_name
, reg
, sel
);
6706 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n",
6707 register_name
, reg
, sel
);
6708 tcg_gen_movi_tl(arg
, 0);
6711 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6713 const char *register_name
= "invalid";
6714 uint64_t mask
= ctx
->PAMask
>> 36;
6717 case CP0_REGISTER_02
:
6720 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6721 tcg_gen_andi_tl(arg
, arg
, mask
);
6722 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6723 register_name
= "EntryLo0";
6726 goto cp0_unimplemented
;
6729 case CP0_REGISTER_03
:
6731 case CP0_REG03__ENTRYLO1
:
6732 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6733 tcg_gen_andi_tl(arg
, arg
, mask
);
6734 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6735 register_name
= "EntryLo1";
6738 goto cp0_unimplemented
;
6741 case CP0_REGISTER_09
:
6743 case CP0_REG09__SAAR
:
6744 CP0_CHECK(ctx
->saar
);
6745 gen_helper_mthc0_saar(cpu_env
, arg
);
6746 register_name
= "SAAR";
6749 goto cp0_unimplemented
;
6752 case CP0_REGISTER_17
:
6756 * LLAddr is read-only (the only exception is bit 0 if LLB is
6757 * supported); the CP0_LLAddr_rw_bitmask does not seem to be
6758 * relevant for modern MIPS cores supporting MTHC0, therefore
6759 * treating MTHC0 to LLAddr as NOP.
6761 register_name
= "LLAddr";
6764 CP0_CHECK(ctx
->mrp
);
6765 gen_helper_mthc0_maar(cpu_env
, arg
);
6766 register_name
= "MAAR";
6769 goto cp0_unimplemented
;
6772 case CP0_REGISTER_28
:
6778 tcg_gen_andi_tl(arg
, arg
, mask
);
6779 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6780 register_name
= "TagLo";
6783 goto cp0_unimplemented
;
6787 goto cp0_unimplemented
;
6789 trace_mips_translate_c0("mthc0", register_name
, reg
, sel
);
6792 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n",
6793 register_name
, reg
, sel
);
6796 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6798 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6799 tcg_gen_movi_tl(arg
, 0);
6801 tcg_gen_movi_tl(arg
, ~0);
6805 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6807 const char *register_name
= "invalid";
6810 check_insn(ctx
, ISA_MIPS32
);
6814 case CP0_REGISTER_00
:
6816 case CP0_REG00__INDEX
:
6817 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6818 register_name
= "Index";
6820 case CP0_REG00__MVPCONTROL
:
6821 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6822 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6823 register_name
= "MVPControl";
6825 case CP0_REG00__MVPCONF0
:
6826 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6827 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6828 register_name
= "MVPConf0";
6830 case CP0_REG00__MVPCONF1
:
6831 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6832 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6833 register_name
= "MVPConf1";
6835 case CP0_REG00__VPCONTROL
:
6837 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6838 register_name
= "VPControl";
6841 goto cp0_unimplemented
;
6844 case CP0_REGISTER_01
:
6846 case CP0_REG01__RANDOM
:
6847 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6848 gen_helper_mfc0_random(arg
, cpu_env
);
6849 register_name
= "Random";
6851 case CP0_REG01__VPECONTROL
:
6852 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6853 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6854 register_name
= "VPEControl";
6856 case CP0_REG01__VPECONF0
:
6857 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6858 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6859 register_name
= "VPEConf0";
6861 case CP0_REG01__VPECONF1
:
6862 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6863 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6864 register_name
= "VPEConf1";
6866 case CP0_REG01__YQMASK
:
6867 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6868 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6869 register_name
= "YQMask";
6871 case CP0_REG01__VPESCHEDULE
:
6872 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6873 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6874 register_name
= "VPESchedule";
6876 case CP0_REG01__VPESCHEFBACK
:
6877 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6878 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6879 register_name
= "VPEScheFBack";
6881 case CP0_REG01__VPEOPT
:
6882 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6883 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6884 register_name
= "VPEOpt";
6887 goto cp0_unimplemented
;
6890 case CP0_REGISTER_02
:
6892 case CP0_REG02__ENTRYLO0
:
6894 TCGv_i64 tmp
= tcg_temp_new_i64();
6895 tcg_gen_ld_i64(tmp
, cpu_env
,
6896 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6897 #if defined(TARGET_MIPS64)
6899 /* Move RI/XI fields to bits 31:30 */
6900 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6901 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6904 gen_move_low32(arg
, tmp
);
6905 tcg_temp_free_i64(tmp
);
6907 register_name
= "EntryLo0";
6909 case CP0_REG02__TCSTATUS
:
6910 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6911 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6912 register_name
= "TCStatus";
6914 case CP0_REG02__TCBIND
:
6915 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6916 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6917 register_name
= "TCBind";
6919 case CP0_REG02__TCRESTART
:
6920 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6921 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6922 register_name
= "TCRestart";
6924 case CP0_REG02__TCHALT
:
6925 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6926 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6927 register_name
= "TCHalt";
6929 case CP0_REG02__TCCONTEXT
:
6930 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6931 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6932 register_name
= "TCContext";
6934 case CP0_REG02__TCSCHEDULE
:
6935 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6936 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6937 register_name
= "TCSchedule";
6939 case CP0_REG02__TCSCHEFBACK
:
6940 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6941 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6942 register_name
= "TCScheFBack";
6945 goto cp0_unimplemented
;
6948 case CP0_REGISTER_03
:
6950 case CP0_REG03__ENTRYLO1
:
6952 TCGv_i64 tmp
= tcg_temp_new_i64();
6953 tcg_gen_ld_i64(tmp
, cpu_env
,
6954 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6955 #if defined(TARGET_MIPS64)
6957 /* Move RI/XI fields to bits 31:30 */
6958 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6959 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6962 gen_move_low32(arg
, tmp
);
6963 tcg_temp_free_i64(tmp
);
6965 register_name
= "EntryLo1";
6967 case CP0_REG03__GLOBALNUM
:
6969 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6970 register_name
= "GlobalNumber";
6973 goto cp0_unimplemented
;
6976 case CP0_REGISTER_04
:
6978 case CP0_REG04__CONTEXT
:
6979 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6980 tcg_gen_ext32s_tl(arg
, arg
);
6981 register_name
= "Context";
6983 case CP0_REG04__CONTEXTCONFIG
:
6985 /* gen_helper_mfc0_contextconfig(arg); */
6986 register_name
= "ContextConfig";
6987 goto cp0_unimplemented
;
6988 case CP0_REG04__USERLOCAL
:
6989 CP0_CHECK(ctx
->ulri
);
6990 tcg_gen_ld_tl(arg
, cpu_env
,
6991 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6992 tcg_gen_ext32s_tl(arg
, arg
);
6993 register_name
= "UserLocal";
6996 goto cp0_unimplemented
;
6999 case CP0_REGISTER_05
:
7001 case CP0_REG05__PAGEMASK
:
7002 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
7003 register_name
= "PageMask";
7005 case CP0_REG05__PAGEGRAIN
:
7006 check_insn(ctx
, ISA_MIPS32R2
);
7007 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
7008 register_name
= "PageGrain";
7010 case CP0_REG05__SEGCTL0
:
7012 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
7013 tcg_gen_ext32s_tl(arg
, arg
);
7014 register_name
= "SegCtl0";
7016 case CP0_REG05__SEGCTL1
:
7018 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
7019 tcg_gen_ext32s_tl(arg
, arg
);
7020 register_name
= "SegCtl1";
7022 case CP0_REG05__SEGCTL2
:
7024 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
7025 tcg_gen_ext32s_tl(arg
, arg
);
7026 register_name
= "SegCtl2";
7028 case CP0_REG05__PWBASE
:
7030 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7031 register_name
= "PWBase";
7033 case CP0_REG05__PWFIELD
:
7035 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
7036 register_name
= "PWField";
7038 case CP0_REG05__PWSIZE
:
7040 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
7041 register_name
= "PWSize";
7044 goto cp0_unimplemented
;
7047 case CP0_REGISTER_06
:
7049 case CP0_REG06__WIRED
:
7050 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
7051 register_name
= "Wired";
7053 case CP0_REG06__SRSCONF0
:
7054 check_insn(ctx
, ISA_MIPS32R2
);
7055 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
7056 register_name
= "SRSConf0";
7058 case CP0_REG06__SRSCONF1
:
7059 check_insn(ctx
, ISA_MIPS32R2
);
7060 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
7061 register_name
= "SRSConf1";
7063 case CP0_REG06__SRSCONF2
:
7064 check_insn(ctx
, ISA_MIPS32R2
);
7065 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
7066 register_name
= "SRSConf2";
7068 case CP0_REG06__SRSCONF3
:
7069 check_insn(ctx
, ISA_MIPS32R2
);
7070 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
7071 register_name
= "SRSConf3";
7073 case CP0_REG06__SRSCONF4
:
7074 check_insn(ctx
, ISA_MIPS32R2
);
7075 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
7076 register_name
= "SRSConf4";
7078 case CP0_REG06__PWCTL
:
7080 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
7081 register_name
= "PWCtl";
7084 goto cp0_unimplemented
;
7087 case CP0_REGISTER_07
:
7089 case CP0_REG07__HWRENA
:
7090 check_insn(ctx
, ISA_MIPS32R2
);
7091 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
7092 register_name
= "HWREna";
7095 goto cp0_unimplemented
;
7098 case CP0_REGISTER_08
:
7100 case CP0_REG08__BADVADDR
:
7101 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7102 tcg_gen_ext32s_tl(arg
, arg
);
7103 register_name
= "BadVAddr";
7105 case CP0_REG08__BADINSTR
:
7107 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7108 register_name
= "BadInstr";
7110 case CP0_REG08__BADINSTRP
:
7112 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7113 register_name
= "BadInstrP";
7115 case CP0_REG08__BADINSTRX
:
7117 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7118 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7119 register_name
= "BadInstrX";
7122 goto cp0_unimplemented
;
7125 case CP0_REGISTER_09
:
7127 case CP0_REG09__COUNT
:
7128 /* Mark as an IO operation because we read the time. */
7129 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7132 gen_helper_mfc0_count(arg
, cpu_env
);
7134 * Break the TB to be able to take timer interrupts immediately
7135 * after reading count. DISAS_STOP isn't sufficient, we need to
7136 * ensure we break completely out of translated code.
7138 gen_save_pc(ctx
->base
.pc_next
+ 4);
7139 ctx
->base
.is_jmp
= DISAS_EXIT
;
7140 register_name
= "Count";
7142 case CP0_REG09__SAARI
:
7143 CP0_CHECK(ctx
->saar
);
7144 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
7145 register_name
= "SAARI";
7147 case CP0_REG09__SAAR
:
7148 CP0_CHECK(ctx
->saar
);
7149 gen_helper_mfc0_saar(arg
, cpu_env
);
7150 register_name
= "SAAR";
7153 goto cp0_unimplemented
;
7156 case CP0_REGISTER_10
:
7158 case CP0_REG10__ENTRYHI
:
7159 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7160 tcg_gen_ext32s_tl(arg
, arg
);
7161 register_name
= "EntryHi";
7164 goto cp0_unimplemented
;
7167 case CP0_REGISTER_11
:
7169 case CP0_REG11__COMPARE
:
7170 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7171 register_name
= "Compare";
7173 /* 6,7 are implementation dependent */
7175 goto cp0_unimplemented
;
7178 case CP0_REGISTER_12
:
7181 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7182 register_name
= "Status";
7185 check_insn(ctx
, ISA_MIPS32R2
);
7186 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7187 register_name
= "IntCtl";
7190 check_insn(ctx
, ISA_MIPS32R2
);
7191 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7192 register_name
= "SRSCtl";
7195 check_insn(ctx
, ISA_MIPS32R2
);
7196 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7197 register_name
= "SRSMap";
7200 goto cp0_unimplemented
;
7203 case CP0_REGISTER_13
:
7206 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7207 register_name
= "Cause";
7210 goto cp0_unimplemented
;
7213 case CP0_REGISTER_14
:
7216 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7217 tcg_gen_ext32s_tl(arg
, arg
);
7218 register_name
= "EPC";
7221 goto cp0_unimplemented
;
7224 case CP0_REGISTER_15
:
7227 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7228 register_name
= "PRid";
7231 check_insn(ctx
, ISA_MIPS32R2
);
7232 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7233 tcg_gen_ext32s_tl(arg
, arg
);
7234 register_name
= "EBase";
7237 check_insn(ctx
, ISA_MIPS32R2
);
7238 CP0_CHECK(ctx
->cmgcr
);
7239 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7240 tcg_gen_ext32s_tl(arg
, arg
);
7241 register_name
= "CMGCRBase";
7244 goto cp0_unimplemented
;
7247 case CP0_REGISTER_16
:
7250 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7251 register_name
= "Config";
7254 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7255 register_name
= "Config1";
7258 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7259 register_name
= "Config2";
7262 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7263 register_name
= "Config3";
7266 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7267 register_name
= "Config4";
7270 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7271 register_name
= "Config5";
7273 /* 6,7 are implementation dependent */
7275 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7276 register_name
= "Config6";
7279 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7280 register_name
= "Config7";
7283 goto cp0_unimplemented
;
7286 case CP0_REGISTER_17
:
7289 gen_helper_mfc0_lladdr(arg
, cpu_env
);
7290 register_name
= "LLAddr";
7293 CP0_CHECK(ctx
->mrp
);
7294 gen_helper_mfc0_maar(arg
, cpu_env
);
7295 register_name
= "MAAR";
7298 CP0_CHECK(ctx
->mrp
);
7299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7300 register_name
= "MAARI";
7303 goto cp0_unimplemented
;
7306 case CP0_REGISTER_18
:
7316 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7317 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
7318 register_name
= "WatchLo";
7321 goto cp0_unimplemented
;
7324 case CP0_REGISTER_19
:
7334 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7335 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
7336 register_name
= "WatchHi";
7339 goto cp0_unimplemented
;
7342 case CP0_REGISTER_20
:
7345 #if defined(TARGET_MIPS64)
7346 check_insn(ctx
, ISA_MIPS3
);
7347 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7348 tcg_gen_ext32s_tl(arg
, arg
);
7349 register_name
= "XContext";
7353 goto cp0_unimplemented
;
7356 case CP0_REGISTER_21
:
7357 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7358 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7361 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7362 register_name
= "Framemask";
7365 goto cp0_unimplemented
;
7368 case CP0_REGISTER_22
:
7369 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7370 register_name
= "'Diagnostic"; /* implementation dependent */
7372 case CP0_REGISTER_23
:
7375 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7376 register_name
= "Debug";
7379 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
7380 register_name
= "TraceControl";
7381 goto cp0_unimplemented
;
7383 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
7384 register_name
= "TraceControl2";
7385 goto cp0_unimplemented
;
7387 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
7388 register_name
= "UserTraceData";
7389 goto cp0_unimplemented
;
7391 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
7392 register_name
= "TraceBPC";
7393 goto cp0_unimplemented
;
7395 goto cp0_unimplemented
;
7398 case CP0_REGISTER_24
:
7402 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7403 tcg_gen_ext32s_tl(arg
, arg
);
7404 register_name
= "DEPC";
7407 goto cp0_unimplemented
;
7410 case CP0_REGISTER_25
:
7413 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7414 register_name
= "Performance0";
7417 /* gen_helper_mfc0_performance1(arg); */
7418 register_name
= "Performance1";
7419 goto cp0_unimplemented
;
7421 /* gen_helper_mfc0_performance2(arg); */
7422 register_name
= "Performance2";
7423 goto cp0_unimplemented
;
7425 /* gen_helper_mfc0_performance3(arg); */
7426 register_name
= "Performance3";
7427 goto cp0_unimplemented
;
7429 /* gen_helper_mfc0_performance4(arg); */
7430 register_name
= "Performance4";
7431 goto cp0_unimplemented
;
7433 /* gen_helper_mfc0_performance5(arg); */
7434 register_name
= "Performance5";
7435 goto cp0_unimplemented
;
7437 /* gen_helper_mfc0_performance6(arg); */
7438 register_name
= "Performance6";
7439 goto cp0_unimplemented
;
7441 /* gen_helper_mfc0_performance7(arg); */
7442 register_name
= "Performance7";
7443 goto cp0_unimplemented
;
7445 goto cp0_unimplemented
;
7448 case CP0_REGISTER_26
:
7451 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7452 register_name
= "ErrCtl";
7455 goto cp0_unimplemented
;
7458 case CP0_REGISTER_27
:
7464 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7465 register_name
= "CacheErr";
7468 goto cp0_unimplemented
;
7471 case CP0_REGISTER_28
:
7478 TCGv_i64 tmp
= tcg_temp_new_i64();
7479 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7480 gen_move_low32(arg
, tmp
);
7481 tcg_temp_free_i64(tmp
);
7483 register_name
= "TagLo";
7489 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7490 register_name
= "DataLo";
7493 goto cp0_unimplemented
;
7496 case CP0_REGISTER_29
:
7502 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7503 register_name
= "TagHi";
7509 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7510 register_name
= "DataHi";
7513 goto cp0_unimplemented
;
7516 case CP0_REGISTER_30
:
7519 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7520 tcg_gen_ext32s_tl(arg
, arg
);
7521 register_name
= "ErrorEPC";
7524 goto cp0_unimplemented
;
7527 case CP0_REGISTER_31
:
7531 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7532 register_name
= "DESAVE";
7540 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7541 tcg_gen_ld_tl(arg
, cpu_env
,
7542 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7543 tcg_gen_ext32s_tl(arg
, arg
);
7544 register_name
= "KScratch";
7547 goto cp0_unimplemented
;
7551 goto cp0_unimplemented
;
7553 trace_mips_translate_c0("mfc0", register_name
, reg
, sel
);
7557 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n",
7558 register_name
, reg
, sel
);
7559 gen_mfc0_unimplemented(ctx
, arg
);
7562 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7564 const char *register_name
= "invalid";
7567 check_insn(ctx
, ISA_MIPS32
);
7570 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7575 case CP0_REGISTER_00
:
7577 case CP0_REG00__INDEX
:
7578 gen_helper_mtc0_index(cpu_env
, arg
);
7579 register_name
= "Index";
7581 case CP0_REG00__MVPCONTROL
:
7582 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7583 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7584 register_name
= "MVPControl";
7586 case CP0_REG00__MVPCONF0
:
7587 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7589 register_name
= "MVPConf0";
7591 case CP0_REG00__MVPCONF1
:
7592 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7594 register_name
= "MVPConf1";
7596 case CP0_REG00__VPCONTROL
:
7599 register_name
= "VPControl";
7602 goto cp0_unimplemented
;
7605 case CP0_REGISTER_01
:
7607 case CP0_REG01__RANDOM
:
7609 register_name
= "Random";
7611 case CP0_REG01__VPECONTROL
:
7612 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7613 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7614 register_name
= "VPEControl";
7616 case CP0_REG01__VPECONF0
:
7617 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7618 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7619 register_name
= "VPEConf0";
7621 case CP0_REG01__VPECONF1
:
7622 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7623 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7624 register_name
= "VPEConf1";
7626 case CP0_REG01__YQMASK
:
7627 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7628 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7629 register_name
= "YQMask";
7631 case CP0_REG01__VPESCHEDULE
:
7632 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7633 tcg_gen_st_tl(arg
, cpu_env
,
7634 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7635 register_name
= "VPESchedule";
7637 case CP0_REG01__VPESCHEFBACK
:
7638 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7639 tcg_gen_st_tl(arg
, cpu_env
,
7640 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7641 register_name
= "VPEScheFBack";
7643 case CP0_REG01__VPEOPT
:
7644 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7645 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7646 register_name
= "VPEOpt";
7649 goto cp0_unimplemented
;
7652 case CP0_REGISTER_02
:
7654 case CP0_REG02__ENTRYLO0
:
7655 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7656 register_name
= "EntryLo0";
7658 case CP0_REG02__TCSTATUS
:
7659 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7660 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7661 register_name
= "TCStatus";
7663 case CP0_REG02__TCBIND
:
7664 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7665 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7666 register_name
= "TCBind";
7668 case CP0_REG02__TCRESTART
:
7669 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7670 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7671 register_name
= "TCRestart";
7673 case CP0_REG02__TCHALT
:
7674 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7675 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7676 register_name
= "TCHalt";
7678 case CP0_REG02__TCCONTEXT
:
7679 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7680 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7681 register_name
= "TCContext";
7683 case CP0_REG02__TCSCHEDULE
:
7684 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7685 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7686 register_name
= "TCSchedule";
7688 case CP0_REG02__TCSCHEFBACK
:
7689 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7690 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7691 register_name
= "TCScheFBack";
7694 goto cp0_unimplemented
;
7697 case CP0_REGISTER_03
:
7699 case CP0_REG03__ENTRYLO1
:
7700 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7701 register_name
= "EntryLo1";
7703 case CP0_REG03__GLOBALNUM
:
7706 register_name
= "GlobalNumber";
7709 goto cp0_unimplemented
;
7712 case CP0_REGISTER_04
:
7714 case CP0_REG04__CONTEXT
:
7715 gen_helper_mtc0_context(cpu_env
, arg
);
7716 register_name
= "Context";
7718 case CP0_REG04__CONTEXTCONFIG
:
7720 /* gen_helper_mtc0_contextconfig(arg); */
7721 register_name
= "ContextConfig";
7722 goto cp0_unimplemented
;
7723 case CP0_REG04__USERLOCAL
:
7724 CP0_CHECK(ctx
->ulri
);
7725 tcg_gen_st_tl(arg
, cpu_env
,
7726 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7727 register_name
= "UserLocal";
7730 goto cp0_unimplemented
;
7733 case CP0_REGISTER_05
:
7735 case CP0_REG05__PAGEMASK
:
7736 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7737 register_name
= "PageMask";
7739 case CP0_REG05__PAGEGRAIN
:
7740 check_insn(ctx
, ISA_MIPS32R2
);
7741 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7742 register_name
= "PageGrain";
7743 ctx
->base
.is_jmp
= DISAS_STOP
;
7745 case CP0_REG05__SEGCTL0
:
7747 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7748 register_name
= "SegCtl0";
7750 case CP0_REG05__SEGCTL1
:
7752 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7753 register_name
= "SegCtl1";
7755 case CP0_REG05__SEGCTL2
:
7757 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7758 register_name
= "SegCtl2";
7760 case CP0_REG05__PWBASE
:
7762 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7763 register_name
= "PWBase";
7765 case CP0_REG05__PWFIELD
:
7767 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7768 register_name
= "PWField";
7770 case CP0_REG05__PWSIZE
:
7772 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7773 register_name
= "PWSize";
7776 goto cp0_unimplemented
;
7779 case CP0_REGISTER_06
:
7781 case CP0_REG06__WIRED
:
7782 gen_helper_mtc0_wired(cpu_env
, arg
);
7783 register_name
= "Wired";
7785 case CP0_REG06__SRSCONF0
:
7786 check_insn(ctx
, ISA_MIPS32R2
);
7787 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7788 register_name
= "SRSConf0";
7790 case CP0_REG06__SRSCONF1
:
7791 check_insn(ctx
, ISA_MIPS32R2
);
7792 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7793 register_name
= "SRSConf1";
7795 case CP0_REG06__SRSCONF2
:
7796 check_insn(ctx
, ISA_MIPS32R2
);
7797 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7798 register_name
= "SRSConf2";
7800 case CP0_REG06__SRSCONF3
:
7801 check_insn(ctx
, ISA_MIPS32R2
);
7802 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7803 register_name
= "SRSConf3";
7805 case CP0_REG06__SRSCONF4
:
7806 check_insn(ctx
, ISA_MIPS32R2
);
7807 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7808 register_name
= "SRSConf4";
7810 case CP0_REG06__PWCTL
:
7812 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7813 register_name
= "PWCtl";
7816 goto cp0_unimplemented
;
7819 case CP0_REGISTER_07
:
7821 case CP0_REG07__HWRENA
:
7822 check_insn(ctx
, ISA_MIPS32R2
);
7823 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7824 ctx
->base
.is_jmp
= DISAS_STOP
;
7825 register_name
= "HWREna";
7828 goto cp0_unimplemented
;
7831 case CP0_REGISTER_08
:
7833 case CP0_REG08__BADVADDR
:
7835 register_name
= "BadVAddr";
7837 case CP0_REG08__BADINSTR
:
7839 register_name
= "BadInstr";
7841 case CP0_REG08__BADINSTRP
:
7843 register_name
= "BadInstrP";
7845 case CP0_REG08__BADINSTRX
:
7847 register_name
= "BadInstrX";
7850 goto cp0_unimplemented
;
7853 case CP0_REGISTER_09
:
7855 case CP0_REG09__COUNT
:
7856 gen_helper_mtc0_count(cpu_env
, arg
);
7857 register_name
= "Count";
7859 case CP0_REG09__SAARI
:
7860 CP0_CHECK(ctx
->saar
);
7861 gen_helper_mtc0_saari(cpu_env
, arg
);
7862 register_name
= "SAARI";
7864 case CP0_REG09__SAAR
:
7865 CP0_CHECK(ctx
->saar
);
7866 gen_helper_mtc0_saar(cpu_env
, arg
);
7867 register_name
= "SAAR";
7870 goto cp0_unimplemented
;
7873 case CP0_REGISTER_10
:
7875 case CP0_REG10__ENTRYHI
:
7876 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7877 register_name
= "EntryHi";
7880 goto cp0_unimplemented
;
7883 case CP0_REGISTER_11
:
7885 case CP0_REG11__COMPARE
:
7886 gen_helper_mtc0_compare(cpu_env
, arg
);
7887 register_name
= "Compare";
7889 /* 6,7 are implementation dependent */
7891 goto cp0_unimplemented
;
7894 case CP0_REGISTER_12
:
7897 save_cpu_state(ctx
, 1);
7898 gen_helper_mtc0_status(cpu_env
, arg
);
7899 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7900 gen_save_pc(ctx
->base
.pc_next
+ 4);
7901 ctx
->base
.is_jmp
= DISAS_EXIT
;
7902 register_name
= "Status";
7905 check_insn(ctx
, ISA_MIPS32R2
);
7906 gen_helper_mtc0_intctl(cpu_env
, arg
);
7907 /* Stop translation as we may have switched the execution mode */
7908 ctx
->base
.is_jmp
= DISAS_STOP
;
7909 register_name
= "IntCtl";
7912 check_insn(ctx
, ISA_MIPS32R2
);
7913 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7914 /* Stop translation as we may have switched the execution mode */
7915 ctx
->base
.is_jmp
= DISAS_STOP
;
7916 register_name
= "SRSCtl";
7919 check_insn(ctx
, ISA_MIPS32R2
);
7920 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7921 /* Stop translation as we may have switched the execution mode */
7922 ctx
->base
.is_jmp
= DISAS_STOP
;
7923 register_name
= "SRSMap";
7926 goto cp0_unimplemented
;
7929 case CP0_REGISTER_13
:
7932 save_cpu_state(ctx
, 1);
7933 gen_helper_mtc0_cause(cpu_env
, arg
);
7935 * Stop translation as we may have triggered an interrupt.
7936 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7937 * translated code to check for pending interrupts.
7939 gen_save_pc(ctx
->base
.pc_next
+ 4);
7940 ctx
->base
.is_jmp
= DISAS_EXIT
;
7941 register_name
= "Cause";
7944 goto cp0_unimplemented
;
7947 case CP0_REGISTER_14
:
7950 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7951 register_name
= "EPC";
7954 goto cp0_unimplemented
;
7957 case CP0_REGISTER_15
:
7961 register_name
= "PRid";
7964 check_insn(ctx
, ISA_MIPS32R2
);
7965 gen_helper_mtc0_ebase(cpu_env
, arg
);
7966 register_name
= "EBase";
7969 goto cp0_unimplemented
;
7972 case CP0_REGISTER_16
:
7975 gen_helper_mtc0_config0(cpu_env
, arg
);
7976 register_name
= "Config";
7977 /* Stop translation as we may have switched the execution mode */
7978 ctx
->base
.is_jmp
= DISAS_STOP
;
7981 /* ignored, read only */
7982 register_name
= "Config1";
7985 gen_helper_mtc0_config2(cpu_env
, arg
);
7986 register_name
= "Config2";
7987 /* Stop translation as we may have switched the execution mode */
7988 ctx
->base
.is_jmp
= DISAS_STOP
;
7991 gen_helper_mtc0_config3(cpu_env
, arg
);
7992 register_name
= "Config3";
7993 /* Stop translation as we may have switched the execution mode */
7994 ctx
->base
.is_jmp
= DISAS_STOP
;
7997 gen_helper_mtc0_config4(cpu_env
, arg
);
7998 register_name
= "Config4";
7999 ctx
->base
.is_jmp
= DISAS_STOP
;
8002 gen_helper_mtc0_config5(cpu_env
, arg
);
8003 register_name
= "Config5";
8004 /* Stop translation as we may have switched the execution mode */
8005 ctx
->base
.is_jmp
= DISAS_STOP
;
8007 /* 6,7 are implementation dependent */
8010 register_name
= "Config6";
8014 register_name
= "Config7";
8017 register_name
= "Invalid config selector";
8018 goto cp0_unimplemented
;
8021 case CP0_REGISTER_17
:
8024 gen_helper_mtc0_lladdr(cpu_env
, arg
);
8025 register_name
= "LLAddr";
8028 CP0_CHECK(ctx
->mrp
);
8029 gen_helper_mtc0_maar(cpu_env
, arg
);
8030 register_name
= "MAAR";
8033 CP0_CHECK(ctx
->mrp
);
8034 gen_helper_mtc0_maari(cpu_env
, arg
);
8035 register_name
= "MAARI";
8038 goto cp0_unimplemented
;
8041 case CP0_REGISTER_18
:
8051 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8052 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
8053 register_name
= "WatchLo";
8056 goto cp0_unimplemented
;
8059 case CP0_REGISTER_19
:
8069 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8070 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
8071 register_name
= "WatchHi";
8074 goto cp0_unimplemented
;
8077 case CP0_REGISTER_20
:
8080 #if defined(TARGET_MIPS64)
8081 check_insn(ctx
, ISA_MIPS3
);
8082 gen_helper_mtc0_xcontext(cpu_env
, arg
);
8083 register_name
= "XContext";
8087 goto cp0_unimplemented
;
8090 case CP0_REGISTER_21
:
8091 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8092 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8095 gen_helper_mtc0_framemask(cpu_env
, arg
);
8096 register_name
= "Framemask";
8099 goto cp0_unimplemented
;
8102 case CP0_REGISTER_22
:
8104 register_name
= "Diagnostic"; /* implementation dependent */
8106 case CP0_REGISTER_23
:
8109 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
8110 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8111 gen_save_pc(ctx
->base
.pc_next
+ 4);
8112 ctx
->base
.is_jmp
= DISAS_EXIT
;
8113 register_name
= "Debug";
8116 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
8117 register_name
= "TraceControl";
8118 /* Stop translation as we may have switched the execution mode */
8119 ctx
->base
.is_jmp
= DISAS_STOP
;
8120 goto cp0_unimplemented
;
8122 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
8123 register_name
= "TraceControl2";
8124 /* Stop translation as we may have switched the execution mode */
8125 ctx
->base
.is_jmp
= DISAS_STOP
;
8126 goto cp0_unimplemented
;
8128 /* Stop translation as we may have switched the execution mode */
8129 ctx
->base
.is_jmp
= DISAS_STOP
;
8130 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
8131 register_name
= "UserTraceData";
8132 /* Stop translation as we may have switched the execution mode */
8133 ctx
->base
.is_jmp
= DISAS_STOP
;
8134 goto cp0_unimplemented
;
8136 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
8137 /* Stop translation as we may have switched the execution mode */
8138 ctx
->base
.is_jmp
= DISAS_STOP
;
8139 register_name
= "TraceBPC";
8140 goto cp0_unimplemented
;
8142 goto cp0_unimplemented
;
8145 case CP0_REGISTER_24
:
8149 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8150 register_name
= "DEPC";
8153 goto cp0_unimplemented
;
8156 case CP0_REGISTER_25
:
8159 gen_helper_mtc0_performance0(cpu_env
, arg
);
8160 register_name
= "Performance0";
8163 /* gen_helper_mtc0_performance1(arg); */
8164 register_name
= "Performance1";
8165 goto cp0_unimplemented
;
8167 /* gen_helper_mtc0_performance2(arg); */
8168 register_name
= "Performance2";
8169 goto cp0_unimplemented
;
8171 /* gen_helper_mtc0_performance3(arg); */
8172 register_name
= "Performance3";
8173 goto cp0_unimplemented
;
8175 /* gen_helper_mtc0_performance4(arg); */
8176 register_name
= "Performance4";
8177 goto cp0_unimplemented
;
8179 /* gen_helper_mtc0_performance5(arg); */
8180 register_name
= "Performance5";
8181 goto cp0_unimplemented
;
8183 /* gen_helper_mtc0_performance6(arg); */
8184 register_name
= "Performance6";
8185 goto cp0_unimplemented
;
8187 /* gen_helper_mtc0_performance7(arg); */
8188 register_name
= "Performance7";
8189 goto cp0_unimplemented
;
8191 goto cp0_unimplemented
;
8194 case CP0_REGISTER_26
:
8197 gen_helper_mtc0_errctl(cpu_env
, arg
);
8198 ctx
->base
.is_jmp
= DISAS_STOP
;
8199 register_name
= "ErrCtl";
8202 goto cp0_unimplemented
;
8205 case CP0_REGISTER_27
:
8212 register_name
= "CacheErr";
8215 goto cp0_unimplemented
;
8218 case CP0_REGISTER_28
:
8224 gen_helper_mtc0_taglo(cpu_env
, arg
);
8225 register_name
= "TagLo";
8231 gen_helper_mtc0_datalo(cpu_env
, arg
);
8232 register_name
= "DataLo";
8235 goto cp0_unimplemented
;
8238 case CP0_REGISTER_29
:
8244 gen_helper_mtc0_taghi(cpu_env
, arg
);
8245 register_name
= "TagHi";
8251 gen_helper_mtc0_datahi(cpu_env
, arg
);
8252 register_name
= "DataHi";
8255 register_name
= "invalid sel";
8256 goto cp0_unimplemented
;
8259 case CP0_REGISTER_30
:
8262 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8263 register_name
= "ErrorEPC";
8266 goto cp0_unimplemented
;
8269 case CP0_REGISTER_31
:
8273 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8274 register_name
= "DESAVE";
8282 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8283 tcg_gen_st_tl(arg
, cpu_env
,
8284 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8285 register_name
= "KScratch";
8288 goto cp0_unimplemented
;
8292 goto cp0_unimplemented
;
8294 trace_mips_translate_c0("mtc0", register_name
, reg
, sel
);
8296 /* For simplicity assume that all writes can cause interrupts. */
8297 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8299 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8300 * translated code to check for pending interrupts.
8302 gen_save_pc(ctx
->base
.pc_next
+ 4);
8303 ctx
->base
.is_jmp
= DISAS_EXIT
;
8308 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n",
8309 register_name
, reg
, sel
);
8312 #if defined(TARGET_MIPS64)
8313 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8315 const char *register_name
= "invalid";
8318 check_insn(ctx
, ISA_MIPS64
);
8322 case CP0_REGISTER_00
:
8324 case CP0_REG00__INDEX
:
8325 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
8326 register_name
= "Index";
8328 case CP0_REG00__MVPCONTROL
:
8329 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8330 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
8331 register_name
= "MVPControl";
8333 case CP0_REG00__MVPCONF0
:
8334 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8335 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
8336 register_name
= "MVPConf0";
8338 case CP0_REG00__MVPCONF1
:
8339 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8340 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
8341 register_name
= "MVPConf1";
8343 case CP0_REG00__VPCONTROL
:
8345 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
8346 register_name
= "VPControl";
8349 goto cp0_unimplemented
;
8352 case CP0_REGISTER_01
:
8354 case CP0_REG01__RANDOM
:
8355 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8356 gen_helper_mfc0_random(arg
, cpu_env
);
8357 register_name
= "Random";
8359 case CP0_REG01__VPECONTROL
:
8360 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8361 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
8362 register_name
= "VPEControl";
8364 case CP0_REG01__VPECONF0
:
8365 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8366 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
8367 register_name
= "VPEConf0";
8369 case CP0_REG01__VPECONF1
:
8370 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8371 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
8372 register_name
= "VPEConf1";
8374 case CP0_REG01__YQMASK
:
8375 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8376 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
8377 register_name
= "YQMask";
8379 case CP0_REG01__VPESCHEDULE
:
8380 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8381 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8382 register_name
= "VPESchedule";
8384 case CP0_REG01__VPESCHEFBACK
:
8385 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8386 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8387 register_name
= "VPEScheFBack";
8389 case CP0_REG01__VPEOPT
:
8390 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8391 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
8392 register_name
= "VPEOpt";
8395 goto cp0_unimplemented
;
8398 case CP0_REGISTER_02
:
8400 case CP0_REG02__ENTRYLO0
:
8401 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
8402 register_name
= "EntryLo0";
8404 case CP0_REG02__TCSTATUS
:
8405 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8406 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8407 register_name
= "TCStatus";
8409 case CP0_REG02__TCBIND
:
8410 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8411 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8412 register_name
= "TCBind";
8414 case CP0_REG02__TCRESTART
:
8415 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8416 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8417 register_name
= "TCRestart";
8419 case CP0_REG02__TCHALT
:
8420 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8421 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8422 register_name
= "TCHalt";
8424 case CP0_REG02__TCCONTEXT
:
8425 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8426 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8427 register_name
= "TCContext";
8429 case CP0_REG02__TCSCHEDULE
:
8430 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8431 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8432 register_name
= "TCSchedule";
8434 case CP0_REG02__TCSCHEFBACK
:
8435 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8436 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8437 register_name
= "TCScheFBack";
8440 goto cp0_unimplemented
;
8443 case CP0_REGISTER_03
:
8445 case CP0_REG03__ENTRYLO1
:
8446 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8447 register_name
= "EntryLo1";
8449 case CP0_REG03__GLOBALNUM
:
8451 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8452 register_name
= "GlobalNumber";
8455 goto cp0_unimplemented
;
8458 case CP0_REGISTER_04
:
8460 case CP0_REG04__CONTEXT
:
8461 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8462 register_name
= "Context";
8464 case CP0_REG04__CONTEXTCONFIG
:
8466 /* gen_helper_dmfc0_contextconfig(arg); */
8467 register_name
= "ContextConfig";
8468 goto cp0_unimplemented
;
8469 case CP0_REG04__USERLOCAL
:
8470 CP0_CHECK(ctx
->ulri
);
8471 tcg_gen_ld_tl(arg
, cpu_env
,
8472 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8473 register_name
= "UserLocal";
8476 goto cp0_unimplemented
;
8479 case CP0_REGISTER_05
:
8481 case CP0_REG05__PAGEMASK
:
8482 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8483 register_name
= "PageMask";
8485 case CP0_REG05__PAGEGRAIN
:
8486 check_insn(ctx
, ISA_MIPS32R2
);
8487 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8488 register_name
= "PageGrain";
8490 case CP0_REG05__SEGCTL0
:
8492 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8493 register_name
= "SegCtl0";
8495 case CP0_REG05__SEGCTL1
:
8497 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8498 register_name
= "SegCtl1";
8500 case CP0_REG05__SEGCTL2
:
8502 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8503 register_name
= "SegCtl2";
8505 case CP0_REG05__PWBASE
:
8507 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8508 register_name
= "PWBase";
8510 case CP0_REG05__PWFIELD
:
8512 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8513 register_name
= "PWField";
8515 case CP0_REG05__PWSIZE
:
8517 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8518 register_name
= "PWSize";
8521 goto cp0_unimplemented
;
8524 case CP0_REGISTER_06
:
8526 case CP0_REG06__WIRED
:
8527 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8528 register_name
= "Wired";
8530 case CP0_REG06__SRSCONF0
:
8531 check_insn(ctx
, ISA_MIPS32R2
);
8532 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8533 register_name
= "SRSConf0";
8535 case CP0_REG06__SRSCONF1
:
8536 check_insn(ctx
, ISA_MIPS32R2
);
8537 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8538 register_name
= "SRSConf1";
8540 case CP0_REG06__SRSCONF2
:
8541 check_insn(ctx
, ISA_MIPS32R2
);
8542 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8543 register_name
= "SRSConf2";
8545 case CP0_REG06__SRSCONF3
:
8546 check_insn(ctx
, ISA_MIPS32R2
);
8547 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8548 register_name
= "SRSConf3";
8550 case CP0_REG06__SRSCONF4
:
8551 check_insn(ctx
, ISA_MIPS32R2
);
8552 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8553 register_name
= "SRSConf4";
8555 case CP0_REG06__PWCTL
:
8557 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8558 register_name
= "PWCtl";
8561 goto cp0_unimplemented
;
8564 case CP0_REGISTER_07
:
8566 case CP0_REG07__HWRENA
:
8567 check_insn(ctx
, ISA_MIPS32R2
);
8568 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8569 register_name
= "HWREna";
8572 goto cp0_unimplemented
;
8575 case CP0_REGISTER_08
:
8577 case CP0_REG08__BADVADDR
:
8578 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8579 register_name
= "BadVAddr";
8581 case CP0_REG08__BADINSTR
:
8583 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8584 register_name
= "BadInstr";
8586 case CP0_REG08__BADINSTRP
:
8588 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8589 register_name
= "BadInstrP";
8591 case CP0_REG08__BADINSTRX
:
8593 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8594 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8595 register_name
= "BadInstrX";
8598 goto cp0_unimplemented
;
8601 case CP0_REGISTER_09
:
8603 case CP0_REG09__COUNT
:
8604 /* Mark as an IO operation because we read the time. */
8605 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8608 gen_helper_mfc0_count(arg
, cpu_env
);
8610 * Break the TB to be able to take timer interrupts immediately
8611 * after reading count. DISAS_STOP isn't sufficient, we need to
8612 * ensure we break completely out of translated code.
8614 gen_save_pc(ctx
->base
.pc_next
+ 4);
8615 ctx
->base
.is_jmp
= DISAS_EXIT
;
8616 register_name
= "Count";
8618 case CP0_REG09__SAARI
:
8619 CP0_CHECK(ctx
->saar
);
8620 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
8621 register_name
= "SAARI";
8623 case CP0_REG09__SAAR
:
8624 CP0_CHECK(ctx
->saar
);
8625 gen_helper_dmfc0_saar(arg
, cpu_env
);
8626 register_name
= "SAAR";
8629 goto cp0_unimplemented
;
8632 case CP0_REGISTER_10
:
8634 case CP0_REG10__ENTRYHI
:
8635 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8636 register_name
= "EntryHi";
8639 goto cp0_unimplemented
;
8642 case CP0_REGISTER_11
:
8644 case CP0_REG11__COMPARE
:
8645 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8646 register_name
= "Compare";
8648 /* 6,7 are implementation dependent */
8650 goto cp0_unimplemented
;
8653 case CP0_REGISTER_12
:
8656 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8657 register_name
= "Status";
8660 check_insn(ctx
, ISA_MIPS32R2
);
8661 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8662 register_name
= "IntCtl";
8665 check_insn(ctx
, ISA_MIPS32R2
);
8666 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8667 register_name
= "SRSCtl";
8670 check_insn(ctx
, ISA_MIPS32R2
);
8671 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8672 register_name
= "SRSMap";
8675 goto cp0_unimplemented
;
8678 case CP0_REGISTER_13
:
8681 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8682 register_name
= "Cause";
8685 goto cp0_unimplemented
;
8688 case CP0_REGISTER_14
:
8691 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8692 register_name
= "EPC";
8695 goto cp0_unimplemented
;
8698 case CP0_REGISTER_15
:
8701 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8702 register_name
= "PRid";
8705 check_insn(ctx
, ISA_MIPS32R2
);
8706 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8707 register_name
= "EBase";
8710 check_insn(ctx
, ISA_MIPS32R2
);
8711 CP0_CHECK(ctx
->cmgcr
);
8712 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8713 register_name
= "CMGCRBase";
8716 goto cp0_unimplemented
;
8719 case CP0_REGISTER_16
:
8722 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8723 register_name
= "Config";
8726 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8727 register_name
= "Config1";
8730 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8731 register_name
= "Config2";
8734 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8735 register_name
= "Config3";
8738 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8739 register_name
= "Config4";
8742 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8743 register_name
= "Config5";
8745 /* 6,7 are implementation dependent */
8747 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8748 register_name
= "Config6";
8751 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8752 register_name
= "Config7";
8755 goto cp0_unimplemented
;
8758 case CP0_REGISTER_17
:
8761 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8762 register_name
= "LLAddr";
8765 CP0_CHECK(ctx
->mrp
);
8766 gen_helper_dmfc0_maar(arg
, cpu_env
);
8767 register_name
= "MAAR";
8770 CP0_CHECK(ctx
->mrp
);
8771 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8772 register_name
= "MAARI";
8775 goto cp0_unimplemented
;
8778 case CP0_REGISTER_18
:
8788 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8789 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8790 register_name
= "WatchLo";
8793 goto cp0_unimplemented
;
8796 case CP0_REGISTER_19
:
8806 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8807 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8808 register_name
= "WatchHi";
8811 goto cp0_unimplemented
;
8814 case CP0_REGISTER_20
:
8817 check_insn(ctx
, ISA_MIPS3
);
8818 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8819 register_name
= "XContext";
8822 goto cp0_unimplemented
;
8825 case CP0_REGISTER_21
:
8826 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8827 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8830 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8831 register_name
= "Framemask";
8834 goto cp0_unimplemented
;
8837 case CP0_REGISTER_22
:
8838 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8839 register_name
= "'Diagnostic"; /* implementation dependent */
8841 case CP0_REGISTER_23
:
8844 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8845 register_name
= "Debug";
8848 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8849 register_name
= "TraceControl";
8850 goto cp0_unimplemented
;
8852 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8853 register_name
= "TraceControl2";
8854 goto cp0_unimplemented
;
8856 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8857 register_name
= "UserTraceData";
8858 goto cp0_unimplemented
;
8860 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8861 register_name
= "TraceBPC";
8862 goto cp0_unimplemented
;
8864 goto cp0_unimplemented
;
8867 case CP0_REGISTER_24
:
8871 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8872 register_name
= "DEPC";
8875 goto cp0_unimplemented
;
8878 case CP0_REGISTER_25
:
8881 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8882 register_name
= "Performance0";
8885 /* gen_helper_dmfc0_performance1(arg); */
8886 register_name
= "Performance1";
8887 goto cp0_unimplemented
;
8889 /* gen_helper_dmfc0_performance2(arg); */
8890 register_name
= "Performance2";
8891 goto cp0_unimplemented
;
8893 /* gen_helper_dmfc0_performance3(arg); */
8894 register_name
= "Performance3";
8895 goto cp0_unimplemented
;
8897 /* gen_helper_dmfc0_performance4(arg); */
8898 register_name
= "Performance4";
8899 goto cp0_unimplemented
;
8901 /* gen_helper_dmfc0_performance5(arg); */
8902 register_name
= "Performance5";
8903 goto cp0_unimplemented
;
8905 /* gen_helper_dmfc0_performance6(arg); */
8906 register_name
= "Performance6";
8907 goto cp0_unimplemented
;
8909 /* gen_helper_dmfc0_performance7(arg); */
8910 register_name
= "Performance7";
8911 goto cp0_unimplemented
;
8913 goto cp0_unimplemented
;
8916 case CP0_REGISTER_26
:
8919 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8920 register_name
= "ErrCtl";
8923 goto cp0_unimplemented
;
8926 case CP0_REGISTER_27
:
8933 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8934 register_name
= "CacheErr";
8937 goto cp0_unimplemented
;
8940 case CP0_REGISTER_28
:
8946 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8947 register_name
= "TagLo";
8953 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8954 register_name
= "DataLo";
8957 goto cp0_unimplemented
;
8960 case CP0_REGISTER_29
:
8966 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8967 register_name
= "TagHi";
8973 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8974 register_name
= "DataHi";
8977 goto cp0_unimplemented
;
8980 case CP0_REGISTER_30
:
8983 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8984 register_name
= "ErrorEPC";
8987 goto cp0_unimplemented
;
8990 case CP0_REGISTER_31
:
8994 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8995 register_name
= "DESAVE";
9003 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9004 tcg_gen_ld_tl(arg
, cpu_env
,
9005 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9006 register_name
= "KScratch";
9009 goto cp0_unimplemented
;
9013 goto cp0_unimplemented
;
9015 trace_mips_translate_c0("dmfc0", register_name
, reg
, sel
);
9019 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n",
9020 register_name
, reg
, sel
);
9021 gen_mfc0_unimplemented(ctx
, arg
);
9024 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
9026 const char *register_name
= "invalid";
9029 check_insn(ctx
, ISA_MIPS64
);
9032 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9037 case CP0_REGISTER_00
:
9039 case CP0_REG00__INDEX
:
9040 gen_helper_mtc0_index(cpu_env
, arg
);
9041 register_name
= "Index";
9043 case CP0_REG00__MVPCONTROL
:
9044 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9045 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
9046 register_name
= "MVPControl";
9048 case CP0_REG00__MVPCONF0
:
9049 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9051 register_name
= "MVPConf0";
9053 case CP0_REG00__MVPCONF1
:
9054 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9056 register_name
= "MVPConf1";
9058 case CP0_REG00__VPCONTROL
:
9061 register_name
= "VPControl";
9064 goto cp0_unimplemented
;
9067 case CP0_REGISTER_01
:
9069 case CP0_REG01__RANDOM
:
9071 register_name
= "Random";
9073 case CP0_REG01__VPECONTROL
:
9074 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9075 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
9076 register_name
= "VPEControl";
9078 case CP0_REG01__VPECONF0
:
9079 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9080 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
9081 register_name
= "VPEConf0";
9083 case CP0_REG01__VPECONF1
:
9084 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9085 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
9086 register_name
= "VPEConf1";
9088 case CP0_REG01__YQMASK
:
9089 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9090 gen_helper_mtc0_yqmask(cpu_env
, arg
);
9091 register_name
= "YQMask";
9093 case CP0_REG01__VPESCHEDULE
:
9094 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9095 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
9096 register_name
= "VPESchedule";
9098 case CP0_REG01__VPESCHEFBACK
:
9099 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9100 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
9101 register_name
= "VPEScheFBack";
9103 case CP0_REG01__VPEOPT
:
9104 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9105 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
9106 register_name
= "VPEOpt";
9109 goto cp0_unimplemented
;
9112 case CP0_REGISTER_02
:
9114 case CP0_REG02__ENTRYLO0
:
9115 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
9116 register_name
= "EntryLo0";
9118 case CP0_REG02__TCSTATUS
:
9119 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9120 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
9121 register_name
= "TCStatus";
9123 case CP0_REG02__TCBIND
:
9124 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9125 gen_helper_mtc0_tcbind(cpu_env
, arg
);
9126 register_name
= "TCBind";
9128 case CP0_REG02__TCRESTART
:
9129 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9130 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
9131 register_name
= "TCRestart";
9133 case CP0_REG02__TCHALT
:
9134 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9135 gen_helper_mtc0_tchalt(cpu_env
, arg
);
9136 register_name
= "TCHalt";
9138 case CP0_REG02__TCCONTEXT
:
9139 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9140 gen_helper_mtc0_tccontext(cpu_env
, arg
);
9141 register_name
= "TCContext";
9143 case CP0_REG02__TCSCHEDULE
:
9144 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9145 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
9146 register_name
= "TCSchedule";
9148 case CP0_REG02__TCSCHEFBACK
:
9149 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9150 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
9151 register_name
= "TCScheFBack";
9154 goto cp0_unimplemented
;
9157 case CP0_REGISTER_03
:
9159 case CP0_REG03__ENTRYLO1
:
9160 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
9161 register_name
= "EntryLo1";
9163 case CP0_REG03__GLOBALNUM
:
9166 register_name
= "GlobalNumber";
9169 goto cp0_unimplemented
;
9172 case CP0_REGISTER_04
:
9174 case CP0_REG04__CONTEXT
:
9175 gen_helper_mtc0_context(cpu_env
, arg
);
9176 register_name
= "Context";
9178 case CP0_REG04__CONTEXTCONFIG
:
9180 /* gen_helper_dmtc0_contextconfig(arg); */
9181 register_name
= "ContextConfig";
9182 goto cp0_unimplemented
;
9183 case CP0_REG04__USERLOCAL
:
9184 CP0_CHECK(ctx
->ulri
);
9185 tcg_gen_st_tl(arg
, cpu_env
,
9186 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
9187 register_name
= "UserLocal";
9190 goto cp0_unimplemented
;
9193 case CP0_REGISTER_05
:
9195 case CP0_REG05__PAGEMASK
:
9196 gen_helper_mtc0_pagemask(cpu_env
, arg
);
9197 register_name
= "PageMask";
9199 case CP0_REG05__PAGEGRAIN
:
9200 check_insn(ctx
, ISA_MIPS32R2
);
9201 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
9202 register_name
= "PageGrain";
9204 case CP0_REG05__SEGCTL0
:
9206 gen_helper_mtc0_segctl0(cpu_env
, arg
);
9207 register_name
= "SegCtl0";
9209 case CP0_REG05__SEGCTL1
:
9211 gen_helper_mtc0_segctl1(cpu_env
, arg
);
9212 register_name
= "SegCtl1";
9214 case CP0_REG05__SEGCTL2
:
9216 gen_helper_mtc0_segctl2(cpu_env
, arg
);
9217 register_name
= "SegCtl2";
9219 case CP0_REG05__PWBASE
:
9221 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
9222 register_name
= "PWBase";
9224 case CP0_REG05__PWFIELD
:
9226 gen_helper_mtc0_pwfield(cpu_env
, arg
);
9227 register_name
= "PWField";
9229 case CP0_REG05__PWSIZE
:
9231 gen_helper_mtc0_pwsize(cpu_env
, arg
);
9232 register_name
= "PWSize";
9235 goto cp0_unimplemented
;
9238 case CP0_REGISTER_06
:
9240 case CP0_REG06__WIRED
:
9241 gen_helper_mtc0_wired(cpu_env
, arg
);
9242 register_name
= "Wired";
9244 case CP0_REG06__SRSCONF0
:
9245 check_insn(ctx
, ISA_MIPS32R2
);
9246 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
9247 register_name
= "SRSConf0";
9249 case CP0_REG06__SRSCONF1
:
9250 check_insn(ctx
, ISA_MIPS32R2
);
9251 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
9252 register_name
= "SRSConf1";
9254 case CP0_REG06__SRSCONF2
:
9255 check_insn(ctx
, ISA_MIPS32R2
);
9256 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
9257 register_name
= "SRSConf2";
9259 case CP0_REG06__SRSCONF3
:
9260 check_insn(ctx
, ISA_MIPS32R2
);
9261 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
9262 register_name
= "SRSConf3";
9264 case CP0_REG06__SRSCONF4
:
9265 check_insn(ctx
, ISA_MIPS32R2
);
9266 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
9267 register_name
= "SRSConf4";
9269 case CP0_REG06__PWCTL
:
9271 gen_helper_mtc0_pwctl(cpu_env
, arg
);
9272 register_name
= "PWCtl";
9275 goto cp0_unimplemented
;
9278 case CP0_REGISTER_07
:
9280 case CP0_REG07__HWRENA
:
9281 check_insn(ctx
, ISA_MIPS32R2
);
9282 gen_helper_mtc0_hwrena(cpu_env
, arg
);
9283 ctx
->base
.is_jmp
= DISAS_STOP
;
9284 register_name
= "HWREna";
9287 goto cp0_unimplemented
;
9290 case CP0_REGISTER_08
:
9292 case CP0_REG08__BADVADDR
:
9294 register_name
= "BadVAddr";
9296 case CP0_REG08__BADINSTR
:
9298 register_name
= "BadInstr";
9300 case CP0_REG08__BADINSTRP
:
9302 register_name
= "BadInstrP";
9304 case CP0_REG08__BADINSTRX
:
9306 register_name
= "BadInstrX";
9309 goto cp0_unimplemented
;
9312 case CP0_REGISTER_09
:
9314 case CP0_REG09__COUNT
:
9315 gen_helper_mtc0_count(cpu_env
, arg
);
9316 register_name
= "Count";
9318 case CP0_REG09__SAARI
:
9319 CP0_CHECK(ctx
->saar
);
9320 gen_helper_mtc0_saari(cpu_env
, arg
);
9321 register_name
= "SAARI";
9323 case CP0_REG09__SAAR
:
9324 CP0_CHECK(ctx
->saar
);
9325 gen_helper_mtc0_saar(cpu_env
, arg
);
9326 register_name
= "SAAR";
9329 goto cp0_unimplemented
;
9331 /* Stop translation as we may have switched the execution mode */
9332 ctx
->base
.is_jmp
= DISAS_STOP
;
9334 case CP0_REGISTER_10
:
9336 case CP0_REG10__ENTRYHI
:
9337 gen_helper_mtc0_entryhi(cpu_env
, arg
);
9338 register_name
= "EntryHi";
9341 goto cp0_unimplemented
;
9344 case CP0_REGISTER_11
:
9346 case CP0_REG11__COMPARE
:
9347 gen_helper_mtc0_compare(cpu_env
, arg
);
9348 register_name
= "Compare";
9350 /* 6,7 are implementation dependent */
9352 goto cp0_unimplemented
;
9354 /* Stop translation as we may have switched the execution mode */
9355 ctx
->base
.is_jmp
= DISAS_STOP
;
9357 case CP0_REGISTER_12
:
9360 save_cpu_state(ctx
, 1);
9361 gen_helper_mtc0_status(cpu_env
, arg
);
9362 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9363 gen_save_pc(ctx
->base
.pc_next
+ 4);
9364 ctx
->base
.is_jmp
= DISAS_EXIT
;
9365 register_name
= "Status";
9368 check_insn(ctx
, ISA_MIPS32R2
);
9369 gen_helper_mtc0_intctl(cpu_env
, arg
);
9370 /* Stop translation as we may have switched the execution mode */
9371 ctx
->base
.is_jmp
= DISAS_STOP
;
9372 register_name
= "IntCtl";
9375 check_insn(ctx
, ISA_MIPS32R2
);
9376 gen_helper_mtc0_srsctl(cpu_env
, arg
);
9377 /* Stop translation as we may have switched the execution mode */
9378 ctx
->base
.is_jmp
= DISAS_STOP
;
9379 register_name
= "SRSCtl";
9382 check_insn(ctx
, ISA_MIPS32R2
);
9383 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
9384 /* Stop translation as we may have switched the execution mode */
9385 ctx
->base
.is_jmp
= DISAS_STOP
;
9386 register_name
= "SRSMap";
9389 goto cp0_unimplemented
;
9392 case CP0_REGISTER_13
:
9395 save_cpu_state(ctx
, 1);
9396 gen_helper_mtc0_cause(cpu_env
, arg
);
9398 * Stop translation as we may have triggered an interrupt.
9399 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9400 * translated code to check for pending interrupts.
9402 gen_save_pc(ctx
->base
.pc_next
+ 4);
9403 ctx
->base
.is_jmp
= DISAS_EXIT
;
9404 register_name
= "Cause";
9407 goto cp0_unimplemented
;
9410 case CP0_REGISTER_14
:
9413 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
9414 register_name
= "EPC";
9417 goto cp0_unimplemented
;
9420 case CP0_REGISTER_15
:
9424 register_name
= "PRid";
9427 check_insn(ctx
, ISA_MIPS32R2
);
9428 gen_helper_mtc0_ebase(cpu_env
, arg
);
9429 register_name
= "EBase";
9432 goto cp0_unimplemented
;
9435 case CP0_REGISTER_16
:
9438 gen_helper_mtc0_config0(cpu_env
, arg
);
9439 register_name
= "Config";
9440 /* Stop translation as we may have switched the execution mode */
9441 ctx
->base
.is_jmp
= DISAS_STOP
;
9444 /* ignored, read only */
9445 register_name
= "Config1";
9448 gen_helper_mtc0_config2(cpu_env
, arg
);
9449 register_name
= "Config2";
9450 /* Stop translation as we may have switched the execution mode */
9451 ctx
->base
.is_jmp
= DISAS_STOP
;
9454 gen_helper_mtc0_config3(cpu_env
, arg
);
9455 register_name
= "Config3";
9456 /* Stop translation as we may have switched the execution mode */
9457 ctx
->base
.is_jmp
= DISAS_STOP
;
9460 /* currently ignored */
9461 register_name
= "Config4";
9464 gen_helper_mtc0_config5(cpu_env
, arg
);
9465 register_name
= "Config5";
9466 /* Stop translation as we may have switched the execution mode */
9467 ctx
->base
.is_jmp
= DISAS_STOP
;
9469 /* 6,7 are implementation dependent */
9471 register_name
= "Invalid config selector";
9472 goto cp0_unimplemented
;
9475 case CP0_REGISTER_17
:
9478 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9479 register_name
= "LLAddr";
9482 CP0_CHECK(ctx
->mrp
);
9483 gen_helper_mtc0_maar(cpu_env
, arg
);
9484 register_name
= "MAAR";
9487 CP0_CHECK(ctx
->mrp
);
9488 gen_helper_mtc0_maari(cpu_env
, arg
);
9489 register_name
= "MAARI";
9492 goto cp0_unimplemented
;
9495 case CP0_REGISTER_18
:
9505 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9506 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9507 register_name
= "WatchLo";
9510 goto cp0_unimplemented
;
9513 case CP0_REGISTER_19
:
9523 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9524 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9525 register_name
= "WatchHi";
9528 goto cp0_unimplemented
;
9531 case CP0_REGISTER_20
:
9534 check_insn(ctx
, ISA_MIPS3
);
9535 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9536 register_name
= "XContext";
9539 goto cp0_unimplemented
;
9542 case CP0_REGISTER_21
:
9543 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9544 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9547 gen_helper_mtc0_framemask(cpu_env
, arg
);
9548 register_name
= "Framemask";
9551 goto cp0_unimplemented
;
9554 case CP0_REGISTER_22
:
9556 register_name
= "Diagnostic"; /* implementation dependent */
9558 case CP0_REGISTER_23
:
9561 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9562 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9563 gen_save_pc(ctx
->base
.pc_next
+ 4);
9564 ctx
->base
.is_jmp
= DISAS_EXIT
;
9565 register_name
= "Debug";
9568 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9569 /* Stop translation as we may have switched the execution mode */
9570 ctx
->base
.is_jmp
= DISAS_STOP
;
9571 register_name
= "TraceControl";
9572 goto cp0_unimplemented
;
9574 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9575 /* Stop translation as we may have switched the execution mode */
9576 ctx
->base
.is_jmp
= DISAS_STOP
;
9577 register_name
= "TraceControl2";
9578 goto cp0_unimplemented
;
9580 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9581 /* Stop translation as we may have switched the execution mode */
9582 ctx
->base
.is_jmp
= DISAS_STOP
;
9583 register_name
= "UserTraceData";
9584 goto cp0_unimplemented
;
9586 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9587 /* Stop translation as we may have switched the execution mode */
9588 ctx
->base
.is_jmp
= DISAS_STOP
;
9589 register_name
= "TraceBPC";
9590 goto cp0_unimplemented
;
9592 goto cp0_unimplemented
;
9595 case CP0_REGISTER_24
:
9599 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9600 register_name
= "DEPC";
9603 goto cp0_unimplemented
;
9606 case CP0_REGISTER_25
:
9609 gen_helper_mtc0_performance0(cpu_env
, arg
);
9610 register_name
= "Performance0";
9613 /* gen_helper_mtc0_performance1(cpu_env, arg); */
9614 register_name
= "Performance1";
9615 goto cp0_unimplemented
;
9617 /* gen_helper_mtc0_performance2(cpu_env, arg); */
9618 register_name
= "Performance2";
9619 goto cp0_unimplemented
;
9621 /* gen_helper_mtc0_performance3(cpu_env, arg); */
9622 register_name
= "Performance3";
9623 goto cp0_unimplemented
;
9625 /* gen_helper_mtc0_performance4(cpu_env, arg); */
9626 register_name
= "Performance4";
9627 goto cp0_unimplemented
;
9629 /* gen_helper_mtc0_performance5(cpu_env, arg); */
9630 register_name
= "Performance5";
9631 goto cp0_unimplemented
;
9633 /* gen_helper_mtc0_performance6(cpu_env, arg); */
9634 register_name
= "Performance6";
9635 goto cp0_unimplemented
;
9637 /* gen_helper_mtc0_performance7(cpu_env, arg); */
9638 register_name
= "Performance7";
9639 goto cp0_unimplemented
;
9641 goto cp0_unimplemented
;
9644 case CP0_REGISTER_26
:
9647 gen_helper_mtc0_errctl(cpu_env
, arg
);
9648 ctx
->base
.is_jmp
= DISAS_STOP
;
9649 register_name
= "ErrCtl";
9652 goto cp0_unimplemented
;
9655 case CP0_REGISTER_27
:
9662 register_name
= "CacheErr";
9665 goto cp0_unimplemented
;
9668 case CP0_REGISTER_28
:
9674 gen_helper_mtc0_taglo(cpu_env
, arg
);
9675 register_name
= "TagLo";
9681 gen_helper_mtc0_datalo(cpu_env
, arg
);
9682 register_name
= "DataLo";
9685 goto cp0_unimplemented
;
9688 case CP0_REGISTER_29
:
9694 gen_helper_mtc0_taghi(cpu_env
, arg
);
9695 register_name
= "TagHi";
9701 gen_helper_mtc0_datahi(cpu_env
, arg
);
9702 register_name
= "DataHi";
9705 register_name
= "invalid sel";
9706 goto cp0_unimplemented
;
9709 case CP0_REGISTER_30
:
9712 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9713 register_name
= "ErrorEPC";
9716 goto cp0_unimplemented
;
9719 case CP0_REGISTER_31
:
9723 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9724 register_name
= "DESAVE";
9732 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9733 tcg_gen_st_tl(arg
, cpu_env
,
9734 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
9735 register_name
= "KScratch";
9738 goto cp0_unimplemented
;
9742 goto cp0_unimplemented
;
9744 trace_mips_translate_c0("dmtc0", register_name
, reg
, sel
);
9746 /* For simplicity assume that all writes can cause interrupts. */
9747 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9749 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9750 * translated code to check for pending interrupts.
9752 gen_save_pc(ctx
->base
.pc_next
+ 4);
9753 ctx
->base
.is_jmp
= DISAS_EXIT
;
9758 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n",
9759 register_name
, reg
, sel
);
9761 #endif /* TARGET_MIPS64 */
9763 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9764 int u
, int sel
, int h
)
9766 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9767 TCGv t0
= tcg_temp_local_new();
9769 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9770 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9771 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
9772 tcg_gen_movi_tl(t0
, -1);
9773 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9774 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
9775 tcg_gen_movi_tl(t0
, -1);
9776 } else if (u
== 0) {
9781 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9784 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9794 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9797 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9800 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9803 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9806 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9809 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9812 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9815 gen_mfc0(ctx
, t0
, rt
, sel
);
9822 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9825 gen_mfc0(ctx
, t0
, rt
, sel
);
9832 gen_helper_mftc0_status(t0
, cpu_env
);
9835 gen_mfc0(ctx
, t0
, rt
, sel
);
9842 gen_helper_mftc0_cause(t0
, cpu_env
);
9852 gen_helper_mftc0_epc(t0
, cpu_env
);
9862 gen_helper_mftc0_ebase(t0
, cpu_env
);
9879 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9889 gen_helper_mftc0_debug(t0
, cpu_env
);
9892 gen_mfc0(ctx
, t0
, rt
, sel
);
9897 gen_mfc0(ctx
, t0
, rt
, sel
);
9901 /* GPR registers. */
9903 gen_helper_1e0i(mftgpr
, t0
, rt
);
9905 /* Auxiliary CPU registers */
9909 gen_helper_1e0i(mftlo
, t0
, 0);
9912 gen_helper_1e0i(mfthi
, t0
, 0);
9915 gen_helper_1e0i(mftacx
, t0
, 0);
9918 gen_helper_1e0i(mftlo
, t0
, 1);
9921 gen_helper_1e0i(mfthi
, t0
, 1);
9924 gen_helper_1e0i(mftacx
, t0
, 1);
9927 gen_helper_1e0i(mftlo
, t0
, 2);
9930 gen_helper_1e0i(mfthi
, t0
, 2);
9933 gen_helper_1e0i(mftacx
, t0
, 2);
9936 gen_helper_1e0i(mftlo
, t0
, 3);
9939 gen_helper_1e0i(mfthi
, t0
, 3);
9942 gen_helper_1e0i(mftacx
, t0
, 3);
9945 gen_helper_mftdsp(t0
, cpu_env
);
9951 /* Floating point (COP1). */
9953 /* XXX: For now we support only a single FPU context. */
9955 TCGv_i32 fp0
= tcg_temp_new_i32();
9957 gen_load_fpr32(ctx
, fp0
, rt
);
9958 tcg_gen_ext_i32_tl(t0
, fp0
);
9959 tcg_temp_free_i32(fp0
);
9961 TCGv_i32 fp0
= tcg_temp_new_i32();
9963 gen_load_fpr32h(ctx
, fp0
, rt
);
9964 tcg_gen_ext_i32_tl(t0
, fp0
);
9965 tcg_temp_free_i32(fp0
);
9969 /* XXX: For now we support only a single FPU context. */
9970 gen_helper_1e0i(cfc1
, t0
, rt
);
9972 /* COP2: Not implemented. */
9980 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9981 gen_store_gpr(t0
, rd
);
9987 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9988 generate_exception_end(ctx
, EXCP_RI
);
9991 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9992 int u
, int sel
, int h
)
9994 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9995 TCGv t0
= tcg_temp_local_new();
9997 gen_load_gpr(t0
, rt
);
9998 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9999 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
10000 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
10003 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
10004 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
10007 } else if (u
== 0) {
10012 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
10015 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
10025 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
10028 gen_helper_mttc0_tcbind(cpu_env
, t0
);
10031 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
10034 gen_helper_mttc0_tchalt(cpu_env
, t0
);
10037 gen_helper_mttc0_tccontext(cpu_env
, t0
);
10040 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
10043 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
10046 gen_mtc0(ctx
, t0
, rd
, sel
);
10053 gen_helper_mttc0_entryhi(cpu_env
, t0
);
10056 gen_mtc0(ctx
, t0
, rd
, sel
);
10063 gen_helper_mttc0_status(cpu_env
, t0
);
10066 gen_mtc0(ctx
, t0
, rd
, sel
);
10073 gen_helper_mttc0_cause(cpu_env
, t0
);
10083 gen_helper_mttc0_ebase(cpu_env
, t0
);
10093 gen_helper_mttc0_debug(cpu_env
, t0
);
10096 gen_mtc0(ctx
, t0
, rd
, sel
);
10101 gen_mtc0(ctx
, t0
, rd
, sel
);
10105 /* GPR registers. */
10107 gen_helper_0e1i(mttgpr
, t0
, rd
);
10109 /* Auxiliary CPU registers */
10113 gen_helper_0e1i(mttlo
, t0
, 0);
10116 gen_helper_0e1i(mtthi
, t0
, 0);
10119 gen_helper_0e1i(mttacx
, t0
, 0);
10122 gen_helper_0e1i(mttlo
, t0
, 1);
10125 gen_helper_0e1i(mtthi
, t0
, 1);
10128 gen_helper_0e1i(mttacx
, t0
, 1);
10131 gen_helper_0e1i(mttlo
, t0
, 2);
10134 gen_helper_0e1i(mtthi
, t0
, 2);
10137 gen_helper_0e1i(mttacx
, t0
, 2);
10140 gen_helper_0e1i(mttlo
, t0
, 3);
10143 gen_helper_0e1i(mtthi
, t0
, 3);
10146 gen_helper_0e1i(mttacx
, t0
, 3);
10149 gen_helper_mttdsp(cpu_env
, t0
);
10155 /* Floating point (COP1). */
10157 /* XXX: For now we support only a single FPU context. */
10159 TCGv_i32 fp0
= tcg_temp_new_i32();
10161 tcg_gen_trunc_tl_i32(fp0
, t0
);
10162 gen_store_fpr32(ctx
, fp0
, rd
);
10163 tcg_temp_free_i32(fp0
);
10165 TCGv_i32 fp0
= tcg_temp_new_i32();
10167 tcg_gen_trunc_tl_i32(fp0
, t0
);
10168 gen_store_fpr32h(ctx
, fp0
, rd
);
10169 tcg_temp_free_i32(fp0
);
10173 /* XXX: For now we support only a single FPU context. */
10175 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
10177 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10178 tcg_temp_free_i32(fs_tmp
);
10180 /* Stop translation as we may have changed hflags */
10181 ctx
->base
.is_jmp
= DISAS_STOP
;
10183 /* COP2: Not implemented. */
10191 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
10197 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
10198 generate_exception_end(ctx
, EXCP_RI
);
10201 static void gen_cp0(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
10204 const char *opn
= "ldst";
10206 check_cp0_enabled(ctx
);
10210 /* Treat as NOP. */
10213 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10218 TCGv t0
= tcg_temp_new();
10220 gen_load_gpr(t0
, rt
);
10221 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10226 #if defined(TARGET_MIPS64)
10228 check_insn(ctx
, ISA_MIPS3
);
10230 /* Treat as NOP. */
10233 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10237 check_insn(ctx
, ISA_MIPS3
);
10239 TCGv t0
= tcg_temp_new();
10241 gen_load_gpr(t0
, rt
);
10242 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10251 /* Treat as NOP. */
10254 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10260 TCGv t0
= tcg_temp_new();
10261 gen_load_gpr(t0
, rt
);
10262 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10268 check_cp0_enabled(ctx
);
10270 /* Treat as NOP. */
10273 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
10274 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10278 check_cp0_enabled(ctx
);
10279 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
10280 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10285 if (!env
->tlb
->helper_tlbwi
) {
10288 gen_helper_tlbwi(cpu_env
);
10292 if (ctx
->ie
>= 2) {
10293 if (!env
->tlb
->helper_tlbinv
) {
10296 gen_helper_tlbinv(cpu_env
);
10297 } /* treat as nop if TLBINV not supported */
10301 if (ctx
->ie
>= 2) {
10302 if (!env
->tlb
->helper_tlbinvf
) {
10305 gen_helper_tlbinvf(cpu_env
);
10306 } /* treat as nop if TLBINV not supported */
10310 if (!env
->tlb
->helper_tlbwr
) {
10313 gen_helper_tlbwr(cpu_env
);
10317 if (!env
->tlb
->helper_tlbp
) {
10320 gen_helper_tlbp(cpu_env
);
10324 if (!env
->tlb
->helper_tlbr
) {
10327 gen_helper_tlbr(cpu_env
);
10329 case OPC_ERET
: /* OPC_ERETNC */
10330 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10331 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10334 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
10335 if (ctx
->opcode
& (1 << bit_shift
)) {
10338 check_insn(ctx
, ISA_MIPS32R5
);
10339 gen_helper_eretnc(cpu_env
);
10343 check_insn(ctx
, ISA_MIPS2
);
10344 gen_helper_eret(cpu_env
);
10346 ctx
->base
.is_jmp
= DISAS_EXIT
;
10351 check_insn(ctx
, ISA_MIPS32
);
10352 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10353 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10356 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10358 generate_exception_end(ctx
, EXCP_RI
);
10360 gen_helper_deret(cpu_env
);
10361 ctx
->base
.is_jmp
= DISAS_EXIT
;
10366 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
10367 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10368 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10371 /* If we get an exception, we want to restart at next instruction */
10372 ctx
->base
.pc_next
+= 4;
10373 save_cpu_state(ctx
, 1);
10374 ctx
->base
.pc_next
-= 4;
10375 gen_helper_wait(cpu_env
);
10376 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10381 generate_exception_end(ctx
, EXCP_RI
);
10384 (void)opn
; /* avoid a compiler warning */
10386 #endif /* !CONFIG_USER_ONLY */
10388 /* CP1 Branches (before delay slot) */
10389 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
10390 int32_t cc
, int32_t offset
)
10392 target_ulong btarget
;
10393 TCGv_i32 t0
= tcg_temp_new_i32();
10395 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10396 generate_exception_end(ctx
, EXCP_RI
);
10401 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
10404 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
10408 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10409 tcg_gen_not_i32(t0
, t0
);
10410 tcg_gen_andi_i32(t0
, t0
, 1);
10411 tcg_gen_extu_i32_tl(bcond
, t0
);
10414 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10415 tcg_gen_not_i32(t0
, t0
);
10416 tcg_gen_andi_i32(t0
, t0
, 1);
10417 tcg_gen_extu_i32_tl(bcond
, t0
);
10420 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10421 tcg_gen_andi_i32(t0
, t0
, 1);
10422 tcg_gen_extu_i32_tl(bcond
, t0
);
10425 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10426 tcg_gen_andi_i32(t0
, t0
, 1);
10427 tcg_gen_extu_i32_tl(bcond
, t0
);
10429 ctx
->hflags
|= MIPS_HFLAG_BL
;
10433 TCGv_i32 t1
= tcg_temp_new_i32();
10434 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10435 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
10436 tcg_gen_nand_i32(t0
, t0
, t1
);
10437 tcg_temp_free_i32(t1
);
10438 tcg_gen_andi_i32(t0
, t0
, 1);
10439 tcg_gen_extu_i32_tl(bcond
, t0
);
10444 TCGv_i32 t1
= tcg_temp_new_i32();
10445 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10446 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
10447 tcg_gen_or_i32(t0
, t0
, t1
);
10448 tcg_temp_free_i32(t1
);
10449 tcg_gen_andi_i32(t0
, t0
, 1);
10450 tcg_gen_extu_i32_tl(bcond
, t0
);
10455 TCGv_i32 t1
= tcg_temp_new_i32();
10456 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10457 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
10458 tcg_gen_and_i32(t0
, t0
, t1
);
10459 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 2));
10460 tcg_gen_and_i32(t0
, t0
, t1
);
10461 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 3));
10462 tcg_gen_nand_i32(t0
, t0
, t1
);
10463 tcg_temp_free_i32(t1
);
10464 tcg_gen_andi_i32(t0
, t0
, 1);
10465 tcg_gen_extu_i32_tl(bcond
, t0
);
10470 TCGv_i32 t1
= tcg_temp_new_i32();
10471 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10472 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
10473 tcg_gen_or_i32(t0
, t0
, t1
);
10474 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 2));
10475 tcg_gen_or_i32(t0
, t0
, t1
);
10476 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 3));
10477 tcg_gen_or_i32(t0
, t0
, t1
);
10478 tcg_temp_free_i32(t1
);
10479 tcg_gen_andi_i32(t0
, t0
, 1);
10480 tcg_gen_extu_i32_tl(bcond
, t0
);
10483 ctx
->hflags
|= MIPS_HFLAG_BC
;
10486 MIPS_INVAL("cp1 cond branch");
10487 generate_exception_end(ctx
, EXCP_RI
);
10490 ctx
->btarget
= btarget
;
10491 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10493 tcg_temp_free_i32(t0
);
10496 /* R6 CP1 Branches */
10497 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10498 int32_t ft
, int32_t offset
,
10499 int delayslot_size
)
10501 target_ulong btarget
;
10502 TCGv_i64 t0
= tcg_temp_new_i64();
10504 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10505 #ifdef MIPS_DEBUG_DISAS
10506 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10507 "\n", ctx
->base
.pc_next
);
10509 generate_exception_end(ctx
, EXCP_RI
);
10513 gen_load_fpr64(ctx
, t0
, ft
);
10514 tcg_gen_andi_i64(t0
, t0
, 1);
10516 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10520 tcg_gen_xori_i64(t0
, t0
, 1);
10521 ctx
->hflags
|= MIPS_HFLAG_BC
;
10524 /* t0 already set */
10525 ctx
->hflags
|= MIPS_HFLAG_BC
;
10528 MIPS_INVAL("cp1 cond branch");
10529 generate_exception_end(ctx
, EXCP_RI
);
10533 tcg_gen_trunc_i64_tl(bcond
, t0
);
10535 ctx
->btarget
= btarget
;
10537 switch (delayslot_size
) {
10539 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10542 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10547 tcg_temp_free_i64(t0
);
10550 /* Coprocessor 1 (FPU) */
10552 #define FOP(func, fmt) (((fmt) << 21) | (func))
10555 OPC_ADD_S
= FOP(0, FMT_S
),
10556 OPC_SUB_S
= FOP(1, FMT_S
),
10557 OPC_MUL_S
= FOP(2, FMT_S
),
10558 OPC_DIV_S
= FOP(3, FMT_S
),
10559 OPC_SQRT_S
= FOP(4, FMT_S
),
10560 OPC_ABS_S
= FOP(5, FMT_S
),
10561 OPC_MOV_S
= FOP(6, FMT_S
),
10562 OPC_NEG_S
= FOP(7, FMT_S
),
10563 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10564 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10565 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10566 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10567 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10568 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10569 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10570 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10571 OPC_SEL_S
= FOP(16, FMT_S
),
10572 OPC_MOVCF_S
= FOP(17, FMT_S
),
10573 OPC_MOVZ_S
= FOP(18, FMT_S
),
10574 OPC_MOVN_S
= FOP(19, FMT_S
),
10575 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10576 OPC_RECIP_S
= FOP(21, FMT_S
),
10577 OPC_RSQRT_S
= FOP(22, FMT_S
),
10578 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10579 OPC_MADDF_S
= FOP(24, FMT_S
),
10580 OPC_MSUBF_S
= FOP(25, FMT_S
),
10581 OPC_RINT_S
= FOP(26, FMT_S
),
10582 OPC_CLASS_S
= FOP(27, FMT_S
),
10583 OPC_MIN_S
= FOP(28, FMT_S
),
10584 OPC_RECIP2_S
= FOP(28, FMT_S
),
10585 OPC_MINA_S
= FOP(29, FMT_S
),
10586 OPC_RECIP1_S
= FOP(29, FMT_S
),
10587 OPC_MAX_S
= FOP(30, FMT_S
),
10588 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10589 OPC_MAXA_S
= FOP(31, FMT_S
),
10590 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10591 OPC_CVT_D_S
= FOP(33, FMT_S
),
10592 OPC_CVT_W_S
= FOP(36, FMT_S
),
10593 OPC_CVT_L_S
= FOP(37, FMT_S
),
10594 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10595 OPC_CMP_F_S
= FOP(48, FMT_S
),
10596 OPC_CMP_UN_S
= FOP(49, FMT_S
),
10597 OPC_CMP_EQ_S
= FOP(50, FMT_S
),
10598 OPC_CMP_UEQ_S
= FOP(51, FMT_S
),
10599 OPC_CMP_OLT_S
= FOP(52, FMT_S
),
10600 OPC_CMP_ULT_S
= FOP(53, FMT_S
),
10601 OPC_CMP_OLE_S
= FOP(54, FMT_S
),
10602 OPC_CMP_ULE_S
= FOP(55, FMT_S
),
10603 OPC_CMP_SF_S
= FOP(56, FMT_S
),
10604 OPC_CMP_NGLE_S
= FOP(57, FMT_S
),
10605 OPC_CMP_SEQ_S
= FOP(58, FMT_S
),
10606 OPC_CMP_NGL_S
= FOP(59, FMT_S
),
10607 OPC_CMP_LT_S
= FOP(60, FMT_S
),
10608 OPC_CMP_NGE_S
= FOP(61, FMT_S
),
10609 OPC_CMP_LE_S
= FOP(62, FMT_S
),
10610 OPC_CMP_NGT_S
= FOP(63, FMT_S
),
10612 OPC_ADD_D
= FOP(0, FMT_D
),
10613 OPC_SUB_D
= FOP(1, FMT_D
),
10614 OPC_MUL_D
= FOP(2, FMT_D
),
10615 OPC_DIV_D
= FOP(3, FMT_D
),
10616 OPC_SQRT_D
= FOP(4, FMT_D
),
10617 OPC_ABS_D
= FOP(5, FMT_D
),
10618 OPC_MOV_D
= FOP(6, FMT_D
),
10619 OPC_NEG_D
= FOP(7, FMT_D
),
10620 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10621 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10622 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10623 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10624 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10625 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10626 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10627 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10628 OPC_SEL_D
= FOP(16, FMT_D
),
10629 OPC_MOVCF_D
= FOP(17, FMT_D
),
10630 OPC_MOVZ_D
= FOP(18, FMT_D
),
10631 OPC_MOVN_D
= FOP(19, FMT_D
),
10632 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10633 OPC_RECIP_D
= FOP(21, FMT_D
),
10634 OPC_RSQRT_D
= FOP(22, FMT_D
),
10635 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10636 OPC_MADDF_D
= FOP(24, FMT_D
),
10637 OPC_MSUBF_D
= FOP(25, FMT_D
),
10638 OPC_RINT_D
= FOP(26, FMT_D
),
10639 OPC_CLASS_D
= FOP(27, FMT_D
),
10640 OPC_MIN_D
= FOP(28, FMT_D
),
10641 OPC_RECIP2_D
= FOP(28, FMT_D
),
10642 OPC_MINA_D
= FOP(29, FMT_D
),
10643 OPC_RECIP1_D
= FOP(29, FMT_D
),
10644 OPC_MAX_D
= FOP(30, FMT_D
),
10645 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10646 OPC_MAXA_D
= FOP(31, FMT_D
),
10647 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10648 OPC_CVT_S_D
= FOP(32, FMT_D
),
10649 OPC_CVT_W_D
= FOP(36, FMT_D
),
10650 OPC_CVT_L_D
= FOP(37, FMT_D
),
10651 OPC_CMP_F_D
= FOP(48, FMT_D
),
10652 OPC_CMP_UN_D
= FOP(49, FMT_D
),
10653 OPC_CMP_EQ_D
= FOP(50, FMT_D
),
10654 OPC_CMP_UEQ_D
= FOP(51, FMT_D
),
10655 OPC_CMP_OLT_D
= FOP(52, FMT_D
),
10656 OPC_CMP_ULT_D
= FOP(53, FMT_D
),
10657 OPC_CMP_OLE_D
= FOP(54, FMT_D
),
10658 OPC_CMP_ULE_D
= FOP(55, FMT_D
),
10659 OPC_CMP_SF_D
= FOP(56, FMT_D
),
10660 OPC_CMP_NGLE_D
= FOP(57, FMT_D
),
10661 OPC_CMP_SEQ_D
= FOP(58, FMT_D
),
10662 OPC_CMP_NGL_D
= FOP(59, FMT_D
),
10663 OPC_CMP_LT_D
= FOP(60, FMT_D
),
10664 OPC_CMP_NGE_D
= FOP(61, FMT_D
),
10665 OPC_CMP_LE_D
= FOP(62, FMT_D
),
10666 OPC_CMP_NGT_D
= FOP(63, FMT_D
),
10668 OPC_CVT_S_W
= FOP(32, FMT_W
),
10669 OPC_CVT_D_W
= FOP(33, FMT_W
),
10670 OPC_CVT_S_L
= FOP(32, FMT_L
),
10671 OPC_CVT_D_L
= FOP(33, FMT_L
),
10672 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10674 OPC_ADD_PS
= FOP(0, FMT_PS
),
10675 OPC_SUB_PS
= FOP(1, FMT_PS
),
10676 OPC_MUL_PS
= FOP(2, FMT_PS
),
10677 OPC_DIV_PS
= FOP(3, FMT_PS
),
10678 OPC_ABS_PS
= FOP(5, FMT_PS
),
10679 OPC_MOV_PS
= FOP(6, FMT_PS
),
10680 OPC_NEG_PS
= FOP(7, FMT_PS
),
10681 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10682 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10683 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10684 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10685 OPC_MULR_PS
= FOP(26, FMT_PS
),
10686 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10687 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10688 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10689 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10691 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10692 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10693 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10694 OPC_PLL_PS
= FOP(44, FMT_PS
),
10695 OPC_PLU_PS
= FOP(45, FMT_PS
),
10696 OPC_PUL_PS
= FOP(46, FMT_PS
),
10697 OPC_PUU_PS
= FOP(47, FMT_PS
),
10698 OPC_CMP_F_PS
= FOP(48, FMT_PS
),
10699 OPC_CMP_UN_PS
= FOP(49, FMT_PS
),
10700 OPC_CMP_EQ_PS
= FOP(50, FMT_PS
),
10701 OPC_CMP_UEQ_PS
= FOP(51, FMT_PS
),
10702 OPC_CMP_OLT_PS
= FOP(52, FMT_PS
),
10703 OPC_CMP_ULT_PS
= FOP(53, FMT_PS
),
10704 OPC_CMP_OLE_PS
= FOP(54, FMT_PS
),
10705 OPC_CMP_ULE_PS
= FOP(55, FMT_PS
),
10706 OPC_CMP_SF_PS
= FOP(56, FMT_PS
),
10707 OPC_CMP_NGLE_PS
= FOP(57, FMT_PS
),
10708 OPC_CMP_SEQ_PS
= FOP(58, FMT_PS
),
10709 OPC_CMP_NGL_PS
= FOP(59, FMT_PS
),
10710 OPC_CMP_LT_PS
= FOP(60, FMT_PS
),
10711 OPC_CMP_NGE_PS
= FOP(61, FMT_PS
),
10712 OPC_CMP_LE_PS
= FOP(62, FMT_PS
),
10713 OPC_CMP_NGT_PS
= FOP(63, FMT_PS
),
10717 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10718 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10719 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10720 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10721 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10722 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10723 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10724 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10725 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10726 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10727 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10728 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10729 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10730 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10731 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10732 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10733 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10734 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10735 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10736 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10737 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10738 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10740 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10741 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10742 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10743 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10744 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10745 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10746 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10747 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10748 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10749 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10750 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10751 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10752 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10753 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10754 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10755 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10756 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10757 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10758 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10759 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10760 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10761 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10764 static void gen_cp1(DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10766 TCGv t0
= tcg_temp_new();
10771 TCGv_i32 fp0
= tcg_temp_new_i32();
10773 gen_load_fpr32(ctx
, fp0
, fs
);
10774 tcg_gen_ext_i32_tl(t0
, fp0
);
10775 tcg_temp_free_i32(fp0
);
10777 gen_store_gpr(t0
, rt
);
10780 gen_load_gpr(t0
, rt
);
10782 TCGv_i32 fp0
= tcg_temp_new_i32();
10784 tcg_gen_trunc_tl_i32(fp0
, t0
);
10785 gen_store_fpr32(ctx
, fp0
, fs
);
10786 tcg_temp_free_i32(fp0
);
10790 gen_helper_1e0i(cfc1
, t0
, fs
);
10791 gen_store_gpr(t0
, rt
);
10794 gen_load_gpr(t0
, rt
);
10795 save_cpu_state(ctx
, 0);
10797 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10799 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10800 tcg_temp_free_i32(fs_tmp
);
10802 /* Stop translation as we may have changed hflags */
10803 ctx
->base
.is_jmp
= DISAS_STOP
;
10805 #if defined(TARGET_MIPS64)
10807 gen_load_fpr64(ctx
, t0
, fs
);
10808 gen_store_gpr(t0
, rt
);
10811 gen_load_gpr(t0
, rt
);
10812 gen_store_fpr64(ctx
, t0
, fs
);
10817 TCGv_i32 fp0
= tcg_temp_new_i32();
10819 gen_load_fpr32h(ctx
, fp0
, fs
);
10820 tcg_gen_ext_i32_tl(t0
, fp0
);
10821 tcg_temp_free_i32(fp0
);
10823 gen_store_gpr(t0
, rt
);
10826 gen_load_gpr(t0
, rt
);
10828 TCGv_i32 fp0
= tcg_temp_new_i32();
10830 tcg_gen_trunc_tl_i32(fp0
, t0
);
10831 gen_store_fpr32h(ctx
, fp0
, fs
);
10832 tcg_temp_free_i32(fp0
);
10836 MIPS_INVAL("cp1 move");
10837 generate_exception_end(ctx
, EXCP_RI
);
10845 static void gen_movci(DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10852 /* Treat as NOP. */
10857 cond
= TCG_COND_EQ
;
10859 cond
= TCG_COND_NE
;
10862 l1
= gen_new_label();
10863 t0
= tcg_temp_new_i32();
10864 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10865 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10866 tcg_temp_free_i32(t0
);
10868 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10870 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10875 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10879 TCGv_i32 t0
= tcg_temp_new_i32();
10880 TCGLabel
*l1
= gen_new_label();
10883 cond
= TCG_COND_EQ
;
10885 cond
= TCG_COND_NE
;
10888 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10889 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10890 gen_load_fpr32(ctx
, t0
, fs
);
10891 gen_store_fpr32(ctx
, t0
, fd
);
10893 tcg_temp_free_i32(t0
);
10896 static inline void gen_movcf_d(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10900 TCGv_i32 t0
= tcg_temp_new_i32();
10902 TCGLabel
*l1
= gen_new_label();
10905 cond
= TCG_COND_EQ
;
10907 cond
= TCG_COND_NE
;
10910 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10911 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10912 tcg_temp_free_i32(t0
);
10913 fp0
= tcg_temp_new_i64();
10914 gen_load_fpr64(ctx
, fp0
, fs
);
10915 gen_store_fpr64(ctx
, fp0
, fd
);
10916 tcg_temp_free_i64(fp0
);
10920 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10924 TCGv_i32 t0
= tcg_temp_new_i32();
10925 TCGLabel
*l1
= gen_new_label();
10926 TCGLabel
*l2
= gen_new_label();
10929 cond
= TCG_COND_EQ
;
10931 cond
= TCG_COND_NE
;
10934 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10935 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10936 gen_load_fpr32(ctx
, t0
, fs
);
10937 gen_store_fpr32(ctx
, t0
, fd
);
10940 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+ 1));
10941 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10942 gen_load_fpr32h(ctx
, t0
, fs
);
10943 gen_store_fpr32h(ctx
, t0
, fd
);
10944 tcg_temp_free_i32(t0
);
10948 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10951 TCGv_i32 t1
= tcg_const_i32(0);
10952 TCGv_i32 fp0
= tcg_temp_new_i32();
10953 TCGv_i32 fp1
= tcg_temp_new_i32();
10954 TCGv_i32 fp2
= tcg_temp_new_i32();
10955 gen_load_fpr32(ctx
, fp0
, fd
);
10956 gen_load_fpr32(ctx
, fp1
, ft
);
10957 gen_load_fpr32(ctx
, fp2
, fs
);
10961 tcg_gen_andi_i32(fp0
, fp0
, 1);
10962 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10965 tcg_gen_andi_i32(fp1
, fp1
, 1);
10966 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10969 tcg_gen_andi_i32(fp1
, fp1
, 1);
10970 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10973 MIPS_INVAL("gen_sel_s");
10974 generate_exception_end(ctx
, EXCP_RI
);
10978 gen_store_fpr32(ctx
, fp0
, fd
);
10979 tcg_temp_free_i32(fp2
);
10980 tcg_temp_free_i32(fp1
);
10981 tcg_temp_free_i32(fp0
);
10982 tcg_temp_free_i32(t1
);
10985 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10988 TCGv_i64 t1
= tcg_const_i64(0);
10989 TCGv_i64 fp0
= tcg_temp_new_i64();
10990 TCGv_i64 fp1
= tcg_temp_new_i64();
10991 TCGv_i64 fp2
= tcg_temp_new_i64();
10992 gen_load_fpr64(ctx
, fp0
, fd
);
10993 gen_load_fpr64(ctx
, fp1
, ft
);
10994 gen_load_fpr64(ctx
, fp2
, fs
);
10998 tcg_gen_andi_i64(fp0
, fp0
, 1);
10999 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
11002 tcg_gen_andi_i64(fp1
, fp1
, 1);
11003 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
11006 tcg_gen_andi_i64(fp1
, fp1
, 1);
11007 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
11010 MIPS_INVAL("gen_sel_d");
11011 generate_exception_end(ctx
, EXCP_RI
);
11015 gen_store_fpr64(ctx
, fp0
, fd
);
11016 tcg_temp_free_i64(fp2
);
11017 tcg_temp_free_i64(fp1
);
11018 tcg_temp_free_i64(fp0
);
11019 tcg_temp_free_i64(t1
);
11022 static void gen_farith(DisasContext
*ctx
, enum fopcode op1
,
11023 int ft
, int fs
, int fd
, int cc
)
11025 uint32_t func
= ctx
->opcode
& 0x3f;
11029 TCGv_i32 fp0
= tcg_temp_new_i32();
11030 TCGv_i32 fp1
= tcg_temp_new_i32();
11032 gen_load_fpr32(ctx
, fp0
, fs
);
11033 gen_load_fpr32(ctx
, fp1
, ft
);
11034 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
11035 tcg_temp_free_i32(fp1
);
11036 gen_store_fpr32(ctx
, fp0
, fd
);
11037 tcg_temp_free_i32(fp0
);
11042 TCGv_i32 fp0
= tcg_temp_new_i32();
11043 TCGv_i32 fp1
= tcg_temp_new_i32();
11045 gen_load_fpr32(ctx
, fp0
, fs
);
11046 gen_load_fpr32(ctx
, fp1
, ft
);
11047 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
11048 tcg_temp_free_i32(fp1
);
11049 gen_store_fpr32(ctx
, fp0
, fd
);
11050 tcg_temp_free_i32(fp0
);
11055 TCGv_i32 fp0
= tcg_temp_new_i32();
11056 TCGv_i32 fp1
= tcg_temp_new_i32();
11058 gen_load_fpr32(ctx
, fp0
, fs
);
11059 gen_load_fpr32(ctx
, fp1
, ft
);
11060 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
11061 tcg_temp_free_i32(fp1
);
11062 gen_store_fpr32(ctx
, fp0
, fd
);
11063 tcg_temp_free_i32(fp0
);
11068 TCGv_i32 fp0
= tcg_temp_new_i32();
11069 TCGv_i32 fp1
= tcg_temp_new_i32();
11071 gen_load_fpr32(ctx
, fp0
, fs
);
11072 gen_load_fpr32(ctx
, fp1
, ft
);
11073 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
11074 tcg_temp_free_i32(fp1
);
11075 gen_store_fpr32(ctx
, fp0
, fd
);
11076 tcg_temp_free_i32(fp0
);
11081 TCGv_i32 fp0
= tcg_temp_new_i32();
11083 gen_load_fpr32(ctx
, fp0
, fs
);
11084 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
11085 gen_store_fpr32(ctx
, fp0
, fd
);
11086 tcg_temp_free_i32(fp0
);
11091 TCGv_i32 fp0
= tcg_temp_new_i32();
11093 gen_load_fpr32(ctx
, fp0
, fs
);
11094 if (ctx
->abs2008
) {
11095 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
11097 gen_helper_float_abs_s(fp0
, fp0
);
11099 gen_store_fpr32(ctx
, fp0
, fd
);
11100 tcg_temp_free_i32(fp0
);
11105 TCGv_i32 fp0
= tcg_temp_new_i32();
11107 gen_load_fpr32(ctx
, fp0
, fs
);
11108 gen_store_fpr32(ctx
, fp0
, fd
);
11109 tcg_temp_free_i32(fp0
);
11114 TCGv_i32 fp0
= tcg_temp_new_i32();
11116 gen_load_fpr32(ctx
, fp0
, fs
);
11117 if (ctx
->abs2008
) {
11118 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
11120 gen_helper_float_chs_s(fp0
, fp0
);
11122 gen_store_fpr32(ctx
, fp0
, fd
);
11123 tcg_temp_free_i32(fp0
);
11126 case OPC_ROUND_L_S
:
11127 check_cp1_64bitmode(ctx
);
11129 TCGv_i32 fp32
= tcg_temp_new_i32();
11130 TCGv_i64 fp64
= tcg_temp_new_i64();
11132 gen_load_fpr32(ctx
, fp32
, fs
);
11133 if (ctx
->nan2008
) {
11134 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
11136 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
11138 tcg_temp_free_i32(fp32
);
11139 gen_store_fpr64(ctx
, fp64
, fd
);
11140 tcg_temp_free_i64(fp64
);
11143 case OPC_TRUNC_L_S
:
11144 check_cp1_64bitmode(ctx
);
11146 TCGv_i32 fp32
= tcg_temp_new_i32();
11147 TCGv_i64 fp64
= tcg_temp_new_i64();
11149 gen_load_fpr32(ctx
, fp32
, fs
);
11150 if (ctx
->nan2008
) {
11151 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
11153 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
11155 tcg_temp_free_i32(fp32
);
11156 gen_store_fpr64(ctx
, fp64
, fd
);
11157 tcg_temp_free_i64(fp64
);
11161 check_cp1_64bitmode(ctx
);
11163 TCGv_i32 fp32
= tcg_temp_new_i32();
11164 TCGv_i64 fp64
= tcg_temp_new_i64();
11166 gen_load_fpr32(ctx
, fp32
, fs
);
11167 if (ctx
->nan2008
) {
11168 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
11170 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
11172 tcg_temp_free_i32(fp32
);
11173 gen_store_fpr64(ctx
, fp64
, fd
);
11174 tcg_temp_free_i64(fp64
);
11177 case OPC_FLOOR_L_S
:
11178 check_cp1_64bitmode(ctx
);
11180 TCGv_i32 fp32
= tcg_temp_new_i32();
11181 TCGv_i64 fp64
= tcg_temp_new_i64();
11183 gen_load_fpr32(ctx
, fp32
, fs
);
11184 if (ctx
->nan2008
) {
11185 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
11187 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
11189 tcg_temp_free_i32(fp32
);
11190 gen_store_fpr64(ctx
, fp64
, fd
);
11191 tcg_temp_free_i64(fp64
);
11194 case OPC_ROUND_W_S
:
11196 TCGv_i32 fp0
= tcg_temp_new_i32();
11198 gen_load_fpr32(ctx
, fp0
, fs
);
11199 if (ctx
->nan2008
) {
11200 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
11202 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
11204 gen_store_fpr32(ctx
, fp0
, fd
);
11205 tcg_temp_free_i32(fp0
);
11208 case OPC_TRUNC_W_S
:
11210 TCGv_i32 fp0
= tcg_temp_new_i32();
11212 gen_load_fpr32(ctx
, fp0
, fs
);
11213 if (ctx
->nan2008
) {
11214 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
11216 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
11218 gen_store_fpr32(ctx
, fp0
, fd
);
11219 tcg_temp_free_i32(fp0
);
11224 TCGv_i32 fp0
= tcg_temp_new_i32();
11226 gen_load_fpr32(ctx
, fp0
, fs
);
11227 if (ctx
->nan2008
) {
11228 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
11230 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
11232 gen_store_fpr32(ctx
, fp0
, fd
);
11233 tcg_temp_free_i32(fp0
);
11236 case OPC_FLOOR_W_S
:
11238 TCGv_i32 fp0
= tcg_temp_new_i32();
11240 gen_load_fpr32(ctx
, fp0
, fs
);
11241 if (ctx
->nan2008
) {
11242 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
11244 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
11246 gen_store_fpr32(ctx
, fp0
, fd
);
11247 tcg_temp_free_i32(fp0
);
11251 check_insn(ctx
, ISA_MIPS32R6
);
11252 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11255 check_insn(ctx
, ISA_MIPS32R6
);
11256 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11259 check_insn(ctx
, ISA_MIPS32R6
);
11260 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11263 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11264 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11267 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11269 TCGLabel
*l1
= gen_new_label();
11273 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11275 fp0
= tcg_temp_new_i32();
11276 gen_load_fpr32(ctx
, fp0
, fs
);
11277 gen_store_fpr32(ctx
, fp0
, fd
);
11278 tcg_temp_free_i32(fp0
);
11283 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11285 TCGLabel
*l1
= gen_new_label();
11289 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11290 fp0
= tcg_temp_new_i32();
11291 gen_load_fpr32(ctx
, fp0
, fs
);
11292 gen_store_fpr32(ctx
, fp0
, fd
);
11293 tcg_temp_free_i32(fp0
);
11300 TCGv_i32 fp0
= tcg_temp_new_i32();
11302 gen_load_fpr32(ctx
, fp0
, fs
);
11303 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
11304 gen_store_fpr32(ctx
, fp0
, fd
);
11305 tcg_temp_free_i32(fp0
);
11310 TCGv_i32 fp0
= tcg_temp_new_i32();
11312 gen_load_fpr32(ctx
, fp0
, fs
);
11313 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
11314 gen_store_fpr32(ctx
, fp0
, fd
);
11315 tcg_temp_free_i32(fp0
);
11319 check_insn(ctx
, ISA_MIPS32R6
);
11321 TCGv_i32 fp0
= tcg_temp_new_i32();
11322 TCGv_i32 fp1
= tcg_temp_new_i32();
11323 TCGv_i32 fp2
= tcg_temp_new_i32();
11324 gen_load_fpr32(ctx
, fp0
, fs
);
11325 gen_load_fpr32(ctx
, fp1
, ft
);
11326 gen_load_fpr32(ctx
, fp2
, fd
);
11327 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11328 gen_store_fpr32(ctx
, fp2
, fd
);
11329 tcg_temp_free_i32(fp2
);
11330 tcg_temp_free_i32(fp1
);
11331 tcg_temp_free_i32(fp0
);
11335 check_insn(ctx
, ISA_MIPS32R6
);
11337 TCGv_i32 fp0
= tcg_temp_new_i32();
11338 TCGv_i32 fp1
= tcg_temp_new_i32();
11339 TCGv_i32 fp2
= tcg_temp_new_i32();
11340 gen_load_fpr32(ctx
, fp0
, fs
);
11341 gen_load_fpr32(ctx
, fp1
, ft
);
11342 gen_load_fpr32(ctx
, fp2
, fd
);
11343 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11344 gen_store_fpr32(ctx
, fp2
, fd
);
11345 tcg_temp_free_i32(fp2
);
11346 tcg_temp_free_i32(fp1
);
11347 tcg_temp_free_i32(fp0
);
11351 check_insn(ctx
, ISA_MIPS32R6
);
11353 TCGv_i32 fp0
= tcg_temp_new_i32();
11354 gen_load_fpr32(ctx
, fp0
, fs
);
11355 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
11356 gen_store_fpr32(ctx
, fp0
, fd
);
11357 tcg_temp_free_i32(fp0
);
11361 check_insn(ctx
, ISA_MIPS32R6
);
11363 TCGv_i32 fp0
= tcg_temp_new_i32();
11364 gen_load_fpr32(ctx
, fp0
, fs
);
11365 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
11366 gen_store_fpr32(ctx
, fp0
, fd
);
11367 tcg_temp_free_i32(fp0
);
11370 case OPC_MIN_S
: /* OPC_RECIP2_S */
11371 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11373 TCGv_i32 fp0
= tcg_temp_new_i32();
11374 TCGv_i32 fp1
= tcg_temp_new_i32();
11375 TCGv_i32 fp2
= tcg_temp_new_i32();
11376 gen_load_fpr32(ctx
, fp0
, fs
);
11377 gen_load_fpr32(ctx
, fp1
, ft
);
11378 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
11379 gen_store_fpr32(ctx
, fp2
, fd
);
11380 tcg_temp_free_i32(fp2
);
11381 tcg_temp_free_i32(fp1
);
11382 tcg_temp_free_i32(fp0
);
11385 check_cp1_64bitmode(ctx
);
11387 TCGv_i32 fp0
= tcg_temp_new_i32();
11388 TCGv_i32 fp1
= tcg_temp_new_i32();
11390 gen_load_fpr32(ctx
, fp0
, fs
);
11391 gen_load_fpr32(ctx
, fp1
, ft
);
11392 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
11393 tcg_temp_free_i32(fp1
);
11394 gen_store_fpr32(ctx
, fp0
, fd
);
11395 tcg_temp_free_i32(fp0
);
11399 case OPC_MINA_S
: /* OPC_RECIP1_S */
11400 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11402 TCGv_i32 fp0
= tcg_temp_new_i32();
11403 TCGv_i32 fp1
= tcg_temp_new_i32();
11404 TCGv_i32 fp2
= tcg_temp_new_i32();
11405 gen_load_fpr32(ctx
, fp0
, fs
);
11406 gen_load_fpr32(ctx
, fp1
, ft
);
11407 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
11408 gen_store_fpr32(ctx
, fp2
, fd
);
11409 tcg_temp_free_i32(fp2
);
11410 tcg_temp_free_i32(fp1
);
11411 tcg_temp_free_i32(fp0
);
11414 check_cp1_64bitmode(ctx
);
11416 TCGv_i32 fp0
= tcg_temp_new_i32();
11418 gen_load_fpr32(ctx
, fp0
, fs
);
11419 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
11420 gen_store_fpr32(ctx
, fp0
, fd
);
11421 tcg_temp_free_i32(fp0
);
11425 case OPC_MAX_S
: /* OPC_RSQRT1_S */
11426 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11428 TCGv_i32 fp0
= tcg_temp_new_i32();
11429 TCGv_i32 fp1
= tcg_temp_new_i32();
11430 gen_load_fpr32(ctx
, fp0
, fs
);
11431 gen_load_fpr32(ctx
, fp1
, ft
);
11432 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
11433 gen_store_fpr32(ctx
, fp1
, fd
);
11434 tcg_temp_free_i32(fp1
);
11435 tcg_temp_free_i32(fp0
);
11438 check_cp1_64bitmode(ctx
);
11440 TCGv_i32 fp0
= tcg_temp_new_i32();
11442 gen_load_fpr32(ctx
, fp0
, fs
);
11443 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
11444 gen_store_fpr32(ctx
, fp0
, fd
);
11445 tcg_temp_free_i32(fp0
);
11449 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11450 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11452 TCGv_i32 fp0
= tcg_temp_new_i32();
11453 TCGv_i32 fp1
= tcg_temp_new_i32();
11454 gen_load_fpr32(ctx
, fp0
, fs
);
11455 gen_load_fpr32(ctx
, fp1
, ft
);
11456 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11457 gen_store_fpr32(ctx
, fp1
, fd
);
11458 tcg_temp_free_i32(fp1
);
11459 tcg_temp_free_i32(fp0
);
11462 check_cp1_64bitmode(ctx
);
11464 TCGv_i32 fp0
= tcg_temp_new_i32();
11465 TCGv_i32 fp1
= tcg_temp_new_i32();
11467 gen_load_fpr32(ctx
, fp0
, fs
);
11468 gen_load_fpr32(ctx
, fp1
, ft
);
11469 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11470 tcg_temp_free_i32(fp1
);
11471 gen_store_fpr32(ctx
, fp0
, fd
);
11472 tcg_temp_free_i32(fp0
);
11477 check_cp1_registers(ctx
, fd
);
11479 TCGv_i32 fp32
= tcg_temp_new_i32();
11480 TCGv_i64 fp64
= tcg_temp_new_i64();
11482 gen_load_fpr32(ctx
, fp32
, fs
);
11483 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11484 tcg_temp_free_i32(fp32
);
11485 gen_store_fpr64(ctx
, fp64
, fd
);
11486 tcg_temp_free_i64(fp64
);
11491 TCGv_i32 fp0
= tcg_temp_new_i32();
11493 gen_load_fpr32(ctx
, fp0
, fs
);
11494 if (ctx
->nan2008
) {
11495 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11497 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11499 gen_store_fpr32(ctx
, fp0
, fd
);
11500 tcg_temp_free_i32(fp0
);
11504 check_cp1_64bitmode(ctx
);
11506 TCGv_i32 fp32
= tcg_temp_new_i32();
11507 TCGv_i64 fp64
= tcg_temp_new_i64();
11509 gen_load_fpr32(ctx
, fp32
, fs
);
11510 if (ctx
->nan2008
) {
11511 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11513 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11515 tcg_temp_free_i32(fp32
);
11516 gen_store_fpr64(ctx
, fp64
, fd
);
11517 tcg_temp_free_i64(fp64
);
11523 TCGv_i64 fp64
= tcg_temp_new_i64();
11524 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11525 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11527 gen_load_fpr32(ctx
, fp32_0
, fs
);
11528 gen_load_fpr32(ctx
, fp32_1
, ft
);
11529 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11530 tcg_temp_free_i32(fp32_1
);
11531 tcg_temp_free_i32(fp32_0
);
11532 gen_store_fpr64(ctx
, fp64
, fd
);
11533 tcg_temp_free_i64(fp64
);
11539 case OPC_CMP_UEQ_S
:
11540 case OPC_CMP_OLT_S
:
11541 case OPC_CMP_ULT_S
:
11542 case OPC_CMP_OLE_S
:
11543 case OPC_CMP_ULE_S
:
11545 case OPC_CMP_NGLE_S
:
11546 case OPC_CMP_SEQ_S
:
11547 case OPC_CMP_NGL_S
:
11549 case OPC_CMP_NGE_S
:
11551 case OPC_CMP_NGT_S
:
11552 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11553 if (ctx
->opcode
& (1 << 6)) {
11554 gen_cmpabs_s(ctx
, func
- 48, ft
, fs
, cc
);
11556 gen_cmp_s(ctx
, func
- 48, ft
, fs
, cc
);
11560 check_cp1_registers(ctx
, fs
| ft
| fd
);
11562 TCGv_i64 fp0
= tcg_temp_new_i64();
11563 TCGv_i64 fp1
= tcg_temp_new_i64();
11565 gen_load_fpr64(ctx
, fp0
, fs
);
11566 gen_load_fpr64(ctx
, fp1
, ft
);
11567 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11568 tcg_temp_free_i64(fp1
);
11569 gen_store_fpr64(ctx
, fp0
, fd
);
11570 tcg_temp_free_i64(fp0
);
11574 check_cp1_registers(ctx
, fs
| ft
| fd
);
11576 TCGv_i64 fp0
= tcg_temp_new_i64();
11577 TCGv_i64 fp1
= tcg_temp_new_i64();
11579 gen_load_fpr64(ctx
, fp0
, fs
);
11580 gen_load_fpr64(ctx
, fp1
, ft
);
11581 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11582 tcg_temp_free_i64(fp1
);
11583 gen_store_fpr64(ctx
, fp0
, fd
);
11584 tcg_temp_free_i64(fp0
);
11588 check_cp1_registers(ctx
, fs
| ft
| fd
);
11590 TCGv_i64 fp0
= tcg_temp_new_i64();
11591 TCGv_i64 fp1
= tcg_temp_new_i64();
11593 gen_load_fpr64(ctx
, fp0
, fs
);
11594 gen_load_fpr64(ctx
, fp1
, ft
);
11595 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11596 tcg_temp_free_i64(fp1
);
11597 gen_store_fpr64(ctx
, fp0
, fd
);
11598 tcg_temp_free_i64(fp0
);
11602 check_cp1_registers(ctx
, fs
| ft
| fd
);
11604 TCGv_i64 fp0
= tcg_temp_new_i64();
11605 TCGv_i64 fp1
= tcg_temp_new_i64();
11607 gen_load_fpr64(ctx
, fp0
, fs
);
11608 gen_load_fpr64(ctx
, fp1
, ft
);
11609 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11610 tcg_temp_free_i64(fp1
);
11611 gen_store_fpr64(ctx
, fp0
, fd
);
11612 tcg_temp_free_i64(fp0
);
11616 check_cp1_registers(ctx
, fs
| fd
);
11618 TCGv_i64 fp0
= tcg_temp_new_i64();
11620 gen_load_fpr64(ctx
, fp0
, fs
);
11621 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11622 gen_store_fpr64(ctx
, fp0
, fd
);
11623 tcg_temp_free_i64(fp0
);
11627 check_cp1_registers(ctx
, fs
| fd
);
11629 TCGv_i64 fp0
= tcg_temp_new_i64();
11631 gen_load_fpr64(ctx
, fp0
, fs
);
11632 if (ctx
->abs2008
) {
11633 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11635 gen_helper_float_abs_d(fp0
, fp0
);
11637 gen_store_fpr64(ctx
, fp0
, fd
);
11638 tcg_temp_free_i64(fp0
);
11642 check_cp1_registers(ctx
, fs
| fd
);
11644 TCGv_i64 fp0
= tcg_temp_new_i64();
11646 gen_load_fpr64(ctx
, fp0
, fs
);
11647 gen_store_fpr64(ctx
, fp0
, fd
);
11648 tcg_temp_free_i64(fp0
);
11652 check_cp1_registers(ctx
, fs
| fd
);
11654 TCGv_i64 fp0
= tcg_temp_new_i64();
11656 gen_load_fpr64(ctx
, fp0
, fs
);
11657 if (ctx
->abs2008
) {
11658 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11660 gen_helper_float_chs_d(fp0
, fp0
);
11662 gen_store_fpr64(ctx
, fp0
, fd
);
11663 tcg_temp_free_i64(fp0
);
11666 case OPC_ROUND_L_D
:
11667 check_cp1_64bitmode(ctx
);
11669 TCGv_i64 fp0
= tcg_temp_new_i64();
11671 gen_load_fpr64(ctx
, fp0
, fs
);
11672 if (ctx
->nan2008
) {
11673 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11675 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11677 gen_store_fpr64(ctx
, fp0
, fd
);
11678 tcg_temp_free_i64(fp0
);
11681 case OPC_TRUNC_L_D
:
11682 check_cp1_64bitmode(ctx
);
11684 TCGv_i64 fp0
= tcg_temp_new_i64();
11686 gen_load_fpr64(ctx
, fp0
, fs
);
11687 if (ctx
->nan2008
) {
11688 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11690 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11692 gen_store_fpr64(ctx
, fp0
, fd
);
11693 tcg_temp_free_i64(fp0
);
11697 check_cp1_64bitmode(ctx
);
11699 TCGv_i64 fp0
= tcg_temp_new_i64();
11701 gen_load_fpr64(ctx
, fp0
, fs
);
11702 if (ctx
->nan2008
) {
11703 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11705 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11707 gen_store_fpr64(ctx
, fp0
, fd
);
11708 tcg_temp_free_i64(fp0
);
11711 case OPC_FLOOR_L_D
:
11712 check_cp1_64bitmode(ctx
);
11714 TCGv_i64 fp0
= tcg_temp_new_i64();
11716 gen_load_fpr64(ctx
, fp0
, fs
);
11717 if (ctx
->nan2008
) {
11718 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11720 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11722 gen_store_fpr64(ctx
, fp0
, fd
);
11723 tcg_temp_free_i64(fp0
);
11726 case OPC_ROUND_W_D
:
11727 check_cp1_registers(ctx
, fs
);
11729 TCGv_i32 fp32
= tcg_temp_new_i32();
11730 TCGv_i64 fp64
= tcg_temp_new_i64();
11732 gen_load_fpr64(ctx
, fp64
, fs
);
11733 if (ctx
->nan2008
) {
11734 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11736 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11738 tcg_temp_free_i64(fp64
);
11739 gen_store_fpr32(ctx
, fp32
, fd
);
11740 tcg_temp_free_i32(fp32
);
11743 case OPC_TRUNC_W_D
:
11744 check_cp1_registers(ctx
, fs
);
11746 TCGv_i32 fp32
= tcg_temp_new_i32();
11747 TCGv_i64 fp64
= tcg_temp_new_i64();
11749 gen_load_fpr64(ctx
, fp64
, fs
);
11750 if (ctx
->nan2008
) {
11751 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11753 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11755 tcg_temp_free_i64(fp64
);
11756 gen_store_fpr32(ctx
, fp32
, fd
);
11757 tcg_temp_free_i32(fp32
);
11761 check_cp1_registers(ctx
, fs
);
11763 TCGv_i32 fp32
= tcg_temp_new_i32();
11764 TCGv_i64 fp64
= tcg_temp_new_i64();
11766 gen_load_fpr64(ctx
, fp64
, fs
);
11767 if (ctx
->nan2008
) {
11768 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11770 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11772 tcg_temp_free_i64(fp64
);
11773 gen_store_fpr32(ctx
, fp32
, fd
);
11774 tcg_temp_free_i32(fp32
);
11777 case OPC_FLOOR_W_D
:
11778 check_cp1_registers(ctx
, fs
);
11780 TCGv_i32 fp32
= tcg_temp_new_i32();
11781 TCGv_i64 fp64
= tcg_temp_new_i64();
11783 gen_load_fpr64(ctx
, fp64
, fs
);
11784 if (ctx
->nan2008
) {
11785 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11787 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11789 tcg_temp_free_i64(fp64
);
11790 gen_store_fpr32(ctx
, fp32
, fd
);
11791 tcg_temp_free_i32(fp32
);
11795 check_insn(ctx
, ISA_MIPS32R6
);
11796 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11799 check_insn(ctx
, ISA_MIPS32R6
);
11800 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11803 check_insn(ctx
, ISA_MIPS32R6
);
11804 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11807 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11808 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11811 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11813 TCGLabel
*l1
= gen_new_label();
11817 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11819 fp0
= tcg_temp_new_i64();
11820 gen_load_fpr64(ctx
, fp0
, fs
);
11821 gen_store_fpr64(ctx
, fp0
, fd
);
11822 tcg_temp_free_i64(fp0
);
11827 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11829 TCGLabel
*l1
= gen_new_label();
11833 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11834 fp0
= tcg_temp_new_i64();
11835 gen_load_fpr64(ctx
, fp0
, fs
);
11836 gen_store_fpr64(ctx
, fp0
, fd
);
11837 tcg_temp_free_i64(fp0
);
11843 check_cp1_registers(ctx
, fs
| fd
);
11845 TCGv_i64 fp0
= tcg_temp_new_i64();
11847 gen_load_fpr64(ctx
, fp0
, fs
);
11848 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11849 gen_store_fpr64(ctx
, fp0
, fd
);
11850 tcg_temp_free_i64(fp0
);
11854 check_cp1_registers(ctx
, fs
| fd
);
11856 TCGv_i64 fp0
= tcg_temp_new_i64();
11858 gen_load_fpr64(ctx
, fp0
, fs
);
11859 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11860 gen_store_fpr64(ctx
, fp0
, fd
);
11861 tcg_temp_free_i64(fp0
);
11865 check_insn(ctx
, ISA_MIPS32R6
);
11867 TCGv_i64 fp0
= tcg_temp_new_i64();
11868 TCGv_i64 fp1
= tcg_temp_new_i64();
11869 TCGv_i64 fp2
= tcg_temp_new_i64();
11870 gen_load_fpr64(ctx
, fp0
, fs
);
11871 gen_load_fpr64(ctx
, fp1
, ft
);
11872 gen_load_fpr64(ctx
, fp2
, fd
);
11873 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11874 gen_store_fpr64(ctx
, fp2
, fd
);
11875 tcg_temp_free_i64(fp2
);
11876 tcg_temp_free_i64(fp1
);
11877 tcg_temp_free_i64(fp0
);
11881 check_insn(ctx
, ISA_MIPS32R6
);
11883 TCGv_i64 fp0
= tcg_temp_new_i64();
11884 TCGv_i64 fp1
= tcg_temp_new_i64();
11885 TCGv_i64 fp2
= tcg_temp_new_i64();
11886 gen_load_fpr64(ctx
, fp0
, fs
);
11887 gen_load_fpr64(ctx
, fp1
, ft
);
11888 gen_load_fpr64(ctx
, fp2
, fd
);
11889 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11890 gen_store_fpr64(ctx
, fp2
, fd
);
11891 tcg_temp_free_i64(fp2
);
11892 tcg_temp_free_i64(fp1
);
11893 tcg_temp_free_i64(fp0
);
11897 check_insn(ctx
, ISA_MIPS32R6
);
11899 TCGv_i64 fp0
= tcg_temp_new_i64();
11900 gen_load_fpr64(ctx
, fp0
, fs
);
11901 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11902 gen_store_fpr64(ctx
, fp0
, fd
);
11903 tcg_temp_free_i64(fp0
);
11907 check_insn(ctx
, ISA_MIPS32R6
);
11909 TCGv_i64 fp0
= tcg_temp_new_i64();
11910 gen_load_fpr64(ctx
, fp0
, fs
);
11911 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11912 gen_store_fpr64(ctx
, fp0
, fd
);
11913 tcg_temp_free_i64(fp0
);
11916 case OPC_MIN_D
: /* OPC_RECIP2_D */
11917 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11919 TCGv_i64 fp0
= tcg_temp_new_i64();
11920 TCGv_i64 fp1
= tcg_temp_new_i64();
11921 gen_load_fpr64(ctx
, fp0
, fs
);
11922 gen_load_fpr64(ctx
, fp1
, ft
);
11923 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11924 gen_store_fpr64(ctx
, fp1
, fd
);
11925 tcg_temp_free_i64(fp1
);
11926 tcg_temp_free_i64(fp0
);
11929 check_cp1_64bitmode(ctx
);
11931 TCGv_i64 fp0
= tcg_temp_new_i64();
11932 TCGv_i64 fp1
= tcg_temp_new_i64();
11934 gen_load_fpr64(ctx
, fp0
, fs
);
11935 gen_load_fpr64(ctx
, fp1
, ft
);
11936 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11937 tcg_temp_free_i64(fp1
);
11938 gen_store_fpr64(ctx
, fp0
, fd
);
11939 tcg_temp_free_i64(fp0
);
11943 case OPC_MINA_D
: /* OPC_RECIP1_D */
11944 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11946 TCGv_i64 fp0
= tcg_temp_new_i64();
11947 TCGv_i64 fp1
= tcg_temp_new_i64();
11948 gen_load_fpr64(ctx
, fp0
, fs
);
11949 gen_load_fpr64(ctx
, fp1
, ft
);
11950 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11951 gen_store_fpr64(ctx
, fp1
, fd
);
11952 tcg_temp_free_i64(fp1
);
11953 tcg_temp_free_i64(fp0
);
11956 check_cp1_64bitmode(ctx
);
11958 TCGv_i64 fp0
= tcg_temp_new_i64();
11960 gen_load_fpr64(ctx
, fp0
, fs
);
11961 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11962 gen_store_fpr64(ctx
, fp0
, fd
);
11963 tcg_temp_free_i64(fp0
);
11967 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11968 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11970 TCGv_i64 fp0
= tcg_temp_new_i64();
11971 TCGv_i64 fp1
= tcg_temp_new_i64();
11972 gen_load_fpr64(ctx
, fp0
, fs
);
11973 gen_load_fpr64(ctx
, fp1
, ft
);
11974 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11975 gen_store_fpr64(ctx
, fp1
, fd
);
11976 tcg_temp_free_i64(fp1
);
11977 tcg_temp_free_i64(fp0
);
11980 check_cp1_64bitmode(ctx
);
11982 TCGv_i64 fp0
= tcg_temp_new_i64();
11984 gen_load_fpr64(ctx
, fp0
, fs
);
11985 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11986 gen_store_fpr64(ctx
, fp0
, fd
);
11987 tcg_temp_free_i64(fp0
);
11991 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11992 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11994 TCGv_i64 fp0
= tcg_temp_new_i64();
11995 TCGv_i64 fp1
= tcg_temp_new_i64();
11996 gen_load_fpr64(ctx
, fp0
, fs
);
11997 gen_load_fpr64(ctx
, fp1
, ft
);
11998 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11999 gen_store_fpr64(ctx
, fp1
, fd
);
12000 tcg_temp_free_i64(fp1
);
12001 tcg_temp_free_i64(fp0
);
12004 check_cp1_64bitmode(ctx
);
12006 TCGv_i64 fp0
= tcg_temp_new_i64();
12007 TCGv_i64 fp1
= tcg_temp_new_i64();
12009 gen_load_fpr64(ctx
, fp0
, fs
);
12010 gen_load_fpr64(ctx
, fp1
, ft
);
12011 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
12012 tcg_temp_free_i64(fp1
);
12013 gen_store_fpr64(ctx
, fp0
, fd
);
12014 tcg_temp_free_i64(fp0
);
12021 case OPC_CMP_UEQ_D
:
12022 case OPC_CMP_OLT_D
:
12023 case OPC_CMP_ULT_D
:
12024 case OPC_CMP_OLE_D
:
12025 case OPC_CMP_ULE_D
:
12027 case OPC_CMP_NGLE_D
:
12028 case OPC_CMP_SEQ_D
:
12029 case OPC_CMP_NGL_D
:
12031 case OPC_CMP_NGE_D
:
12033 case OPC_CMP_NGT_D
:
12034 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12035 if (ctx
->opcode
& (1 << 6)) {
12036 gen_cmpabs_d(ctx
, func
- 48, ft
, fs
, cc
);
12038 gen_cmp_d(ctx
, func
- 48, ft
, fs
, cc
);
12042 check_cp1_registers(ctx
, fs
);
12044 TCGv_i32 fp32
= tcg_temp_new_i32();
12045 TCGv_i64 fp64
= tcg_temp_new_i64();
12047 gen_load_fpr64(ctx
, fp64
, fs
);
12048 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
12049 tcg_temp_free_i64(fp64
);
12050 gen_store_fpr32(ctx
, fp32
, fd
);
12051 tcg_temp_free_i32(fp32
);
12055 check_cp1_registers(ctx
, fs
);
12057 TCGv_i32 fp32
= tcg_temp_new_i32();
12058 TCGv_i64 fp64
= tcg_temp_new_i64();
12060 gen_load_fpr64(ctx
, fp64
, fs
);
12061 if (ctx
->nan2008
) {
12062 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
12064 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
12066 tcg_temp_free_i64(fp64
);
12067 gen_store_fpr32(ctx
, fp32
, fd
);
12068 tcg_temp_free_i32(fp32
);
12072 check_cp1_64bitmode(ctx
);
12074 TCGv_i64 fp0
= tcg_temp_new_i64();
12076 gen_load_fpr64(ctx
, fp0
, fs
);
12077 if (ctx
->nan2008
) {
12078 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
12080 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
12082 gen_store_fpr64(ctx
, fp0
, fd
);
12083 tcg_temp_free_i64(fp0
);
12088 TCGv_i32 fp0
= tcg_temp_new_i32();
12090 gen_load_fpr32(ctx
, fp0
, fs
);
12091 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
12092 gen_store_fpr32(ctx
, fp0
, fd
);
12093 tcg_temp_free_i32(fp0
);
12097 check_cp1_registers(ctx
, fd
);
12099 TCGv_i32 fp32
= tcg_temp_new_i32();
12100 TCGv_i64 fp64
= tcg_temp_new_i64();
12102 gen_load_fpr32(ctx
, fp32
, fs
);
12103 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
12104 tcg_temp_free_i32(fp32
);
12105 gen_store_fpr64(ctx
, fp64
, fd
);
12106 tcg_temp_free_i64(fp64
);
12110 check_cp1_64bitmode(ctx
);
12112 TCGv_i32 fp32
= tcg_temp_new_i32();
12113 TCGv_i64 fp64
= tcg_temp_new_i64();
12115 gen_load_fpr64(ctx
, fp64
, fs
);
12116 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
12117 tcg_temp_free_i64(fp64
);
12118 gen_store_fpr32(ctx
, fp32
, fd
);
12119 tcg_temp_free_i32(fp32
);
12123 check_cp1_64bitmode(ctx
);
12125 TCGv_i64 fp0
= tcg_temp_new_i64();
12127 gen_load_fpr64(ctx
, fp0
, fs
);
12128 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
12129 gen_store_fpr64(ctx
, fp0
, fd
);
12130 tcg_temp_free_i64(fp0
);
12133 case OPC_CVT_PS_PW
:
12136 TCGv_i64 fp0
= tcg_temp_new_i64();
12138 gen_load_fpr64(ctx
, fp0
, fs
);
12139 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
12140 gen_store_fpr64(ctx
, fp0
, fd
);
12141 tcg_temp_free_i64(fp0
);
12147 TCGv_i64 fp0
= tcg_temp_new_i64();
12148 TCGv_i64 fp1
= tcg_temp_new_i64();
12150 gen_load_fpr64(ctx
, fp0
, fs
);
12151 gen_load_fpr64(ctx
, fp1
, ft
);
12152 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
12153 tcg_temp_free_i64(fp1
);
12154 gen_store_fpr64(ctx
, fp0
, fd
);
12155 tcg_temp_free_i64(fp0
);
12161 TCGv_i64 fp0
= tcg_temp_new_i64();
12162 TCGv_i64 fp1
= tcg_temp_new_i64();
12164 gen_load_fpr64(ctx
, fp0
, fs
);
12165 gen_load_fpr64(ctx
, fp1
, ft
);
12166 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
12167 tcg_temp_free_i64(fp1
);
12168 gen_store_fpr64(ctx
, fp0
, fd
);
12169 tcg_temp_free_i64(fp0
);
12175 TCGv_i64 fp0
= tcg_temp_new_i64();
12176 TCGv_i64 fp1
= tcg_temp_new_i64();
12178 gen_load_fpr64(ctx
, fp0
, fs
);
12179 gen_load_fpr64(ctx
, fp1
, ft
);
12180 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
12181 tcg_temp_free_i64(fp1
);
12182 gen_store_fpr64(ctx
, fp0
, fd
);
12183 tcg_temp_free_i64(fp0
);
12189 TCGv_i64 fp0
= tcg_temp_new_i64();
12191 gen_load_fpr64(ctx
, fp0
, fs
);
12192 gen_helper_float_abs_ps(fp0
, fp0
);
12193 gen_store_fpr64(ctx
, fp0
, fd
);
12194 tcg_temp_free_i64(fp0
);
12200 TCGv_i64 fp0
= tcg_temp_new_i64();
12202 gen_load_fpr64(ctx
, fp0
, fs
);
12203 gen_store_fpr64(ctx
, fp0
, fd
);
12204 tcg_temp_free_i64(fp0
);
12210 TCGv_i64 fp0
= tcg_temp_new_i64();
12212 gen_load_fpr64(ctx
, fp0
, fs
);
12213 gen_helper_float_chs_ps(fp0
, fp0
);
12214 gen_store_fpr64(ctx
, fp0
, fd
);
12215 tcg_temp_free_i64(fp0
);
12220 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
12225 TCGLabel
*l1
= gen_new_label();
12229 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
12231 fp0
= tcg_temp_new_i64();
12232 gen_load_fpr64(ctx
, fp0
, fs
);
12233 gen_store_fpr64(ctx
, fp0
, fd
);
12234 tcg_temp_free_i64(fp0
);
12241 TCGLabel
*l1
= gen_new_label();
12245 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
12246 fp0
= tcg_temp_new_i64();
12247 gen_load_fpr64(ctx
, fp0
, fs
);
12248 gen_store_fpr64(ctx
, fp0
, fd
);
12249 tcg_temp_free_i64(fp0
);
12257 TCGv_i64 fp0
= tcg_temp_new_i64();
12258 TCGv_i64 fp1
= tcg_temp_new_i64();
12260 gen_load_fpr64(ctx
, fp0
, ft
);
12261 gen_load_fpr64(ctx
, fp1
, fs
);
12262 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
12263 tcg_temp_free_i64(fp1
);
12264 gen_store_fpr64(ctx
, fp0
, fd
);
12265 tcg_temp_free_i64(fp0
);
12271 TCGv_i64 fp0
= tcg_temp_new_i64();
12272 TCGv_i64 fp1
= tcg_temp_new_i64();
12274 gen_load_fpr64(ctx
, fp0
, ft
);
12275 gen_load_fpr64(ctx
, fp1
, fs
);
12276 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
12277 tcg_temp_free_i64(fp1
);
12278 gen_store_fpr64(ctx
, fp0
, fd
);
12279 tcg_temp_free_i64(fp0
);
12282 case OPC_RECIP2_PS
:
12285 TCGv_i64 fp0
= tcg_temp_new_i64();
12286 TCGv_i64 fp1
= tcg_temp_new_i64();
12288 gen_load_fpr64(ctx
, fp0
, fs
);
12289 gen_load_fpr64(ctx
, fp1
, ft
);
12290 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
12291 tcg_temp_free_i64(fp1
);
12292 gen_store_fpr64(ctx
, fp0
, fd
);
12293 tcg_temp_free_i64(fp0
);
12296 case OPC_RECIP1_PS
:
12299 TCGv_i64 fp0
= tcg_temp_new_i64();
12301 gen_load_fpr64(ctx
, fp0
, fs
);
12302 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
12303 gen_store_fpr64(ctx
, fp0
, fd
);
12304 tcg_temp_free_i64(fp0
);
12307 case OPC_RSQRT1_PS
:
12310 TCGv_i64 fp0
= tcg_temp_new_i64();
12312 gen_load_fpr64(ctx
, fp0
, fs
);
12313 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
12314 gen_store_fpr64(ctx
, fp0
, fd
);
12315 tcg_temp_free_i64(fp0
);
12318 case OPC_RSQRT2_PS
:
12321 TCGv_i64 fp0
= tcg_temp_new_i64();
12322 TCGv_i64 fp1
= tcg_temp_new_i64();
12324 gen_load_fpr64(ctx
, fp0
, fs
);
12325 gen_load_fpr64(ctx
, fp1
, ft
);
12326 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
12327 tcg_temp_free_i64(fp1
);
12328 gen_store_fpr64(ctx
, fp0
, fd
);
12329 tcg_temp_free_i64(fp0
);
12333 check_cp1_64bitmode(ctx
);
12335 TCGv_i32 fp0
= tcg_temp_new_i32();
12337 gen_load_fpr32h(ctx
, fp0
, fs
);
12338 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
12339 gen_store_fpr32(ctx
, fp0
, fd
);
12340 tcg_temp_free_i32(fp0
);
12343 case OPC_CVT_PW_PS
:
12346 TCGv_i64 fp0
= tcg_temp_new_i64();
12348 gen_load_fpr64(ctx
, fp0
, fs
);
12349 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
12350 gen_store_fpr64(ctx
, fp0
, fd
);
12351 tcg_temp_free_i64(fp0
);
12355 check_cp1_64bitmode(ctx
);
12357 TCGv_i32 fp0
= tcg_temp_new_i32();
12359 gen_load_fpr32(ctx
, fp0
, fs
);
12360 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
12361 gen_store_fpr32(ctx
, fp0
, fd
);
12362 tcg_temp_free_i32(fp0
);
12368 TCGv_i32 fp0
= tcg_temp_new_i32();
12369 TCGv_i32 fp1
= tcg_temp_new_i32();
12371 gen_load_fpr32(ctx
, fp0
, fs
);
12372 gen_load_fpr32(ctx
, fp1
, ft
);
12373 gen_store_fpr32h(ctx
, fp0
, fd
);
12374 gen_store_fpr32(ctx
, fp1
, fd
);
12375 tcg_temp_free_i32(fp0
);
12376 tcg_temp_free_i32(fp1
);
12382 TCGv_i32 fp0
= tcg_temp_new_i32();
12383 TCGv_i32 fp1
= tcg_temp_new_i32();
12385 gen_load_fpr32(ctx
, fp0
, fs
);
12386 gen_load_fpr32h(ctx
, fp1
, ft
);
12387 gen_store_fpr32(ctx
, fp1
, fd
);
12388 gen_store_fpr32h(ctx
, fp0
, fd
);
12389 tcg_temp_free_i32(fp0
);
12390 tcg_temp_free_i32(fp1
);
12396 TCGv_i32 fp0
= tcg_temp_new_i32();
12397 TCGv_i32 fp1
= tcg_temp_new_i32();
12399 gen_load_fpr32h(ctx
, fp0
, fs
);
12400 gen_load_fpr32(ctx
, fp1
, ft
);
12401 gen_store_fpr32(ctx
, fp1
, fd
);
12402 gen_store_fpr32h(ctx
, fp0
, fd
);
12403 tcg_temp_free_i32(fp0
);
12404 tcg_temp_free_i32(fp1
);
12410 TCGv_i32 fp0
= tcg_temp_new_i32();
12411 TCGv_i32 fp1
= tcg_temp_new_i32();
12413 gen_load_fpr32h(ctx
, fp0
, fs
);
12414 gen_load_fpr32h(ctx
, fp1
, ft
);
12415 gen_store_fpr32(ctx
, fp1
, fd
);
12416 gen_store_fpr32h(ctx
, fp0
, fd
);
12417 tcg_temp_free_i32(fp0
);
12418 tcg_temp_free_i32(fp1
);
12422 case OPC_CMP_UN_PS
:
12423 case OPC_CMP_EQ_PS
:
12424 case OPC_CMP_UEQ_PS
:
12425 case OPC_CMP_OLT_PS
:
12426 case OPC_CMP_ULT_PS
:
12427 case OPC_CMP_OLE_PS
:
12428 case OPC_CMP_ULE_PS
:
12429 case OPC_CMP_SF_PS
:
12430 case OPC_CMP_NGLE_PS
:
12431 case OPC_CMP_SEQ_PS
:
12432 case OPC_CMP_NGL_PS
:
12433 case OPC_CMP_LT_PS
:
12434 case OPC_CMP_NGE_PS
:
12435 case OPC_CMP_LE_PS
:
12436 case OPC_CMP_NGT_PS
:
12437 if (ctx
->opcode
& (1 << 6)) {
12438 gen_cmpabs_ps(ctx
, func
- 48, ft
, fs
, cc
);
12440 gen_cmp_ps(ctx
, func
- 48, ft
, fs
, cc
);
12444 MIPS_INVAL("farith");
12445 generate_exception_end(ctx
, EXCP_RI
);
12450 /* Coprocessor 3 (FPU) */
12451 static void gen_flt3_ldst(DisasContext
*ctx
, uint32_t opc
,
12452 int fd
, int fs
, int base
, int index
)
12454 TCGv t0
= tcg_temp_new();
12457 gen_load_gpr(t0
, index
);
12458 } else if (index
== 0) {
12459 gen_load_gpr(t0
, base
);
12461 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12464 * Don't do NOP if destination is zero: we must perform the actual
12471 TCGv_i32 fp0
= tcg_temp_new_i32();
12473 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12474 tcg_gen_trunc_tl_i32(fp0
, t0
);
12475 gen_store_fpr32(ctx
, fp0
, fd
);
12476 tcg_temp_free_i32(fp0
);
12481 check_cp1_registers(ctx
, fd
);
12483 TCGv_i64 fp0
= tcg_temp_new_i64();
12484 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12485 gen_store_fpr64(ctx
, fp0
, fd
);
12486 tcg_temp_free_i64(fp0
);
12490 check_cp1_64bitmode(ctx
);
12491 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12493 TCGv_i64 fp0
= tcg_temp_new_i64();
12495 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12496 gen_store_fpr64(ctx
, fp0
, fd
);
12497 tcg_temp_free_i64(fp0
);
12503 TCGv_i32 fp0
= tcg_temp_new_i32();
12504 gen_load_fpr32(ctx
, fp0
, fs
);
12505 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12506 tcg_temp_free_i32(fp0
);
12511 check_cp1_registers(ctx
, fs
);
12513 TCGv_i64 fp0
= tcg_temp_new_i64();
12514 gen_load_fpr64(ctx
, fp0
, fs
);
12515 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12516 tcg_temp_free_i64(fp0
);
12520 check_cp1_64bitmode(ctx
);
12521 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12523 TCGv_i64 fp0
= tcg_temp_new_i64();
12524 gen_load_fpr64(ctx
, fp0
, fs
);
12525 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12526 tcg_temp_free_i64(fp0
);
12533 static void gen_flt3_arith(DisasContext
*ctx
, uint32_t opc
,
12534 int fd
, int fr
, int fs
, int ft
)
12540 TCGv t0
= tcg_temp_local_new();
12541 TCGv_i32 fp
= tcg_temp_new_i32();
12542 TCGv_i32 fph
= tcg_temp_new_i32();
12543 TCGLabel
*l1
= gen_new_label();
12544 TCGLabel
*l2
= gen_new_label();
12546 gen_load_gpr(t0
, fr
);
12547 tcg_gen_andi_tl(t0
, t0
, 0x7);
12549 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12550 gen_load_fpr32(ctx
, fp
, fs
);
12551 gen_load_fpr32h(ctx
, fph
, fs
);
12552 gen_store_fpr32(ctx
, fp
, fd
);
12553 gen_store_fpr32h(ctx
, fph
, fd
);
12556 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12558 #ifdef TARGET_WORDS_BIGENDIAN
12559 gen_load_fpr32(ctx
, fp
, fs
);
12560 gen_load_fpr32h(ctx
, fph
, ft
);
12561 gen_store_fpr32h(ctx
, fp
, fd
);
12562 gen_store_fpr32(ctx
, fph
, fd
);
12564 gen_load_fpr32h(ctx
, fph
, fs
);
12565 gen_load_fpr32(ctx
, fp
, ft
);
12566 gen_store_fpr32(ctx
, fph
, fd
);
12567 gen_store_fpr32h(ctx
, fp
, fd
);
12570 tcg_temp_free_i32(fp
);
12571 tcg_temp_free_i32(fph
);
12577 TCGv_i32 fp0
= tcg_temp_new_i32();
12578 TCGv_i32 fp1
= tcg_temp_new_i32();
12579 TCGv_i32 fp2
= tcg_temp_new_i32();
12581 gen_load_fpr32(ctx
, fp0
, fs
);
12582 gen_load_fpr32(ctx
, fp1
, ft
);
12583 gen_load_fpr32(ctx
, fp2
, fr
);
12584 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12585 tcg_temp_free_i32(fp0
);
12586 tcg_temp_free_i32(fp1
);
12587 gen_store_fpr32(ctx
, fp2
, fd
);
12588 tcg_temp_free_i32(fp2
);
12593 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12595 TCGv_i64 fp0
= tcg_temp_new_i64();
12596 TCGv_i64 fp1
= tcg_temp_new_i64();
12597 TCGv_i64 fp2
= tcg_temp_new_i64();
12599 gen_load_fpr64(ctx
, fp0
, fs
);
12600 gen_load_fpr64(ctx
, fp1
, ft
);
12601 gen_load_fpr64(ctx
, fp2
, fr
);
12602 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12603 tcg_temp_free_i64(fp0
);
12604 tcg_temp_free_i64(fp1
);
12605 gen_store_fpr64(ctx
, fp2
, fd
);
12606 tcg_temp_free_i64(fp2
);
12612 TCGv_i64 fp0
= tcg_temp_new_i64();
12613 TCGv_i64 fp1
= tcg_temp_new_i64();
12614 TCGv_i64 fp2
= tcg_temp_new_i64();
12616 gen_load_fpr64(ctx
, fp0
, fs
);
12617 gen_load_fpr64(ctx
, fp1
, ft
);
12618 gen_load_fpr64(ctx
, fp2
, fr
);
12619 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12620 tcg_temp_free_i64(fp0
);
12621 tcg_temp_free_i64(fp1
);
12622 gen_store_fpr64(ctx
, fp2
, fd
);
12623 tcg_temp_free_i64(fp2
);
12629 TCGv_i32 fp0
= tcg_temp_new_i32();
12630 TCGv_i32 fp1
= tcg_temp_new_i32();
12631 TCGv_i32 fp2
= tcg_temp_new_i32();
12633 gen_load_fpr32(ctx
, fp0
, fs
);
12634 gen_load_fpr32(ctx
, fp1
, ft
);
12635 gen_load_fpr32(ctx
, fp2
, fr
);
12636 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12637 tcg_temp_free_i32(fp0
);
12638 tcg_temp_free_i32(fp1
);
12639 gen_store_fpr32(ctx
, fp2
, fd
);
12640 tcg_temp_free_i32(fp2
);
12645 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12647 TCGv_i64 fp0
= tcg_temp_new_i64();
12648 TCGv_i64 fp1
= tcg_temp_new_i64();
12649 TCGv_i64 fp2
= tcg_temp_new_i64();
12651 gen_load_fpr64(ctx
, fp0
, fs
);
12652 gen_load_fpr64(ctx
, fp1
, ft
);
12653 gen_load_fpr64(ctx
, fp2
, fr
);
12654 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12655 tcg_temp_free_i64(fp0
);
12656 tcg_temp_free_i64(fp1
);
12657 gen_store_fpr64(ctx
, fp2
, fd
);
12658 tcg_temp_free_i64(fp2
);
12664 TCGv_i64 fp0
= tcg_temp_new_i64();
12665 TCGv_i64 fp1
= tcg_temp_new_i64();
12666 TCGv_i64 fp2
= tcg_temp_new_i64();
12668 gen_load_fpr64(ctx
, fp0
, fs
);
12669 gen_load_fpr64(ctx
, fp1
, ft
);
12670 gen_load_fpr64(ctx
, fp2
, fr
);
12671 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12672 tcg_temp_free_i64(fp0
);
12673 tcg_temp_free_i64(fp1
);
12674 gen_store_fpr64(ctx
, fp2
, fd
);
12675 tcg_temp_free_i64(fp2
);
12681 TCGv_i32 fp0
= tcg_temp_new_i32();
12682 TCGv_i32 fp1
= tcg_temp_new_i32();
12683 TCGv_i32 fp2
= tcg_temp_new_i32();
12685 gen_load_fpr32(ctx
, fp0
, fs
);
12686 gen_load_fpr32(ctx
, fp1
, ft
);
12687 gen_load_fpr32(ctx
, fp2
, fr
);
12688 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12689 tcg_temp_free_i32(fp0
);
12690 tcg_temp_free_i32(fp1
);
12691 gen_store_fpr32(ctx
, fp2
, fd
);
12692 tcg_temp_free_i32(fp2
);
12697 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12699 TCGv_i64 fp0
= tcg_temp_new_i64();
12700 TCGv_i64 fp1
= tcg_temp_new_i64();
12701 TCGv_i64 fp2
= tcg_temp_new_i64();
12703 gen_load_fpr64(ctx
, fp0
, fs
);
12704 gen_load_fpr64(ctx
, fp1
, ft
);
12705 gen_load_fpr64(ctx
, fp2
, fr
);
12706 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12707 tcg_temp_free_i64(fp0
);
12708 tcg_temp_free_i64(fp1
);
12709 gen_store_fpr64(ctx
, fp2
, fd
);
12710 tcg_temp_free_i64(fp2
);
12716 TCGv_i64 fp0
= tcg_temp_new_i64();
12717 TCGv_i64 fp1
= tcg_temp_new_i64();
12718 TCGv_i64 fp2
= tcg_temp_new_i64();
12720 gen_load_fpr64(ctx
, fp0
, fs
);
12721 gen_load_fpr64(ctx
, fp1
, ft
);
12722 gen_load_fpr64(ctx
, fp2
, fr
);
12723 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12724 tcg_temp_free_i64(fp0
);
12725 tcg_temp_free_i64(fp1
);
12726 gen_store_fpr64(ctx
, fp2
, fd
);
12727 tcg_temp_free_i64(fp2
);
12733 TCGv_i32 fp0
= tcg_temp_new_i32();
12734 TCGv_i32 fp1
= tcg_temp_new_i32();
12735 TCGv_i32 fp2
= tcg_temp_new_i32();
12737 gen_load_fpr32(ctx
, fp0
, fs
);
12738 gen_load_fpr32(ctx
, fp1
, ft
);
12739 gen_load_fpr32(ctx
, fp2
, fr
);
12740 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12741 tcg_temp_free_i32(fp0
);
12742 tcg_temp_free_i32(fp1
);
12743 gen_store_fpr32(ctx
, fp2
, fd
);
12744 tcg_temp_free_i32(fp2
);
12749 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12751 TCGv_i64 fp0
= tcg_temp_new_i64();
12752 TCGv_i64 fp1
= tcg_temp_new_i64();
12753 TCGv_i64 fp2
= tcg_temp_new_i64();
12755 gen_load_fpr64(ctx
, fp0
, fs
);
12756 gen_load_fpr64(ctx
, fp1
, ft
);
12757 gen_load_fpr64(ctx
, fp2
, fr
);
12758 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12759 tcg_temp_free_i64(fp0
);
12760 tcg_temp_free_i64(fp1
);
12761 gen_store_fpr64(ctx
, fp2
, fd
);
12762 tcg_temp_free_i64(fp2
);
12768 TCGv_i64 fp0
= tcg_temp_new_i64();
12769 TCGv_i64 fp1
= tcg_temp_new_i64();
12770 TCGv_i64 fp2
= tcg_temp_new_i64();
12772 gen_load_fpr64(ctx
, fp0
, fs
);
12773 gen_load_fpr64(ctx
, fp1
, ft
);
12774 gen_load_fpr64(ctx
, fp2
, fr
);
12775 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12776 tcg_temp_free_i64(fp0
);
12777 tcg_temp_free_i64(fp1
);
12778 gen_store_fpr64(ctx
, fp2
, fd
);
12779 tcg_temp_free_i64(fp2
);
12783 MIPS_INVAL("flt3_arith");
12784 generate_exception_end(ctx
, EXCP_RI
);
12789 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12793 #if !defined(CONFIG_USER_ONLY)
12795 * The Linux kernel will emulate rdhwr if it's not supported natively.
12796 * Therefore only check the ISA in system mode.
12798 check_insn(ctx
, ISA_MIPS32R2
);
12800 t0
= tcg_temp_new();
12804 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12805 gen_store_gpr(t0
, rt
);
12808 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12809 gen_store_gpr(t0
, rt
);
12812 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12815 gen_helper_rdhwr_cc(t0
, cpu_env
);
12816 gen_store_gpr(t0
, rt
);
12818 * Break the TB to be able to take timer interrupts immediately
12819 * after reading count. DISAS_STOP isn't sufficient, we need to ensure
12820 * we break completely out of translated code.
12822 gen_save_pc(ctx
->base
.pc_next
+ 4);
12823 ctx
->base
.is_jmp
= DISAS_EXIT
;
12826 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12827 gen_store_gpr(t0
, rt
);
12830 check_insn(ctx
, ISA_MIPS32R6
);
12833 * Performance counter registers are not implemented other than
12834 * control register 0.
12836 generate_exception(ctx
, EXCP_RI
);
12838 gen_helper_rdhwr_performance(t0
, cpu_env
);
12839 gen_store_gpr(t0
, rt
);
12842 check_insn(ctx
, ISA_MIPS32R6
);
12843 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12844 gen_store_gpr(t0
, rt
);
12847 #if defined(CONFIG_USER_ONLY)
12848 tcg_gen_ld_tl(t0
, cpu_env
,
12849 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12850 gen_store_gpr(t0
, rt
);
12853 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12854 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12855 tcg_gen_ld_tl(t0
, cpu_env
,
12856 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12857 gen_store_gpr(t0
, rt
);
12859 generate_exception_end(ctx
, EXCP_RI
);
12863 default: /* Invalid */
12864 MIPS_INVAL("rdhwr");
12865 generate_exception_end(ctx
, EXCP_RI
);
12871 static inline void clear_branch_hflags(DisasContext
*ctx
)
12873 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12874 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12875 save_cpu_state(ctx
, 0);
12878 * It is not safe to save ctx->hflags as hflags may be changed
12879 * in execution time by the instruction in delay / forbidden slot.
12881 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12885 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12887 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12888 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12889 /* Branches completion */
12890 clear_branch_hflags(ctx
);
12891 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12892 /* FIXME: Need to clear can_do_io. */
12893 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12894 case MIPS_HFLAG_FBNSLOT
:
12895 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12898 /* unconditional branch */
12899 if (proc_hflags
& MIPS_HFLAG_BX
) {
12900 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12902 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12904 case MIPS_HFLAG_BL
:
12905 /* blikely taken case */
12906 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12908 case MIPS_HFLAG_BC
:
12909 /* Conditional branch */
12911 TCGLabel
*l1
= gen_new_label();
12913 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12914 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12916 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12919 case MIPS_HFLAG_BR
:
12920 /* unconditional branch to register */
12921 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12922 TCGv t0
= tcg_temp_new();
12923 TCGv_i32 t1
= tcg_temp_new_i32();
12925 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12926 tcg_gen_trunc_tl_i32(t1
, t0
);
12928 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12929 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12930 tcg_gen_or_i32(hflags
, hflags
, t1
);
12931 tcg_temp_free_i32(t1
);
12933 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12935 tcg_gen_mov_tl(cpu_PC
, btarget
);
12937 if (ctx
->base
.singlestep_enabled
) {
12938 save_cpu_state(ctx
, 0);
12939 gen_helper_raise_exception_debug(cpu_env
);
12941 tcg_gen_lookup_and_goto_ptr();
12944 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12950 /* Compact Branches */
12951 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12952 int rs
, int rt
, int32_t offset
)
12954 int bcond_compute
= 0;
12955 TCGv t0
= tcg_temp_new();
12956 TCGv t1
= tcg_temp_new();
12957 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12959 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12960 #ifdef MIPS_DEBUG_DISAS
12961 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12962 "\n", ctx
->base
.pc_next
);
12964 generate_exception_end(ctx
, EXCP_RI
);
12968 /* Load needed operands and calculate btarget */
12970 /* compact branch */
12971 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12972 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12973 gen_load_gpr(t0
, rs
);
12974 gen_load_gpr(t1
, rt
);
12976 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12977 if (rs
<= rt
&& rs
== 0) {
12978 /* OPC_BEQZALC, OPC_BNEZALC */
12979 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12982 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12983 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12984 gen_load_gpr(t0
, rs
);
12985 gen_load_gpr(t1
, rt
);
12987 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12989 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12990 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12991 if (rs
== 0 || rs
== rt
) {
12992 /* OPC_BLEZALC, OPC_BGEZALC */
12993 /* OPC_BGTZALC, OPC_BLTZALC */
12994 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12996 gen_load_gpr(t0
, rs
);
12997 gen_load_gpr(t1
, rt
);
12999 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
13003 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
13008 /* OPC_BEQZC, OPC_BNEZC */
13009 gen_load_gpr(t0
, rs
);
13011 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
13013 /* OPC_JIC, OPC_JIALC */
13014 TCGv tbase
= tcg_temp_new();
13015 TCGv toffset
= tcg_temp_new();
13017 gen_load_gpr(tbase
, rt
);
13018 tcg_gen_movi_tl(toffset
, offset
);
13019 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
13020 tcg_temp_free(tbase
);
13021 tcg_temp_free(toffset
);
13025 MIPS_INVAL("Compact branch/jump");
13026 generate_exception_end(ctx
, EXCP_RI
);
13030 if (bcond_compute
== 0) {
13031 /* Uncoditional compact branch */
13034 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
13037 ctx
->hflags
|= MIPS_HFLAG_BR
;
13040 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
13043 ctx
->hflags
|= MIPS_HFLAG_B
;
13046 MIPS_INVAL("Compact branch/jump");
13047 generate_exception_end(ctx
, EXCP_RI
);
13051 /* Generating branch here as compact branches don't have delay slot */
13052 gen_branch(ctx
, 4);
13054 /* Conditional compact branch */
13055 TCGLabel
*fs
= gen_new_label();
13056 save_cpu_state(ctx
, 0);
13059 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
13060 if (rs
== 0 && rt
!= 0) {
13062 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
13063 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13065 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
13068 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
13071 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
13072 if (rs
== 0 && rt
!= 0) {
13074 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
13075 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13077 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
13080 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
13083 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
13084 if (rs
== 0 && rt
!= 0) {
13086 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
13087 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13089 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
13092 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
13095 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
13096 if (rs
== 0 && rt
!= 0) {
13098 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
13099 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13101 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
13104 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
13107 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
13108 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
13110 /* OPC_BOVC, OPC_BNVC */
13111 TCGv t2
= tcg_temp_new();
13112 TCGv t3
= tcg_temp_new();
13113 TCGv t4
= tcg_temp_new();
13114 TCGv input_overflow
= tcg_temp_new();
13116 gen_load_gpr(t0
, rs
);
13117 gen_load_gpr(t1
, rt
);
13118 tcg_gen_ext32s_tl(t2
, t0
);
13119 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
13120 tcg_gen_ext32s_tl(t3
, t1
);
13121 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
13122 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
13124 tcg_gen_add_tl(t4
, t2
, t3
);
13125 tcg_gen_ext32s_tl(t4
, t4
);
13126 tcg_gen_xor_tl(t2
, t2
, t3
);
13127 tcg_gen_xor_tl(t3
, t4
, t3
);
13128 tcg_gen_andc_tl(t2
, t3
, t2
);
13129 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
13130 tcg_gen_or_tl(t4
, t4
, input_overflow
);
13131 if (opc
== OPC_BOVC
) {
13133 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
13136 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
13138 tcg_temp_free(input_overflow
);
13142 } else if (rs
< rt
&& rs
== 0) {
13143 /* OPC_BEQZALC, OPC_BNEZALC */
13144 if (opc
== OPC_BEQZALC
) {
13146 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
13149 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
13152 /* OPC_BEQC, OPC_BNEC */
13153 if (opc
== OPC_BEQC
) {
13155 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
13158 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
13163 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
13166 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
13169 MIPS_INVAL("Compact conditional branch/jump");
13170 generate_exception_end(ctx
, EXCP_RI
);
13174 /* Generating branch here as compact branches don't have delay slot */
13175 gen_goto_tb(ctx
, 1, ctx
->btarget
);
13178 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
13186 /* ISA extensions (ASEs) */
13187 /* MIPS16 extension to MIPS32 */
13189 /* MIPS16 major opcodes */
13191 M16_OPC_ADDIUSP
= 0x00,
13192 M16_OPC_ADDIUPC
= 0x01,
13194 M16_OPC_JAL
= 0x03,
13195 M16_OPC_BEQZ
= 0x04,
13196 M16_OPC_BNEQZ
= 0x05,
13197 M16_OPC_SHIFT
= 0x06,
13199 M16_OPC_RRIA
= 0x08,
13200 M16_OPC_ADDIU8
= 0x09,
13201 M16_OPC_SLTI
= 0x0a,
13202 M16_OPC_SLTIU
= 0x0b,
13205 M16_OPC_CMPI
= 0x0e,
13209 M16_OPC_LWSP
= 0x12,
13211 M16_OPC_LBU
= 0x14,
13212 M16_OPC_LHU
= 0x15,
13213 M16_OPC_LWPC
= 0x16,
13214 M16_OPC_LWU
= 0x17,
13217 M16_OPC_SWSP
= 0x1a,
13219 M16_OPC_RRR
= 0x1c,
13221 M16_OPC_EXTEND
= 0x1e,
13225 /* I8 funct field */
13244 /* RR funct field */
13278 /* I64 funct field */
13286 I64_DADDIUPC
= 0x6,
13290 /* RR ry field for CNVT */
13292 RR_RY_CNVT_ZEB
= 0x0,
13293 RR_RY_CNVT_ZEH
= 0x1,
13294 RR_RY_CNVT_ZEW
= 0x2,
13295 RR_RY_CNVT_SEB
= 0x4,
13296 RR_RY_CNVT_SEH
= 0x5,
13297 RR_RY_CNVT_SEW
= 0x6,
13300 static int xlat(int r
)
13302 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13307 static void gen_mips16_save(DisasContext
*ctx
,
13308 int xsregs
, int aregs
,
13309 int do_ra
, int do_s0
, int do_s1
,
13312 TCGv t0
= tcg_temp_new();
13313 TCGv t1
= tcg_temp_new();
13314 TCGv t2
= tcg_temp_new();
13344 generate_exception_end(ctx
, EXCP_RI
);
13350 gen_base_offset_addr(ctx
, t0
, 29, 12);
13351 gen_load_gpr(t1
, 7);
13352 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13355 gen_base_offset_addr(ctx
, t0
, 29, 8);
13356 gen_load_gpr(t1
, 6);
13357 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13360 gen_base_offset_addr(ctx
, t0
, 29, 4);
13361 gen_load_gpr(t1
, 5);
13362 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13365 gen_base_offset_addr(ctx
, t0
, 29, 0);
13366 gen_load_gpr(t1
, 4);
13367 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13370 gen_load_gpr(t0
, 29);
13372 #define DECR_AND_STORE(reg) do { \
13373 tcg_gen_movi_tl(t2, -4); \
13374 gen_op_addr_add(ctx, t0, t0, t2); \
13375 gen_load_gpr(t1, reg); \
13376 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
13380 DECR_AND_STORE(31);
13385 DECR_AND_STORE(30);
13388 DECR_AND_STORE(23);
13391 DECR_AND_STORE(22);
13394 DECR_AND_STORE(21);
13397 DECR_AND_STORE(20);
13400 DECR_AND_STORE(19);
13403 DECR_AND_STORE(18);
13407 DECR_AND_STORE(17);
13410 DECR_AND_STORE(16);
13440 generate_exception_end(ctx
, EXCP_RI
);
13456 #undef DECR_AND_STORE
13458 tcg_gen_movi_tl(t2
, -framesize
);
13459 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13465 static void gen_mips16_restore(DisasContext
*ctx
,
13466 int xsregs
, int aregs
,
13467 int do_ra
, int do_s0
, int do_s1
,
13471 TCGv t0
= tcg_temp_new();
13472 TCGv t1
= tcg_temp_new();
13473 TCGv t2
= tcg_temp_new();
13475 tcg_gen_movi_tl(t2
, framesize
);
13476 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13478 #define DECR_AND_LOAD(reg) do { \
13479 tcg_gen_movi_tl(t2, -4); \
13480 gen_op_addr_add(ctx, t0, t0, t2); \
13481 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13482 gen_store_gpr(t1, reg); \
13546 generate_exception_end(ctx
, EXCP_RI
);
13562 #undef DECR_AND_LOAD
13564 tcg_gen_movi_tl(t2
, framesize
);
13565 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13571 static void gen_addiupc(DisasContext
*ctx
, int rx
, int imm
,
13572 int is_64_bit
, int extended
)
13576 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13577 generate_exception_end(ctx
, EXCP_RI
);
13581 t0
= tcg_temp_new();
13583 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13584 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13586 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13592 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13595 TCGv_i32 t0
= tcg_const_i32(op
);
13596 TCGv t1
= tcg_temp_new();
13597 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13598 gen_helper_cache(cpu_env
, t1
, t0
);
13601 #if defined(TARGET_MIPS64)
13602 static void decode_i64_mips16(DisasContext
*ctx
,
13603 int ry
, int funct
, int16_t offset
,
13608 check_insn(ctx
, ISA_MIPS3
);
13609 check_mips_64(ctx
);
13610 offset
= extended
? offset
: offset
<< 3;
13611 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13614 check_insn(ctx
, ISA_MIPS3
);
13615 check_mips_64(ctx
);
13616 offset
= extended
? offset
: offset
<< 3;
13617 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13620 check_insn(ctx
, ISA_MIPS3
);
13621 check_mips_64(ctx
);
13622 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13623 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13626 check_insn(ctx
, ISA_MIPS3
);
13627 check_mips_64(ctx
);
13628 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13629 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13632 check_insn(ctx
, ISA_MIPS3
);
13633 check_mips_64(ctx
);
13634 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13635 generate_exception_end(ctx
, EXCP_RI
);
13637 offset
= extended
? offset
: offset
<< 3;
13638 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13642 check_insn(ctx
, ISA_MIPS3
);
13643 check_mips_64(ctx
);
13644 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13645 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13648 check_insn(ctx
, ISA_MIPS3
);
13649 check_mips_64(ctx
);
13650 offset
= extended
? offset
: offset
<< 2;
13651 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13654 check_insn(ctx
, ISA_MIPS3
);
13655 check_mips_64(ctx
);
13656 offset
= extended
? offset
: offset
<< 2;
13657 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13663 static int decode_extended_mips16_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13665 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13666 int op
, rx
, ry
, funct
, sa
;
13667 int16_t imm
, offset
;
13669 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13670 op
= (ctx
->opcode
>> 11) & 0x1f;
13671 sa
= (ctx
->opcode
>> 22) & 0x1f;
13672 funct
= (ctx
->opcode
>> 8) & 0x7;
13673 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13674 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13675 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13676 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13677 | (ctx
->opcode
& 0x1f));
13680 * The extended opcodes cleverly reuse the opcodes from their 16-bit
13684 case M16_OPC_ADDIUSP
:
13685 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13687 case M16_OPC_ADDIUPC
:
13688 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13691 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13692 /* No delay slot, so just process as a normal instruction */
13695 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13696 /* No delay slot, so just process as a normal instruction */
13698 case M16_OPC_BNEQZ
:
13699 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13700 /* No delay slot, so just process as a normal instruction */
13702 case M16_OPC_SHIFT
:
13703 switch (ctx
->opcode
& 0x3) {
13705 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13708 #if defined(TARGET_MIPS64)
13709 check_mips_64(ctx
);
13710 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13712 generate_exception_end(ctx
, EXCP_RI
);
13716 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13719 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13723 #if defined(TARGET_MIPS64)
13725 check_insn(ctx
, ISA_MIPS3
);
13726 check_mips_64(ctx
);
13727 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13731 imm
= ctx
->opcode
& 0xf;
13732 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13733 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13734 imm
= (int16_t) (imm
<< 1) >> 1;
13735 if ((ctx
->opcode
>> 4) & 0x1) {
13736 #if defined(TARGET_MIPS64)
13737 check_mips_64(ctx
);
13738 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13740 generate_exception_end(ctx
, EXCP_RI
);
13743 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13746 case M16_OPC_ADDIU8
:
13747 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13750 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13752 case M16_OPC_SLTIU
:
13753 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13758 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13761 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13764 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13767 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13770 check_insn(ctx
, ISA_MIPS32
);
13772 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13773 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13774 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13775 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13776 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13777 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13778 | (ctx
->opcode
& 0xf)) << 3;
13780 if (ctx
->opcode
& (1 << 7)) {
13781 gen_mips16_save(ctx
, xsregs
, aregs
,
13782 do_ra
, do_s0
, do_s1
,
13785 gen_mips16_restore(ctx
, xsregs
, aregs
,
13786 do_ra
, do_s0
, do_s1
,
13792 generate_exception_end(ctx
, EXCP_RI
);
13797 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13800 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13802 #if defined(TARGET_MIPS64)
13804 check_insn(ctx
, ISA_MIPS3
);
13805 check_mips_64(ctx
);
13806 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13810 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13813 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13816 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13819 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13822 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13825 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13828 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13830 #if defined(TARGET_MIPS64)
13832 check_insn(ctx
, ISA_MIPS3
);
13833 check_mips_64(ctx
);
13834 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13838 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13841 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13844 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13847 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13849 #if defined(TARGET_MIPS64)
13851 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13855 generate_exception_end(ctx
, EXCP_RI
);
13862 static inline bool is_uhi(int sdbbp_code
)
13864 #ifdef CONFIG_USER_ONLY
13867 return semihosting_enabled() && sdbbp_code
== 1;
13871 #ifdef CONFIG_USER_ONLY
13872 /* The above should dead-code away any calls to this..*/
13873 static inline void gen_helper_do_semihosting(void *env
)
13875 g_assert_not_reached();
13879 static int decode_mips16_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13883 int op
, cnvt_op
, op1
, offset
;
13887 op
= (ctx
->opcode
>> 11) & 0x1f;
13888 sa
= (ctx
->opcode
>> 2) & 0x7;
13889 sa
= sa
== 0 ? 8 : sa
;
13890 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13891 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13892 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13893 op1
= offset
= ctx
->opcode
& 0x1f;
13898 case M16_OPC_ADDIUSP
:
13900 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13902 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13905 case M16_OPC_ADDIUPC
:
13906 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13909 offset
= (ctx
->opcode
& 0x7ff) << 1;
13910 offset
= (int16_t)(offset
<< 4) >> 4;
13911 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13912 /* No delay slot, so just process as a normal instruction */
13915 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13916 offset
= (((ctx
->opcode
& 0x1f) << 21)
13917 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13919 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13920 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13924 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13925 ((int8_t)ctx
->opcode
) << 1, 0);
13926 /* No delay slot, so just process as a normal instruction */
13928 case M16_OPC_BNEQZ
:
13929 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13930 ((int8_t)ctx
->opcode
) << 1, 0);
13931 /* No delay slot, so just process as a normal instruction */
13933 case M16_OPC_SHIFT
:
13934 switch (ctx
->opcode
& 0x3) {
13936 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13939 #if defined(TARGET_MIPS64)
13940 check_insn(ctx
, ISA_MIPS3
);
13941 check_mips_64(ctx
);
13942 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13944 generate_exception_end(ctx
, EXCP_RI
);
13948 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13951 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13955 #if defined(TARGET_MIPS64)
13957 check_insn(ctx
, ISA_MIPS3
);
13958 check_mips_64(ctx
);
13959 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13964 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13966 if ((ctx
->opcode
>> 4) & 1) {
13967 #if defined(TARGET_MIPS64)
13968 check_insn(ctx
, ISA_MIPS3
);
13969 check_mips_64(ctx
);
13970 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13972 generate_exception_end(ctx
, EXCP_RI
);
13975 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13979 case M16_OPC_ADDIU8
:
13981 int16_t imm
= (int8_t) ctx
->opcode
;
13983 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13988 int16_t imm
= (uint8_t) ctx
->opcode
;
13989 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13992 case M16_OPC_SLTIU
:
13994 int16_t imm
= (uint8_t) ctx
->opcode
;
13995 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
14002 funct
= (ctx
->opcode
>> 8) & 0x7;
14005 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
14006 ((int8_t)ctx
->opcode
) << 1, 0);
14009 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
14010 ((int8_t)ctx
->opcode
) << 1, 0);
14013 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
14016 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
14017 ((int8_t)ctx
->opcode
) << 3);
14020 check_insn(ctx
, ISA_MIPS32
);
14022 int do_ra
= ctx
->opcode
& (1 << 6);
14023 int do_s0
= ctx
->opcode
& (1 << 5);
14024 int do_s1
= ctx
->opcode
& (1 << 4);
14025 int framesize
= ctx
->opcode
& 0xf;
14027 if (framesize
== 0) {
14030 framesize
= framesize
<< 3;
14033 if (ctx
->opcode
& (1 << 7)) {
14034 gen_mips16_save(ctx
, 0, 0,
14035 do_ra
, do_s0
, do_s1
, framesize
);
14037 gen_mips16_restore(ctx
, 0, 0,
14038 do_ra
, do_s0
, do_s1
, framesize
);
14044 int rz
= xlat(ctx
->opcode
& 0x7);
14046 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
14047 ((ctx
->opcode
>> 5) & 0x7);
14048 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
14052 reg32
= ctx
->opcode
& 0x1f;
14053 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
14056 generate_exception_end(ctx
, EXCP_RI
);
14063 int16_t imm
= (uint8_t) ctx
->opcode
;
14065 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
14070 int16_t imm
= (uint8_t) ctx
->opcode
;
14071 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
14074 #if defined(TARGET_MIPS64)
14076 check_insn(ctx
, ISA_MIPS3
);
14077 check_mips_64(ctx
);
14078 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
14082 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
14085 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
14088 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
14091 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
14094 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
14097 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
14100 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
14102 #if defined(TARGET_MIPS64)
14104 check_insn(ctx
, ISA_MIPS3
);
14105 check_mips_64(ctx
);
14106 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
14110 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
14113 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
14116 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
14119 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
14123 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
14126 switch (ctx
->opcode
& 0x3) {
14128 mips32_op
= OPC_ADDU
;
14131 mips32_op
= OPC_SUBU
;
14133 #if defined(TARGET_MIPS64)
14135 mips32_op
= OPC_DADDU
;
14136 check_insn(ctx
, ISA_MIPS3
);
14137 check_mips_64(ctx
);
14140 mips32_op
= OPC_DSUBU
;
14141 check_insn(ctx
, ISA_MIPS3
);
14142 check_mips_64(ctx
);
14146 generate_exception_end(ctx
, EXCP_RI
);
14150 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
14159 int nd
= (ctx
->opcode
>> 7) & 0x1;
14160 int link
= (ctx
->opcode
>> 6) & 0x1;
14161 int ra
= (ctx
->opcode
>> 5) & 0x1;
14164 check_insn(ctx
, ISA_MIPS32
);
14173 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
14178 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
14179 gen_helper_do_semihosting(cpu_env
);
14182 * XXX: not clear which exception should be raised
14183 * when in debug mode...
14185 check_insn(ctx
, ISA_MIPS32
);
14186 generate_exception_end(ctx
, EXCP_DBp
);
14190 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
14193 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
14196 generate_exception_end(ctx
, EXCP_BREAK
);
14199 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
14202 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
14205 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
14207 #if defined(TARGET_MIPS64)
14209 check_insn(ctx
, ISA_MIPS3
);
14210 check_mips_64(ctx
);
14211 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
14215 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
14218 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
14221 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
14224 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
14227 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
14230 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
14233 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
14236 check_insn(ctx
, ISA_MIPS32
);
14238 case RR_RY_CNVT_ZEB
:
14239 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14241 case RR_RY_CNVT_ZEH
:
14242 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14244 case RR_RY_CNVT_SEB
:
14245 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14247 case RR_RY_CNVT_SEH
:
14248 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14250 #if defined(TARGET_MIPS64)
14251 case RR_RY_CNVT_ZEW
:
14252 check_insn(ctx
, ISA_MIPS64
);
14253 check_mips_64(ctx
);
14254 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14256 case RR_RY_CNVT_SEW
:
14257 check_insn(ctx
, ISA_MIPS64
);
14258 check_mips_64(ctx
);
14259 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14263 generate_exception_end(ctx
, EXCP_RI
);
14268 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
14270 #if defined(TARGET_MIPS64)
14272 check_insn(ctx
, ISA_MIPS3
);
14273 check_mips_64(ctx
);
14274 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
14277 check_insn(ctx
, ISA_MIPS3
);
14278 check_mips_64(ctx
);
14279 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
14282 check_insn(ctx
, ISA_MIPS3
);
14283 check_mips_64(ctx
);
14284 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
14287 check_insn(ctx
, ISA_MIPS3
);
14288 check_mips_64(ctx
);
14289 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
14293 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
14296 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
14299 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
14302 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
14304 #if defined(TARGET_MIPS64)
14306 check_insn(ctx
, ISA_MIPS3
);
14307 check_mips_64(ctx
);
14308 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
14311 check_insn(ctx
, ISA_MIPS3
);
14312 check_mips_64(ctx
);
14313 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
14316 check_insn(ctx
, ISA_MIPS3
);
14317 check_mips_64(ctx
);
14318 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
14321 check_insn(ctx
, ISA_MIPS3
);
14322 check_mips_64(ctx
);
14323 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
14327 generate_exception_end(ctx
, EXCP_RI
);
14331 case M16_OPC_EXTEND
:
14332 decode_extended_mips16_opc(env
, ctx
);
14335 #if defined(TARGET_MIPS64)
14337 funct
= (ctx
->opcode
>> 8) & 0x7;
14338 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
14342 generate_exception_end(ctx
, EXCP_RI
);
14349 /* microMIPS extension to MIPS32/MIPS64 */
14352 * microMIPS32/microMIPS64 major opcodes
14354 * 1. MIPS Architecture for Programmers Volume II-B:
14355 * The microMIPS32 Instruction Set (Revision 3.05)
14357 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
14359 * 2. MIPS Architecture For Programmers Volume II-A:
14360 * The MIPS64 Instruction Set (Revision 3.51)
14390 POOL32S
= 0x16, /* MIPS64 */
14391 DADDIU32
= 0x17, /* MIPS64 */
14420 /* 0x29 is reserved */
14433 /* 0x31 is reserved */
14446 SD32
= 0x36, /* MIPS64 */
14447 LD32
= 0x37, /* MIPS64 */
14449 /* 0x39 is reserved */
14465 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14487 /* POOL32A encoding of minor opcode field */
14491 * These opcodes are distinguished only by bits 9..6; those bits are
14492 * what are recorded below.
14530 /* The following can be distinguished by their lower 6 bits. */
14540 /* POOL32AXF encoding of minor opcode field extension */
14543 * 1. MIPS Architecture for Programmers Volume II-B:
14544 * The microMIPS32 Instruction Set (Revision 3.05)
14546 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14548 * 2. MIPS Architecture for Programmers VolumeIV-e:
14549 * The MIPS DSP Application-Specific Extension
14550 * to the microMIPS32 Architecture (Revision 2.34)
14552 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14567 /* begin of microMIPS32 DSP */
14569 /* bits 13..12 for 0x01 */
14575 /* bits 13..12 for 0x2a */
14581 /* bits 13..12 for 0x32 */
14585 /* end of microMIPS32 DSP */
14587 /* bits 15..12 for 0x2c */
14604 /* bits 15..12 for 0x34 */
14612 /* bits 15..12 for 0x3c */
14614 JR
= 0x0, /* alias */
14622 /* bits 15..12 for 0x05 */
14626 /* bits 15..12 for 0x0d */
14638 /* bits 15..12 for 0x15 */
14644 /* bits 15..12 for 0x1d */
14648 /* bits 15..12 for 0x2d */
14653 /* bits 15..12 for 0x35 */
14660 /* POOL32B encoding of minor opcode field (bits 15..12) */
14676 /* POOL32C encoding of minor opcode field (bits 15..12) */
14697 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14710 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14723 /* POOL32F encoding of minor opcode field (bits 5..0) */
14726 /* These are the bit 7..6 values */
14735 /* These are the bit 8..6 values */
14760 MOVZ_FMT_05
= 0x05,
14794 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14801 /* POOL32Fxf encoding of minor opcode extension field */
14839 /* POOL32I encoding of minor opcode field (bits 25..21) */
14869 /* These overlap and are distinguished by bit16 of the instruction */
14878 /* POOL16A encoding of minor opcode field */
14885 /* POOL16B encoding of minor opcode field */
14892 /* POOL16C encoding of minor opcode field */
14912 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14936 /* POOL16D encoding of minor opcode field */
14943 /* POOL16E encoding of minor opcode field */
14950 static int mmreg(int r
)
14952 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14957 /* Used for 16-bit store instructions. */
14958 static int mmreg2(int r
)
14960 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14965 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14966 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14967 #define uMIPS_RS2(op) uMIPS_RS(op)
14968 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14969 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14970 #define uMIPS_RS5(op) (op & 0x1f)
14972 /* Signed immediate */
14973 #define SIMM(op, start, width) \
14974 ((int32_t)(((op >> start) & ((~0U) >> (32 - width))) \
14977 /* Zero-extended immediate */
14978 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32 - width)))
14980 static void gen_addiur1sp(DisasContext
*ctx
)
14982 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14984 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14987 static void gen_addiur2(DisasContext
*ctx
)
14989 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14990 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14991 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14993 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14996 static void gen_addiusp(DisasContext
*ctx
)
14998 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
15001 if (encoded
<= 1) {
15002 decoded
= 256 + encoded
;
15003 } else if (encoded
<= 255) {
15005 } else if (encoded
<= 509) {
15006 decoded
= encoded
- 512;
15008 decoded
= encoded
- 768;
15011 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
15014 static void gen_addius5(DisasContext
*ctx
)
15016 int imm
= SIMM(ctx
->opcode
, 1, 4);
15017 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15019 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
15022 static void gen_andi16(DisasContext
*ctx
)
15024 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
15025 31, 32, 63, 64, 255, 32768, 65535 };
15026 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15027 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15028 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
15030 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
15033 static void gen_ldst_multiple(DisasContext
*ctx
, uint32_t opc
, int reglist
,
15034 int base
, int16_t offset
)
15039 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
15040 generate_exception_end(ctx
, EXCP_RI
);
15044 t0
= tcg_temp_new();
15046 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15048 t1
= tcg_const_tl(reglist
);
15049 t2
= tcg_const_i32(ctx
->mem_idx
);
15051 save_cpu_state(ctx
, 1);
15054 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
15057 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
15059 #ifdef TARGET_MIPS64
15061 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
15064 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
15070 tcg_temp_free_i32(t2
);
15074 static void gen_pool16c_insn(DisasContext
*ctx
)
15076 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
15077 int rs
= mmreg(ctx
->opcode
& 0x7);
15079 switch (((ctx
->opcode
) >> 4) & 0x3f) {
15084 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
15090 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
15096 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
15102 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
15109 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
15110 int offset
= ZIMM(ctx
->opcode
, 0, 4);
15112 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
15121 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
15122 int offset
= ZIMM(ctx
->opcode
, 0, 4);
15124 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
15131 int reg
= ctx
->opcode
& 0x1f;
15133 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
15139 int reg
= ctx
->opcode
& 0x1f;
15140 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
15142 * Let normal delay slot handling in our caller take us
15143 * to the branch target.
15149 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
15150 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15154 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
15155 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15159 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
15163 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
15166 generate_exception_end(ctx
, EXCP_BREAK
);
15169 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
15170 gen_helper_do_semihosting(cpu_env
);
15173 * XXX: not clear which exception should be raised
15174 * when in debug mode...
15176 check_insn(ctx
, ISA_MIPS32
);
15177 generate_exception_end(ctx
, EXCP_DBp
);
15180 case JRADDIUSP
+ 0:
15181 case JRADDIUSP
+ 1:
15183 int imm
= ZIMM(ctx
->opcode
, 0, 5);
15184 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15185 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15187 * Let normal delay slot handling in our caller take us
15188 * to the branch target.
15193 generate_exception_end(ctx
, EXCP_RI
);
15198 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
15201 int rd
, rs
, re
, rt
;
15202 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
15203 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
15204 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
15205 rd
= rd_enc
[enc_dest
];
15206 re
= re_enc
[enc_dest
];
15207 rs
= rs_rt_enc
[enc_rs
];
15208 rt
= rs_rt_enc
[enc_rt
];
15210 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
15212 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
15215 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
15217 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
15221 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
15223 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
15224 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
15226 switch (ctx
->opcode
& 0xf) {
15228 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
15231 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
15235 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15236 int offset
= extract32(ctx
->opcode
, 4, 4);
15237 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
15240 case R6_JRC16
: /* JRCADDIUSP */
15241 if ((ctx
->opcode
>> 4) & 1) {
15243 int imm
= extract32(ctx
->opcode
, 5, 5);
15244 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15245 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15248 rs
= extract32(ctx
->opcode
, 5, 5);
15249 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
15261 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15262 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15263 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
15264 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15268 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
15271 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
15275 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15276 int offset
= extract32(ctx
->opcode
, 4, 4);
15277 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
15280 case JALRC16
: /* BREAK16, SDBBP16 */
15281 switch (ctx
->opcode
& 0x3f) {
15283 case JALRC16
+ 0x20:
15285 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
15290 generate_exception(ctx
, EXCP_BREAK
);
15294 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
15295 gen_helper_do_semihosting(cpu_env
);
15297 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15298 generate_exception(ctx
, EXCP_RI
);
15300 generate_exception(ctx
, EXCP_DBp
);
15307 generate_exception(ctx
, EXCP_RI
);
15312 static void gen_ldxs(DisasContext
*ctx
, int base
, int index
, int rd
)
15314 TCGv t0
= tcg_temp_new();
15315 TCGv t1
= tcg_temp_new();
15317 gen_load_gpr(t0
, base
);
15320 gen_load_gpr(t1
, index
);
15321 tcg_gen_shli_tl(t1
, t1
, 2);
15322 gen_op_addr_add(ctx
, t0
, t1
, t0
);
15325 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15326 gen_store_gpr(t1
, rd
);
15332 static void gen_ldst_pair(DisasContext
*ctx
, uint32_t opc
, int rd
,
15333 int base
, int16_t offset
)
15337 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
15338 generate_exception_end(ctx
, EXCP_RI
);
15342 t0
= tcg_temp_new();
15343 t1
= tcg_temp_new();
15345 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15350 generate_exception_end(ctx
, EXCP_RI
);
15353 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15354 gen_store_gpr(t1
, rd
);
15355 tcg_gen_movi_tl(t1
, 4);
15356 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15357 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15358 gen_store_gpr(t1
, rd
+ 1);
15361 gen_load_gpr(t1
, rd
);
15362 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15363 tcg_gen_movi_tl(t1
, 4);
15364 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15365 gen_load_gpr(t1
, rd
+ 1);
15366 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15368 #ifdef TARGET_MIPS64
15371 generate_exception_end(ctx
, EXCP_RI
);
15374 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15375 gen_store_gpr(t1
, rd
);
15376 tcg_gen_movi_tl(t1
, 8);
15377 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15378 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15379 gen_store_gpr(t1
, rd
+ 1);
15382 gen_load_gpr(t1
, rd
);
15383 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15384 tcg_gen_movi_tl(t1
, 8);
15385 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15386 gen_load_gpr(t1
, rd
+ 1);
15387 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15395 static void gen_sync(int stype
)
15397 TCGBar tcg_mo
= TCG_BAR_SC
;
15400 case 0x4: /* SYNC_WMB */
15401 tcg_mo
|= TCG_MO_ST_ST
;
15403 case 0x10: /* SYNC_MB */
15404 tcg_mo
|= TCG_MO_ALL
;
15406 case 0x11: /* SYNC_ACQUIRE */
15407 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
15409 case 0x12: /* SYNC_RELEASE */
15410 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
15412 case 0x13: /* SYNC_RMB */
15413 tcg_mo
|= TCG_MO_LD_LD
;
15416 tcg_mo
|= TCG_MO_ALL
;
15420 tcg_gen_mb(tcg_mo
);
15423 static void gen_pool32axf(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
15425 int extension
= (ctx
->opcode
>> 6) & 0x3f;
15426 int minor
= (ctx
->opcode
>> 12) & 0xf;
15427 uint32_t mips32_op
;
15429 switch (extension
) {
15431 mips32_op
= OPC_TEQ
;
15434 mips32_op
= OPC_TGE
;
15437 mips32_op
= OPC_TGEU
;
15440 mips32_op
= OPC_TLT
;
15443 mips32_op
= OPC_TLTU
;
15446 mips32_op
= OPC_TNE
;
15448 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
15450 #ifndef CONFIG_USER_ONLY
15453 check_cp0_enabled(ctx
);
15455 /* Treat as NOP. */
15458 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
15462 check_cp0_enabled(ctx
);
15464 TCGv t0
= tcg_temp_new();
15466 gen_load_gpr(t0
, rt
);
15467 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
15473 switch (minor
& 3) {
15475 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15478 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15481 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15484 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15487 goto pool32axf_invalid
;
15491 switch (minor
& 3) {
15493 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15496 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15499 goto pool32axf_invalid
;
15505 check_insn(ctx
, ISA_MIPS32R6
);
15506 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15509 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15512 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15515 mips32_op
= OPC_CLO
;
15518 mips32_op
= OPC_CLZ
;
15520 check_insn(ctx
, ISA_MIPS32
);
15521 gen_cl(ctx
, mips32_op
, rt
, rs
);
15524 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15525 gen_rdhwr(ctx
, rt
, rs
, 0);
15528 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15531 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15532 mips32_op
= OPC_MULT
;
15535 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15536 mips32_op
= OPC_MULTU
;
15539 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15540 mips32_op
= OPC_DIV
;
15543 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15544 mips32_op
= OPC_DIVU
;
15547 check_insn(ctx
, ISA_MIPS32
);
15548 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15551 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15552 mips32_op
= OPC_MADD
;
15555 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15556 mips32_op
= OPC_MADDU
;
15559 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15560 mips32_op
= OPC_MSUB
;
15563 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15564 mips32_op
= OPC_MSUBU
;
15566 check_insn(ctx
, ISA_MIPS32
);
15567 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15570 goto pool32axf_invalid
;
15581 generate_exception_err(ctx
, EXCP_CpU
, 2);
15584 goto pool32axf_invalid
;
15589 case JALR
: /* JALRC */
15590 case JALR_HB
: /* JALRC_HB */
15591 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15592 /* JALRC, JALRC_HB */
15593 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15595 /* JALR, JALR_HB */
15596 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15597 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15602 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15603 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15604 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15607 goto pool32axf_invalid
;
15613 check_cp0_enabled(ctx
);
15614 check_insn(ctx
, ISA_MIPS32R2
);
15615 gen_load_srsgpr(rs
, rt
);
15618 check_cp0_enabled(ctx
);
15619 check_insn(ctx
, ISA_MIPS32R2
);
15620 gen_store_srsgpr(rs
, rt
);
15623 goto pool32axf_invalid
;
15626 #ifndef CONFIG_USER_ONLY
15630 mips32_op
= OPC_TLBP
;
15633 mips32_op
= OPC_TLBR
;
15636 mips32_op
= OPC_TLBWI
;
15639 mips32_op
= OPC_TLBWR
;
15642 mips32_op
= OPC_TLBINV
;
15645 mips32_op
= OPC_TLBINVF
;
15648 mips32_op
= OPC_WAIT
;
15651 mips32_op
= OPC_DERET
;
15654 mips32_op
= OPC_ERET
;
15656 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15659 goto pool32axf_invalid
;
15665 check_cp0_enabled(ctx
);
15667 TCGv t0
= tcg_temp_new();
15669 save_cpu_state(ctx
, 1);
15670 gen_helper_di(t0
, cpu_env
);
15671 gen_store_gpr(t0
, rs
);
15673 * Stop translation as we may have switched the execution
15676 ctx
->base
.is_jmp
= DISAS_STOP
;
15681 check_cp0_enabled(ctx
);
15683 TCGv t0
= tcg_temp_new();
15685 save_cpu_state(ctx
, 1);
15686 gen_helper_ei(t0
, cpu_env
);
15687 gen_store_gpr(t0
, rs
);
15689 * DISAS_STOP isn't sufficient, we need to ensure we break out
15690 * of translated code to check for pending interrupts.
15692 gen_save_pc(ctx
->base
.pc_next
+ 4);
15693 ctx
->base
.is_jmp
= DISAS_EXIT
;
15698 goto pool32axf_invalid
;
15705 gen_sync(extract32(ctx
->opcode
, 16, 5));
15708 generate_exception_end(ctx
, EXCP_SYSCALL
);
15711 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15712 gen_helper_do_semihosting(cpu_env
);
15714 check_insn(ctx
, ISA_MIPS32
);
15715 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15716 generate_exception_end(ctx
, EXCP_RI
);
15718 generate_exception_end(ctx
, EXCP_DBp
);
15723 goto pool32axf_invalid
;
15727 switch (minor
& 3) {
15729 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15732 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15735 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15738 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15741 goto pool32axf_invalid
;
15745 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15748 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15751 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15754 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15757 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15760 goto pool32axf_invalid
;
15765 MIPS_INVAL("pool32axf");
15766 generate_exception_end(ctx
, EXCP_RI
);
15772 * Values for microMIPS fmt field. Variable-width, depending on which
15773 * formats the instruction supports.
15792 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15794 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15795 uint32_t mips32_op
;
15797 #define FLOAT_1BIT_FMT(opc, fmt) ((fmt << 8) | opc)
15798 #define FLOAT_2BIT_FMT(opc, fmt) ((fmt << 7) | opc)
15799 #define COND_FLOAT_MOV(opc, cond) ((cond << 7) | opc)
15801 switch (extension
) {
15802 case FLOAT_1BIT_FMT(CFC1
, 0):
15803 mips32_op
= OPC_CFC1
;
15805 case FLOAT_1BIT_FMT(CTC1
, 0):
15806 mips32_op
= OPC_CTC1
;
15808 case FLOAT_1BIT_FMT(MFC1
, 0):
15809 mips32_op
= OPC_MFC1
;
15811 case FLOAT_1BIT_FMT(MTC1
, 0):
15812 mips32_op
= OPC_MTC1
;
15814 case FLOAT_1BIT_FMT(MFHC1
, 0):
15815 mips32_op
= OPC_MFHC1
;
15817 case FLOAT_1BIT_FMT(MTHC1
, 0):
15818 mips32_op
= OPC_MTHC1
;
15820 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15823 /* Reciprocal square root */
15824 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15825 mips32_op
= OPC_RSQRT_S
;
15827 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15828 mips32_op
= OPC_RSQRT_D
;
15832 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15833 mips32_op
= OPC_SQRT_S
;
15835 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15836 mips32_op
= OPC_SQRT_D
;
15840 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15841 mips32_op
= OPC_RECIP_S
;
15843 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15844 mips32_op
= OPC_RECIP_D
;
15848 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15849 mips32_op
= OPC_FLOOR_L_S
;
15851 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15852 mips32_op
= OPC_FLOOR_L_D
;
15854 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15855 mips32_op
= OPC_FLOOR_W_S
;
15857 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15858 mips32_op
= OPC_FLOOR_W_D
;
15862 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15863 mips32_op
= OPC_CEIL_L_S
;
15865 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15866 mips32_op
= OPC_CEIL_L_D
;
15868 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15869 mips32_op
= OPC_CEIL_W_S
;
15871 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15872 mips32_op
= OPC_CEIL_W_D
;
15876 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15877 mips32_op
= OPC_TRUNC_L_S
;
15879 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15880 mips32_op
= OPC_TRUNC_L_D
;
15882 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15883 mips32_op
= OPC_TRUNC_W_S
;
15885 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15886 mips32_op
= OPC_TRUNC_W_D
;
15890 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15891 mips32_op
= OPC_ROUND_L_S
;
15893 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15894 mips32_op
= OPC_ROUND_L_D
;
15896 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15897 mips32_op
= OPC_ROUND_W_S
;
15899 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15900 mips32_op
= OPC_ROUND_W_D
;
15903 /* Integer to floating-point conversion */
15904 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15905 mips32_op
= OPC_CVT_L_S
;
15907 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15908 mips32_op
= OPC_CVT_L_D
;
15910 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15911 mips32_op
= OPC_CVT_W_S
;
15913 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15914 mips32_op
= OPC_CVT_W_D
;
15917 /* Paired-foo conversions */
15918 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15919 mips32_op
= OPC_CVT_S_PL
;
15921 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15922 mips32_op
= OPC_CVT_S_PU
;
15924 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15925 mips32_op
= OPC_CVT_PW_PS
;
15927 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15928 mips32_op
= OPC_CVT_PS_PW
;
15931 /* Floating-point moves */
15932 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15933 mips32_op
= OPC_MOV_S
;
15935 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15936 mips32_op
= OPC_MOV_D
;
15938 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15939 mips32_op
= OPC_MOV_PS
;
15942 /* Absolute value */
15943 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15944 mips32_op
= OPC_ABS_S
;
15946 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15947 mips32_op
= OPC_ABS_D
;
15949 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15950 mips32_op
= OPC_ABS_PS
;
15954 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15955 mips32_op
= OPC_NEG_S
;
15957 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15958 mips32_op
= OPC_NEG_D
;
15960 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15961 mips32_op
= OPC_NEG_PS
;
15964 /* Reciprocal square root step */
15965 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15966 mips32_op
= OPC_RSQRT1_S
;
15968 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15969 mips32_op
= OPC_RSQRT1_D
;
15971 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15972 mips32_op
= OPC_RSQRT1_PS
;
15975 /* Reciprocal step */
15976 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15977 mips32_op
= OPC_RECIP1_S
;
15979 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15980 mips32_op
= OPC_RECIP1_S
;
15982 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15983 mips32_op
= OPC_RECIP1_PS
;
15986 /* Conversions from double */
15987 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15988 mips32_op
= OPC_CVT_D_S
;
15990 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15991 mips32_op
= OPC_CVT_D_W
;
15993 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15994 mips32_op
= OPC_CVT_D_L
;
15997 /* Conversions from single */
15998 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15999 mips32_op
= OPC_CVT_S_D
;
16001 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
16002 mips32_op
= OPC_CVT_S_W
;
16004 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
16005 mips32_op
= OPC_CVT_S_L
;
16007 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
16010 /* Conditional moves on floating-point codes */
16011 case COND_FLOAT_MOV(MOVT
, 0):
16012 case COND_FLOAT_MOV(MOVT
, 1):
16013 case COND_FLOAT_MOV(MOVT
, 2):
16014 case COND_FLOAT_MOV(MOVT
, 3):
16015 case COND_FLOAT_MOV(MOVT
, 4):
16016 case COND_FLOAT_MOV(MOVT
, 5):
16017 case COND_FLOAT_MOV(MOVT
, 6):
16018 case COND_FLOAT_MOV(MOVT
, 7):
16019 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16020 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
16022 case COND_FLOAT_MOV(MOVF
, 0):
16023 case COND_FLOAT_MOV(MOVF
, 1):
16024 case COND_FLOAT_MOV(MOVF
, 2):
16025 case COND_FLOAT_MOV(MOVF
, 3):
16026 case COND_FLOAT_MOV(MOVF
, 4):
16027 case COND_FLOAT_MOV(MOVF
, 5):
16028 case COND_FLOAT_MOV(MOVF
, 6):
16029 case COND_FLOAT_MOV(MOVF
, 7):
16030 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16031 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
16034 MIPS_INVAL("pool32fxf");
16035 generate_exception_end(ctx
, EXCP_RI
);
16040 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
16044 int rt
, rs
, rd
, rr
;
16046 uint32_t op
, minor
, minor2
, mips32_op
;
16047 uint32_t cond
, fmt
, cc
;
16049 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
16050 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
16052 rt
= (ctx
->opcode
>> 21) & 0x1f;
16053 rs
= (ctx
->opcode
>> 16) & 0x1f;
16054 rd
= (ctx
->opcode
>> 11) & 0x1f;
16055 rr
= (ctx
->opcode
>> 6) & 0x1f;
16056 imm
= (int16_t) ctx
->opcode
;
16058 op
= (ctx
->opcode
>> 26) & 0x3f;
16061 minor
= ctx
->opcode
& 0x3f;
16064 minor
= (ctx
->opcode
>> 6) & 0xf;
16067 mips32_op
= OPC_SLL
;
16070 mips32_op
= OPC_SRA
;
16073 mips32_op
= OPC_SRL
;
16076 mips32_op
= OPC_ROTR
;
16078 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
16081 check_insn(ctx
, ISA_MIPS32R6
);
16082 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
16085 check_insn(ctx
, ISA_MIPS32R6
);
16086 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
16089 check_insn(ctx
, ISA_MIPS32R6
);
16090 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
16093 goto pool32a_invalid
;
16097 minor
= (ctx
->opcode
>> 6) & 0xf;
16101 mips32_op
= OPC_ADD
;
16104 mips32_op
= OPC_ADDU
;
16107 mips32_op
= OPC_SUB
;
16110 mips32_op
= OPC_SUBU
;
16113 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16114 mips32_op
= OPC_MUL
;
16116 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
16120 mips32_op
= OPC_SLLV
;
16123 mips32_op
= OPC_SRLV
;
16126 mips32_op
= OPC_SRAV
;
16129 mips32_op
= OPC_ROTRV
;
16131 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
16133 /* Logical operations */
16135 mips32_op
= OPC_AND
;
16138 mips32_op
= OPC_OR
;
16141 mips32_op
= OPC_NOR
;
16144 mips32_op
= OPC_XOR
;
16146 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
16148 /* Set less than */
16150 mips32_op
= OPC_SLT
;
16153 mips32_op
= OPC_SLTU
;
16155 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
16158 goto pool32a_invalid
;
16162 minor
= (ctx
->opcode
>> 6) & 0xf;
16164 /* Conditional moves */
16165 case MOVN
: /* MUL */
16166 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16168 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
16171 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
16174 case MOVZ
: /* MUH */
16175 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16177 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
16180 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
16184 check_insn(ctx
, ISA_MIPS32R6
);
16185 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
16188 check_insn(ctx
, ISA_MIPS32R6
);
16189 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
16191 case LWXS
: /* DIV */
16192 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16194 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
16197 gen_ldxs(ctx
, rs
, rt
, rd
);
16201 check_insn(ctx
, ISA_MIPS32R6
);
16202 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
16205 check_insn(ctx
, ISA_MIPS32R6
);
16206 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
16209 check_insn(ctx
, ISA_MIPS32R6
);
16210 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
16213 goto pool32a_invalid
;
16217 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
16220 check_insn(ctx
, ISA_MIPS32R6
);
16221 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
16222 extract32(ctx
->opcode
, 9, 2));
16225 check_insn(ctx
, ISA_MIPS32R6
);
16226 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
16229 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
16232 gen_pool32axf(env
, ctx
, rt
, rs
);
16235 generate_exception_end(ctx
, EXCP_BREAK
);
16238 check_insn(ctx
, ISA_MIPS32R6
);
16239 generate_exception_end(ctx
, EXCP_RI
);
16243 MIPS_INVAL("pool32a");
16244 generate_exception_end(ctx
, EXCP_RI
);
16249 minor
= (ctx
->opcode
>> 12) & 0xf;
16252 check_cp0_enabled(ctx
);
16253 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16254 gen_cache_operation(ctx
, rt
, rs
, imm
);
16259 /* COP2: Not implemented. */
16260 generate_exception_err(ctx
, EXCP_CpU
, 2);
16262 #ifdef TARGET_MIPS64
16265 check_insn(ctx
, ISA_MIPS3
);
16266 check_mips_64(ctx
);
16271 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16273 #ifdef TARGET_MIPS64
16276 check_insn(ctx
, ISA_MIPS3
);
16277 check_mips_64(ctx
);
16282 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16285 MIPS_INVAL("pool32b");
16286 generate_exception_end(ctx
, EXCP_RI
);
16291 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
16292 minor
= ctx
->opcode
& 0x3f;
16293 check_cp1_enabled(ctx
);
16296 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16297 mips32_op
= OPC_ALNV_PS
;
16300 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16301 mips32_op
= OPC_MADD_S
;
16304 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16305 mips32_op
= OPC_MADD_D
;
16308 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16309 mips32_op
= OPC_MADD_PS
;
16312 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16313 mips32_op
= OPC_MSUB_S
;
16316 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16317 mips32_op
= OPC_MSUB_D
;
16320 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16321 mips32_op
= OPC_MSUB_PS
;
16324 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16325 mips32_op
= OPC_NMADD_S
;
16328 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16329 mips32_op
= OPC_NMADD_D
;
16332 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16333 mips32_op
= OPC_NMADD_PS
;
16336 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16337 mips32_op
= OPC_NMSUB_S
;
16340 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16341 mips32_op
= OPC_NMSUB_D
;
16344 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16345 mips32_op
= OPC_NMSUB_PS
;
16347 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
16349 case CABS_COND_FMT
:
16350 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16351 cond
= (ctx
->opcode
>> 6) & 0xf;
16352 cc
= (ctx
->opcode
>> 13) & 0x7;
16353 fmt
= (ctx
->opcode
>> 10) & 0x3;
16356 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
16359 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
16362 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
16365 goto pool32f_invalid
;
16369 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16370 cond
= (ctx
->opcode
>> 6) & 0xf;
16371 cc
= (ctx
->opcode
>> 13) & 0x7;
16372 fmt
= (ctx
->opcode
>> 10) & 0x3;
16375 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
16378 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
16381 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
16384 goto pool32f_invalid
;
16388 check_insn(ctx
, ISA_MIPS32R6
);
16389 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16392 check_insn(ctx
, ISA_MIPS32R6
);
16393 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16396 gen_pool32fxf(ctx
, rt
, rs
);
16400 switch ((ctx
->opcode
>> 6) & 0x7) {
16402 mips32_op
= OPC_PLL_PS
;
16405 mips32_op
= OPC_PLU_PS
;
16408 mips32_op
= OPC_PUL_PS
;
16411 mips32_op
= OPC_PUU_PS
;
16414 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16415 mips32_op
= OPC_CVT_PS_S
;
16417 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16420 goto pool32f_invalid
;
16424 check_insn(ctx
, ISA_MIPS32R6
);
16425 switch ((ctx
->opcode
>> 9) & 0x3) {
16427 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
16430 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
16433 goto pool32f_invalid
;
16438 switch ((ctx
->opcode
>> 6) & 0x7) {
16440 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16441 mips32_op
= OPC_LWXC1
;
16444 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16445 mips32_op
= OPC_SWXC1
;
16448 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16449 mips32_op
= OPC_LDXC1
;
16452 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16453 mips32_op
= OPC_SDXC1
;
16456 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16457 mips32_op
= OPC_LUXC1
;
16460 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16461 mips32_op
= OPC_SUXC1
;
16463 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
16466 goto pool32f_invalid
;
16470 check_insn(ctx
, ISA_MIPS32R6
);
16471 switch ((ctx
->opcode
>> 9) & 0x3) {
16473 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
16476 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
16479 goto pool32f_invalid
;
16484 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16485 fmt
= (ctx
->opcode
>> 9) & 0x3;
16486 switch ((ctx
->opcode
>> 6) & 0x7) {
16490 mips32_op
= OPC_RSQRT2_S
;
16493 mips32_op
= OPC_RSQRT2_D
;
16496 mips32_op
= OPC_RSQRT2_PS
;
16499 goto pool32f_invalid
;
16505 mips32_op
= OPC_RECIP2_S
;
16508 mips32_op
= OPC_RECIP2_D
;
16511 mips32_op
= OPC_RECIP2_PS
;
16514 goto pool32f_invalid
;
16518 mips32_op
= OPC_ADDR_PS
;
16521 mips32_op
= OPC_MULR_PS
;
16523 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16526 goto pool32f_invalid
;
16530 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16531 cc
= (ctx
->opcode
>> 13) & 0x7;
16532 fmt
= (ctx
->opcode
>> 9) & 0x3;
16533 switch ((ctx
->opcode
>> 6) & 0x7) {
16534 case MOVF_FMT
: /* RINT_FMT */
16535 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16539 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16542 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16545 goto pool32f_invalid
;
16551 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16554 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16558 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16561 goto pool32f_invalid
;
16565 case MOVT_FMT
: /* CLASS_FMT */
16566 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16570 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16573 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16576 goto pool32f_invalid
;
16582 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16585 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16589 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16592 goto pool32f_invalid
;
16597 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16600 goto pool32f_invalid
;
16603 #define FINSN_3ARG_SDPS(prfx) \
16604 switch ((ctx->opcode >> 8) & 0x3) { \
16606 mips32_op = OPC_##prfx##_S; \
16609 mips32_op = OPC_##prfx##_D; \
16611 case FMT_SDPS_PS: \
16613 mips32_op = OPC_##prfx##_PS; \
16616 goto pool32f_invalid; \
16619 check_insn(ctx
, ISA_MIPS32R6
);
16620 switch ((ctx
->opcode
>> 9) & 0x3) {
16622 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16625 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16628 goto pool32f_invalid
;
16632 check_insn(ctx
, ISA_MIPS32R6
);
16633 switch ((ctx
->opcode
>> 9) & 0x3) {
16635 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16638 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16641 goto pool32f_invalid
;
16645 /* regular FP ops */
16646 switch ((ctx
->opcode
>> 6) & 0x3) {
16648 FINSN_3ARG_SDPS(ADD
);
16651 FINSN_3ARG_SDPS(SUB
);
16654 FINSN_3ARG_SDPS(MUL
);
16657 fmt
= (ctx
->opcode
>> 8) & 0x3;
16659 mips32_op
= OPC_DIV_D
;
16660 } else if (fmt
== 0) {
16661 mips32_op
= OPC_DIV_S
;
16663 goto pool32f_invalid
;
16667 goto pool32f_invalid
;
16672 switch ((ctx
->opcode
>> 6) & 0x7) {
16673 case MOVN_FMT
: /* SELEQZ_FMT */
16674 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16676 switch ((ctx
->opcode
>> 9) & 0x3) {
16678 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16681 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16684 goto pool32f_invalid
;
16688 FINSN_3ARG_SDPS(MOVN
);
16692 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16693 FINSN_3ARG_SDPS(MOVN
);
16695 case MOVZ_FMT
: /* SELNEZ_FMT */
16696 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16698 switch ((ctx
->opcode
>> 9) & 0x3) {
16700 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16703 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16706 goto pool32f_invalid
;
16710 FINSN_3ARG_SDPS(MOVZ
);
16714 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16715 FINSN_3ARG_SDPS(MOVZ
);
16718 check_insn(ctx
, ISA_MIPS32R6
);
16719 switch ((ctx
->opcode
>> 9) & 0x3) {
16721 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16724 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16727 goto pool32f_invalid
;
16731 check_insn(ctx
, ISA_MIPS32R6
);
16732 switch ((ctx
->opcode
>> 9) & 0x3) {
16734 mips32_op
= OPC_MADDF_S
;
16737 mips32_op
= OPC_MADDF_D
;
16740 goto pool32f_invalid
;
16744 check_insn(ctx
, ISA_MIPS32R6
);
16745 switch ((ctx
->opcode
>> 9) & 0x3) {
16747 mips32_op
= OPC_MSUBF_S
;
16750 mips32_op
= OPC_MSUBF_D
;
16753 goto pool32f_invalid
;
16757 goto pool32f_invalid
;
16761 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16765 MIPS_INVAL("pool32f");
16766 generate_exception_end(ctx
, EXCP_RI
);
16770 generate_exception_err(ctx
, EXCP_CpU
, 1);
16774 minor
= (ctx
->opcode
>> 21) & 0x1f;
16777 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16778 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16781 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16782 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16783 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16786 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16787 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16788 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16791 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16792 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16795 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16796 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16797 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16800 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16801 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16802 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16805 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16806 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16809 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16810 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16814 case TLTI
: /* BC1EQZC */
16815 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16817 check_cp1_enabled(ctx
);
16818 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16821 mips32_op
= OPC_TLTI
;
16825 case TGEI
: /* BC1NEZC */
16826 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16828 check_cp1_enabled(ctx
);
16829 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16832 mips32_op
= OPC_TGEI
;
16837 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16838 mips32_op
= OPC_TLTIU
;
16841 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16842 mips32_op
= OPC_TGEIU
;
16844 case TNEI
: /* SYNCI */
16845 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16848 * Break the TB to be able to sync copied instructions
16851 ctx
->base
.is_jmp
= DISAS_STOP
;
16854 mips32_op
= OPC_TNEI
;
16859 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16860 mips32_op
= OPC_TEQI
;
16862 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16867 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16868 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16869 4, rs
, 0, imm
<< 1, 0);
16871 * Compact branches don't have a delay slot, so just let
16872 * the normal delay slot handling take us to the branch
16877 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16878 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16881 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16883 * Break the TB to be able to sync copied instructions
16886 ctx
->base
.is_jmp
= DISAS_STOP
;
16890 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16891 /* COP2: Not implemented. */
16892 generate_exception_err(ctx
, EXCP_CpU
, 2);
16895 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16896 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16899 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16900 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16903 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16904 mips32_op
= OPC_BC1FANY4
;
16907 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16908 mips32_op
= OPC_BC1TANY4
;
16911 check_insn(ctx
, ASE_MIPS3D
);
16914 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16915 check_cp1_enabled(ctx
);
16916 gen_compute_branch1(ctx
, mips32_op
,
16917 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16919 generate_exception_err(ctx
, EXCP_CpU
, 1);
16924 /* MIPS DSP: not implemented */
16927 MIPS_INVAL("pool32i");
16928 generate_exception_end(ctx
, EXCP_RI
);
16933 minor
= (ctx
->opcode
>> 12) & 0xf;
16934 offset
= sextract32(ctx
->opcode
, 0,
16935 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16938 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16939 mips32_op
= OPC_LWL
;
16942 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16943 mips32_op
= OPC_SWL
;
16946 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16947 mips32_op
= OPC_LWR
;
16950 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16951 mips32_op
= OPC_SWR
;
16953 #if defined(TARGET_MIPS64)
16955 check_insn(ctx
, ISA_MIPS3
);
16956 check_mips_64(ctx
);
16957 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16958 mips32_op
= OPC_LDL
;
16961 check_insn(ctx
, ISA_MIPS3
);
16962 check_mips_64(ctx
);
16963 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16964 mips32_op
= OPC_SDL
;
16967 check_insn(ctx
, ISA_MIPS3
);
16968 check_mips_64(ctx
);
16969 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16970 mips32_op
= OPC_LDR
;
16973 check_insn(ctx
, ISA_MIPS3
);
16974 check_mips_64(ctx
);
16975 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16976 mips32_op
= OPC_SDR
;
16979 check_insn(ctx
, ISA_MIPS3
);
16980 check_mips_64(ctx
);
16981 mips32_op
= OPC_LWU
;
16984 check_insn(ctx
, ISA_MIPS3
);
16985 check_mips_64(ctx
);
16986 mips32_op
= OPC_LLD
;
16990 mips32_op
= OPC_LL
;
16993 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16996 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16999 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TESL
, false);
17001 #if defined(TARGET_MIPS64)
17003 check_insn(ctx
, ISA_MIPS3
);
17004 check_mips_64(ctx
);
17005 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TEQ
, false);
17010 MIPS_INVAL("pool32c ld-eva");
17011 generate_exception_end(ctx
, EXCP_RI
);
17014 check_cp0_enabled(ctx
);
17016 minor2
= (ctx
->opcode
>> 9) & 0x7;
17017 offset
= sextract32(ctx
->opcode
, 0, 9);
17020 mips32_op
= OPC_LBUE
;
17023 mips32_op
= OPC_LHUE
;
17026 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17027 mips32_op
= OPC_LWLE
;
17030 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17031 mips32_op
= OPC_LWRE
;
17034 mips32_op
= OPC_LBE
;
17037 mips32_op
= OPC_LHE
;
17040 mips32_op
= OPC_LLE
;
17043 mips32_op
= OPC_LWE
;
17049 MIPS_INVAL("pool32c st-eva");
17050 generate_exception_end(ctx
, EXCP_RI
);
17053 check_cp0_enabled(ctx
);
17055 minor2
= (ctx
->opcode
>> 9) & 0x7;
17056 offset
= sextract32(ctx
->opcode
, 0, 9);
17059 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17060 mips32_op
= OPC_SWLE
;
17063 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17064 mips32_op
= OPC_SWRE
;
17067 /* Treat as no-op */
17068 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
17069 /* hint codes 24-31 are reserved and signal RI */
17070 generate_exception(ctx
, EXCP_RI
);
17074 /* Treat as no-op */
17075 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17076 gen_cache_operation(ctx
, rt
, rs
, offset
);
17080 mips32_op
= OPC_SBE
;
17083 mips32_op
= OPC_SHE
;
17086 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TESL
, true);
17089 mips32_op
= OPC_SWE
;
17094 /* Treat as no-op */
17095 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
17096 /* hint codes 24-31 are reserved and signal RI */
17097 generate_exception(ctx
, EXCP_RI
);
17101 MIPS_INVAL("pool32c");
17102 generate_exception_end(ctx
, EXCP_RI
);
17106 case ADDI32
: /* AUI, LUI */
17107 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17109 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
17112 mips32_op
= OPC_ADDI
;
17117 mips32_op
= OPC_ADDIU
;
17119 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17122 /* Logical operations */
17124 mips32_op
= OPC_ORI
;
17127 mips32_op
= OPC_XORI
;
17130 mips32_op
= OPC_ANDI
;
17132 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17135 /* Set less than immediate */
17137 mips32_op
= OPC_SLTI
;
17140 mips32_op
= OPC_SLTIU
;
17142 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17145 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17146 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
17147 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
17148 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17150 case JALS32
: /* BOVC, BEQC, BEQZALC */
17151 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17154 mips32_op
= OPC_BOVC
;
17155 } else if (rs
< rt
&& rs
== 0) {
17157 mips32_op
= OPC_BEQZALC
;
17160 mips32_op
= OPC_BEQC
;
17162 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17165 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
17166 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
17167 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17170 case BEQ32
: /* BC */
17171 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17173 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
17174 sextract32(ctx
->opcode
<< 1, 0, 27));
17177 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
17180 case BNE32
: /* BALC */
17181 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17183 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
17184 sextract32(ctx
->opcode
<< 1, 0, 27));
17187 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
17190 case J32
: /* BGTZC, BLTZC, BLTC */
17191 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17192 if (rs
== 0 && rt
!= 0) {
17194 mips32_op
= OPC_BGTZC
;
17195 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17197 mips32_op
= OPC_BLTZC
;
17200 mips32_op
= OPC_BLTC
;
17202 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17205 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
17206 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17209 case JAL32
: /* BLEZC, BGEZC, BGEC */
17210 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17211 if (rs
== 0 && rt
!= 0) {
17213 mips32_op
= OPC_BLEZC
;
17214 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17216 mips32_op
= OPC_BGEZC
;
17219 mips32_op
= OPC_BGEC
;
17221 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17224 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
17225 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17226 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17229 /* Floating point (COP1) */
17231 mips32_op
= OPC_LWC1
;
17234 mips32_op
= OPC_LDC1
;
17237 mips32_op
= OPC_SWC1
;
17240 mips32_op
= OPC_SDC1
;
17242 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
17244 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17245 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17246 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17247 switch ((ctx
->opcode
>> 16) & 0x1f) {
17256 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17259 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
17262 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
17272 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17275 generate_exception(ctx
, EXCP_RI
);
17280 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
17281 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
17283 gen_addiupc(ctx
, reg
, offset
, 0, 0);
17286 case BNVC
: /* BNEC, BNEZALC */
17287 check_insn(ctx
, ISA_MIPS32R6
);
17290 mips32_op
= OPC_BNVC
;
17291 } else if (rs
< rt
&& rs
== 0) {
17293 mips32_op
= OPC_BNEZALC
;
17296 mips32_op
= OPC_BNEC
;
17298 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17300 case R6_BNEZC
: /* JIALC */
17301 check_insn(ctx
, ISA_MIPS32R6
);
17304 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
17305 sextract32(ctx
->opcode
<< 1, 0, 22));
17308 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
17311 case R6_BEQZC
: /* JIC */
17312 check_insn(ctx
, ISA_MIPS32R6
);
17315 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
17316 sextract32(ctx
->opcode
<< 1, 0, 22));
17319 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
17322 case BLEZALC
: /* BGEZALC, BGEUC */
17323 check_insn(ctx
, ISA_MIPS32R6
);
17324 if (rs
== 0 && rt
!= 0) {
17326 mips32_op
= OPC_BLEZALC
;
17327 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17329 mips32_op
= OPC_BGEZALC
;
17332 mips32_op
= OPC_BGEUC
;
17334 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17336 case BGTZALC
: /* BLTZALC, BLTUC */
17337 check_insn(ctx
, ISA_MIPS32R6
);
17338 if (rs
== 0 && rt
!= 0) {
17340 mips32_op
= OPC_BGTZALC
;
17341 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17343 mips32_op
= OPC_BLTZALC
;
17346 mips32_op
= OPC_BLTUC
;
17348 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17350 /* Loads and stores */
17352 mips32_op
= OPC_LB
;
17355 mips32_op
= OPC_LBU
;
17358 mips32_op
= OPC_LH
;
17361 mips32_op
= OPC_LHU
;
17364 mips32_op
= OPC_LW
;
17366 #ifdef TARGET_MIPS64
17368 check_insn(ctx
, ISA_MIPS3
);
17369 check_mips_64(ctx
);
17370 mips32_op
= OPC_LD
;
17373 check_insn(ctx
, ISA_MIPS3
);
17374 check_mips_64(ctx
);
17375 mips32_op
= OPC_SD
;
17379 mips32_op
= OPC_SB
;
17382 mips32_op
= OPC_SH
;
17385 mips32_op
= OPC_SW
;
17388 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
17391 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
17394 generate_exception_end(ctx
, EXCP_RI
);
17399 static int decode_micromips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
17403 /* make sure instructions are on a halfword boundary */
17404 if (ctx
->base
.pc_next
& 0x1) {
17405 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
17406 generate_exception_end(ctx
, EXCP_AdEL
);
17410 op
= (ctx
->opcode
>> 10) & 0x3f;
17411 /* Enforce properly-sized instructions in a delay slot */
17412 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
17413 switch (op
& 0x7) { /* MSB-3..MSB-5 */
17415 /* POOL32A, POOL32B, POOL32I, POOL32C */
17417 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
17419 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
17421 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
17423 /* LB32, LH32, LWC132, LDC132, LW32 */
17424 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
17425 generate_exception_end(ctx
, EXCP_RI
);
17430 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
17432 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
17434 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
17435 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
17436 generate_exception_end(ctx
, EXCP_RI
);
17446 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17447 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
17448 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
17451 switch (ctx
->opcode
& 0x1) {
17459 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17461 * In the Release 6, the register number location in
17462 * the instruction encoding has changed.
17464 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
17466 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
17472 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17473 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
17474 int amount
= (ctx
->opcode
>> 1) & 0x7;
17476 amount
= amount
== 0 ? 8 : amount
;
17478 switch (ctx
->opcode
& 0x1) {
17487 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17491 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17492 gen_pool16c_r6_insn(ctx
);
17494 gen_pool16c_insn(ctx
);
17499 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17500 int rb
= 28; /* GP */
17501 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17503 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17507 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17508 if (ctx
->opcode
& 1) {
17509 generate_exception_end(ctx
, EXCP_RI
);
17512 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17513 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17514 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17515 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17520 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17521 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17522 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17523 offset
= (offset
== 0xf ? -1 : offset
);
17525 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17530 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17531 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17532 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17534 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17539 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17540 int rb
= 29; /* SP */
17541 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17543 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17548 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17549 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17550 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17552 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17557 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17558 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17559 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17561 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17566 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17567 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17568 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17570 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17575 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17576 int rb
= 29; /* SP */
17577 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17579 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17584 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17585 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17586 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17588 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17593 int rd
= uMIPS_RD5(ctx
->opcode
);
17594 int rs
= uMIPS_RS5(ctx
->opcode
);
17596 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17603 switch (ctx
->opcode
& 0x1) {
17613 switch (ctx
->opcode
& 0x1) {
17618 gen_addiur1sp(ctx
);
17622 case B16
: /* BC16 */
17623 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17624 sextract32(ctx
->opcode
, 0, 10) << 1,
17625 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17627 case BNEZ16
: /* BNEZC16 */
17628 case BEQZ16
: /* BEQZC16 */
17629 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17630 mmreg(uMIPS_RD(ctx
->opcode
)),
17631 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17632 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17637 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17638 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17640 imm
= (imm
== 0x7f ? -1 : imm
);
17641 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17647 generate_exception_end(ctx
, EXCP_RI
);
17650 decode_micromips32_opc(env
, ctx
);
17663 /* MAJOR, P16, and P32 pools opcodes */
17667 NM_MOVE_BALC
= 0x02,
17675 NM_P16_SHIFT
= 0x0c,
17693 NM_P_LS_U12
= 0x21,
17703 NM_P16_ADDU
= 0x2c,
17717 NM_MOVEPREV
= 0x3f,
17720 /* POOL32A instruction pool */
17722 NM_POOL32A0
= 0x00,
17723 NM_SPECIAL2
= 0x01,
17726 NM_POOL32A5
= 0x05,
17727 NM_POOL32A7
= 0x07,
17730 /* P.GP.W instruction pool */
17732 NM_ADDIUGP_W
= 0x00,
17737 /* P48I instruction pool */
17741 NM_ADDIUGP48
= 0x02,
17742 NM_ADDIUPC48
= 0x03,
17747 /* P.U12 instruction pool */
17756 NM_ADDIUNEG
= 0x08,
17763 /* POOL32F instruction pool */
17765 NM_POOL32F_0
= 0x00,
17766 NM_POOL32F_3
= 0x03,
17767 NM_POOL32F_5
= 0x05,
17770 /* POOL32S instruction pool */
17772 NM_POOL32S_0
= 0x00,
17773 NM_POOL32S_4
= 0x04,
17776 /* P.LUI instruction pool */
17782 /* P.GP.BH instruction pool */
17787 NM_ADDIUGP_B
= 0x03,
17790 NM_P_GP_CP1
= 0x06,
17793 /* P.LS.U12 instruction pool */
17798 NM_P_PREFU12
= 0x03,
17811 /* P.LS.S9 instruction pool */
17817 NM_P_LS_UAWM
= 0x05,
17820 /* P.BAL instruction pool */
17826 /* P.J instruction pool */
17829 NM_JALRC_HB
= 0x01,
17830 NM_P_BALRSC
= 0x08,
17833 /* P.BR1 instruction pool */
17841 /* P.BR2 instruction pool */
17848 /* P.BRI instruction pool */
17860 /* P16.SHIFT instruction pool */
17866 /* POOL16C instruction pool */
17868 NM_POOL16C_0
= 0x00,
17872 /* P16.A1 instruction pool */
17874 NM_ADDIUR1SP
= 0x01,
17877 /* P16.A2 instruction pool */
17880 NM_P_ADDIURS5
= 0x01,
17883 /* P16.ADDU instruction pool */
17889 /* P16.SR instruction pool */
17892 NM_RESTORE_JRC16
= 0x01,
17895 /* P16.4X4 instruction pool */
17901 /* P16.LB instruction pool */
17908 /* P16.LH instruction pool */
17915 /* P.RI instruction pool */
17918 NM_P_SYSCALL
= 0x01,
17923 /* POOL32A0 instruction pool */
17958 NM_D_E_MT_VPE
= 0x56,
17966 /* CRC32 instruction pool */
17976 /* POOL32A5 instruction pool */
17978 NM_CMP_EQ_PH
= 0x00,
17979 NM_CMP_LT_PH
= 0x08,
17980 NM_CMP_LE_PH
= 0x10,
17981 NM_CMPGU_EQ_QB
= 0x18,
17982 NM_CMPGU_LT_QB
= 0x20,
17983 NM_CMPGU_LE_QB
= 0x28,
17984 NM_CMPGDU_EQ_QB
= 0x30,
17985 NM_CMPGDU_LT_QB
= 0x38,
17986 NM_CMPGDU_LE_QB
= 0x40,
17987 NM_CMPU_EQ_QB
= 0x48,
17988 NM_CMPU_LT_QB
= 0x50,
17989 NM_CMPU_LE_QB
= 0x58,
17990 NM_ADDQ_S_W
= 0x60,
17991 NM_SUBQ_S_W
= 0x68,
17995 NM_ADDQ_S_PH
= 0x01,
17996 NM_ADDQH_R_PH
= 0x09,
17997 NM_ADDQH_R_W
= 0x11,
17998 NM_ADDU_S_QB
= 0x19,
17999 NM_ADDU_S_PH
= 0x21,
18000 NM_ADDUH_R_QB
= 0x29,
18001 NM_SHRAV_R_PH
= 0x31,
18002 NM_SHRAV_R_QB
= 0x39,
18003 NM_SUBQ_S_PH
= 0x41,
18004 NM_SUBQH_R_PH
= 0x49,
18005 NM_SUBQH_R_W
= 0x51,
18006 NM_SUBU_S_QB
= 0x59,
18007 NM_SUBU_S_PH
= 0x61,
18008 NM_SUBUH_R_QB
= 0x69,
18009 NM_SHLLV_S_PH
= 0x71,
18010 NM_PRECR_SRA_R_PH_W
= 0x79,
18012 NM_MULEU_S_PH_QBL
= 0x12,
18013 NM_MULEU_S_PH_QBR
= 0x1a,
18014 NM_MULQ_RS_PH
= 0x22,
18015 NM_MULQ_S_PH
= 0x2a,
18016 NM_MULQ_RS_W
= 0x32,
18017 NM_MULQ_S_W
= 0x3a,
18020 NM_SHRAV_R_W
= 0x5a,
18021 NM_SHRLV_PH
= 0x62,
18022 NM_SHRLV_QB
= 0x6a,
18023 NM_SHLLV_QB
= 0x72,
18024 NM_SHLLV_S_W
= 0x7a,
18028 NM_MULEQ_S_W_PHL
= 0x04,
18029 NM_MULEQ_S_W_PHR
= 0x0c,
18031 NM_MUL_S_PH
= 0x05,
18032 NM_PRECR_QB_PH
= 0x0d,
18033 NM_PRECRQ_QB_PH
= 0x15,
18034 NM_PRECRQ_PH_W
= 0x1d,
18035 NM_PRECRQ_RS_PH_W
= 0x25,
18036 NM_PRECRQU_S_QB_PH
= 0x2d,
18037 NM_PACKRL_PH
= 0x35,
18041 NM_SHRA_R_W
= 0x5e,
18042 NM_SHRA_R_PH
= 0x66,
18043 NM_SHLL_S_PH
= 0x76,
18044 NM_SHLL_S_W
= 0x7e,
18049 /* POOL32A7 instruction pool */
18054 NM_POOL32AXF
= 0x07,
18057 /* P.SR instruction pool */
18063 /* P.SHIFT instruction pool */
18071 /* P.ROTX instruction pool */
18076 /* P.INS instruction pool */
18081 /* P.EXT instruction pool */
18086 /* POOL32F_0 (fmt) instruction pool */
18091 NM_SELEQZ_S
= 0x07,
18092 NM_SELEQZ_D
= 0x47,
18096 NM_SELNEZ_S
= 0x0f,
18097 NM_SELNEZ_D
= 0x4f,
18112 /* POOL32F_3 instruction pool */
18116 NM_MINA_FMT
= 0x04,
18117 NM_MAXA_FMT
= 0x05,
18118 NM_POOL32FXF
= 0x07,
18121 /* POOL32F_5 instruction pool */
18123 NM_CMP_CONDN_S
= 0x00,
18124 NM_CMP_CONDN_D
= 0x02,
18127 /* P.GP.LH instruction pool */
18133 /* P.GP.SH instruction pool */
18138 /* P.GP.CP1 instruction pool */
18146 /* P.LS.S0 instruction pool */
18163 NM_P_PREFS9
= 0x03,
18169 /* P.LS.S1 instruction pool */
18171 NM_ASET_ACLR
= 0x02,
18179 /* P.LS.E0 instruction pool */
18195 /* P.PREFE instruction pool */
18201 /* P.LLE instruction pool */
18207 /* P.SCE instruction pool */
18213 /* P.LS.WM instruction pool */
18219 /* P.LS.UAWM instruction pool */
18225 /* P.BR3A instruction pool */
18231 NM_BPOSGE32C
= 0x04,
18234 /* P16.RI instruction pool */
18236 NM_P16_SYSCALL
= 0x01,
18241 /* POOL16C_0 instruction pool */
18243 NM_POOL16C_00
= 0x00,
18246 /* P16.JRC instruction pool */
18252 /* P.SYSCALL instruction pool */
18258 /* P.TRAP instruction pool */
18264 /* P.CMOVE instruction pool */
18270 /* POOL32Axf instruction pool */
18272 NM_POOL32AXF_1
= 0x01,
18273 NM_POOL32AXF_2
= 0x02,
18274 NM_POOL32AXF_4
= 0x04,
18275 NM_POOL32AXF_5
= 0x05,
18276 NM_POOL32AXF_7
= 0x07,
18279 /* POOL32Axf_1 instruction pool */
18281 NM_POOL32AXF_1_0
= 0x00,
18282 NM_POOL32AXF_1_1
= 0x01,
18283 NM_POOL32AXF_1_3
= 0x03,
18284 NM_POOL32AXF_1_4
= 0x04,
18285 NM_POOL32AXF_1_5
= 0x05,
18286 NM_POOL32AXF_1_7
= 0x07,
18289 /* POOL32Axf_2 instruction pool */
18291 NM_POOL32AXF_2_0_7
= 0x00,
18292 NM_POOL32AXF_2_8_15
= 0x01,
18293 NM_POOL32AXF_2_16_23
= 0x02,
18294 NM_POOL32AXF_2_24_31
= 0x03,
18297 /* POOL32Axf_7 instruction pool */
18299 NM_SHRA_R_QB
= 0x0,
18304 /* POOL32Axf_1_0 instruction pool */
18312 /* POOL32Axf_1_1 instruction pool */
18318 /* POOL32Axf_1_3 instruction pool */
18326 /* POOL32Axf_1_4 instruction pool */
18332 /* POOL32Axf_1_5 instruction pool */
18334 NM_MAQ_S_W_PHR
= 0x0,
18335 NM_MAQ_S_W_PHL
= 0x1,
18336 NM_MAQ_SA_W_PHR
= 0x2,
18337 NM_MAQ_SA_W_PHL
= 0x3,
18340 /* POOL32Axf_1_7 instruction pool */
18344 NM_EXTR_RS_W
= 0x2,
18348 /* POOL32Axf_2_0_7 instruction pool */
18351 NM_DPAQ_S_W_PH
= 0x1,
18353 NM_DPSQ_S_W_PH
= 0x3,
18360 /* POOL32Axf_2_8_15 instruction pool */
18362 NM_DPAX_W_PH
= 0x0,
18363 NM_DPAQ_SA_L_W
= 0x1,
18364 NM_DPSX_W_PH
= 0x2,
18365 NM_DPSQ_SA_L_W
= 0x3,
18368 NM_EXTRV_R_W
= 0x7,
18371 /* POOL32Axf_2_16_23 instruction pool */
18373 NM_DPAU_H_QBL
= 0x0,
18374 NM_DPAQX_S_W_PH
= 0x1,
18375 NM_DPSU_H_QBL
= 0x2,
18376 NM_DPSQX_S_W_PH
= 0x3,
18379 NM_MULSA_W_PH
= 0x6,
18380 NM_EXTRV_RS_W
= 0x7,
18383 /* POOL32Axf_2_24_31 instruction pool */
18385 NM_DPAU_H_QBR
= 0x0,
18386 NM_DPAQX_SA_W_PH
= 0x1,
18387 NM_DPSU_H_QBR
= 0x2,
18388 NM_DPSQX_SA_W_PH
= 0x3,
18391 NM_MULSAQ_S_W_PH
= 0x6,
18392 NM_EXTRV_S_H
= 0x7,
18395 /* POOL32Axf_{4, 5} instruction pool */
18414 /* nanoMIPS DSP instructions */
18415 NM_ABSQ_S_QB
= 0x00,
18416 NM_ABSQ_S_PH
= 0x08,
18417 NM_ABSQ_S_W
= 0x10,
18418 NM_PRECEQ_W_PHL
= 0x28,
18419 NM_PRECEQ_W_PHR
= 0x30,
18420 NM_PRECEQU_PH_QBL
= 0x38,
18421 NM_PRECEQU_PH_QBR
= 0x48,
18422 NM_PRECEU_PH_QBL
= 0x58,
18423 NM_PRECEU_PH_QBR
= 0x68,
18424 NM_PRECEQU_PH_QBLA
= 0x39,
18425 NM_PRECEQU_PH_QBRA
= 0x49,
18426 NM_PRECEU_PH_QBLA
= 0x59,
18427 NM_PRECEU_PH_QBRA
= 0x69,
18428 NM_REPLV_PH
= 0x01,
18429 NM_REPLV_QB
= 0x09,
18432 NM_RADDU_W_QB
= 0x78,
18438 /* PP.SR instruction pool */
18442 NM_RESTORE_JRC
= 0x03,
18445 /* P.SR.F instruction pool */
18448 NM_RESTOREF
= 0x01,
18451 /* P16.SYSCALL instruction pool */
18453 NM_SYSCALL16
= 0x00,
18454 NM_HYPCALL16
= 0x01,
18457 /* POOL16C_00 instruction pool */
18465 /* PP.LSX and PP.LSXS instruction pool */
18503 /* ERETx instruction pool */
18509 /* POOL32FxF_{0, 1} insturction pool */
18518 NM_CVT_S_PL
= 0x84,
18519 NM_CVT_S_PU
= 0xa4,
18521 NM_CVT_L_S
= 0x004,
18522 NM_CVT_L_D
= 0x104,
18523 NM_CVT_W_S
= 0x024,
18524 NM_CVT_W_D
= 0x124,
18526 NM_RSQRT_S
= 0x008,
18527 NM_RSQRT_D
= 0x108,
18532 NM_RECIP_S
= 0x048,
18533 NM_RECIP_D
= 0x148,
18535 NM_FLOOR_L_S
= 0x00c,
18536 NM_FLOOR_L_D
= 0x10c,
18538 NM_FLOOR_W_S
= 0x02c,
18539 NM_FLOOR_W_D
= 0x12c,
18541 NM_CEIL_L_S
= 0x04c,
18542 NM_CEIL_L_D
= 0x14c,
18543 NM_CEIL_W_S
= 0x06c,
18544 NM_CEIL_W_D
= 0x16c,
18545 NM_TRUNC_L_S
= 0x08c,
18546 NM_TRUNC_L_D
= 0x18c,
18547 NM_TRUNC_W_S
= 0x0ac,
18548 NM_TRUNC_W_D
= 0x1ac,
18549 NM_ROUND_L_S
= 0x0cc,
18550 NM_ROUND_L_D
= 0x1cc,
18551 NM_ROUND_W_S
= 0x0ec,
18552 NM_ROUND_W_D
= 0x1ec,
18560 NM_CVT_D_S
= 0x04d,
18561 NM_CVT_D_W
= 0x0cd,
18562 NM_CVT_D_L
= 0x14d,
18563 NM_CVT_S_D
= 0x06d,
18564 NM_CVT_S_W
= 0x0ed,
18565 NM_CVT_S_L
= 0x16d,
18568 /* P.LL instruction pool */
18574 /* P.SC instruction pool */
18580 /* P.DVP instruction pool */
18589 * nanoMIPS decoding engine
18594 /* extraction utilities */
18596 #define NANOMIPS_EXTRACT_RT3(op) ((op >> 7) & 0x7)
18597 #define NANOMIPS_EXTRACT_RS3(op) ((op >> 4) & 0x7)
18598 #define NANOMIPS_EXTRACT_RD3(op) ((op >> 1) & 0x7)
18599 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18600 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18602 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18603 static inline int decode_gpr_gpr3(int r
)
18605 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18607 return map
[r
& 0x7];
18610 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18611 static inline int decode_gpr_gpr3_src_store(int r
)
18613 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18615 return map
[r
& 0x7];
18618 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18619 static inline int decode_gpr_gpr4(int r
)
18621 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18622 16, 17, 18, 19, 20, 21, 22, 23 };
18624 return map
[r
& 0xf];
18627 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18628 static inline int decode_gpr_gpr4_zero(int r
)
18630 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18631 16, 17, 18, 19, 20, 21, 22, 23 };
18633 return map
[r
& 0xf];
18637 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18639 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18642 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18643 uint8_t gp
, uint16_t u
)
18646 TCGv va
= tcg_temp_new();
18647 TCGv t0
= tcg_temp_new();
18649 while (counter
!= count
) {
18650 bool use_gp
= gp
&& (counter
== count
- 1);
18651 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18652 int this_offset
= -((counter
+ 1) << 2);
18653 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18654 gen_load_gpr(t0
, this_rt
);
18655 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18656 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18660 /* adjust stack pointer */
18661 gen_adjust_sp(ctx
, -u
);
18667 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18668 uint8_t gp
, uint16_t u
)
18671 TCGv va
= tcg_temp_new();
18672 TCGv t0
= tcg_temp_new();
18674 while (counter
!= count
) {
18675 bool use_gp
= gp
&& (counter
== count
- 1);
18676 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18677 int this_offset
= u
- ((counter
+ 1) << 2);
18678 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18679 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18680 ctx
->default_tcg_memop_mask
);
18681 tcg_gen_ext32s_tl(t0
, t0
);
18682 gen_store_gpr(t0
, this_rt
);
18686 /* adjust stack pointer */
18687 gen_adjust_sp(ctx
, u
);
18693 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18695 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
18696 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
18698 switch (extract32(ctx
->opcode
, 2, 2)) {
18700 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18703 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18706 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18709 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18714 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18716 int rt
= extract32(ctx
->opcode
, 21, 5);
18717 int rs
= extract32(ctx
->opcode
, 16, 5);
18718 int rd
= extract32(ctx
->opcode
, 11, 5);
18720 switch (extract32(ctx
->opcode
, 3, 7)) {
18722 switch (extract32(ctx
->opcode
, 10, 1)) {
18725 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18729 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18735 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18739 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18742 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18745 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18748 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18751 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18754 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18757 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18760 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18764 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18767 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18770 switch (extract32(ctx
->opcode
, 10, 1)) {
18772 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18775 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18780 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18783 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18786 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18789 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18792 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18797 #ifndef CONFIG_USER_ONLY
18798 TCGv t0
= tcg_temp_new();
18799 switch (extract32(ctx
->opcode
, 10, 1)) {
18802 check_cp0_enabled(ctx
);
18803 gen_helper_dvp(t0
, cpu_env
);
18804 gen_store_gpr(t0
, rt
);
18809 check_cp0_enabled(ctx
);
18810 gen_helper_evp(t0
, cpu_env
);
18811 gen_store_gpr(t0
, rt
);
18818 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18823 TCGv t0
= tcg_temp_new();
18824 TCGv t1
= tcg_temp_new();
18825 TCGv t2
= tcg_temp_new();
18827 gen_load_gpr(t1
, rs
);
18828 gen_load_gpr(t2
, rt
);
18829 tcg_gen_add_tl(t0
, t1
, t2
);
18830 tcg_gen_ext32s_tl(t0
, t0
);
18831 tcg_gen_xor_tl(t1
, t1
, t2
);
18832 tcg_gen_xor_tl(t2
, t0
, t2
);
18833 tcg_gen_andc_tl(t1
, t2
, t1
);
18835 /* operands of same sign, result different sign */
18836 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18837 gen_store_gpr(t0
, rd
);
18845 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18848 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18851 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18854 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18857 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18860 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18863 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18866 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18868 #ifndef CONFIG_USER_ONLY
18870 check_cp0_enabled(ctx
);
18872 /* Treat as NOP. */
18875 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18878 check_cp0_enabled(ctx
);
18880 TCGv t0
= tcg_temp_new();
18882 gen_load_gpr(t0
, rt
);
18883 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18887 case NM_D_E_MT_VPE
:
18889 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18890 TCGv t0
= tcg_temp_new();
18897 gen_helper_dmt(t0
);
18898 gen_store_gpr(t0
, rt
);
18899 } else if (rs
== 0) {
18902 gen_helper_dvpe(t0
, cpu_env
);
18903 gen_store_gpr(t0
, rt
);
18905 generate_exception_end(ctx
, EXCP_RI
);
18912 gen_helper_emt(t0
);
18913 gen_store_gpr(t0
, rt
);
18914 } else if (rs
== 0) {
18917 gen_helper_evpe(t0
, cpu_env
);
18918 gen_store_gpr(t0
, rt
);
18920 generate_exception_end(ctx
, EXCP_RI
);
18931 TCGv t0
= tcg_temp_new();
18932 TCGv t1
= tcg_temp_new();
18934 gen_load_gpr(t0
, rt
);
18935 gen_load_gpr(t1
, rs
);
18936 gen_helper_fork(t0
, t1
);
18943 check_cp0_enabled(ctx
);
18945 /* Treat as NOP. */
18948 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18949 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18953 check_cp0_enabled(ctx
);
18954 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18955 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18960 TCGv t0
= tcg_temp_new();
18962 gen_load_gpr(t0
, rs
);
18963 gen_helper_yield(t0
, cpu_env
, t0
);
18964 gen_store_gpr(t0
, rt
);
18970 generate_exception_end(ctx
, EXCP_RI
);
18976 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18977 int ret
, int v1
, int v2
)
18983 t0
= tcg_temp_new_i32();
18985 v0_t
= tcg_temp_new();
18986 v1_t
= tcg_temp_new();
18988 tcg_gen_movi_i32(t0
, v2
>> 3);
18990 gen_load_gpr(v0_t
, ret
);
18991 gen_load_gpr(v1_t
, v1
);
18994 case NM_MAQ_S_W_PHR
:
18996 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18998 case NM_MAQ_S_W_PHL
:
19000 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
19002 case NM_MAQ_SA_W_PHR
:
19004 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
19006 case NM_MAQ_SA_W_PHL
:
19008 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
19011 generate_exception_end(ctx
, EXCP_RI
);
19015 tcg_temp_free_i32(t0
);
19017 tcg_temp_free(v0_t
);
19018 tcg_temp_free(v1_t
);
19022 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19023 int ret
, int v1
, int v2
)
19026 TCGv t0
= tcg_temp_new();
19027 TCGv t1
= tcg_temp_new();
19028 TCGv v0_t
= tcg_temp_new();
19030 gen_load_gpr(v0_t
, v1
);
19033 case NM_POOL32AXF_1_0
:
19035 switch (extract32(ctx
->opcode
, 12, 2)) {
19037 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
19040 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
19043 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
19046 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
19050 case NM_POOL32AXF_1_1
:
19052 switch (extract32(ctx
->opcode
, 12, 2)) {
19054 tcg_gen_movi_tl(t0
, v2
);
19055 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
19058 tcg_gen_movi_tl(t0
, v2
>> 3);
19059 gen_helper_shilo(t0
, v0_t
, cpu_env
);
19062 generate_exception_end(ctx
, EXCP_RI
);
19066 case NM_POOL32AXF_1_3
:
19068 imm
= extract32(ctx
->opcode
, 14, 7);
19069 switch (extract32(ctx
->opcode
, 12, 2)) {
19071 tcg_gen_movi_tl(t0
, imm
);
19072 gen_helper_rddsp(t0
, t0
, cpu_env
);
19073 gen_store_gpr(t0
, ret
);
19076 gen_load_gpr(t0
, ret
);
19077 tcg_gen_movi_tl(t1
, imm
);
19078 gen_helper_wrdsp(t0
, t1
, cpu_env
);
19081 tcg_gen_movi_tl(t0
, v2
>> 3);
19082 tcg_gen_movi_tl(t1
, v1
);
19083 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
19084 gen_store_gpr(t0
, ret
);
19087 tcg_gen_movi_tl(t0
, v2
>> 3);
19088 tcg_gen_movi_tl(t1
, v1
);
19089 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
19090 gen_store_gpr(t0
, ret
);
19094 case NM_POOL32AXF_1_4
:
19096 tcg_gen_movi_tl(t0
, v2
>> 2);
19097 switch (extract32(ctx
->opcode
, 12, 1)) {
19099 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
19100 gen_store_gpr(t0
, ret
);
19103 gen_helper_shrl_qb(t0
, t0
, v0_t
);
19104 gen_store_gpr(t0
, ret
);
19108 case NM_POOL32AXF_1_5
:
19109 opc
= extract32(ctx
->opcode
, 12, 2);
19110 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
19112 case NM_POOL32AXF_1_7
:
19114 tcg_gen_movi_tl(t0
, v2
>> 3);
19115 tcg_gen_movi_tl(t1
, v1
);
19116 switch (extract32(ctx
->opcode
, 12, 2)) {
19118 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
19119 gen_store_gpr(t0
, ret
);
19122 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
19123 gen_store_gpr(t0
, ret
);
19126 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
19127 gen_store_gpr(t0
, ret
);
19130 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
19131 gen_store_gpr(t0
, ret
);
19136 generate_exception_end(ctx
, EXCP_RI
);
19142 tcg_temp_free(v0_t
);
19145 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
19146 TCGv v0
, TCGv v1
, int rd
)
19150 t0
= tcg_temp_new_i32();
19152 tcg_gen_movi_i32(t0
, rd
>> 3);
19155 case NM_POOL32AXF_2_0_7
:
19156 switch (extract32(ctx
->opcode
, 9, 3)) {
19159 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
19161 case NM_DPAQ_S_W_PH
:
19163 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19167 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
19169 case NM_DPSQ_S_W_PH
:
19171 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19174 generate_exception_end(ctx
, EXCP_RI
);
19178 case NM_POOL32AXF_2_8_15
:
19179 switch (extract32(ctx
->opcode
, 9, 3)) {
19182 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
19184 case NM_DPAQ_SA_L_W
:
19186 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19190 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
19192 case NM_DPSQ_SA_L_W
:
19194 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19197 generate_exception_end(ctx
, EXCP_RI
);
19201 case NM_POOL32AXF_2_16_23
:
19202 switch (extract32(ctx
->opcode
, 9, 3)) {
19203 case NM_DPAU_H_QBL
:
19205 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
19207 case NM_DPAQX_S_W_PH
:
19209 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19211 case NM_DPSU_H_QBL
:
19213 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
19215 case NM_DPSQX_S_W_PH
:
19217 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19219 case NM_MULSA_W_PH
:
19221 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
19224 generate_exception_end(ctx
, EXCP_RI
);
19228 case NM_POOL32AXF_2_24_31
:
19229 switch (extract32(ctx
->opcode
, 9, 3)) {
19230 case NM_DPAU_H_QBR
:
19232 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
19234 case NM_DPAQX_SA_W_PH
:
19236 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19238 case NM_DPSU_H_QBR
:
19240 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
19242 case NM_DPSQX_SA_W_PH
:
19244 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19246 case NM_MULSAQ_S_W_PH
:
19248 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19251 generate_exception_end(ctx
, EXCP_RI
);
19256 generate_exception_end(ctx
, EXCP_RI
);
19260 tcg_temp_free_i32(t0
);
19263 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19264 int rt
, int rs
, int rd
)
19267 TCGv t0
= tcg_temp_new();
19268 TCGv t1
= tcg_temp_new();
19269 TCGv v0_t
= tcg_temp_new();
19270 TCGv v1_t
= tcg_temp_new();
19272 gen_load_gpr(v0_t
, rt
);
19273 gen_load_gpr(v1_t
, rs
);
19276 case NM_POOL32AXF_2_0_7
:
19277 switch (extract32(ctx
->opcode
, 9, 3)) {
19279 case NM_DPAQ_S_W_PH
:
19281 case NM_DPSQ_S_W_PH
:
19282 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19287 gen_load_gpr(t0
, rs
);
19289 if (rd
!= 0 && rd
!= 2) {
19290 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
19291 tcg_gen_ext32u_tl(t0
, t0
);
19292 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
19293 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
19295 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
19301 int acc
= extract32(ctx
->opcode
, 14, 2);
19302 TCGv_i64 t2
= tcg_temp_new_i64();
19303 TCGv_i64 t3
= tcg_temp_new_i64();
19305 gen_load_gpr(t0
, rt
);
19306 gen_load_gpr(t1
, rs
);
19307 tcg_gen_ext_tl_i64(t2
, t0
);
19308 tcg_gen_ext_tl_i64(t3
, t1
);
19309 tcg_gen_mul_i64(t2
, t2
, t3
);
19310 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19311 tcg_gen_add_i64(t2
, t2
, t3
);
19312 tcg_temp_free_i64(t3
);
19313 gen_move_low32(cpu_LO
[acc
], t2
);
19314 gen_move_high32(cpu_HI
[acc
], t2
);
19315 tcg_temp_free_i64(t2
);
19321 int acc
= extract32(ctx
->opcode
, 14, 2);
19322 TCGv_i32 t2
= tcg_temp_new_i32();
19323 TCGv_i32 t3
= tcg_temp_new_i32();
19325 gen_load_gpr(t0
, rs
);
19326 gen_load_gpr(t1
, rt
);
19327 tcg_gen_trunc_tl_i32(t2
, t0
);
19328 tcg_gen_trunc_tl_i32(t3
, t1
);
19329 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
19330 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19331 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19332 tcg_temp_free_i32(t2
);
19333 tcg_temp_free_i32(t3
);
19338 gen_load_gpr(v1_t
, rs
);
19339 tcg_gen_movi_tl(t0
, rd
>> 3);
19340 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
19341 gen_store_gpr(t0
, ret
);
19345 case NM_POOL32AXF_2_8_15
:
19346 switch (extract32(ctx
->opcode
, 9, 3)) {
19348 case NM_DPAQ_SA_L_W
:
19350 case NM_DPSQ_SA_L_W
:
19351 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19356 int acc
= extract32(ctx
->opcode
, 14, 2);
19357 TCGv_i64 t2
= tcg_temp_new_i64();
19358 TCGv_i64 t3
= tcg_temp_new_i64();
19360 gen_load_gpr(t0
, rs
);
19361 gen_load_gpr(t1
, rt
);
19362 tcg_gen_ext32u_tl(t0
, t0
);
19363 tcg_gen_ext32u_tl(t1
, t1
);
19364 tcg_gen_extu_tl_i64(t2
, t0
);
19365 tcg_gen_extu_tl_i64(t3
, t1
);
19366 tcg_gen_mul_i64(t2
, t2
, t3
);
19367 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19368 tcg_gen_add_i64(t2
, t2
, t3
);
19369 tcg_temp_free_i64(t3
);
19370 gen_move_low32(cpu_LO
[acc
], t2
);
19371 gen_move_high32(cpu_HI
[acc
], t2
);
19372 tcg_temp_free_i64(t2
);
19378 int acc
= extract32(ctx
->opcode
, 14, 2);
19379 TCGv_i32 t2
= tcg_temp_new_i32();
19380 TCGv_i32 t3
= tcg_temp_new_i32();
19382 gen_load_gpr(t0
, rs
);
19383 gen_load_gpr(t1
, rt
);
19384 tcg_gen_trunc_tl_i32(t2
, t0
);
19385 tcg_gen_trunc_tl_i32(t3
, t1
);
19386 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
19387 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19388 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19389 tcg_temp_free_i32(t2
);
19390 tcg_temp_free_i32(t3
);
19395 tcg_gen_movi_tl(t0
, rd
>> 3);
19396 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
19397 gen_store_gpr(t0
, ret
);
19400 generate_exception_end(ctx
, EXCP_RI
);
19404 case NM_POOL32AXF_2_16_23
:
19405 switch (extract32(ctx
->opcode
, 9, 3)) {
19406 case NM_DPAU_H_QBL
:
19407 case NM_DPAQX_S_W_PH
:
19408 case NM_DPSU_H_QBL
:
19409 case NM_DPSQX_S_W_PH
:
19410 case NM_MULSA_W_PH
:
19411 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19415 tcg_gen_movi_tl(t0
, rd
>> 3);
19416 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
19417 gen_store_gpr(t0
, ret
);
19422 int acc
= extract32(ctx
->opcode
, 14, 2);
19423 TCGv_i64 t2
= tcg_temp_new_i64();
19424 TCGv_i64 t3
= tcg_temp_new_i64();
19426 gen_load_gpr(t0
, rs
);
19427 gen_load_gpr(t1
, rt
);
19428 tcg_gen_ext_tl_i64(t2
, t0
);
19429 tcg_gen_ext_tl_i64(t3
, t1
);
19430 tcg_gen_mul_i64(t2
, t2
, t3
);
19431 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19432 tcg_gen_sub_i64(t2
, t3
, t2
);
19433 tcg_temp_free_i64(t3
);
19434 gen_move_low32(cpu_LO
[acc
], t2
);
19435 gen_move_high32(cpu_HI
[acc
], t2
);
19436 tcg_temp_free_i64(t2
);
19439 case NM_EXTRV_RS_W
:
19441 tcg_gen_movi_tl(t0
, rd
>> 3);
19442 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
19443 gen_store_gpr(t0
, ret
);
19447 case NM_POOL32AXF_2_24_31
:
19448 switch (extract32(ctx
->opcode
, 9, 3)) {
19449 case NM_DPAU_H_QBR
:
19450 case NM_DPAQX_SA_W_PH
:
19451 case NM_DPSU_H_QBR
:
19452 case NM_DPSQX_SA_W_PH
:
19453 case NM_MULSAQ_S_W_PH
:
19454 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19458 tcg_gen_movi_tl(t0
, rd
>> 3);
19459 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
19460 gen_store_gpr(t0
, ret
);
19465 int acc
= extract32(ctx
->opcode
, 14, 2);
19466 TCGv_i64 t2
= tcg_temp_new_i64();
19467 TCGv_i64 t3
= tcg_temp_new_i64();
19469 gen_load_gpr(t0
, rs
);
19470 gen_load_gpr(t1
, rt
);
19471 tcg_gen_ext32u_tl(t0
, t0
);
19472 tcg_gen_ext32u_tl(t1
, t1
);
19473 tcg_gen_extu_tl_i64(t2
, t0
);
19474 tcg_gen_extu_tl_i64(t3
, t1
);
19475 tcg_gen_mul_i64(t2
, t2
, t3
);
19476 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19477 tcg_gen_sub_i64(t2
, t3
, t2
);
19478 tcg_temp_free_i64(t3
);
19479 gen_move_low32(cpu_LO
[acc
], t2
);
19480 gen_move_high32(cpu_HI
[acc
], t2
);
19481 tcg_temp_free_i64(t2
);
19486 tcg_gen_movi_tl(t0
, rd
>> 3);
19487 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19488 gen_store_gpr(t0
, ret
);
19493 generate_exception_end(ctx
, EXCP_RI
);
19500 tcg_temp_free(v0_t
);
19501 tcg_temp_free(v1_t
);
19504 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19508 TCGv t0
= tcg_temp_new();
19509 TCGv v0_t
= tcg_temp_new();
19511 gen_load_gpr(v0_t
, rs
);
19516 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19517 gen_store_gpr(v0_t
, ret
);
19521 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19522 gen_store_gpr(v0_t
, ret
);
19526 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19527 gen_store_gpr(v0_t
, ret
);
19529 case NM_PRECEQ_W_PHL
:
19531 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19532 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19533 gen_store_gpr(v0_t
, ret
);
19535 case NM_PRECEQ_W_PHR
:
19537 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19538 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19539 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19540 gen_store_gpr(v0_t
, ret
);
19542 case NM_PRECEQU_PH_QBL
:
19544 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19545 gen_store_gpr(v0_t
, ret
);
19547 case NM_PRECEQU_PH_QBR
:
19549 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19550 gen_store_gpr(v0_t
, ret
);
19552 case NM_PRECEQU_PH_QBLA
:
19554 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19555 gen_store_gpr(v0_t
, ret
);
19557 case NM_PRECEQU_PH_QBRA
:
19559 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19560 gen_store_gpr(v0_t
, ret
);
19562 case NM_PRECEU_PH_QBL
:
19564 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19565 gen_store_gpr(v0_t
, ret
);
19567 case NM_PRECEU_PH_QBR
:
19569 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19570 gen_store_gpr(v0_t
, ret
);
19572 case NM_PRECEU_PH_QBLA
:
19574 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19575 gen_store_gpr(v0_t
, ret
);
19577 case NM_PRECEU_PH_QBRA
:
19579 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19580 gen_store_gpr(v0_t
, ret
);
19584 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19585 tcg_gen_shli_tl(t0
, v0_t
, 16);
19586 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19587 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19588 gen_store_gpr(v0_t
, ret
);
19592 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19593 tcg_gen_shli_tl(t0
, v0_t
, 8);
19594 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19595 tcg_gen_shli_tl(t0
, v0_t
, 16);
19596 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19597 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19598 gen_store_gpr(v0_t
, ret
);
19602 gen_helper_bitrev(v0_t
, v0_t
);
19603 gen_store_gpr(v0_t
, ret
);
19608 TCGv tv0
= tcg_temp_new();
19610 gen_load_gpr(tv0
, rt
);
19611 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19612 gen_store_gpr(v0_t
, ret
);
19613 tcg_temp_free(tv0
);
19616 case NM_RADDU_W_QB
:
19618 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19619 gen_store_gpr(v0_t
, ret
);
19622 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19626 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19630 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19633 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19636 generate_exception_end(ctx
, EXCP_RI
);
19640 tcg_temp_free(v0_t
);
19644 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19645 int rt
, int rs
, int rd
)
19647 TCGv t0
= tcg_temp_new();
19648 TCGv rs_t
= tcg_temp_new();
19650 gen_load_gpr(rs_t
, rs
);
19655 tcg_gen_movi_tl(t0
, rd
>> 2);
19656 switch (extract32(ctx
->opcode
, 12, 1)) {
19659 gen_helper_shra_qb(t0
, t0
, rs_t
);
19660 gen_store_gpr(t0
, rt
);
19664 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19665 gen_store_gpr(t0
, rt
);
19671 tcg_gen_movi_tl(t0
, rd
>> 1);
19672 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19673 gen_store_gpr(t0
, rt
);
19679 target_long result
;
19680 imm
= extract32(ctx
->opcode
, 13, 8);
19681 result
= (uint32_t)imm
<< 24 |
19682 (uint32_t)imm
<< 16 |
19683 (uint32_t)imm
<< 8 |
19685 result
= (int32_t)result
;
19686 tcg_gen_movi_tl(t0
, result
);
19687 gen_store_gpr(t0
, rt
);
19691 generate_exception_end(ctx
, EXCP_RI
);
19695 tcg_temp_free(rs_t
);
19699 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19701 int rt
= extract32(ctx
->opcode
, 21, 5);
19702 int rs
= extract32(ctx
->opcode
, 16, 5);
19703 int rd
= extract32(ctx
->opcode
, 11, 5);
19705 switch (extract32(ctx
->opcode
, 6, 3)) {
19706 case NM_POOL32AXF_1
:
19708 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19709 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19712 case NM_POOL32AXF_2
:
19714 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19715 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19718 case NM_POOL32AXF_4
:
19720 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19721 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19724 case NM_POOL32AXF_5
:
19725 switch (extract32(ctx
->opcode
, 9, 7)) {
19726 #ifndef CONFIG_USER_ONLY
19728 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19731 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19734 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19737 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19740 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19743 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19746 check_cp0_enabled(ctx
);
19748 TCGv t0
= tcg_temp_new();
19750 save_cpu_state(ctx
, 1);
19751 gen_helper_di(t0
, cpu_env
);
19752 gen_store_gpr(t0
, rt
);
19753 /* Stop translation as we may have switched the execution mode */
19754 ctx
->base
.is_jmp
= DISAS_STOP
;
19759 check_cp0_enabled(ctx
);
19761 TCGv t0
= tcg_temp_new();
19763 save_cpu_state(ctx
, 1);
19764 gen_helper_ei(t0
, cpu_env
);
19765 gen_store_gpr(t0
, rt
);
19766 /* Stop translation as we may have switched the execution mode */
19767 ctx
->base
.is_jmp
= DISAS_STOP
;
19772 gen_load_srsgpr(rs
, rt
);
19775 gen_store_srsgpr(rs
, rt
);
19778 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19781 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19784 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19788 generate_exception_end(ctx
, EXCP_RI
);
19792 case NM_POOL32AXF_7
:
19794 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19795 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19799 generate_exception_end(ctx
, EXCP_RI
);
19804 /* Immediate Value Compact Branches */
19805 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19806 int rt
, int32_t imm
, int32_t offset
)
19809 int bcond_compute
= 0;
19810 TCGv t0
= tcg_temp_new();
19811 TCGv t1
= tcg_temp_new();
19813 gen_load_gpr(t0
, rt
);
19814 tcg_gen_movi_tl(t1
, imm
);
19815 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19817 /* Load needed operands and calculate btarget */
19820 if (rt
== 0 && imm
== 0) {
19821 /* Unconditional branch */
19822 } else if (rt
== 0 && imm
!= 0) {
19827 cond
= TCG_COND_EQ
;
19833 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19834 generate_exception_end(ctx
, EXCP_RI
);
19836 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19837 /* Unconditional branch */
19838 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19842 tcg_gen_shri_tl(t0
, t0
, imm
);
19843 tcg_gen_andi_tl(t0
, t0
, 1);
19844 tcg_gen_movi_tl(t1
, 0);
19846 if (opc
== NM_BBEQZC
) {
19847 cond
= TCG_COND_EQ
;
19849 cond
= TCG_COND_NE
;
19854 if (rt
== 0 && imm
== 0) {
19857 } else if (rt
== 0 && imm
!= 0) {
19858 /* Unconditional branch */
19861 cond
= TCG_COND_NE
;
19865 if (rt
== 0 && imm
== 0) {
19866 /* Unconditional branch */
19869 cond
= TCG_COND_GE
;
19874 cond
= TCG_COND_LT
;
19877 if (rt
== 0 && imm
== 0) {
19878 /* Unconditional branch */
19881 cond
= TCG_COND_GEU
;
19886 cond
= TCG_COND_LTU
;
19889 MIPS_INVAL("Immediate Value Compact branch");
19890 generate_exception_end(ctx
, EXCP_RI
);
19894 /* branch completion */
19895 clear_branch_hflags(ctx
);
19896 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19898 if (bcond_compute
== 0) {
19899 /* Uncoditional compact branch */
19900 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19902 /* Conditional compact branch */
19903 TCGLabel
*fs
= gen_new_label();
19905 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19907 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19910 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19918 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19919 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19922 TCGv t0
= tcg_temp_new();
19923 TCGv t1
= tcg_temp_new();
19926 gen_load_gpr(t0
, rs
);
19930 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19933 /* calculate btarget */
19934 tcg_gen_shli_tl(t0
, t0
, 1);
19935 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19936 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19938 /* branch completion */
19939 clear_branch_hflags(ctx
);
19940 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19942 /* unconditional branch to register */
19943 tcg_gen_mov_tl(cpu_PC
, btarget
);
19944 tcg_gen_lookup_and_goto_ptr();
19950 /* nanoMIPS Branches */
19951 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19952 int rs
, int rt
, int32_t offset
)
19954 int bcond_compute
= 0;
19955 TCGv t0
= tcg_temp_new();
19956 TCGv t1
= tcg_temp_new();
19958 /* Load needed operands and calculate btarget */
19960 /* compact branch */
19963 gen_load_gpr(t0
, rs
);
19964 gen_load_gpr(t1
, rt
);
19966 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19970 if (rs
== 0 || rs
== rt
) {
19971 /* OPC_BLEZALC, OPC_BGEZALC */
19972 /* OPC_BGTZALC, OPC_BLTZALC */
19973 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19975 gen_load_gpr(t0
, rs
);
19976 gen_load_gpr(t1
, rt
);
19978 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19981 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19985 /* OPC_BEQZC, OPC_BNEZC */
19986 gen_load_gpr(t0
, rs
);
19988 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19990 /* OPC_JIC, OPC_JIALC */
19991 TCGv tbase
= tcg_temp_new();
19992 TCGv toffset
= tcg_temp_new();
19994 gen_load_gpr(tbase
, rt
);
19995 tcg_gen_movi_tl(toffset
, offset
);
19996 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19997 tcg_temp_free(tbase
);
19998 tcg_temp_free(toffset
);
20002 MIPS_INVAL("Compact branch/jump");
20003 generate_exception_end(ctx
, EXCP_RI
);
20007 if (bcond_compute
== 0) {
20008 /* Uncoditional compact branch */
20011 gen_goto_tb(ctx
, 0, ctx
->btarget
);
20014 MIPS_INVAL("Compact branch/jump");
20015 generate_exception_end(ctx
, EXCP_RI
);
20019 /* Conditional compact branch */
20020 TCGLabel
*fs
= gen_new_label();
20024 if (rs
== 0 && rt
!= 0) {
20026 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
20027 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20029 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
20032 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
20036 if (rs
== 0 && rt
!= 0) {
20038 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
20039 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20041 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
20044 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
20048 if (rs
== 0 && rt
!= 0) {
20050 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
20051 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20053 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
20056 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
20060 if (rs
== 0 && rt
!= 0) {
20062 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
20063 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20065 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
20068 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
20072 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
20075 MIPS_INVAL("Compact conditional branch/jump");
20076 generate_exception_end(ctx
, EXCP_RI
);
20080 /* branch completion */
20081 clear_branch_hflags(ctx
);
20082 ctx
->base
.is_jmp
= DISAS_NORETURN
;
20084 /* Generating branch here as compact branches don't have delay slot */
20085 gen_goto_tb(ctx
, 1, ctx
->btarget
);
20088 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
20097 /* nanoMIPS CP1 Branches */
20098 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
20099 int32_t ft
, int32_t offset
)
20101 target_ulong btarget
;
20102 TCGv_i64 t0
= tcg_temp_new_i64();
20104 gen_load_fpr64(ctx
, t0
, ft
);
20105 tcg_gen_andi_i64(t0
, t0
, 1);
20107 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20111 tcg_gen_xori_i64(t0
, t0
, 1);
20112 ctx
->hflags
|= MIPS_HFLAG_BC
;
20115 /* t0 already set */
20116 ctx
->hflags
|= MIPS_HFLAG_BC
;
20119 MIPS_INVAL("cp1 cond branch");
20120 generate_exception_end(ctx
, EXCP_RI
);
20124 tcg_gen_trunc_i64_tl(bcond
, t0
);
20126 ctx
->btarget
= btarget
;
20129 tcg_temp_free_i64(t0
);
20133 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
20136 t0
= tcg_temp_new();
20137 t1
= tcg_temp_new();
20139 gen_load_gpr(t0
, rs
);
20140 gen_load_gpr(t1
, rt
);
20142 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
20143 /* PP.LSXS instructions require shifting */
20144 switch (extract32(ctx
->opcode
, 7, 4)) {
20150 tcg_gen_shli_tl(t0
, t0
, 1);
20158 tcg_gen_shli_tl(t0
, t0
, 2);
20162 tcg_gen_shli_tl(t0
, t0
, 3);
20166 gen_op_addr_add(ctx
, t0
, t0
, t1
);
20168 switch (extract32(ctx
->opcode
, 7, 4)) {
20170 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20172 gen_store_gpr(t0
, rd
);
20176 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20178 gen_store_gpr(t0
, rd
);
20182 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20184 gen_store_gpr(t0
, rd
);
20187 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20189 gen_store_gpr(t0
, rd
);
20193 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20195 gen_store_gpr(t0
, rd
);
20199 gen_load_gpr(t1
, rd
);
20200 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20206 gen_load_gpr(t1
, rd
);
20207 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20213 gen_load_gpr(t1
, rd
);
20214 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20218 /*case NM_LWC1XS:*/
20220 /*case NM_LDC1XS:*/
20222 /*case NM_SWC1XS:*/
20224 /*case NM_SDC1XS:*/
20225 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20226 check_cp1_enabled(ctx
);
20227 switch (extract32(ctx
->opcode
, 7, 4)) {
20229 /*case NM_LWC1XS:*/
20230 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
20233 /*case NM_LDC1XS:*/
20234 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
20237 /*case NM_SWC1XS:*/
20238 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
20241 /*case NM_SDC1XS:*/
20242 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
20246 generate_exception_err(ctx
, EXCP_CpU
, 1);
20250 generate_exception_end(ctx
, EXCP_RI
);
20258 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
20262 rt
= extract32(ctx
->opcode
, 21, 5);
20263 rs
= extract32(ctx
->opcode
, 16, 5);
20264 rd
= extract32(ctx
->opcode
, 11, 5);
20266 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
20267 generate_exception_end(ctx
, EXCP_RI
);
20270 check_cp1_enabled(ctx
);
20271 switch (extract32(ctx
->opcode
, 0, 3)) {
20273 switch (extract32(ctx
->opcode
, 3, 7)) {
20275 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
20278 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
20281 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
20284 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
20287 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
20290 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
20293 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
20296 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
20299 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
20302 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
20305 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
20308 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
20311 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
20314 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
20317 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
20320 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
20323 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
20326 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
20329 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
20332 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
20335 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
20338 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
20341 generate_exception_end(ctx
, EXCP_RI
);
20346 switch (extract32(ctx
->opcode
, 3, 3)) {
20348 switch (extract32(ctx
->opcode
, 9, 1)) {
20350 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
20353 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
20358 switch (extract32(ctx
->opcode
, 9, 1)) {
20360 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
20363 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
20368 switch (extract32(ctx
->opcode
, 9, 1)) {
20370 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
20373 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
20378 switch (extract32(ctx
->opcode
, 9, 1)) {
20380 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
20383 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
20388 switch (extract32(ctx
->opcode
, 6, 8)) {
20390 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
20393 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
20396 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
20399 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
20402 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
20405 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
20408 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
20411 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
20414 switch (extract32(ctx
->opcode
, 6, 9)) {
20416 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
20419 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
20422 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
20425 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
20428 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
20431 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
20434 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
20437 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
20440 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
20443 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
20446 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
20449 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
20452 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
20455 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
20458 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
20461 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
20464 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
20467 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
20470 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
20473 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
20476 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
20479 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
20482 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
20485 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
20488 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
20491 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
20494 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
20497 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
20500 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20503 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20506 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20509 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20512 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20515 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20518 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20521 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20524 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20527 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20530 generate_exception_end(ctx
, EXCP_RI
);
20539 switch (extract32(ctx
->opcode
, 3, 3)) {
20540 case NM_CMP_CONDN_S
:
20541 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20543 case NM_CMP_CONDN_D
:
20544 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20547 generate_exception_end(ctx
, EXCP_RI
);
20552 generate_exception_end(ctx
, EXCP_RI
);
20557 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20558 int rd
, int rs
, int rt
)
20561 TCGv t0
= tcg_temp_new();
20562 TCGv v1_t
= tcg_temp_new();
20563 TCGv v2_t
= tcg_temp_new();
20565 gen_load_gpr(v1_t
, rs
);
20566 gen_load_gpr(v2_t
, rt
);
20571 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20575 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20579 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20581 case NM_CMPU_EQ_QB
:
20583 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20585 case NM_CMPU_LT_QB
:
20587 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20589 case NM_CMPU_LE_QB
:
20591 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20593 case NM_CMPGU_EQ_QB
:
20595 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20596 gen_store_gpr(v1_t
, ret
);
20598 case NM_CMPGU_LT_QB
:
20600 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20601 gen_store_gpr(v1_t
, ret
);
20603 case NM_CMPGU_LE_QB
:
20605 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20606 gen_store_gpr(v1_t
, ret
);
20608 case NM_CMPGDU_EQ_QB
:
20610 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20611 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20612 gen_store_gpr(v1_t
, ret
);
20614 case NM_CMPGDU_LT_QB
:
20616 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20617 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20618 gen_store_gpr(v1_t
, ret
);
20620 case NM_CMPGDU_LE_QB
:
20622 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20623 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20624 gen_store_gpr(v1_t
, ret
);
20628 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20629 gen_store_gpr(v1_t
, ret
);
20633 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20634 gen_store_gpr(v1_t
, ret
);
20638 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20639 gen_store_gpr(v1_t
, ret
);
20643 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20644 gen_store_gpr(v1_t
, ret
);
20648 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20649 gen_store_gpr(v1_t
, ret
);
20653 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20654 gen_store_gpr(v1_t
, ret
);
20658 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20659 gen_store_gpr(v1_t
, ret
);
20663 switch (extract32(ctx
->opcode
, 10, 1)) {
20666 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20667 gen_store_gpr(v1_t
, ret
);
20671 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20672 gen_store_gpr(v1_t
, ret
);
20676 case NM_ADDQH_R_PH
:
20678 switch (extract32(ctx
->opcode
, 10, 1)) {
20681 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20682 gen_store_gpr(v1_t
, ret
);
20686 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20687 gen_store_gpr(v1_t
, ret
);
20693 switch (extract32(ctx
->opcode
, 10, 1)) {
20696 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20697 gen_store_gpr(v1_t
, ret
);
20701 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20702 gen_store_gpr(v1_t
, ret
);
20708 switch (extract32(ctx
->opcode
, 10, 1)) {
20711 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20712 gen_store_gpr(v1_t
, ret
);
20716 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20717 gen_store_gpr(v1_t
, ret
);
20723 switch (extract32(ctx
->opcode
, 10, 1)) {
20726 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20727 gen_store_gpr(v1_t
, ret
);
20731 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20732 gen_store_gpr(v1_t
, ret
);
20736 case NM_ADDUH_R_QB
:
20738 switch (extract32(ctx
->opcode
, 10, 1)) {
20741 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20742 gen_store_gpr(v1_t
, ret
);
20746 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20747 gen_store_gpr(v1_t
, ret
);
20751 case NM_SHRAV_R_PH
:
20753 switch (extract32(ctx
->opcode
, 10, 1)) {
20756 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20757 gen_store_gpr(v1_t
, ret
);
20761 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20762 gen_store_gpr(v1_t
, ret
);
20766 case NM_SHRAV_R_QB
:
20768 switch (extract32(ctx
->opcode
, 10, 1)) {
20771 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20772 gen_store_gpr(v1_t
, ret
);
20776 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20777 gen_store_gpr(v1_t
, ret
);
20783 switch (extract32(ctx
->opcode
, 10, 1)) {
20786 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20787 gen_store_gpr(v1_t
, ret
);
20791 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20792 gen_store_gpr(v1_t
, ret
);
20796 case NM_SUBQH_R_PH
:
20798 switch (extract32(ctx
->opcode
, 10, 1)) {
20801 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20802 gen_store_gpr(v1_t
, ret
);
20806 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20807 gen_store_gpr(v1_t
, ret
);
20813 switch (extract32(ctx
->opcode
, 10, 1)) {
20816 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20817 gen_store_gpr(v1_t
, ret
);
20821 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20822 gen_store_gpr(v1_t
, ret
);
20828 switch (extract32(ctx
->opcode
, 10, 1)) {
20831 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20832 gen_store_gpr(v1_t
, ret
);
20836 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20837 gen_store_gpr(v1_t
, ret
);
20843 switch (extract32(ctx
->opcode
, 10, 1)) {
20846 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20847 gen_store_gpr(v1_t
, ret
);
20851 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20852 gen_store_gpr(v1_t
, ret
);
20856 case NM_SUBUH_R_QB
:
20858 switch (extract32(ctx
->opcode
, 10, 1)) {
20861 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20862 gen_store_gpr(v1_t
, ret
);
20866 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20867 gen_store_gpr(v1_t
, ret
);
20871 case NM_SHLLV_S_PH
:
20873 switch (extract32(ctx
->opcode
, 10, 1)) {
20876 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20877 gen_store_gpr(v1_t
, ret
);
20881 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20882 gen_store_gpr(v1_t
, ret
);
20886 case NM_PRECR_SRA_R_PH_W
:
20888 switch (extract32(ctx
->opcode
, 10, 1)) {
20890 /* PRECR_SRA_PH_W */
20892 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20893 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20895 gen_store_gpr(v1_t
, rt
);
20896 tcg_temp_free_i32(sa_t
);
20900 /* PRECR_SRA_R_PH_W */
20902 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20903 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20905 gen_store_gpr(v1_t
, rt
);
20906 tcg_temp_free_i32(sa_t
);
20911 case NM_MULEU_S_PH_QBL
:
20913 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20914 gen_store_gpr(v1_t
, ret
);
20916 case NM_MULEU_S_PH_QBR
:
20918 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20919 gen_store_gpr(v1_t
, ret
);
20921 case NM_MULQ_RS_PH
:
20923 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20924 gen_store_gpr(v1_t
, ret
);
20928 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20929 gen_store_gpr(v1_t
, ret
);
20933 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20934 gen_store_gpr(v1_t
, ret
);
20938 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20939 gen_store_gpr(v1_t
, ret
);
20943 gen_load_gpr(t0
, rs
);
20945 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20947 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20951 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20952 gen_store_gpr(v1_t
, ret
);
20956 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20957 gen_store_gpr(v1_t
, ret
);
20961 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20962 gen_store_gpr(v1_t
, ret
);
20966 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20967 gen_store_gpr(v1_t
, ret
);
20971 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20972 gen_store_gpr(v1_t
, ret
);
20976 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20977 gen_store_gpr(v1_t
, ret
);
20982 TCGv tv0
= tcg_temp_new();
20983 TCGv tv1
= tcg_temp_new();
20984 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20986 tcg_gen_movi_tl(tv0
, rd
>> 3);
20987 tcg_gen_movi_tl(tv1
, imm
);
20988 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20991 case NM_MULEQ_S_W_PHL
:
20993 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20994 gen_store_gpr(v1_t
, ret
);
20996 case NM_MULEQ_S_W_PHR
:
20998 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20999 gen_store_gpr(v1_t
, ret
);
21003 switch (extract32(ctx
->opcode
, 10, 1)) {
21006 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
21007 gen_store_gpr(v1_t
, ret
);
21011 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
21012 gen_store_gpr(v1_t
, ret
);
21016 case NM_PRECR_QB_PH
:
21018 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
21019 gen_store_gpr(v1_t
, ret
);
21021 case NM_PRECRQ_QB_PH
:
21023 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
21024 gen_store_gpr(v1_t
, ret
);
21026 case NM_PRECRQ_PH_W
:
21028 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
21029 gen_store_gpr(v1_t
, ret
);
21031 case NM_PRECRQ_RS_PH_W
:
21033 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
21034 gen_store_gpr(v1_t
, ret
);
21036 case NM_PRECRQU_S_QB_PH
:
21038 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
21039 gen_store_gpr(v1_t
, ret
);
21043 tcg_gen_movi_tl(t0
, rd
);
21044 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
21045 gen_store_gpr(v1_t
, rt
);
21049 tcg_gen_movi_tl(t0
, rd
>> 1);
21050 switch (extract32(ctx
->opcode
, 10, 1)) {
21053 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
21054 gen_store_gpr(v1_t
, rt
);
21058 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
21059 gen_store_gpr(v1_t
, rt
);
21065 tcg_gen_movi_tl(t0
, rd
>> 1);
21066 switch (extract32(ctx
->opcode
, 10, 2)) {
21069 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
21070 gen_store_gpr(v1_t
, rt
);
21074 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
21075 gen_store_gpr(v1_t
, rt
);
21078 generate_exception_end(ctx
, EXCP_RI
);
21084 tcg_gen_movi_tl(t0
, rd
);
21085 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
21086 gen_store_gpr(v1_t
, rt
);
21092 imm
= sextract32(ctx
->opcode
, 11, 11);
21093 imm
= (int16_t)(imm
<< 6) >> 6;
21095 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
21100 generate_exception_end(ctx
, EXCP_RI
);
21105 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21113 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
21114 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
21116 rt
= extract32(ctx
->opcode
, 21, 5);
21117 rs
= extract32(ctx
->opcode
, 16, 5);
21118 rd
= extract32(ctx
->opcode
, 11, 5);
21120 op
= extract32(ctx
->opcode
, 26, 6);
21125 switch (extract32(ctx
->opcode
, 19, 2)) {
21128 generate_exception_end(ctx
, EXCP_RI
);
21131 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
21132 generate_exception_end(ctx
, EXCP_SYSCALL
);
21134 generate_exception_end(ctx
, EXCP_RI
);
21138 generate_exception_end(ctx
, EXCP_BREAK
);
21141 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
21142 gen_helper_do_semihosting(cpu_env
);
21144 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21145 generate_exception_end(ctx
, EXCP_RI
);
21147 generate_exception_end(ctx
, EXCP_DBp
);
21154 imm
= extract32(ctx
->opcode
, 0, 16);
21156 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
21158 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21160 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21165 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21166 extract32(ctx
->opcode
, 1, 20) << 1;
21167 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21168 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21172 switch (ctx
->opcode
& 0x07) {
21174 gen_pool32a0_nanomips_insn(env
, ctx
);
21178 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
21179 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
21183 switch (extract32(ctx
->opcode
, 3, 3)) {
21185 gen_p_lsx(ctx
, rd
, rs
, rt
);
21189 * In nanoMIPS, the shift field directly encodes the shift
21190 * amount, meaning that the supported shift values are in
21191 * the range 0 to 3 (instead of 1 to 4 in MIPSR6).
21193 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
21194 extract32(ctx
->opcode
, 9, 2) - 1);
21197 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
21200 gen_pool32axf_nanomips_insn(env
, ctx
);
21203 generate_exception_end(ctx
, EXCP_RI
);
21208 generate_exception_end(ctx
, EXCP_RI
);
21213 switch (ctx
->opcode
& 0x03) {
21216 offset
= extract32(ctx
->opcode
, 0, 21);
21217 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
21221 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21224 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21227 generate_exception_end(ctx
, EXCP_RI
);
21233 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
21234 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
21235 switch (extract32(ctx
->opcode
, 16, 5)) {
21239 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
21245 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
21246 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21252 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
21258 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21261 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21268 t0
= tcg_temp_new();
21270 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21273 tcg_gen_movi_tl(t0
, addr
);
21274 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
21282 t0
= tcg_temp_new();
21283 t1
= tcg_temp_new();
21285 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21288 tcg_gen_movi_tl(t0
, addr
);
21289 gen_load_gpr(t1
, rt
);
21291 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
21298 generate_exception_end(ctx
, EXCP_RI
);
21304 switch (extract32(ctx
->opcode
, 12, 4)) {
21306 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21309 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21312 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21315 switch (extract32(ctx
->opcode
, 20, 1)) {
21317 switch (ctx
->opcode
& 3) {
21319 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21320 extract32(ctx
->opcode
, 2, 1),
21321 extract32(ctx
->opcode
, 3, 9) << 3);
21324 case NM_RESTORE_JRC
:
21325 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21326 extract32(ctx
->opcode
, 2, 1),
21327 extract32(ctx
->opcode
, 3, 9) << 3);
21328 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
21329 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21333 generate_exception_end(ctx
, EXCP_RI
);
21338 generate_exception_end(ctx
, EXCP_RI
);
21343 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21346 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21350 TCGv t0
= tcg_temp_new();
21352 imm
= extract32(ctx
->opcode
, 0, 12);
21353 gen_load_gpr(t0
, rs
);
21354 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
21355 gen_store_gpr(t0
, rt
);
21361 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
21362 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
21366 int shift
= extract32(ctx
->opcode
, 0, 5);
21367 switch (extract32(ctx
->opcode
, 5, 4)) {
21369 if (rt
== 0 && shift
== 0) {
21371 } else if (rt
== 0 && shift
== 3) {
21372 /* EHB - treat as NOP */
21373 } else if (rt
== 0 && shift
== 5) {
21374 /* PAUSE - treat as NOP */
21375 } else if (rt
== 0 && shift
== 6) {
21377 gen_sync(extract32(ctx
->opcode
, 16, 5));
21380 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
21381 extract32(ctx
->opcode
, 0, 5));
21385 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
21386 extract32(ctx
->opcode
, 0, 5));
21389 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
21390 extract32(ctx
->opcode
, 0, 5));
21393 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
21394 extract32(ctx
->opcode
, 0, 5));
21402 TCGv t0
= tcg_temp_new();
21403 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
21404 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
21406 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
21408 gen_load_gpr(t0
, rs
);
21409 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
21412 tcg_temp_free_i32(shift
);
21413 tcg_temp_free_i32(shiftx
);
21414 tcg_temp_free_i32(stripe
);
21418 switch (((ctx
->opcode
>> 10) & 2) |
21419 (extract32(ctx
->opcode
, 5, 1))) {
21422 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21423 extract32(ctx
->opcode
, 6, 5));
21426 generate_exception_end(ctx
, EXCP_RI
);
21431 switch (((ctx
->opcode
>> 10) & 2) |
21432 (extract32(ctx
->opcode
, 5, 1))) {
21435 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21436 extract32(ctx
->opcode
, 6, 5));
21439 generate_exception_end(ctx
, EXCP_RI
);
21444 generate_exception_end(ctx
, EXCP_RI
);
21449 gen_pool32f_nanomips_insn(ctx
);
21454 switch (extract32(ctx
->opcode
, 1, 1)) {
21457 tcg_gen_movi_tl(cpu_gpr
[rt
],
21458 sextract32(ctx
->opcode
, 0, 1) << 31 |
21459 extract32(ctx
->opcode
, 2, 10) << 21 |
21460 extract32(ctx
->opcode
, 12, 9) << 12);
21465 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
21466 extract32(ctx
->opcode
, 2, 10) << 21 |
21467 extract32(ctx
->opcode
, 12, 9) << 12;
21469 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21470 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21477 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
21479 switch (extract32(ctx
->opcode
, 18, 3)) {
21481 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
21484 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
21487 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
21491 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
21496 switch (ctx
->opcode
& 1) {
21498 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
21501 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21507 switch (ctx
->opcode
& 1) {
21509 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21512 generate_exception_end(ctx
, EXCP_RI
);
21518 switch (ctx
->opcode
& 0x3) {
21520 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21523 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21526 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21529 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21534 generate_exception_end(ctx
, EXCP_RI
);
21541 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21543 switch (extract32(ctx
->opcode
, 12, 4)) {
21548 * Break the TB to be able to sync copied instructions
21551 ctx
->base
.is_jmp
= DISAS_STOP
;
21554 /* Treat as NOP. */
21558 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21561 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21564 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21567 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21570 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21573 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21576 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21579 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21582 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21585 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21588 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21591 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21594 generate_exception_end(ctx
, EXCP_RI
);
21601 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21602 extract32(ctx
->opcode
, 0, 8);
21604 switch (extract32(ctx
->opcode
, 8, 3)) {
21606 switch (extract32(ctx
->opcode
, 11, 4)) {
21608 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21611 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21614 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21617 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21620 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21623 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21626 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21629 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21632 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21635 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21638 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21641 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21647 * Break the TB to be able to sync copied instructions
21650 ctx
->base
.is_jmp
= DISAS_STOP
;
21653 /* Treat as NOP. */
21657 generate_exception_end(ctx
, EXCP_RI
);
21662 switch (extract32(ctx
->opcode
, 11, 4)) {
21667 TCGv t0
= tcg_temp_new();
21668 TCGv t1
= tcg_temp_new();
21670 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21672 switch (extract32(ctx
->opcode
, 11, 4)) {
21674 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21676 gen_store_gpr(t0
, rt
);
21679 gen_load_gpr(t1
, rt
);
21680 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21689 switch (ctx
->opcode
& 0x03) {
21691 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21695 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21700 switch (ctx
->opcode
& 0x03) {
21702 gen_st_cond(ctx
, rt
, rs
, s
, MO_TESL
, false);
21706 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21712 check_cp0_enabled(ctx
);
21713 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21714 gen_cache_operation(ctx
, rt
, rs
, s
);
21720 switch (extract32(ctx
->opcode
, 11, 4)) {
21723 check_cp0_enabled(ctx
);
21724 gen_ld(ctx
, OPC_LBE
, rt
, rs
, s
);
21728 check_cp0_enabled(ctx
);
21729 gen_st(ctx
, OPC_SBE
, rt
, rs
, s
);
21733 check_cp0_enabled(ctx
);
21734 gen_ld(ctx
, OPC_LBUE
, rt
, rs
, s
);
21738 /* case NM_SYNCIE */
21740 check_cp0_enabled(ctx
);
21742 * Break the TB to be able to sync copied instructions
21745 ctx
->base
.is_jmp
= DISAS_STOP
;
21747 /* case NM_PREFE */
21749 check_cp0_enabled(ctx
);
21750 /* Treat as NOP. */
21755 check_cp0_enabled(ctx
);
21756 gen_ld(ctx
, OPC_LHE
, rt
, rs
, s
);
21760 check_cp0_enabled(ctx
);
21761 gen_st(ctx
, OPC_SHE
, rt
, rs
, s
);
21765 check_cp0_enabled(ctx
);
21766 gen_ld(ctx
, OPC_LHUE
, rt
, rs
, s
);
21769 check_nms_dl_il_sl_tl_l2c(ctx
);
21770 gen_cache_operation(ctx
, rt
, rs
, s
);
21774 check_cp0_enabled(ctx
);
21775 gen_ld(ctx
, OPC_LWE
, rt
, rs
, s
);
21779 check_cp0_enabled(ctx
);
21780 gen_st(ctx
, OPC_SWE
, rt
, rs
, s
);
21783 switch (extract32(ctx
->opcode
, 2, 2)) {
21787 check_cp0_enabled(ctx
);
21788 gen_ld(ctx
, OPC_LLE
, rt
, rs
, s
);
21793 check_cp0_enabled(ctx
);
21794 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21797 generate_exception_end(ctx
, EXCP_RI
);
21802 switch (extract32(ctx
->opcode
, 2, 2)) {
21806 check_cp0_enabled(ctx
);
21807 gen_st_cond(ctx
, rt
, rs
, s
, MO_TESL
, true);
21812 check_cp0_enabled(ctx
);
21813 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21817 generate_exception_end(ctx
, EXCP_RI
);
21827 int count
= extract32(ctx
->opcode
, 12, 3);
21830 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21831 extract32(ctx
->opcode
, 0, 8);
21832 TCGv va
= tcg_temp_new();
21833 TCGv t1
= tcg_temp_new();
21834 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21835 NM_P_LS_UAWM
? MO_UNALN
: 0;
21837 count
= (count
== 0) ? 8 : count
;
21838 while (counter
!= count
) {
21839 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21840 int this_offset
= offset
+ (counter
<< 2);
21842 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21844 switch (extract32(ctx
->opcode
, 11, 1)) {
21846 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21848 gen_store_gpr(t1
, this_rt
);
21849 if ((this_rt
== rs
) &&
21850 (counter
!= (count
- 1))) {
21851 /* UNPREDICTABLE */
21855 this_rt
= (rt
== 0) ? 0 : this_rt
;
21856 gen_load_gpr(t1
, this_rt
);
21857 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21868 generate_exception_end(ctx
, EXCP_RI
);
21876 TCGv t0
= tcg_temp_new();
21877 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21878 extract32(ctx
->opcode
, 1, 20) << 1;
21879 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21880 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21881 extract32(ctx
->opcode
, 21, 3));
21882 gen_load_gpr(t0
, rt
);
21883 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21884 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21890 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21891 extract32(ctx
->opcode
, 1, 24) << 1;
21893 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21895 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21898 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21903 switch (extract32(ctx
->opcode
, 12, 4)) {
21906 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21909 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21912 generate_exception_end(ctx
, EXCP_RI
);
21918 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21919 extract32(ctx
->opcode
, 1, 13) << 1;
21920 switch (extract32(ctx
->opcode
, 14, 2)) {
21923 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21926 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21927 extract32(ctx
->opcode
, 1, 13) << 1;
21928 check_cp1_enabled(ctx
);
21929 switch (extract32(ctx
->opcode
, 16, 5)) {
21931 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21934 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21939 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21940 extract32(ctx
->opcode
, 0, 1) << 13;
21942 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21947 generate_exception_end(ctx
, EXCP_RI
);
21953 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21955 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21959 if (rs
== rt
|| rt
== 0) {
21960 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21961 } else if (rs
== 0) {
21962 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21964 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21972 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21973 extract32(ctx
->opcode
, 1, 13) << 1;
21974 switch (extract32(ctx
->opcode
, 14, 2)) {
21977 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21980 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21982 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21984 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21988 if (rs
== 0 || rs
== rt
) {
21990 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21992 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21996 generate_exception_end(ctx
, EXCP_RI
);
22003 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
22004 extract32(ctx
->opcode
, 1, 10) << 1;
22005 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
22007 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
22012 generate_exception_end(ctx
, EXCP_RI
);
22018 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
22021 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22022 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
22023 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD3(ctx
->opcode
));
22027 /* make sure instructions are on a halfword boundary */
22028 if (ctx
->base
.pc_next
& 0x1) {
22029 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
22030 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
22031 tcg_temp_free(tmp
);
22032 generate_exception_end(ctx
, EXCP_AdEL
);
22036 op
= extract32(ctx
->opcode
, 10, 6);
22039 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22042 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
22043 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
22046 switch (extract32(ctx
->opcode
, 3, 2)) {
22047 case NM_P16_SYSCALL
:
22048 if (extract32(ctx
->opcode
, 2, 1) == 0) {
22049 generate_exception_end(ctx
, EXCP_SYSCALL
);
22051 generate_exception_end(ctx
, EXCP_RI
);
22055 generate_exception_end(ctx
, EXCP_BREAK
);
22058 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
22059 gen_helper_do_semihosting(cpu_env
);
22061 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
22062 generate_exception_end(ctx
, EXCP_RI
);
22064 generate_exception_end(ctx
, EXCP_DBp
);
22069 generate_exception_end(ctx
, EXCP_RI
);
22076 int shift
= extract32(ctx
->opcode
, 0, 3);
22078 shift
= (shift
== 0) ? 8 : shift
;
22080 switch (extract32(ctx
->opcode
, 3, 1)) {
22088 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
22092 switch (ctx
->opcode
& 1) {
22094 gen_pool16c_nanomips_insn(ctx
);
22097 gen_ldxs(ctx
, rt
, rs
, rd
);
22102 switch (extract32(ctx
->opcode
, 6, 1)) {
22104 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
22105 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
22108 generate_exception_end(ctx
, EXCP_RI
);
22113 switch (extract32(ctx
->opcode
, 3, 1)) {
22115 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
22116 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
22118 case NM_P_ADDIURS5
:
22119 rt
= extract32(ctx
->opcode
, 5, 5);
22121 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
22122 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
22123 (extract32(ctx
->opcode
, 0, 3));
22124 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
22130 switch (ctx
->opcode
& 0x1) {
22132 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
22135 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
22140 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22141 extract32(ctx
->opcode
, 5, 3);
22142 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22143 extract32(ctx
->opcode
, 0, 3);
22144 rt
= decode_gpr_gpr4(rt
);
22145 rs
= decode_gpr_gpr4(rs
);
22146 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
22147 (extract32(ctx
->opcode
, 3, 1))) {
22150 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
22154 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
22157 generate_exception_end(ctx
, EXCP_RI
);
22163 int imm
= extract32(ctx
->opcode
, 0, 7);
22164 imm
= (imm
== 0x7f ? -1 : imm
);
22166 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
22172 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
22173 u
= (u
== 12) ? 0xff :
22174 (u
== 13) ? 0xffff : u
;
22175 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
22179 offset
= extract32(ctx
->opcode
, 0, 2);
22180 switch (extract32(ctx
->opcode
, 2, 2)) {
22182 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
22185 rt
= decode_gpr_gpr3_src_store(
22186 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22187 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
22190 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
22193 generate_exception_end(ctx
, EXCP_RI
);
22198 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
22199 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
22201 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
22204 rt
= decode_gpr_gpr3_src_store(
22205 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22206 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
22209 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
22212 generate_exception_end(ctx
, EXCP_RI
);
22217 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22218 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22221 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22222 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22223 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
22227 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22228 extract32(ctx
->opcode
, 5, 3);
22229 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22230 extract32(ctx
->opcode
, 0, 3);
22231 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22232 (extract32(ctx
->opcode
, 8, 1) << 2);
22233 rt
= decode_gpr_gpr4(rt
);
22234 rs
= decode_gpr_gpr4(rs
);
22235 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22239 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22240 extract32(ctx
->opcode
, 5, 3);
22241 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22242 extract32(ctx
->opcode
, 0, 3);
22243 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22244 (extract32(ctx
->opcode
, 8, 1) << 2);
22245 rt
= decode_gpr_gpr4_zero(rt
);
22246 rs
= decode_gpr_gpr4(rs
);
22247 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22250 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22251 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
22254 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22255 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22256 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
22259 rt
= decode_gpr_gpr3_src_store(
22260 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22261 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
22262 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22263 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22266 rt
= decode_gpr_gpr3_src_store(
22267 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22268 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22269 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
22272 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
22273 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22274 (extract32(ctx
->opcode
, 1, 9) << 1));
22277 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
22278 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22279 (extract32(ctx
->opcode
, 1, 9) << 1));
22282 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
22283 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22284 (extract32(ctx
->opcode
, 1, 6) << 1));
22287 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
22288 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22289 (extract32(ctx
->opcode
, 1, 6) << 1));
22292 switch (ctx
->opcode
& 0xf) {
22295 switch (extract32(ctx
->opcode
, 4, 1)) {
22297 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
22298 extract32(ctx
->opcode
, 5, 5), 0, 0);
22301 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
22302 extract32(ctx
->opcode
, 5, 5), 31, 0);
22309 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
22310 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
22311 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
22312 extract32(ctx
->opcode
, 0, 4) << 1);
22319 int count
= extract32(ctx
->opcode
, 0, 4);
22320 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
22322 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
22323 switch (extract32(ctx
->opcode
, 8, 1)) {
22325 gen_save(ctx
, rt
, count
, 0, u
);
22327 case NM_RESTORE_JRC16
:
22328 gen_restore(ctx
, rt
, count
, 0, u
);
22329 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
22338 static const int gpr2reg1
[] = {4, 5, 6, 7};
22339 static const int gpr2reg2
[] = {5, 6, 7, 8};
22341 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
22342 extract32(ctx
->opcode
, 8, 1);
22343 int r1
= gpr2reg1
[rd2
];
22344 int r2
= gpr2reg2
[rd2
];
22345 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
22346 extract32(ctx
->opcode
, 0, 3);
22347 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
22348 extract32(ctx
->opcode
, 5, 3);
22349 TCGv t0
= tcg_temp_new();
22350 TCGv t1
= tcg_temp_new();
22351 if (op
== NM_MOVEP
) {
22354 rs
= decode_gpr_gpr4_zero(r3
);
22355 rt
= decode_gpr_gpr4_zero(r4
);
22357 rd
= decode_gpr_gpr4(r3
);
22358 re
= decode_gpr_gpr4(r4
);
22362 gen_load_gpr(t0
, rs
);
22363 gen_load_gpr(t1
, rt
);
22364 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
22365 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
22371 return decode_nanomips_32_48_opc(env
, ctx
);
22378 /* SmartMIPS extension to MIPS32 */
22380 #if defined(TARGET_MIPS64)
22382 /* MDMX extension to MIPS64 */
22386 /* MIPSDSP functions. */
22387 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
22388 int rd
, int base
, int offset
)
22393 t0
= tcg_temp_new();
22396 gen_load_gpr(t0
, offset
);
22397 } else if (offset
== 0) {
22398 gen_load_gpr(t0
, base
);
22400 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
22405 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
22406 gen_store_gpr(t0
, rd
);
22409 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
22410 gen_store_gpr(t0
, rd
);
22413 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
22414 gen_store_gpr(t0
, rd
);
22416 #if defined(TARGET_MIPS64)
22418 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
22419 gen_store_gpr(t0
, rd
);
22426 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22427 int ret
, int v1
, int v2
)
22433 /* Treat as NOP. */
22437 v1_t
= tcg_temp_new();
22438 v2_t
= tcg_temp_new();
22440 gen_load_gpr(v1_t
, v1
);
22441 gen_load_gpr(v2_t
, v2
);
22444 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
22445 case OPC_MULT_G_2E
:
22449 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22451 case OPC_ADDUH_R_QB
:
22452 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22455 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22457 case OPC_ADDQH_R_PH
:
22458 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22461 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22463 case OPC_ADDQH_R_W
:
22464 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22467 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22469 case OPC_SUBUH_R_QB
:
22470 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22473 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22475 case OPC_SUBQH_R_PH
:
22476 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22479 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22481 case OPC_SUBQH_R_W
:
22482 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22486 case OPC_ABSQ_S_PH_DSP
:
22488 case OPC_ABSQ_S_QB
:
22490 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
22492 case OPC_ABSQ_S_PH
:
22494 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
22498 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
22500 case OPC_PRECEQ_W_PHL
:
22502 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
22503 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22505 case OPC_PRECEQ_W_PHR
:
22507 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
22508 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
22509 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22511 case OPC_PRECEQU_PH_QBL
:
22513 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
22515 case OPC_PRECEQU_PH_QBR
:
22517 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
22519 case OPC_PRECEQU_PH_QBLA
:
22521 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
22523 case OPC_PRECEQU_PH_QBRA
:
22525 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
22527 case OPC_PRECEU_PH_QBL
:
22529 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
22531 case OPC_PRECEU_PH_QBR
:
22533 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
22535 case OPC_PRECEU_PH_QBLA
:
22537 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
22539 case OPC_PRECEU_PH_QBRA
:
22541 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
22545 case OPC_ADDU_QB_DSP
:
22549 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22551 case OPC_ADDQ_S_PH
:
22553 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22557 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22561 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22563 case OPC_ADDU_S_QB
:
22565 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22569 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22571 case OPC_ADDU_S_PH
:
22573 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22577 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22579 case OPC_SUBQ_S_PH
:
22581 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22585 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22589 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22591 case OPC_SUBU_S_QB
:
22593 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22597 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22599 case OPC_SUBU_S_PH
:
22601 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22605 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22609 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22613 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22615 case OPC_RADDU_W_QB
:
22617 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22621 case OPC_CMPU_EQ_QB_DSP
:
22623 case OPC_PRECR_QB_PH
:
22625 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22627 case OPC_PRECRQ_QB_PH
:
22629 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22631 case OPC_PRECR_SRA_PH_W
:
22634 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22635 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22637 tcg_temp_free_i32(sa_t
);
22640 case OPC_PRECR_SRA_R_PH_W
:
22643 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22644 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22646 tcg_temp_free_i32(sa_t
);
22649 case OPC_PRECRQ_PH_W
:
22651 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22653 case OPC_PRECRQ_RS_PH_W
:
22655 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22657 case OPC_PRECRQU_S_QB_PH
:
22659 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22663 #ifdef TARGET_MIPS64
22664 case OPC_ABSQ_S_QH_DSP
:
22666 case OPC_PRECEQ_L_PWL
:
22668 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22670 case OPC_PRECEQ_L_PWR
:
22672 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22674 case OPC_PRECEQ_PW_QHL
:
22676 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22678 case OPC_PRECEQ_PW_QHR
:
22680 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22682 case OPC_PRECEQ_PW_QHLA
:
22684 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22686 case OPC_PRECEQ_PW_QHRA
:
22688 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22690 case OPC_PRECEQU_QH_OBL
:
22692 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22694 case OPC_PRECEQU_QH_OBR
:
22696 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22698 case OPC_PRECEQU_QH_OBLA
:
22700 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22702 case OPC_PRECEQU_QH_OBRA
:
22704 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22706 case OPC_PRECEU_QH_OBL
:
22708 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22710 case OPC_PRECEU_QH_OBR
:
22712 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22714 case OPC_PRECEU_QH_OBLA
:
22716 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22718 case OPC_PRECEU_QH_OBRA
:
22720 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22722 case OPC_ABSQ_S_OB
:
22724 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22726 case OPC_ABSQ_S_PW
:
22728 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22730 case OPC_ABSQ_S_QH
:
22732 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22736 case OPC_ADDU_OB_DSP
:
22738 case OPC_RADDU_L_OB
:
22740 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22744 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22746 case OPC_SUBQ_S_PW
:
22748 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22752 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22754 case OPC_SUBQ_S_QH
:
22756 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22760 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22762 case OPC_SUBU_S_OB
:
22764 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22768 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22770 case OPC_SUBU_S_QH
:
22772 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22776 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22778 case OPC_SUBUH_R_OB
:
22780 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22784 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22786 case OPC_ADDQ_S_PW
:
22788 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22792 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22794 case OPC_ADDQ_S_QH
:
22796 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22800 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22802 case OPC_ADDU_S_OB
:
22804 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22808 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22810 case OPC_ADDU_S_QH
:
22812 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22816 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22818 case OPC_ADDUH_R_OB
:
22820 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22824 case OPC_CMPU_EQ_OB_DSP
:
22826 case OPC_PRECR_OB_QH
:
22828 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22830 case OPC_PRECR_SRA_QH_PW
:
22833 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22834 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22835 tcg_temp_free_i32(ret_t
);
22838 case OPC_PRECR_SRA_R_QH_PW
:
22841 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22842 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22843 tcg_temp_free_i32(sa_v
);
22846 case OPC_PRECRQ_OB_QH
:
22848 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22850 case OPC_PRECRQ_PW_L
:
22852 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22854 case OPC_PRECRQ_QH_PW
:
22856 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22858 case OPC_PRECRQ_RS_QH_PW
:
22860 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22862 case OPC_PRECRQU_S_OB_QH
:
22864 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22871 tcg_temp_free(v1_t
);
22872 tcg_temp_free(v2_t
);
22875 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22876 int ret
, int v1
, int v2
)
22884 /* Treat as NOP. */
22888 t0
= tcg_temp_new();
22889 v1_t
= tcg_temp_new();
22890 v2_t
= tcg_temp_new();
22892 tcg_gen_movi_tl(t0
, v1
);
22893 gen_load_gpr(v1_t
, v1
);
22894 gen_load_gpr(v2_t
, v2
);
22897 case OPC_SHLL_QB_DSP
:
22899 op2
= MASK_SHLL_QB(ctx
->opcode
);
22903 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22907 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22911 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22915 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22917 case OPC_SHLL_S_PH
:
22919 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22921 case OPC_SHLLV_S_PH
:
22923 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22927 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22929 case OPC_SHLLV_S_W
:
22931 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22935 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22939 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22943 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22947 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22951 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22953 case OPC_SHRA_R_QB
:
22955 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22959 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22961 case OPC_SHRAV_R_QB
:
22963 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22967 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22969 case OPC_SHRA_R_PH
:
22971 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22975 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22977 case OPC_SHRAV_R_PH
:
22979 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22983 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22985 case OPC_SHRAV_R_W
:
22987 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22989 default: /* Invalid */
22990 MIPS_INVAL("MASK SHLL.QB");
22991 generate_exception_end(ctx
, EXCP_RI
);
22996 #ifdef TARGET_MIPS64
22997 case OPC_SHLL_OB_DSP
:
22998 op2
= MASK_SHLL_OB(ctx
->opcode
);
23002 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23006 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23008 case OPC_SHLL_S_PW
:
23010 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23012 case OPC_SHLLV_S_PW
:
23014 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23018 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23022 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23026 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23030 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23032 case OPC_SHLL_S_QH
:
23034 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23036 case OPC_SHLLV_S_QH
:
23038 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23042 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
23046 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
23048 case OPC_SHRA_R_OB
:
23050 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
23052 case OPC_SHRAV_R_OB
:
23054 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
23058 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
23062 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
23064 case OPC_SHRA_R_PW
:
23066 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
23068 case OPC_SHRAV_R_PW
:
23070 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
23074 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
23078 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
23080 case OPC_SHRA_R_QH
:
23082 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
23084 case OPC_SHRAV_R_QH
:
23086 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
23090 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
23094 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
23098 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
23102 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
23104 default: /* Invalid */
23105 MIPS_INVAL("MASK SHLL.OB");
23106 generate_exception_end(ctx
, EXCP_RI
);
23114 tcg_temp_free(v1_t
);
23115 tcg_temp_free(v2_t
);
23118 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23119 int ret
, int v1
, int v2
, int check_ret
)
23125 if ((ret
== 0) && (check_ret
== 1)) {
23126 /* Treat as NOP. */
23130 t0
= tcg_temp_new_i32();
23131 v1_t
= tcg_temp_new();
23132 v2_t
= tcg_temp_new();
23134 tcg_gen_movi_i32(t0
, ret
);
23135 gen_load_gpr(v1_t
, v1
);
23136 gen_load_gpr(v2_t
, v2
);
23140 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
23141 * the same mask and op1.
23143 case OPC_MULT_G_2E
:
23147 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23150 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23153 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23155 case OPC_MULQ_RS_W
:
23156 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23160 case OPC_DPA_W_PH_DSP
:
23162 case OPC_DPAU_H_QBL
:
23164 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
23166 case OPC_DPAU_H_QBR
:
23168 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
23170 case OPC_DPSU_H_QBL
:
23172 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
23174 case OPC_DPSU_H_QBR
:
23176 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
23180 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23182 case OPC_DPAX_W_PH
:
23184 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23186 case OPC_DPAQ_S_W_PH
:
23188 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23190 case OPC_DPAQX_S_W_PH
:
23192 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23194 case OPC_DPAQX_SA_W_PH
:
23196 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23200 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23202 case OPC_DPSX_W_PH
:
23204 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23206 case OPC_DPSQ_S_W_PH
:
23208 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23210 case OPC_DPSQX_S_W_PH
:
23212 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23214 case OPC_DPSQX_SA_W_PH
:
23216 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23218 case OPC_MULSAQ_S_W_PH
:
23220 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23222 case OPC_DPAQ_SA_L_W
:
23224 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23226 case OPC_DPSQ_SA_L_W
:
23228 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23230 case OPC_MAQ_S_W_PHL
:
23232 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23234 case OPC_MAQ_S_W_PHR
:
23236 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23238 case OPC_MAQ_SA_W_PHL
:
23240 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23242 case OPC_MAQ_SA_W_PHR
:
23244 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23246 case OPC_MULSA_W_PH
:
23248 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23252 #ifdef TARGET_MIPS64
23253 case OPC_DPAQ_W_QH_DSP
:
23255 int ac
= ret
& 0x03;
23256 tcg_gen_movi_i32(t0
, ac
);
23261 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
23265 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
23269 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
23273 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
23277 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23279 case OPC_DPAQ_S_W_QH
:
23281 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23283 case OPC_DPAQ_SA_L_PW
:
23285 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23287 case OPC_DPAU_H_OBL
:
23289 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23291 case OPC_DPAU_H_OBR
:
23293 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23297 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23299 case OPC_DPSQ_S_W_QH
:
23301 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23303 case OPC_DPSQ_SA_L_PW
:
23305 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23307 case OPC_DPSU_H_OBL
:
23309 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23311 case OPC_DPSU_H_OBR
:
23313 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23315 case OPC_MAQ_S_L_PWL
:
23317 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
23319 case OPC_MAQ_S_L_PWR
:
23321 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
23323 case OPC_MAQ_S_W_QHLL
:
23325 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23327 case OPC_MAQ_SA_W_QHLL
:
23329 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23331 case OPC_MAQ_S_W_QHLR
:
23333 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23335 case OPC_MAQ_SA_W_QHLR
:
23337 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23339 case OPC_MAQ_S_W_QHRL
:
23341 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23343 case OPC_MAQ_SA_W_QHRL
:
23345 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23347 case OPC_MAQ_S_W_QHRR
:
23349 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23351 case OPC_MAQ_SA_W_QHRR
:
23353 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23355 case OPC_MULSAQ_S_L_PW
:
23357 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23359 case OPC_MULSAQ_S_W_QH
:
23361 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23367 case OPC_ADDU_QB_DSP
:
23369 case OPC_MULEU_S_PH_QBL
:
23371 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23373 case OPC_MULEU_S_PH_QBR
:
23375 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23377 case OPC_MULQ_RS_PH
:
23379 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23381 case OPC_MULEQ_S_W_PHL
:
23383 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23385 case OPC_MULEQ_S_W_PHR
:
23387 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23389 case OPC_MULQ_S_PH
:
23391 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23395 #ifdef TARGET_MIPS64
23396 case OPC_ADDU_OB_DSP
:
23398 case OPC_MULEQ_S_PW_QHL
:
23400 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23402 case OPC_MULEQ_S_PW_QHR
:
23404 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23406 case OPC_MULEU_S_QH_OBL
:
23408 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23410 case OPC_MULEU_S_QH_OBR
:
23412 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23414 case OPC_MULQ_RS_QH
:
23416 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23423 tcg_temp_free_i32(t0
);
23424 tcg_temp_free(v1_t
);
23425 tcg_temp_free(v2_t
);
23428 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23436 /* Treat as NOP. */
23440 t0
= tcg_temp_new();
23441 val_t
= tcg_temp_new();
23442 gen_load_gpr(val_t
, val
);
23445 case OPC_ABSQ_S_PH_DSP
:
23449 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
23454 target_long result
;
23455 imm
= (ctx
->opcode
>> 16) & 0xFF;
23456 result
= (uint32_t)imm
<< 24 |
23457 (uint32_t)imm
<< 16 |
23458 (uint32_t)imm
<< 8 |
23460 result
= (int32_t)result
;
23461 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
23466 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23467 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23468 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23469 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23470 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23471 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23476 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23477 imm
= (int16_t)(imm
<< 6) >> 6;
23478 tcg_gen_movi_tl(cpu_gpr
[ret
], \
23479 (target_long
)((int32_t)imm
<< 16 | \
23485 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23486 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23487 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23488 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23492 #ifdef TARGET_MIPS64
23493 case OPC_ABSQ_S_QH_DSP
:
23500 imm
= (ctx
->opcode
>> 16) & 0xFF;
23501 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
23502 temp
= (temp
<< 16) | temp
;
23503 temp
= (temp
<< 32) | temp
;
23504 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23512 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23513 imm
= (int16_t)(imm
<< 6) >> 6;
23514 temp
= ((target_long
)imm
<< 32) \
23515 | ((target_long
)imm
& 0xFFFFFFFF);
23516 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23524 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23525 imm
= (int16_t)(imm
<< 6) >> 6;
23527 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
23528 ((uint64_t)(uint16_t)imm
<< 32) |
23529 ((uint64_t)(uint16_t)imm
<< 16) |
23530 (uint64_t)(uint16_t)imm
;
23531 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23536 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23537 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23538 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23539 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23540 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23541 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23542 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23546 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
23547 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23548 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23552 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23553 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23554 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23555 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23556 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23563 tcg_temp_free(val_t
);
23566 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
23567 uint32_t op1
, uint32_t op2
,
23568 int ret
, int v1
, int v2
, int check_ret
)
23574 if ((ret
== 0) && (check_ret
== 1)) {
23575 /* Treat as NOP. */
23579 t1
= tcg_temp_new();
23580 v1_t
= tcg_temp_new();
23581 v2_t
= tcg_temp_new();
23583 gen_load_gpr(v1_t
, v1
);
23584 gen_load_gpr(v2_t
, v2
);
23587 case OPC_CMPU_EQ_QB_DSP
:
23589 case OPC_CMPU_EQ_QB
:
23591 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
23593 case OPC_CMPU_LT_QB
:
23595 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
23597 case OPC_CMPU_LE_QB
:
23599 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
23601 case OPC_CMPGU_EQ_QB
:
23603 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23605 case OPC_CMPGU_LT_QB
:
23607 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23609 case OPC_CMPGU_LE_QB
:
23611 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23613 case OPC_CMPGDU_EQ_QB
:
23615 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23616 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23617 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23618 tcg_gen_shli_tl(t1
, t1
, 24);
23619 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23621 case OPC_CMPGDU_LT_QB
:
23623 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23624 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23625 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23626 tcg_gen_shli_tl(t1
, t1
, 24);
23627 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23629 case OPC_CMPGDU_LE_QB
:
23631 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23632 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23633 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23634 tcg_gen_shli_tl(t1
, t1
, 24);
23635 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23637 case OPC_CMP_EQ_PH
:
23639 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23641 case OPC_CMP_LT_PH
:
23643 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23645 case OPC_CMP_LE_PH
:
23647 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23651 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23655 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23657 case OPC_PACKRL_PH
:
23659 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23663 #ifdef TARGET_MIPS64
23664 case OPC_CMPU_EQ_OB_DSP
:
23666 case OPC_CMP_EQ_PW
:
23668 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23670 case OPC_CMP_LT_PW
:
23672 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23674 case OPC_CMP_LE_PW
:
23676 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23678 case OPC_CMP_EQ_QH
:
23680 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23682 case OPC_CMP_LT_QH
:
23684 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23686 case OPC_CMP_LE_QH
:
23688 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23690 case OPC_CMPGDU_EQ_OB
:
23692 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23694 case OPC_CMPGDU_LT_OB
:
23696 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23698 case OPC_CMPGDU_LE_OB
:
23700 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23702 case OPC_CMPGU_EQ_OB
:
23704 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23706 case OPC_CMPGU_LT_OB
:
23708 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23710 case OPC_CMPGU_LE_OB
:
23712 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23714 case OPC_CMPU_EQ_OB
:
23716 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23718 case OPC_CMPU_LT_OB
:
23720 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23722 case OPC_CMPU_LE_OB
:
23724 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23726 case OPC_PACKRL_PW
:
23728 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23732 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23736 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23740 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23748 tcg_temp_free(v1_t
);
23749 tcg_temp_free(v2_t
);
23752 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23753 uint32_t op1
, int rt
, int rs
, int sa
)
23760 /* Treat as NOP. */
23764 t0
= tcg_temp_new();
23765 gen_load_gpr(t0
, rs
);
23768 case OPC_APPEND_DSP
:
23769 switch (MASK_APPEND(ctx
->opcode
)) {
23772 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23774 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23778 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23779 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23780 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23781 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23783 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23787 if (sa
!= 0 && sa
!= 2) {
23788 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23789 tcg_gen_ext32u_tl(t0
, t0
);
23790 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23791 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23793 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23795 default: /* Invalid */
23796 MIPS_INVAL("MASK APPEND");
23797 generate_exception_end(ctx
, EXCP_RI
);
23801 #ifdef TARGET_MIPS64
23802 case OPC_DAPPEND_DSP
:
23803 switch (MASK_DAPPEND(ctx
->opcode
)) {
23806 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23810 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23811 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23812 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23816 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23817 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23818 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23823 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23824 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23825 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23826 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23829 default: /* Invalid */
23830 MIPS_INVAL("MASK DAPPEND");
23831 generate_exception_end(ctx
, EXCP_RI
);
23840 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23841 int ret
, int v1
, int v2
, int check_ret
)
23850 if ((ret
== 0) && (check_ret
== 1)) {
23851 /* Treat as NOP. */
23855 t0
= tcg_temp_new();
23856 t1
= tcg_temp_new();
23857 v1_t
= tcg_temp_new();
23858 v2_t
= tcg_temp_new();
23860 gen_load_gpr(v1_t
, v1
);
23861 gen_load_gpr(v2_t
, v2
);
23864 case OPC_EXTR_W_DSP
:
23868 tcg_gen_movi_tl(t0
, v2
);
23869 tcg_gen_movi_tl(t1
, v1
);
23870 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23873 tcg_gen_movi_tl(t0
, v2
);
23874 tcg_gen_movi_tl(t1
, v1
);
23875 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23877 case OPC_EXTR_RS_W
:
23878 tcg_gen_movi_tl(t0
, v2
);
23879 tcg_gen_movi_tl(t1
, v1
);
23880 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23883 tcg_gen_movi_tl(t0
, v2
);
23884 tcg_gen_movi_tl(t1
, v1
);
23885 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23887 case OPC_EXTRV_S_H
:
23888 tcg_gen_movi_tl(t0
, v2
);
23889 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23892 tcg_gen_movi_tl(t0
, v2
);
23893 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23895 case OPC_EXTRV_R_W
:
23896 tcg_gen_movi_tl(t0
, v2
);
23897 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23899 case OPC_EXTRV_RS_W
:
23900 tcg_gen_movi_tl(t0
, v2
);
23901 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23904 tcg_gen_movi_tl(t0
, v2
);
23905 tcg_gen_movi_tl(t1
, v1
);
23906 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23909 tcg_gen_movi_tl(t0
, v2
);
23910 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23913 tcg_gen_movi_tl(t0
, v2
);
23914 tcg_gen_movi_tl(t1
, v1
);
23915 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23918 tcg_gen_movi_tl(t0
, v2
);
23919 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23922 imm
= (ctx
->opcode
>> 20) & 0x3F;
23923 tcg_gen_movi_tl(t0
, ret
);
23924 tcg_gen_movi_tl(t1
, imm
);
23925 gen_helper_shilo(t0
, t1
, cpu_env
);
23928 tcg_gen_movi_tl(t0
, ret
);
23929 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23932 tcg_gen_movi_tl(t0
, ret
);
23933 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23936 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23937 tcg_gen_movi_tl(t0
, imm
);
23938 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23941 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23942 tcg_gen_movi_tl(t0
, imm
);
23943 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23947 #ifdef TARGET_MIPS64
23948 case OPC_DEXTR_W_DSP
:
23952 tcg_gen_movi_tl(t0
, ret
);
23953 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23957 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23958 int ac
= (ctx
->opcode
>> 11) & 0x03;
23959 tcg_gen_movi_tl(t0
, shift
);
23960 tcg_gen_movi_tl(t1
, ac
);
23961 gen_helper_dshilo(t0
, t1
, cpu_env
);
23966 int ac
= (ctx
->opcode
>> 11) & 0x03;
23967 tcg_gen_movi_tl(t0
, ac
);
23968 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23972 tcg_gen_movi_tl(t0
, v2
);
23973 tcg_gen_movi_tl(t1
, v1
);
23975 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23978 tcg_gen_movi_tl(t0
, v2
);
23979 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23982 tcg_gen_movi_tl(t0
, v2
);
23983 tcg_gen_movi_tl(t1
, v1
);
23984 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23987 tcg_gen_movi_tl(t0
, v2
);
23988 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23991 tcg_gen_movi_tl(t0
, v2
);
23992 tcg_gen_movi_tl(t1
, v1
);
23993 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23995 case OPC_DEXTR_R_L
:
23996 tcg_gen_movi_tl(t0
, v2
);
23997 tcg_gen_movi_tl(t1
, v1
);
23998 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24000 case OPC_DEXTR_RS_L
:
24001 tcg_gen_movi_tl(t0
, v2
);
24002 tcg_gen_movi_tl(t1
, v1
);
24003 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24006 tcg_gen_movi_tl(t0
, v2
);
24007 tcg_gen_movi_tl(t1
, v1
);
24008 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24010 case OPC_DEXTR_R_W
:
24011 tcg_gen_movi_tl(t0
, v2
);
24012 tcg_gen_movi_tl(t1
, v1
);
24013 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24015 case OPC_DEXTR_RS_W
:
24016 tcg_gen_movi_tl(t0
, v2
);
24017 tcg_gen_movi_tl(t1
, v1
);
24018 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24020 case OPC_DEXTR_S_H
:
24021 tcg_gen_movi_tl(t0
, v2
);
24022 tcg_gen_movi_tl(t1
, v1
);
24023 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24025 case OPC_DEXTRV_S_H
:
24026 tcg_gen_movi_tl(t0
, v2
);
24027 tcg_gen_movi_tl(t1
, v1
);
24028 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24031 tcg_gen_movi_tl(t0
, v2
);
24032 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24034 case OPC_DEXTRV_R_L
:
24035 tcg_gen_movi_tl(t0
, v2
);
24036 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24038 case OPC_DEXTRV_RS_L
:
24039 tcg_gen_movi_tl(t0
, v2
);
24040 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24043 tcg_gen_movi_tl(t0
, v2
);
24044 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24046 case OPC_DEXTRV_R_W
:
24047 tcg_gen_movi_tl(t0
, v2
);
24048 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24050 case OPC_DEXTRV_RS_W
:
24051 tcg_gen_movi_tl(t0
, v2
);
24052 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24061 tcg_temp_free(v1_t
);
24062 tcg_temp_free(v2_t
);
24065 /* End MIPSDSP functions. */
24067 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
24069 int rs
, rt
, rd
, sa
;
24072 rs
= (ctx
->opcode
>> 21) & 0x1f;
24073 rt
= (ctx
->opcode
>> 16) & 0x1f;
24074 rd
= (ctx
->opcode
>> 11) & 0x1f;
24075 sa
= (ctx
->opcode
>> 6) & 0x1f;
24077 op1
= MASK_SPECIAL(ctx
->opcode
);
24080 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
24086 op2
= MASK_R6_MULDIV(ctx
->opcode
);
24096 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
24099 MIPS_INVAL("special_r6 muldiv");
24100 generate_exception_end(ctx
, EXCP_RI
);
24106 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24110 if (rt
== 0 && sa
== 1) {
24112 * Major opcode and function field is shared with preR6 MFHI/MTHI.
24113 * We need additionally to check other fields.
24115 gen_cl(ctx
, op1
, rd
, rs
);
24117 generate_exception_end(ctx
, EXCP_RI
);
24121 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
24122 gen_helper_do_semihosting(cpu_env
);
24124 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
24125 generate_exception_end(ctx
, EXCP_RI
);
24127 generate_exception_end(ctx
, EXCP_DBp
);
24131 #if defined(TARGET_MIPS64)
24133 check_mips_64(ctx
);
24134 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
24138 if (rt
== 0 && sa
== 1) {
24140 * Major opcode and function field is shared with preR6 MFHI/MTHI.
24141 * We need additionally to check other fields.
24143 check_mips_64(ctx
);
24144 gen_cl(ctx
, op1
, rd
, rs
);
24146 generate_exception_end(ctx
, EXCP_RI
);
24154 op2
= MASK_R6_MULDIV(ctx
->opcode
);
24164 check_mips_64(ctx
);
24165 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
24168 MIPS_INVAL("special_r6 muldiv");
24169 generate_exception_end(ctx
, EXCP_RI
);
24174 default: /* Invalid */
24175 MIPS_INVAL("special_r6");
24176 generate_exception_end(ctx
, EXCP_RI
);
24181 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
24183 int rs
= extract32(ctx
->opcode
, 21, 5);
24184 int rt
= extract32(ctx
->opcode
, 16, 5);
24185 int rd
= extract32(ctx
->opcode
, 11, 5);
24186 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
24189 case OPC_MOVN
: /* Conditional move */
24191 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24193 case OPC_MFHI
: /* Move from HI/LO */
24195 gen_HILO(ctx
, op1
, 0, rd
);
24198 case OPC_MTLO
: /* Move to HI/LO */
24199 gen_HILO(ctx
, op1
, 0, rs
);
24203 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
24207 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24209 #if defined(TARGET_MIPS64)
24214 check_insn_opc_user_only(ctx
, INSN_R5900
);
24215 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24219 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
24221 default: /* Invalid */
24222 MIPS_INVAL("special_tx79");
24223 generate_exception_end(ctx
, EXCP_RI
);
24228 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
24230 int rs
, rt
, rd
, sa
;
24233 rs
= (ctx
->opcode
>> 21) & 0x1f;
24234 rt
= (ctx
->opcode
>> 16) & 0x1f;
24235 rd
= (ctx
->opcode
>> 11) & 0x1f;
24236 sa
= (ctx
->opcode
>> 6) & 0x1f;
24238 op1
= MASK_SPECIAL(ctx
->opcode
);
24240 case OPC_MOVN
: /* Conditional move */
24242 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
24243 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
24244 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24246 case OPC_MFHI
: /* Move from HI/LO */
24248 gen_HILO(ctx
, op1
, rs
& 3, rd
);
24251 case OPC_MTLO
: /* Move to HI/LO */
24252 gen_HILO(ctx
, op1
, rd
& 3, rs
);
24255 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
24256 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
24257 check_cp1_enabled(ctx
);
24258 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
24259 (ctx
->opcode
>> 16) & 1);
24261 generate_exception_err(ctx
, EXCP_CpU
, 1);
24267 check_insn(ctx
, INSN_VR54XX
);
24268 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
24269 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
24271 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
24276 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24278 #if defined(TARGET_MIPS64)
24283 check_insn(ctx
, ISA_MIPS3
);
24284 check_mips_64(ctx
);
24285 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24289 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24292 #ifdef MIPS_STRICT_STANDARD
24293 MIPS_INVAL("SPIM");
24294 generate_exception_end(ctx
, EXCP_RI
);
24296 /* Implemented as RI exception for now. */
24297 MIPS_INVAL("spim (unofficial)");
24298 generate_exception_end(ctx
, EXCP_RI
);
24301 default: /* Invalid */
24302 MIPS_INVAL("special_legacy");
24303 generate_exception_end(ctx
, EXCP_RI
);
24308 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
24310 int rs
, rt
, rd
, sa
;
24313 rs
= (ctx
->opcode
>> 21) & 0x1f;
24314 rt
= (ctx
->opcode
>> 16) & 0x1f;
24315 rd
= (ctx
->opcode
>> 11) & 0x1f;
24316 sa
= (ctx
->opcode
>> 6) & 0x1f;
24318 op1
= MASK_SPECIAL(ctx
->opcode
);
24320 case OPC_SLL
: /* Shift with immediate */
24321 if (sa
== 5 && rd
== 0 &&
24322 rs
== 0 && rt
== 0) { /* PAUSE */
24323 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
24324 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
24325 generate_exception_end(ctx
, EXCP_RI
);
24331 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24334 switch ((ctx
->opcode
>> 21) & 0x1f) {
24336 /* rotr is decoded as srl on non-R2 CPUs */
24337 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24342 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24345 generate_exception_end(ctx
, EXCP_RI
);
24353 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24355 case OPC_SLLV
: /* Shifts */
24357 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24360 switch ((ctx
->opcode
>> 6) & 0x1f) {
24362 /* rotrv is decoded as srlv on non-R2 CPUs */
24363 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24368 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24371 generate_exception_end(ctx
, EXCP_RI
);
24375 case OPC_SLT
: /* Set on less than */
24377 gen_slt(ctx
, op1
, rd
, rs
, rt
);
24379 case OPC_AND
: /* Logic*/
24383 gen_logic(ctx
, op1
, rd
, rs
, rt
);
24386 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24388 case OPC_TGE
: /* Traps */
24394 check_insn(ctx
, ISA_MIPS2
);
24395 gen_trap(ctx
, op1
, rs
, rt
, -1);
24397 case OPC_LSA
: /* OPC_PMON */
24398 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24399 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24400 decode_opc_special_r6(env
, ctx
);
24402 /* Pmon entry point, also R4010 selsl */
24403 #ifdef MIPS_STRICT_STANDARD
24404 MIPS_INVAL("PMON / selsl");
24405 generate_exception_end(ctx
, EXCP_RI
);
24407 gen_helper_0e0i(pmon
, sa
);
24412 generate_exception_end(ctx
, EXCP_SYSCALL
);
24415 generate_exception_end(ctx
, EXCP_BREAK
);
24418 check_insn(ctx
, ISA_MIPS2
);
24419 gen_sync(extract32(ctx
->opcode
, 6, 5));
24422 #if defined(TARGET_MIPS64)
24423 /* MIPS64 specific opcodes */
24428 check_insn(ctx
, ISA_MIPS3
);
24429 check_mips_64(ctx
);
24430 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24433 switch ((ctx
->opcode
>> 21) & 0x1f) {
24435 /* drotr is decoded as dsrl on non-R2 CPUs */
24436 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24441 check_insn(ctx
, ISA_MIPS3
);
24442 check_mips_64(ctx
);
24443 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24446 generate_exception_end(ctx
, EXCP_RI
);
24451 switch ((ctx
->opcode
>> 21) & 0x1f) {
24453 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
24454 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24459 check_insn(ctx
, ISA_MIPS3
);
24460 check_mips_64(ctx
);
24461 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24464 generate_exception_end(ctx
, EXCP_RI
);
24472 check_insn(ctx
, ISA_MIPS3
);
24473 check_mips_64(ctx
);
24474 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24478 check_insn(ctx
, ISA_MIPS3
);
24479 check_mips_64(ctx
);
24480 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24483 switch ((ctx
->opcode
>> 6) & 0x1f) {
24485 /* drotrv is decoded as dsrlv on non-R2 CPUs */
24486 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24491 check_insn(ctx
, ISA_MIPS3
);
24492 check_mips_64(ctx
);
24493 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24496 generate_exception_end(ctx
, EXCP_RI
);
24501 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24502 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24503 decode_opc_special_r6(env
, ctx
);
24508 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24509 decode_opc_special_r6(env
, ctx
);
24510 } else if (ctx
->insn_flags
& INSN_R5900
) {
24511 decode_opc_special_tx79(env
, ctx
);
24513 decode_opc_special_legacy(env
, ctx
);
24519 #if defined(TARGET_MIPS64)
24523 * MMI (MultiMedia Interface) ASE instructions
24524 * ===========================================
24528 * MMI instructions category: data communication
24529 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24531 * PCPYH PEXCH PEXTLB PINTH PPACB PEXT5 PREVH
24532 * PCPYLD PEXCW PEXTLH PINTEH PPACH PPAC5 PROT3W
24533 * PCPYUD PEXEH PEXTLW PPACW
24542 * Parallel Copy Halfword
24544 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24545 * +-----------+---------+---------+---------+---------+-----------+
24546 * | MMI |0 0 0 0 0| rt | rd | PCPYH | MMI3 |
24547 * +-----------+---------+---------+---------+---------+-----------+
24549 static void gen_mmi_pcpyh(DisasContext
*ctx
)
24551 uint32_t pd
, rt
, rd
;
24554 opcode
= ctx
->opcode
;
24556 pd
= extract32(opcode
, 21, 5);
24557 rt
= extract32(opcode
, 16, 5);
24558 rd
= extract32(opcode
, 11, 5);
24560 if (unlikely(pd
!= 0)) {
24561 generate_exception_end(ctx
, EXCP_RI
);
24562 } else if (rd
== 0) {
24564 } else if (rt
== 0) {
24565 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24566 tcg_gen_movi_i64(cpu_mmr
[rd
], 0);
24568 TCGv_i64 t0
= tcg_temp_new();
24569 TCGv_i64 t1
= tcg_temp_new();
24570 uint64_t mask
= (1ULL << 16) - 1;
24572 tcg_gen_andi_i64(t0
, cpu_gpr
[rt
], mask
);
24573 tcg_gen_movi_i64(t1
, 0);
24574 tcg_gen_or_i64(t1
, t0
, t1
);
24575 tcg_gen_shli_i64(t0
, t0
, 16);
24576 tcg_gen_or_i64(t1
, t0
, t1
);
24577 tcg_gen_shli_i64(t0
, t0
, 16);
24578 tcg_gen_or_i64(t1
, t0
, t1
);
24579 tcg_gen_shli_i64(t0
, t0
, 16);
24580 tcg_gen_or_i64(t1
, t0
, t1
);
24582 tcg_gen_mov_i64(cpu_gpr
[rd
], t1
);
24584 tcg_gen_andi_i64(t0
, cpu_mmr
[rt
], mask
);
24585 tcg_gen_movi_i64(t1
, 0);
24586 tcg_gen_or_i64(t1
, t0
, t1
);
24587 tcg_gen_shli_i64(t0
, t0
, 16);
24588 tcg_gen_or_i64(t1
, t0
, t1
);
24589 tcg_gen_shli_i64(t0
, t0
, 16);
24590 tcg_gen_or_i64(t1
, t0
, t1
);
24591 tcg_gen_shli_i64(t0
, t0
, 16);
24592 tcg_gen_or_i64(t1
, t0
, t1
);
24594 tcg_gen_mov_i64(cpu_mmr
[rd
], t1
);
24602 * PCPYLD rd, rs, rt
24604 * Parallel Copy Lower Doubleword
24606 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24607 * +-----------+---------+---------+---------+---------+-----------+
24608 * | MMI | rs | rt | rd | PCPYLD | MMI2 |
24609 * +-----------+---------+---------+---------+---------+-----------+
24611 static void gen_mmi_pcpyld(DisasContext
*ctx
)
24613 uint32_t rs
, rt
, rd
;
24616 opcode
= ctx
->opcode
;
24618 rs
= extract32(opcode
, 21, 5);
24619 rt
= extract32(opcode
, 16, 5);
24620 rd
= extract32(opcode
, 11, 5);
24626 tcg_gen_movi_i64(cpu_mmr
[rd
], 0);
24628 tcg_gen_mov_i64(cpu_mmr
[rd
], cpu_gpr
[rs
]);
24631 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24634 tcg_gen_mov_i64(cpu_gpr
[rd
], cpu_gpr
[rt
]);
24641 * PCPYUD rd, rs, rt
24643 * Parallel Copy Upper Doubleword
24645 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24646 * +-----------+---------+---------+---------+---------+-----------+
24647 * | MMI | rs | rt | rd | PCPYUD | MMI3 |
24648 * +-----------+---------+---------+---------+---------+-----------+
24650 static void gen_mmi_pcpyud(DisasContext
*ctx
)
24652 uint32_t rs
, rt
, rd
;
24655 opcode
= ctx
->opcode
;
24657 rs
= extract32(opcode
, 21, 5);
24658 rt
= extract32(opcode
, 16, 5);
24659 rd
= extract32(opcode
, 11, 5);
24665 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24667 tcg_gen_mov_i64(cpu_gpr
[rd
], cpu_mmr
[rs
]);
24670 tcg_gen_movi_i64(cpu_mmr
[rd
], 0);
24673 tcg_gen_mov_i64(cpu_mmr
[rd
], cpu_mmr
[rt
]);
24682 #if !defined(TARGET_MIPS64)
24684 /* MXU accumulate add/subtract 1-bit pattern 'aptn1' */
24685 #define MXU_APTN1_A 0
24686 #define MXU_APTN1_S 1
24688 /* MXU accumulate add/subtract 2-bit pattern 'aptn2' */
24689 #define MXU_APTN2_AA 0
24690 #define MXU_APTN2_AS 1
24691 #define MXU_APTN2_SA 2
24692 #define MXU_APTN2_SS 3
24694 /* MXU execute add/subtract 2-bit pattern 'eptn2' */
24695 #define MXU_EPTN2_AA 0
24696 #define MXU_EPTN2_AS 1
24697 #define MXU_EPTN2_SA 2
24698 #define MXU_EPTN2_SS 3
24700 /* MXU operand getting pattern 'optn2' */
24701 #define MXU_OPTN2_PTN0 0
24702 #define MXU_OPTN2_PTN1 1
24703 #define MXU_OPTN2_PTN2 2
24704 #define MXU_OPTN2_PTN3 3
24705 /* alternative naming scheme for 'optn2' */
24706 #define MXU_OPTN2_WW 0
24707 #define MXU_OPTN2_LW 1
24708 #define MXU_OPTN2_HW 2
24709 #define MXU_OPTN2_XW 3
24711 /* MXU operand getting pattern 'optn3' */
24712 #define MXU_OPTN3_PTN0 0
24713 #define MXU_OPTN3_PTN1 1
24714 #define MXU_OPTN3_PTN2 2
24715 #define MXU_OPTN3_PTN3 3
24716 #define MXU_OPTN3_PTN4 4
24717 #define MXU_OPTN3_PTN5 5
24718 #define MXU_OPTN3_PTN6 6
24719 #define MXU_OPTN3_PTN7 7
24723 * S32I2M XRa, rb - Register move from GRF to XRF
24725 static void gen_mxu_s32i2m(DisasContext
*ctx
)
24730 t0
= tcg_temp_new();
24732 XRa
= extract32(ctx
->opcode
, 6, 5);
24733 Rb
= extract32(ctx
->opcode
, 16, 5);
24735 gen_load_gpr(t0
, Rb
);
24737 gen_store_mxu_gpr(t0
, XRa
);
24738 } else if (XRa
== 16) {
24739 gen_store_mxu_cr(t0
);
24746 * S32M2I XRa, rb - Register move from XRF to GRF
24748 static void gen_mxu_s32m2i(DisasContext
*ctx
)
24753 t0
= tcg_temp_new();
24755 XRa
= extract32(ctx
->opcode
, 6, 5);
24756 Rb
= extract32(ctx
->opcode
, 16, 5);
24759 gen_load_mxu_gpr(t0
, XRa
);
24760 } else if (XRa
== 16) {
24761 gen_load_mxu_cr(t0
);
24764 gen_store_gpr(t0
, Rb
);
24770 * S8LDD XRa, Rb, s8, optn3 - Load a byte from memory to XRF
24772 static void gen_mxu_s8ldd(DisasContext
*ctx
)
24775 uint32_t XRa
, Rb
, s8
, optn3
;
24777 t0
= tcg_temp_new();
24778 t1
= tcg_temp_new();
24780 XRa
= extract32(ctx
->opcode
, 6, 4);
24781 s8
= extract32(ctx
->opcode
, 10, 8);
24782 optn3
= extract32(ctx
->opcode
, 18, 3);
24783 Rb
= extract32(ctx
->opcode
, 21, 5);
24785 gen_load_gpr(t0
, Rb
);
24786 tcg_gen_addi_tl(t0
, t0
, (int8_t)s8
);
24789 /* XRa[7:0] = tmp8 */
24790 case MXU_OPTN3_PTN0
:
24791 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24792 gen_load_mxu_gpr(t0
, XRa
);
24793 tcg_gen_deposit_tl(t0
, t0
, t1
, 0, 8);
24795 /* XRa[15:8] = tmp8 */
24796 case MXU_OPTN3_PTN1
:
24797 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24798 gen_load_mxu_gpr(t0
, XRa
);
24799 tcg_gen_deposit_tl(t0
, t0
, t1
, 8, 8);
24801 /* XRa[23:16] = tmp8 */
24802 case MXU_OPTN3_PTN2
:
24803 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24804 gen_load_mxu_gpr(t0
, XRa
);
24805 tcg_gen_deposit_tl(t0
, t0
, t1
, 16, 8);
24807 /* XRa[31:24] = tmp8 */
24808 case MXU_OPTN3_PTN3
:
24809 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24810 gen_load_mxu_gpr(t0
, XRa
);
24811 tcg_gen_deposit_tl(t0
, t0
, t1
, 24, 8);
24813 /* XRa = {8'b0, tmp8, 8'b0, tmp8} */
24814 case MXU_OPTN3_PTN4
:
24815 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24816 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24818 /* XRa = {tmp8, 8'b0, tmp8, 8'b0} */
24819 case MXU_OPTN3_PTN5
:
24820 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24821 tcg_gen_shli_tl(t1
, t1
, 8);
24822 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24824 /* XRa = {{8{sign of tmp8}}, tmp8, {8{sign of tmp8}}, tmp8} */
24825 case MXU_OPTN3_PTN6
:
24826 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
24827 tcg_gen_mov_tl(t0
, t1
);
24828 tcg_gen_andi_tl(t0
, t0
, 0xFF00FFFF);
24829 tcg_gen_shli_tl(t1
, t1
, 16);
24830 tcg_gen_or_tl(t0
, t0
, t1
);
24832 /* XRa = {tmp8, tmp8, tmp8, tmp8} */
24833 case MXU_OPTN3_PTN7
:
24834 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24835 tcg_gen_deposit_tl(t1
, t1
, t1
, 8, 8);
24836 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24840 gen_store_mxu_gpr(t0
, XRa
);
24847 * D16MUL XRa, XRb, XRc, XRd, optn2 - Signed 16 bit pattern multiplication
24849 static void gen_mxu_d16mul(DisasContext
*ctx
)
24851 TCGv t0
, t1
, t2
, t3
;
24852 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
;
24854 t0
= tcg_temp_new();
24855 t1
= tcg_temp_new();
24856 t2
= tcg_temp_new();
24857 t3
= tcg_temp_new();
24859 XRa
= extract32(ctx
->opcode
, 6, 4);
24860 XRb
= extract32(ctx
->opcode
, 10, 4);
24861 XRc
= extract32(ctx
->opcode
, 14, 4);
24862 XRd
= extract32(ctx
->opcode
, 18, 4);
24863 optn2
= extract32(ctx
->opcode
, 22, 2);
24865 gen_load_mxu_gpr(t1
, XRb
);
24866 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24867 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24868 gen_load_mxu_gpr(t3
, XRc
);
24869 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24870 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24873 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24874 tcg_gen_mul_tl(t3
, t1
, t3
);
24875 tcg_gen_mul_tl(t2
, t0
, t2
);
24877 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24878 tcg_gen_mul_tl(t3
, t0
, t3
);
24879 tcg_gen_mul_tl(t2
, t0
, t2
);
24881 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24882 tcg_gen_mul_tl(t3
, t1
, t3
);
24883 tcg_gen_mul_tl(t2
, t1
, t2
);
24885 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24886 tcg_gen_mul_tl(t3
, t0
, t3
);
24887 tcg_gen_mul_tl(t2
, t1
, t2
);
24890 gen_store_mxu_gpr(t3
, XRa
);
24891 gen_store_mxu_gpr(t2
, XRd
);
24900 * D16MAC XRa, XRb, XRc, XRd, aptn2, optn2 - Signed 16 bit pattern multiply
24903 static void gen_mxu_d16mac(DisasContext
*ctx
)
24905 TCGv t0
, t1
, t2
, t3
;
24906 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
, aptn2
;
24908 t0
= tcg_temp_new();
24909 t1
= tcg_temp_new();
24910 t2
= tcg_temp_new();
24911 t3
= tcg_temp_new();
24913 XRa
= extract32(ctx
->opcode
, 6, 4);
24914 XRb
= extract32(ctx
->opcode
, 10, 4);
24915 XRc
= extract32(ctx
->opcode
, 14, 4);
24916 XRd
= extract32(ctx
->opcode
, 18, 4);
24917 optn2
= extract32(ctx
->opcode
, 22, 2);
24918 aptn2
= extract32(ctx
->opcode
, 24, 2);
24920 gen_load_mxu_gpr(t1
, XRb
);
24921 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24922 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24924 gen_load_mxu_gpr(t3
, XRc
);
24925 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24926 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24929 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24930 tcg_gen_mul_tl(t3
, t1
, t3
);
24931 tcg_gen_mul_tl(t2
, t0
, t2
);
24933 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24934 tcg_gen_mul_tl(t3
, t0
, t3
);
24935 tcg_gen_mul_tl(t2
, t0
, t2
);
24937 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24938 tcg_gen_mul_tl(t3
, t1
, t3
);
24939 tcg_gen_mul_tl(t2
, t1
, t2
);
24941 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24942 tcg_gen_mul_tl(t3
, t0
, t3
);
24943 tcg_gen_mul_tl(t2
, t1
, t2
);
24946 gen_load_mxu_gpr(t0
, XRa
);
24947 gen_load_mxu_gpr(t1
, XRd
);
24951 tcg_gen_add_tl(t3
, t0
, t3
);
24952 tcg_gen_add_tl(t2
, t1
, t2
);
24955 tcg_gen_add_tl(t3
, t0
, t3
);
24956 tcg_gen_sub_tl(t2
, t1
, t2
);
24959 tcg_gen_sub_tl(t3
, t0
, t3
);
24960 tcg_gen_add_tl(t2
, t1
, t2
);
24963 tcg_gen_sub_tl(t3
, t0
, t3
);
24964 tcg_gen_sub_tl(t2
, t1
, t2
);
24967 gen_store_mxu_gpr(t3
, XRa
);
24968 gen_store_mxu_gpr(t2
, XRd
);
24977 * Q8MUL XRa, XRb, XRc, XRd - Parallel unsigned 8 bit pattern multiply
24978 * Q8MULSU XRa, XRb, XRc, XRd - Parallel signed 8 bit pattern multiply
24980 static void gen_mxu_q8mul_q8mulsu(DisasContext
*ctx
)
24982 TCGv t0
, t1
, t2
, t3
, t4
, t5
, t6
, t7
;
24983 uint32_t XRa
, XRb
, XRc
, XRd
, sel
;
24985 t0
= tcg_temp_new();
24986 t1
= tcg_temp_new();
24987 t2
= tcg_temp_new();
24988 t3
= tcg_temp_new();
24989 t4
= tcg_temp_new();
24990 t5
= tcg_temp_new();
24991 t6
= tcg_temp_new();
24992 t7
= tcg_temp_new();
24994 XRa
= extract32(ctx
->opcode
, 6, 4);
24995 XRb
= extract32(ctx
->opcode
, 10, 4);
24996 XRc
= extract32(ctx
->opcode
, 14, 4);
24997 XRd
= extract32(ctx
->opcode
, 18, 4);
24998 sel
= extract32(ctx
->opcode
, 22, 2);
25000 gen_load_mxu_gpr(t3
, XRb
);
25001 gen_load_mxu_gpr(t7
, XRc
);
25005 tcg_gen_ext8s_tl(t0
, t3
);
25006 tcg_gen_shri_tl(t3
, t3
, 8);
25007 tcg_gen_ext8s_tl(t1
, t3
);
25008 tcg_gen_shri_tl(t3
, t3
, 8);
25009 tcg_gen_ext8s_tl(t2
, t3
);
25010 tcg_gen_shri_tl(t3
, t3
, 8);
25011 tcg_gen_ext8s_tl(t3
, t3
);
25014 tcg_gen_ext8u_tl(t0
, t3
);
25015 tcg_gen_shri_tl(t3
, t3
, 8);
25016 tcg_gen_ext8u_tl(t1
, t3
);
25017 tcg_gen_shri_tl(t3
, t3
, 8);
25018 tcg_gen_ext8u_tl(t2
, t3
);
25019 tcg_gen_shri_tl(t3
, t3
, 8);
25020 tcg_gen_ext8u_tl(t3
, t3
);
25023 tcg_gen_ext8u_tl(t4
, t7
);
25024 tcg_gen_shri_tl(t7
, t7
, 8);
25025 tcg_gen_ext8u_tl(t5
, t7
);
25026 tcg_gen_shri_tl(t7
, t7
, 8);
25027 tcg_gen_ext8u_tl(t6
, t7
);
25028 tcg_gen_shri_tl(t7
, t7
, 8);
25029 tcg_gen_ext8u_tl(t7
, t7
);
25031 tcg_gen_mul_tl(t0
, t0
, t4
);
25032 tcg_gen_mul_tl(t1
, t1
, t5
);
25033 tcg_gen_mul_tl(t2
, t2
, t6
);
25034 tcg_gen_mul_tl(t3
, t3
, t7
);
25036 tcg_gen_andi_tl(t0
, t0
, 0xFFFF);
25037 tcg_gen_andi_tl(t1
, t1
, 0xFFFF);
25038 tcg_gen_andi_tl(t2
, t2
, 0xFFFF);
25039 tcg_gen_andi_tl(t3
, t3
, 0xFFFF);
25041 tcg_gen_shli_tl(t1
, t1
, 16);
25042 tcg_gen_shli_tl(t3
, t3
, 16);
25044 tcg_gen_or_tl(t0
, t0
, t1
);
25045 tcg_gen_or_tl(t1
, t2
, t3
);
25047 gen_store_mxu_gpr(t0
, XRd
);
25048 gen_store_mxu_gpr(t1
, XRa
);
25061 * S32LDD XRa, Rb, S12 - Load a word from memory to XRF
25062 * S32LDDR XRa, Rb, S12 - Load a word from memory to XRF, reversed byte seq.
25064 static void gen_mxu_s32ldd_s32lddr(DisasContext
*ctx
)
25067 uint32_t XRa
, Rb
, s12
, sel
;
25069 t0
= tcg_temp_new();
25070 t1
= tcg_temp_new();
25072 XRa
= extract32(ctx
->opcode
, 6, 4);
25073 s12
= extract32(ctx
->opcode
, 10, 10);
25074 sel
= extract32(ctx
->opcode
, 20, 1);
25075 Rb
= extract32(ctx
->opcode
, 21, 5);
25077 gen_load_gpr(t0
, Rb
);
25079 tcg_gen_movi_tl(t1
, s12
);
25080 tcg_gen_shli_tl(t1
, t1
, 2);
25082 tcg_gen_ori_tl(t1
, t1
, 0xFFFFF000);
25084 tcg_gen_add_tl(t1
, t0
, t1
);
25085 tcg_gen_qemu_ld_tl(t1
, t1
, ctx
->mem_idx
, MO_SL
);
25089 tcg_gen_bswap32_tl(t1
, t1
);
25091 gen_store_mxu_gpr(t1
, XRa
);
25099 * MXU instruction category: logic
25100 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25102 * S32NOR S32AND S32OR S32XOR
25106 * S32NOR XRa, XRb, XRc
25107 * Update XRa with the result of logical bitwise 'nor' operation
25108 * applied to the content of XRb and XRc.
25110 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25111 * +-----------+---------+-----+-------+-------+-------+-----------+
25112 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25113 * +-----------+---------+-----+-------+-------+-------+-----------+
25115 static void gen_mxu_S32NOR(DisasContext
*ctx
)
25117 uint32_t pad
, XRc
, XRb
, XRa
;
25119 pad
= extract32(ctx
->opcode
, 21, 5);
25120 XRc
= extract32(ctx
->opcode
, 14, 4);
25121 XRb
= extract32(ctx
->opcode
, 10, 4);
25122 XRa
= extract32(ctx
->opcode
, 6, 4);
25124 if (unlikely(pad
!= 0)) {
25125 /* opcode padding incorrect -> do nothing */
25126 } else if (unlikely(XRa
== 0)) {
25127 /* destination is zero register -> do nothing */
25128 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25129 /* both operands zero registers -> just set destination to all 1s */
25130 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0xFFFFFFFF);
25131 } else if (unlikely(XRb
== 0)) {
25132 /* XRb zero register -> just set destination to the negation of XRc */
25133 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25134 } else if (unlikely(XRc
== 0)) {
25135 /* XRa zero register -> just set destination to the negation of XRb */
25136 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25137 } else if (unlikely(XRb
== XRc
)) {
25138 /* both operands same -> just set destination to the negation of XRb */
25139 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25141 /* the most general case */
25142 tcg_gen_nor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25147 * S32AND XRa, XRb, XRc
25148 * Update XRa with the result of logical bitwise 'and' operation
25149 * applied to the content of XRb and XRc.
25151 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25152 * +-----------+---------+-----+-------+-------+-------+-----------+
25153 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25154 * +-----------+---------+-----+-------+-------+-------+-----------+
25156 static void gen_mxu_S32AND(DisasContext
*ctx
)
25158 uint32_t pad
, XRc
, XRb
, XRa
;
25160 pad
= extract32(ctx
->opcode
, 21, 5);
25161 XRc
= extract32(ctx
->opcode
, 14, 4);
25162 XRb
= extract32(ctx
->opcode
, 10, 4);
25163 XRa
= extract32(ctx
->opcode
, 6, 4);
25165 if (unlikely(pad
!= 0)) {
25166 /* opcode padding incorrect -> do nothing */
25167 } else if (unlikely(XRa
== 0)) {
25168 /* destination is zero register -> do nothing */
25169 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25170 /* one of operands zero register -> just set destination to all 0s */
25171 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25172 } else if (unlikely(XRb
== XRc
)) {
25173 /* both operands same -> just set destination to one of them */
25174 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25176 /* the most general case */
25177 tcg_gen_and_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25182 * S32OR XRa, XRb, XRc
25183 * Update XRa with the result of logical bitwise 'or' operation
25184 * applied to the content of XRb and XRc.
25186 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25187 * +-----------+---------+-----+-------+-------+-------+-----------+
25188 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25189 * +-----------+---------+-----+-------+-------+-------+-----------+
25191 static void gen_mxu_S32OR(DisasContext
*ctx
)
25193 uint32_t pad
, XRc
, XRb
, XRa
;
25195 pad
= extract32(ctx
->opcode
, 21, 5);
25196 XRc
= extract32(ctx
->opcode
, 14, 4);
25197 XRb
= extract32(ctx
->opcode
, 10, 4);
25198 XRa
= extract32(ctx
->opcode
, 6, 4);
25200 if (unlikely(pad
!= 0)) {
25201 /* opcode padding incorrect -> do nothing */
25202 } else if (unlikely(XRa
== 0)) {
25203 /* destination is zero register -> do nothing */
25204 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25205 /* both operands zero registers -> just set destination to all 0s */
25206 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25207 } else if (unlikely(XRb
== 0)) {
25208 /* XRb zero register -> just set destination to the content of XRc */
25209 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25210 } else if (unlikely(XRc
== 0)) {
25211 /* XRc zero register -> just set destination to the content of XRb */
25212 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25213 } else if (unlikely(XRb
== XRc
)) {
25214 /* both operands same -> just set destination to one of them */
25215 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25217 /* the most general case */
25218 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25223 * S32XOR XRa, XRb, XRc
25224 * Update XRa with the result of logical bitwise 'xor' operation
25225 * applied to the content of XRb and XRc.
25227 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25228 * +-----------+---------+-----+-------+-------+-------+-----------+
25229 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25230 * +-----------+---------+-----+-------+-------+-------+-----------+
25232 static void gen_mxu_S32XOR(DisasContext
*ctx
)
25234 uint32_t pad
, XRc
, XRb
, XRa
;
25236 pad
= extract32(ctx
->opcode
, 21, 5);
25237 XRc
= extract32(ctx
->opcode
, 14, 4);
25238 XRb
= extract32(ctx
->opcode
, 10, 4);
25239 XRa
= extract32(ctx
->opcode
, 6, 4);
25241 if (unlikely(pad
!= 0)) {
25242 /* opcode padding incorrect -> do nothing */
25243 } else if (unlikely(XRa
== 0)) {
25244 /* destination is zero register -> do nothing */
25245 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25246 /* both operands zero registers -> just set destination to all 0s */
25247 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25248 } else if (unlikely(XRb
== 0)) {
25249 /* XRb zero register -> just set destination to the content of XRc */
25250 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25251 } else if (unlikely(XRc
== 0)) {
25252 /* XRc zero register -> just set destination to the content of XRb */
25253 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25254 } else if (unlikely(XRb
== XRc
)) {
25255 /* both operands same -> just set destination to all 0s */
25256 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25258 /* the most general case */
25259 tcg_gen_xor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25265 * MXU instruction category max/min
25266 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25268 * S32MAX D16MAX Q8MAX
25269 * S32MIN D16MIN Q8MIN
25273 * S32MAX XRa, XRb, XRc
25274 * Update XRa with the maximum of signed 32-bit integers contained
25277 * S32MIN XRa, XRb, XRc
25278 * Update XRa with the minimum of signed 32-bit integers contained
25281 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25282 * +-----------+---------+-----+-------+-------+-------+-----------+
25283 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25284 * +-----------+---------+-----+-------+-------+-------+-----------+
25286 static void gen_mxu_S32MAX_S32MIN(DisasContext
*ctx
)
25288 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25290 pad
= extract32(ctx
->opcode
, 21, 5);
25291 opc
= extract32(ctx
->opcode
, 18, 3);
25292 XRc
= extract32(ctx
->opcode
, 14, 4);
25293 XRb
= extract32(ctx
->opcode
, 10, 4);
25294 XRa
= extract32(ctx
->opcode
, 6, 4);
25296 if (unlikely(pad
!= 0)) {
25297 /* opcode padding incorrect -> do nothing */
25298 } else if (unlikely(XRa
== 0)) {
25299 /* destination is zero register -> do nothing */
25300 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25301 /* both operands zero registers -> just set destination to zero */
25302 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25303 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25304 /* exactly one operand is zero register - find which one is not...*/
25305 uint32_t XRx
= XRb
? XRb
: XRc
;
25306 /* ...and do max/min operation with one operand 0 */
25307 if (opc
== OPC_MXU_S32MAX
) {
25308 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
25310 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
25312 } else if (unlikely(XRb
== XRc
)) {
25313 /* both operands same -> just set destination to one of them */
25314 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25316 /* the most general case */
25317 if (opc
== OPC_MXU_S32MAX
) {
25318 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25321 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25329 * Update XRa with the 16-bit-wise maximums of signed integers
25330 * contained in XRb and XRc.
25333 * Update XRa with the 16-bit-wise minimums of signed integers
25334 * contained in XRb and XRc.
25336 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25337 * +-----------+---------+-----+-------+-------+-------+-----------+
25338 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25339 * +-----------+---------+-----+-------+-------+-------+-----------+
25341 static void gen_mxu_D16MAX_D16MIN(DisasContext
*ctx
)
25343 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25345 pad
= extract32(ctx
->opcode
, 21, 5);
25346 opc
= extract32(ctx
->opcode
, 18, 3);
25347 XRc
= extract32(ctx
->opcode
, 14, 4);
25348 XRb
= extract32(ctx
->opcode
, 10, 4);
25349 XRa
= extract32(ctx
->opcode
, 6, 4);
25351 if (unlikely(pad
!= 0)) {
25352 /* opcode padding incorrect -> do nothing */
25353 } else if (unlikely(XRc
== 0)) {
25354 /* destination is zero register -> do nothing */
25355 } else if (unlikely((XRb
== 0) && (XRa
== 0))) {
25356 /* both operands zero registers -> just set destination to zero */
25357 tcg_gen_movi_i32(mxu_gpr
[XRc
- 1], 0);
25358 } else if (unlikely((XRb
== 0) || (XRa
== 0))) {
25359 /* exactly one operand is zero register - find which one is not...*/
25360 uint32_t XRx
= XRb
? XRb
: XRc
;
25361 /* ...and do half-word-wise max/min with one operand 0 */
25362 TCGv_i32 t0
= tcg_temp_new();
25363 TCGv_i32 t1
= tcg_const_i32(0);
25365 /* the left half-word first */
25366 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFFFF0000);
25367 if (opc
== OPC_MXU_D16MAX
) {
25368 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25370 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25373 /* the right half-word */
25374 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0x0000FFFF);
25375 /* move half-words to the leftmost position */
25376 tcg_gen_shli_i32(t0
, t0
, 16);
25377 /* t0 will be max/min of t0 and t1 */
25378 if (opc
== OPC_MXU_D16MAX
) {
25379 tcg_gen_smax_i32(t0
, t0
, t1
);
25381 tcg_gen_smin_i32(t0
, t0
, t1
);
25383 /* return resulting half-words to its original position */
25384 tcg_gen_shri_i32(t0
, t0
, 16);
25385 /* finaly update the destination */
25386 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25390 } else if (unlikely(XRb
== XRc
)) {
25391 /* both operands same -> just set destination to one of them */
25392 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25394 /* the most general case */
25395 TCGv_i32 t0
= tcg_temp_new();
25396 TCGv_i32 t1
= tcg_temp_new();
25398 /* the left half-word first */
25399 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFFFF0000);
25400 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25401 if (opc
== OPC_MXU_D16MAX
) {
25402 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25404 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25407 /* the right half-word */
25408 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25409 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0x0000FFFF);
25410 /* move half-words to the leftmost position */
25411 tcg_gen_shli_i32(t0
, t0
, 16);
25412 tcg_gen_shli_i32(t1
, t1
, 16);
25413 /* t0 will be max/min of t0 and t1 */
25414 if (opc
== OPC_MXU_D16MAX
) {
25415 tcg_gen_smax_i32(t0
, t0
, t1
);
25417 tcg_gen_smin_i32(t0
, t0
, t1
);
25419 /* return resulting half-words to its original position */
25420 tcg_gen_shri_i32(t0
, t0
, 16);
25421 /* finaly update the destination */
25422 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25431 * Update XRa with the 8-bit-wise maximums of signed integers
25432 * contained in XRb and XRc.
25435 * Update XRa with the 8-bit-wise minimums of signed integers
25436 * contained in XRb and XRc.
25438 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25439 * +-----------+---------+-----+-------+-------+-------+-----------+
25440 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25441 * +-----------+---------+-----+-------+-------+-------+-----------+
25443 static void gen_mxu_Q8MAX_Q8MIN(DisasContext
*ctx
)
25445 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25447 pad
= extract32(ctx
->opcode
, 21, 5);
25448 opc
= extract32(ctx
->opcode
, 18, 3);
25449 XRc
= extract32(ctx
->opcode
, 14, 4);
25450 XRb
= extract32(ctx
->opcode
, 10, 4);
25451 XRa
= extract32(ctx
->opcode
, 6, 4);
25453 if (unlikely(pad
!= 0)) {
25454 /* opcode padding incorrect -> do nothing */
25455 } else if (unlikely(XRa
== 0)) {
25456 /* destination is zero register -> do nothing */
25457 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25458 /* both operands zero registers -> just set destination to zero */
25459 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25460 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25461 /* exactly one operand is zero register - make it be the first...*/
25462 uint32_t XRx
= XRb
? XRb
: XRc
;
25463 /* ...and do byte-wise max/min with one operand 0 */
25464 TCGv_i32 t0
= tcg_temp_new();
25465 TCGv_i32 t1
= tcg_const_i32(0);
25468 /* the leftmost byte (byte 3) first */
25469 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF000000);
25470 if (opc
== OPC_MXU_Q8MAX
) {
25471 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25473 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25476 /* bytes 2, 1, 0 */
25477 for (i
= 2; i
>= 0; i
--) {
25478 /* extract the byte */
25479 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF << (8 * i
));
25480 /* move the byte to the leftmost position */
25481 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25482 /* t0 will be max/min of t0 and t1 */
25483 if (opc
== OPC_MXU_Q8MAX
) {
25484 tcg_gen_smax_i32(t0
, t0
, t1
);
25486 tcg_gen_smin_i32(t0
, t0
, t1
);
25488 /* return resulting byte to its original position */
25489 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25490 /* finaly update the destination */
25491 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25496 } else if (unlikely(XRb
== XRc
)) {
25497 /* both operands same -> just set destination to one of them */
25498 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25500 /* the most general case */
25501 TCGv_i32 t0
= tcg_temp_new();
25502 TCGv_i32 t1
= tcg_temp_new();
25505 /* the leftmost bytes (bytes 3) first */
25506 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF000000);
25507 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25508 if (opc
== OPC_MXU_Q8MAX
) {
25509 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25511 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25514 /* bytes 2, 1, 0 */
25515 for (i
= 2; i
>= 0; i
--) {
25516 /* extract corresponding bytes */
25517 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF << (8 * i
));
25518 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF << (8 * i
));
25519 /* move the bytes to the leftmost position */
25520 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25521 tcg_gen_shli_i32(t1
, t1
, 8 * (3 - i
));
25522 /* t0 will be max/min of t0 and t1 */
25523 if (opc
== OPC_MXU_Q8MAX
) {
25524 tcg_gen_smax_i32(t0
, t0
, t1
);
25526 tcg_gen_smin_i32(t0
, t0
, t1
);
25528 /* return resulting byte to its original position */
25529 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25530 /* finaly update the destination */
25531 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25541 * MXU instruction category: align
25542 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25548 * S32ALNI XRc, XRb, XRa, optn3
25549 * Arrange bytes from XRb and XRc according to one of five sets of
25550 * rules determined by optn3, and place the result in XRa.
25552 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25553 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25554 * | SPECIAL2 |optn3|0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25555 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25558 static void gen_mxu_S32ALNI(DisasContext
*ctx
)
25560 uint32_t optn3
, pad
, XRc
, XRb
, XRa
;
25562 optn3
= extract32(ctx
->opcode
, 23, 3);
25563 pad
= extract32(ctx
->opcode
, 21, 2);
25564 XRc
= extract32(ctx
->opcode
, 14, 4);
25565 XRb
= extract32(ctx
->opcode
, 10, 4);
25566 XRa
= extract32(ctx
->opcode
, 6, 4);
25568 if (unlikely(pad
!= 0)) {
25569 /* opcode padding incorrect -> do nothing */
25570 } else if (unlikely(XRa
== 0)) {
25571 /* destination is zero register -> do nothing */
25572 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25573 /* both operands zero registers -> just set destination to all 0s */
25574 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25575 } else if (unlikely(XRb
== 0)) {
25576 /* XRb zero register -> just appropriatelly shift XRc into XRa */
25578 case MXU_OPTN3_PTN0
:
25579 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25581 case MXU_OPTN3_PTN1
:
25582 case MXU_OPTN3_PTN2
:
25583 case MXU_OPTN3_PTN3
:
25584 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1],
25587 case MXU_OPTN3_PTN4
:
25588 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25591 } else if (unlikely(XRc
== 0)) {
25592 /* XRc zero register -> just appropriatelly shift XRb into XRa */
25594 case MXU_OPTN3_PTN0
:
25595 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25597 case MXU_OPTN3_PTN1
:
25598 case MXU_OPTN3_PTN2
:
25599 case MXU_OPTN3_PTN3
:
25600 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25602 case MXU_OPTN3_PTN4
:
25603 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25606 } else if (unlikely(XRb
== XRc
)) {
25607 /* both operands same -> just rotation or moving from any of them */
25609 case MXU_OPTN3_PTN0
:
25610 case MXU_OPTN3_PTN4
:
25611 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25613 case MXU_OPTN3_PTN1
:
25614 case MXU_OPTN3_PTN2
:
25615 case MXU_OPTN3_PTN3
:
25616 tcg_gen_rotli_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25620 /* the most general case */
25622 case MXU_OPTN3_PTN0
:
25626 /* +---------------+ */
25627 /* | A B C D | E F G H */
25628 /* +-------+-------+ */
25633 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25636 case MXU_OPTN3_PTN1
:
25640 /* +-------------------+ */
25641 /* A | B C D E | F G H */
25642 /* +---------+---------+ */
25647 TCGv_i32 t0
= tcg_temp_new();
25648 TCGv_i32 t1
= tcg_temp_new();
25650 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x00FFFFFF);
25651 tcg_gen_shli_i32(t0
, t0
, 8);
25653 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25654 tcg_gen_shri_i32(t1
, t1
, 24);
25656 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25662 case MXU_OPTN3_PTN2
:
25666 /* +-------------------+ */
25667 /* A B | C D E F | G H */
25668 /* +---------+---------+ */
25673 TCGv_i32 t0
= tcg_temp_new();
25674 TCGv_i32 t1
= tcg_temp_new();
25676 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25677 tcg_gen_shli_i32(t0
, t0
, 16);
25679 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25680 tcg_gen_shri_i32(t1
, t1
, 16);
25682 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25688 case MXU_OPTN3_PTN3
:
25692 /* +-------------------+ */
25693 /* A B C | D E F G | H */
25694 /* +---------+---------+ */
25699 TCGv_i32 t0
= tcg_temp_new();
25700 TCGv_i32 t1
= tcg_temp_new();
25702 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x000000FF);
25703 tcg_gen_shli_i32(t0
, t0
, 24);
25705 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFFFF00);
25706 tcg_gen_shri_i32(t1
, t1
, 8);
25708 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25714 case MXU_OPTN3_PTN4
:
25718 /* +---------------+ */
25719 /* A B C D | E F G H | */
25720 /* +-------+-------+ */
25725 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25734 * Decoding engine for MXU
25735 * =======================
25740 * Decode MXU pool00
25742 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25743 * +-----------+---------+-----+-------+-------+-------+-----------+
25744 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL00|
25745 * +-----------+---------+-----+-------+-------+-------+-----------+
25748 static void decode_opc_mxu__pool00(CPUMIPSState
*env
, DisasContext
*ctx
)
25750 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25753 case OPC_MXU_S32MAX
:
25754 case OPC_MXU_S32MIN
:
25755 gen_mxu_S32MAX_S32MIN(ctx
);
25757 case OPC_MXU_D16MAX
:
25758 case OPC_MXU_D16MIN
:
25759 gen_mxu_D16MAX_D16MIN(ctx
);
25761 case OPC_MXU_Q8MAX
:
25762 case OPC_MXU_Q8MIN
:
25763 gen_mxu_Q8MAX_Q8MIN(ctx
);
25765 case OPC_MXU_Q8SLT
:
25766 /* TODO: Implement emulation of Q8SLT instruction. */
25767 MIPS_INVAL("OPC_MXU_Q8SLT");
25768 generate_exception_end(ctx
, EXCP_RI
);
25770 case OPC_MXU_Q8SLTU
:
25771 /* TODO: Implement emulation of Q8SLTU instruction. */
25772 MIPS_INVAL("OPC_MXU_Q8SLTU");
25773 generate_exception_end(ctx
, EXCP_RI
);
25776 MIPS_INVAL("decode_opc_mxu");
25777 generate_exception_end(ctx
, EXCP_RI
);
25784 * Decode MXU pool01
25786 * S32SLT, D16SLT, D16AVG, D16AVGR, Q8AVG, Q8AVGR:
25787 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25788 * +-----------+---------+-----+-------+-------+-------+-----------+
25789 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25790 * +-----------+---------+-----+-------+-------+-------+-----------+
25793 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25794 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25795 * | SPECIAL2 |en2|0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25796 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25799 static void decode_opc_mxu__pool01(CPUMIPSState
*env
, DisasContext
*ctx
)
25801 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25804 case OPC_MXU_S32SLT
:
25805 /* TODO: Implement emulation of S32SLT instruction. */
25806 MIPS_INVAL("OPC_MXU_S32SLT");
25807 generate_exception_end(ctx
, EXCP_RI
);
25809 case OPC_MXU_D16SLT
:
25810 /* TODO: Implement emulation of D16SLT instruction. */
25811 MIPS_INVAL("OPC_MXU_D16SLT");
25812 generate_exception_end(ctx
, EXCP_RI
);
25814 case OPC_MXU_D16AVG
:
25815 /* TODO: Implement emulation of D16AVG instruction. */
25816 MIPS_INVAL("OPC_MXU_D16AVG");
25817 generate_exception_end(ctx
, EXCP_RI
);
25819 case OPC_MXU_D16AVGR
:
25820 /* TODO: Implement emulation of D16AVGR instruction. */
25821 MIPS_INVAL("OPC_MXU_D16AVGR");
25822 generate_exception_end(ctx
, EXCP_RI
);
25824 case OPC_MXU_Q8AVG
:
25825 /* TODO: Implement emulation of Q8AVG instruction. */
25826 MIPS_INVAL("OPC_MXU_Q8AVG");
25827 generate_exception_end(ctx
, EXCP_RI
);
25829 case OPC_MXU_Q8AVGR
:
25830 /* TODO: Implement emulation of Q8AVGR instruction. */
25831 MIPS_INVAL("OPC_MXU_Q8AVGR");
25832 generate_exception_end(ctx
, EXCP_RI
);
25834 case OPC_MXU_Q8ADD
:
25835 /* TODO: Implement emulation of Q8ADD instruction. */
25836 MIPS_INVAL("OPC_MXU_Q8ADD");
25837 generate_exception_end(ctx
, EXCP_RI
);
25840 MIPS_INVAL("decode_opc_mxu");
25841 generate_exception_end(ctx
, EXCP_RI
);
25848 * Decode MXU pool02
25850 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25851 * +-----------+---------+-----+-------+-------+-------+-----------+
25852 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL02|
25853 * +-----------+---------+-----+-------+-------+-------+-----------+
25856 static void decode_opc_mxu__pool02(CPUMIPSState
*env
, DisasContext
*ctx
)
25858 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25861 case OPC_MXU_S32CPS
:
25862 /* TODO: Implement emulation of S32CPS instruction. */
25863 MIPS_INVAL("OPC_MXU_S32CPS");
25864 generate_exception_end(ctx
, EXCP_RI
);
25866 case OPC_MXU_D16CPS
:
25867 /* TODO: Implement emulation of D16CPS instruction. */
25868 MIPS_INVAL("OPC_MXU_D16CPS");
25869 generate_exception_end(ctx
, EXCP_RI
);
25871 case OPC_MXU_Q8ABD
:
25872 /* TODO: Implement emulation of Q8ABD instruction. */
25873 MIPS_INVAL("OPC_MXU_Q8ABD");
25874 generate_exception_end(ctx
, EXCP_RI
);
25876 case OPC_MXU_Q16SAT
:
25877 /* TODO: Implement emulation of Q16SAT instruction. */
25878 MIPS_INVAL("OPC_MXU_Q16SAT");
25879 generate_exception_end(ctx
, EXCP_RI
);
25882 MIPS_INVAL("decode_opc_mxu");
25883 generate_exception_end(ctx
, EXCP_RI
);
25890 * Decode MXU pool03
25893 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25894 * +-----------+---+---+-------+-------+-------+-------+-----------+
25895 * | SPECIAL2 |x x|on2|0 0 0 0| XRc | XRb | XRa |MXU__POOL03|
25896 * +-----------+---+---+-------+-------+-------+-------+-----------+
25899 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25900 * +-----------+---+---+-------+-------+-------+-------+-----------+
25901 * | SPECIAL2 |x x|on2| Xd | XRc | XRb | XRa |MXU__POOL03|
25902 * +-----------+---+---+-------+-------+-------+-------+-----------+
25905 static void decode_opc_mxu__pool03(CPUMIPSState
*env
, DisasContext
*ctx
)
25907 uint32_t opcode
= extract32(ctx
->opcode
, 24, 2);
25910 case OPC_MXU_D16MULF
:
25911 /* TODO: Implement emulation of D16MULF instruction. */
25912 MIPS_INVAL("OPC_MXU_D16MULF");
25913 generate_exception_end(ctx
, EXCP_RI
);
25915 case OPC_MXU_D16MULE
:
25916 /* TODO: Implement emulation of D16MULE instruction. */
25917 MIPS_INVAL("OPC_MXU_D16MULE");
25918 generate_exception_end(ctx
, EXCP_RI
);
25921 MIPS_INVAL("decode_opc_mxu");
25922 generate_exception_end(ctx
, EXCP_RI
);
25929 * Decode MXU pool04
25931 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25932 * +-----------+---------+-+-------------------+-------+-----------+
25933 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL04|
25934 * +-----------+---------+-+-------------------+-------+-----------+
25937 static void decode_opc_mxu__pool04(CPUMIPSState
*env
, DisasContext
*ctx
)
25939 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25942 case OPC_MXU_S32LDD
:
25943 case OPC_MXU_S32LDDR
:
25944 gen_mxu_s32ldd_s32lddr(ctx
);
25947 MIPS_INVAL("decode_opc_mxu");
25948 generate_exception_end(ctx
, EXCP_RI
);
25955 * Decode MXU pool05
25957 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25958 * +-----------+---------+-+-------------------+-------+-----------+
25959 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL05|
25960 * +-----------+---------+-+-------------------+-------+-----------+
25963 static void decode_opc_mxu__pool05(CPUMIPSState
*env
, DisasContext
*ctx
)
25965 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25968 case OPC_MXU_S32STD
:
25969 /* TODO: Implement emulation of S32STD instruction. */
25970 MIPS_INVAL("OPC_MXU_S32STD");
25971 generate_exception_end(ctx
, EXCP_RI
);
25973 case OPC_MXU_S32STDR
:
25974 /* TODO: Implement emulation of S32STDR instruction. */
25975 MIPS_INVAL("OPC_MXU_S32STDR");
25976 generate_exception_end(ctx
, EXCP_RI
);
25979 MIPS_INVAL("decode_opc_mxu");
25980 generate_exception_end(ctx
, EXCP_RI
);
25987 * Decode MXU pool06
25989 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25990 * +-----------+---------+---------+---+-------+-------+-----------+
25991 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL06|
25992 * +-----------+---------+---------+---+-------+-------+-----------+
25995 static void decode_opc_mxu__pool06(CPUMIPSState
*env
, DisasContext
*ctx
)
25997 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
26000 case OPC_MXU_S32LDDV
:
26001 /* TODO: Implement emulation of S32LDDV instruction. */
26002 MIPS_INVAL("OPC_MXU_S32LDDV");
26003 generate_exception_end(ctx
, EXCP_RI
);
26005 case OPC_MXU_S32LDDVR
:
26006 /* TODO: Implement emulation of S32LDDVR instruction. */
26007 MIPS_INVAL("OPC_MXU_S32LDDVR");
26008 generate_exception_end(ctx
, EXCP_RI
);
26011 MIPS_INVAL("decode_opc_mxu");
26012 generate_exception_end(ctx
, EXCP_RI
);
26019 * Decode MXU pool07
26021 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26022 * +-----------+---------+---------+---+-------+-------+-----------+
26023 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL07|
26024 * +-----------+---------+---------+---+-------+-------+-----------+
26027 static void decode_opc_mxu__pool07(CPUMIPSState
*env
, DisasContext
*ctx
)
26029 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
26032 case OPC_MXU_S32STDV
:
26033 /* TODO: Implement emulation of S32TDV instruction. */
26034 MIPS_INVAL("OPC_MXU_S32TDV");
26035 generate_exception_end(ctx
, EXCP_RI
);
26037 case OPC_MXU_S32STDVR
:
26038 /* TODO: Implement emulation of S32TDVR instruction. */
26039 MIPS_INVAL("OPC_MXU_S32TDVR");
26040 generate_exception_end(ctx
, EXCP_RI
);
26043 MIPS_INVAL("decode_opc_mxu");
26044 generate_exception_end(ctx
, EXCP_RI
);
26051 * Decode MXU pool08
26053 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26054 * +-----------+---------+-+-------------------+-------+-----------+
26055 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL08|
26056 * +-----------+---------+-+-------------------+-------+-----------+
26059 static void decode_opc_mxu__pool08(CPUMIPSState
*env
, DisasContext
*ctx
)
26061 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
26064 case OPC_MXU_S32LDI
:
26065 /* TODO: Implement emulation of S32LDI instruction. */
26066 MIPS_INVAL("OPC_MXU_S32LDI");
26067 generate_exception_end(ctx
, EXCP_RI
);
26069 case OPC_MXU_S32LDIR
:
26070 /* TODO: Implement emulation of S32LDIR instruction. */
26071 MIPS_INVAL("OPC_MXU_S32LDIR");
26072 generate_exception_end(ctx
, EXCP_RI
);
26075 MIPS_INVAL("decode_opc_mxu");
26076 generate_exception_end(ctx
, EXCP_RI
);
26083 * Decode MXU pool09
26085 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26086 * +-----------+---------+-+-------------------+-------+-----------+
26087 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL09|
26088 * +-----------+---------+-+-------------------+-------+-----------+
26091 static void decode_opc_mxu__pool09(CPUMIPSState
*env
, DisasContext
*ctx
)
26093 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
26096 case OPC_MXU_S32SDI
:
26097 /* TODO: Implement emulation of S32SDI instruction. */
26098 MIPS_INVAL("OPC_MXU_S32SDI");
26099 generate_exception_end(ctx
, EXCP_RI
);
26101 case OPC_MXU_S32SDIR
:
26102 /* TODO: Implement emulation of S32SDIR instruction. */
26103 MIPS_INVAL("OPC_MXU_S32SDIR");
26104 generate_exception_end(ctx
, EXCP_RI
);
26107 MIPS_INVAL("decode_opc_mxu");
26108 generate_exception_end(ctx
, EXCP_RI
);
26115 * Decode MXU pool10
26117 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26118 * +-----------+---------+---------+---+-------+-------+-----------+
26119 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL10|
26120 * +-----------+---------+---------+---+-------+-------+-----------+
26123 static void decode_opc_mxu__pool10(CPUMIPSState
*env
, DisasContext
*ctx
)
26125 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
26128 case OPC_MXU_S32LDIV
:
26129 /* TODO: Implement emulation of S32LDIV instruction. */
26130 MIPS_INVAL("OPC_MXU_S32LDIV");
26131 generate_exception_end(ctx
, EXCP_RI
);
26133 case OPC_MXU_S32LDIVR
:
26134 /* TODO: Implement emulation of S32LDIVR instruction. */
26135 MIPS_INVAL("OPC_MXU_S32LDIVR");
26136 generate_exception_end(ctx
, EXCP_RI
);
26139 MIPS_INVAL("decode_opc_mxu");
26140 generate_exception_end(ctx
, EXCP_RI
);
26147 * Decode MXU pool11
26149 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26150 * +-----------+---------+---------+---+-------+-------+-----------+
26151 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL11|
26152 * +-----------+---------+---------+---+-------+-------+-----------+
26155 static void decode_opc_mxu__pool11(CPUMIPSState
*env
, DisasContext
*ctx
)
26157 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
26160 case OPC_MXU_S32SDIV
:
26161 /* TODO: Implement emulation of S32SDIV instruction. */
26162 MIPS_INVAL("OPC_MXU_S32SDIV");
26163 generate_exception_end(ctx
, EXCP_RI
);
26165 case OPC_MXU_S32SDIVR
:
26166 /* TODO: Implement emulation of S32SDIVR instruction. */
26167 MIPS_INVAL("OPC_MXU_S32SDIVR");
26168 generate_exception_end(ctx
, EXCP_RI
);
26171 MIPS_INVAL("decode_opc_mxu");
26172 generate_exception_end(ctx
, EXCP_RI
);
26179 * Decode MXU pool12
26181 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26182 * +-----------+---+---+-------+-------+-------+-------+-----------+
26183 * | SPECIAL2 |an2|x x| Xd | XRc | XRb | XRa |MXU__POOL12|
26184 * +-----------+---+---+-------+-------+-------+-------+-----------+
26187 static void decode_opc_mxu__pool12(CPUMIPSState
*env
, DisasContext
*ctx
)
26189 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26192 case OPC_MXU_D32ACC
:
26193 /* TODO: Implement emulation of D32ACC instruction. */
26194 MIPS_INVAL("OPC_MXU_D32ACC");
26195 generate_exception_end(ctx
, EXCP_RI
);
26197 case OPC_MXU_D32ACCM
:
26198 /* TODO: Implement emulation of D32ACCM instruction. */
26199 MIPS_INVAL("OPC_MXU_D32ACCM");
26200 generate_exception_end(ctx
, EXCP_RI
);
26202 case OPC_MXU_D32ASUM
:
26203 /* TODO: Implement emulation of D32ASUM instruction. */
26204 MIPS_INVAL("OPC_MXU_D32ASUM");
26205 generate_exception_end(ctx
, EXCP_RI
);
26208 MIPS_INVAL("decode_opc_mxu");
26209 generate_exception_end(ctx
, EXCP_RI
);
26216 * Decode MXU pool13
26218 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26219 * +-----------+---+---+-------+-------+-------+-------+-----------+
26220 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL13|
26221 * +-----------+---+---+-------+-------+-------+-------+-----------+
26224 static void decode_opc_mxu__pool13(CPUMIPSState
*env
, DisasContext
*ctx
)
26226 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26229 case OPC_MXU_Q16ACC
:
26230 /* TODO: Implement emulation of Q16ACC instruction. */
26231 MIPS_INVAL("OPC_MXU_Q16ACC");
26232 generate_exception_end(ctx
, EXCP_RI
);
26234 case OPC_MXU_Q16ACCM
:
26235 /* TODO: Implement emulation of Q16ACCM instruction. */
26236 MIPS_INVAL("OPC_MXU_Q16ACCM");
26237 generate_exception_end(ctx
, EXCP_RI
);
26239 case OPC_MXU_Q16ASUM
:
26240 /* TODO: Implement emulation of Q16ASUM instruction. */
26241 MIPS_INVAL("OPC_MXU_Q16ASUM");
26242 generate_exception_end(ctx
, EXCP_RI
);
26245 MIPS_INVAL("decode_opc_mxu");
26246 generate_exception_end(ctx
, EXCP_RI
);
26253 * Decode MXU pool14
26256 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26257 * +-----------+---+---+-------+-------+-------+-------+-----------+
26258 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL14|
26259 * +-----------+---+---+-------+-------+-------+-------+-----------+
26262 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26263 * +-----------+---+---+-------+-------+-------+-------+-----------+
26264 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL14|
26265 * +-----------+---+---+-------+-------+-------+-------+-----------+
26268 static void decode_opc_mxu__pool14(CPUMIPSState
*env
, DisasContext
*ctx
)
26270 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26273 case OPC_MXU_Q8ADDE
:
26274 /* TODO: Implement emulation of Q8ADDE instruction. */
26275 MIPS_INVAL("OPC_MXU_Q8ADDE");
26276 generate_exception_end(ctx
, EXCP_RI
);
26278 case OPC_MXU_D8SUM
:
26279 /* TODO: Implement emulation of D8SUM instruction. */
26280 MIPS_INVAL("OPC_MXU_D8SUM");
26281 generate_exception_end(ctx
, EXCP_RI
);
26283 case OPC_MXU_D8SUMC
:
26284 /* TODO: Implement emulation of D8SUMC instruction. */
26285 MIPS_INVAL("OPC_MXU_D8SUMC");
26286 generate_exception_end(ctx
, EXCP_RI
);
26289 MIPS_INVAL("decode_opc_mxu");
26290 generate_exception_end(ctx
, EXCP_RI
);
26297 * Decode MXU pool15
26299 * S32MUL, S32MULU, S32EXTRV:
26300 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26301 * +-----------+---------+---------+---+-------+-------+-----------+
26302 * | SPECIAL2 | rs | rt |x x| XRd | XRa |MXU__POOL15|
26303 * +-----------+---------+---------+---+-------+-------+-----------+
26306 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26307 * +-----------+---------+---------+---+-------+-------+-----------+
26308 * | SPECIAL2 | rb | sft5 |x x| XRd | XRa |MXU__POOL15|
26309 * +-----------+---------+---------+---+-------+-------+-----------+
26312 static void decode_opc_mxu__pool15(CPUMIPSState
*env
, DisasContext
*ctx
)
26314 uint32_t opcode
= extract32(ctx
->opcode
, 14, 2);
26317 case OPC_MXU_S32MUL
:
26318 /* TODO: Implement emulation of S32MUL instruction. */
26319 MIPS_INVAL("OPC_MXU_S32MUL");
26320 generate_exception_end(ctx
, EXCP_RI
);
26322 case OPC_MXU_S32MULU
:
26323 /* TODO: Implement emulation of S32MULU instruction. */
26324 MIPS_INVAL("OPC_MXU_S32MULU");
26325 generate_exception_end(ctx
, EXCP_RI
);
26327 case OPC_MXU_S32EXTR
:
26328 /* TODO: Implement emulation of S32EXTR instruction. */
26329 MIPS_INVAL("OPC_MXU_S32EXTR");
26330 generate_exception_end(ctx
, EXCP_RI
);
26332 case OPC_MXU_S32EXTRV
:
26333 /* TODO: Implement emulation of S32EXTRV instruction. */
26334 MIPS_INVAL("OPC_MXU_S32EXTRV");
26335 generate_exception_end(ctx
, EXCP_RI
);
26338 MIPS_INVAL("decode_opc_mxu");
26339 generate_exception_end(ctx
, EXCP_RI
);
26346 * Decode MXU pool16
26349 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26350 * +-----------+---------+-----+-------+-------+-------+-----------+
26351 * | SPECIAL2 | rb |x x x| XRc | XRb | XRa |MXU__POOL16|
26352 * +-----------+---------+-----+-------+-------+-------+-----------+
26355 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26356 * +-----------+---------+-----+-------+-------+-------+-----------+
26357 * | SPECIAL2 | rs |x x x| XRc | XRb | XRa |MXU__POOL16|
26358 * +-----------+---------+-----+-------+-------+-------+-----------+
26361 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26362 * +-----------+-----+---+-----+-------+-------+-------+-----------+
26363 * | SPECIAL2 | s3 |0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
26364 * +-----------+-----+---+-----+-------+-------+-------+-----------+
26367 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26368 * +-----------+-----+---+-----+-------+---------------+-----------+
26369 * | SPECIAL2 |optn3|0 0|x x x| XRc | s8 |MXU__POOL16|
26370 * +-----------+-----+---+-----+-------+---------------+-----------+
26372 * S32NOR, S32AND, S32OR, S32XOR:
26373 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26374 * +-----------+---------+-----+-------+-------+-------+-----------+
26375 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
26376 * +-----------+---------+-----+-------+-------+-------+-----------+
26379 static void decode_opc_mxu__pool16(CPUMIPSState
*env
, DisasContext
*ctx
)
26381 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26384 case OPC_MXU_D32SARW
:
26385 /* TODO: Implement emulation of D32SARW instruction. */
26386 MIPS_INVAL("OPC_MXU_D32SARW");
26387 generate_exception_end(ctx
, EXCP_RI
);
26389 case OPC_MXU_S32ALN
:
26390 /* TODO: Implement emulation of S32ALN instruction. */
26391 MIPS_INVAL("OPC_MXU_S32ALN");
26392 generate_exception_end(ctx
, EXCP_RI
);
26394 case OPC_MXU_S32ALNI
:
26395 gen_mxu_S32ALNI(ctx
);
26397 case OPC_MXU_S32LUI
:
26398 /* TODO: Implement emulation of S32LUI instruction. */
26399 MIPS_INVAL("OPC_MXU_S32LUI");
26400 generate_exception_end(ctx
, EXCP_RI
);
26402 case OPC_MXU_S32NOR
:
26403 gen_mxu_S32NOR(ctx
);
26405 case OPC_MXU_S32AND
:
26406 gen_mxu_S32AND(ctx
);
26408 case OPC_MXU_S32OR
:
26409 gen_mxu_S32OR(ctx
);
26411 case OPC_MXU_S32XOR
:
26412 gen_mxu_S32XOR(ctx
);
26415 MIPS_INVAL("decode_opc_mxu");
26416 generate_exception_end(ctx
, EXCP_RI
);
26423 * Decode MXU pool17
26425 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26426 * +-----------+---------+---------+---+---------+-----+-----------+
26427 * | SPECIAL2 | rs | rt |0 0| rd |x x x|MXU__POOL15|
26428 * +-----------+---------+---------+---+---------+-----+-----------+
26431 static void decode_opc_mxu__pool17(CPUMIPSState
*env
, DisasContext
*ctx
)
26433 uint32_t opcode
= extract32(ctx
->opcode
, 6, 2);
26437 /* TODO: Implement emulation of LXW instruction. */
26438 MIPS_INVAL("OPC_MXU_LXW");
26439 generate_exception_end(ctx
, EXCP_RI
);
26442 /* TODO: Implement emulation of LXH instruction. */
26443 MIPS_INVAL("OPC_MXU_LXH");
26444 generate_exception_end(ctx
, EXCP_RI
);
26447 /* TODO: Implement emulation of LXHU instruction. */
26448 MIPS_INVAL("OPC_MXU_LXHU");
26449 generate_exception_end(ctx
, EXCP_RI
);
26452 /* TODO: Implement emulation of LXB instruction. */
26453 MIPS_INVAL("OPC_MXU_LXB");
26454 generate_exception_end(ctx
, EXCP_RI
);
26457 /* TODO: Implement emulation of LXBU instruction. */
26458 MIPS_INVAL("OPC_MXU_LXBU");
26459 generate_exception_end(ctx
, EXCP_RI
);
26462 MIPS_INVAL("decode_opc_mxu");
26463 generate_exception_end(ctx
, EXCP_RI
);
26469 * Decode MXU pool18
26471 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26472 * +-----------+---------+-----+-------+-------+-------+-----------+
26473 * | SPECIAL2 | rb |x x x| XRd | XRa |0 0 0 0|MXU__POOL18|
26474 * +-----------+---------+-----+-------+-------+-------+-----------+
26477 static void decode_opc_mxu__pool18(CPUMIPSState
*env
, DisasContext
*ctx
)
26479 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26482 case OPC_MXU_D32SLLV
:
26483 /* TODO: Implement emulation of D32SLLV instruction. */
26484 MIPS_INVAL("OPC_MXU_D32SLLV");
26485 generate_exception_end(ctx
, EXCP_RI
);
26487 case OPC_MXU_D32SLRV
:
26488 /* TODO: Implement emulation of D32SLRV instruction. */
26489 MIPS_INVAL("OPC_MXU_D32SLRV");
26490 generate_exception_end(ctx
, EXCP_RI
);
26492 case OPC_MXU_D32SARV
:
26493 /* TODO: Implement emulation of D32SARV instruction. */
26494 MIPS_INVAL("OPC_MXU_D32SARV");
26495 generate_exception_end(ctx
, EXCP_RI
);
26497 case OPC_MXU_Q16SLLV
:
26498 /* TODO: Implement emulation of Q16SLLV instruction. */
26499 MIPS_INVAL("OPC_MXU_Q16SLLV");
26500 generate_exception_end(ctx
, EXCP_RI
);
26502 case OPC_MXU_Q16SLRV
:
26503 /* TODO: Implement emulation of Q16SLRV instruction. */
26504 MIPS_INVAL("OPC_MXU_Q16SLRV");
26505 generate_exception_end(ctx
, EXCP_RI
);
26507 case OPC_MXU_Q16SARV
:
26508 /* TODO: Implement emulation of Q16SARV instruction. */
26509 MIPS_INVAL("OPC_MXU_Q16SARV");
26510 generate_exception_end(ctx
, EXCP_RI
);
26513 MIPS_INVAL("decode_opc_mxu");
26514 generate_exception_end(ctx
, EXCP_RI
);
26521 * Decode MXU pool19
26523 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26524 * +-----------+---+---+-------+-------+-------+-------+-----------+
26525 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL19|
26526 * +-----------+---+---+-------+-------+-------+-------+-----------+
26529 static void decode_opc_mxu__pool19(CPUMIPSState
*env
, DisasContext
*ctx
)
26531 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26534 case OPC_MXU_Q8MUL
:
26535 case OPC_MXU_Q8MULSU
:
26536 gen_mxu_q8mul_q8mulsu(ctx
);
26539 MIPS_INVAL("decode_opc_mxu");
26540 generate_exception_end(ctx
, EXCP_RI
);
26547 * Decode MXU pool20
26549 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26550 * +-----------+---------+-----+-------+-------+-------+-----------+
26551 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL20|
26552 * +-----------+---------+-----+-------+-------+-------+-----------+
26555 static void decode_opc_mxu__pool20(CPUMIPSState
*env
, DisasContext
*ctx
)
26557 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26560 case OPC_MXU_Q8MOVZ
:
26561 /* TODO: Implement emulation of Q8MOVZ instruction. */
26562 MIPS_INVAL("OPC_MXU_Q8MOVZ");
26563 generate_exception_end(ctx
, EXCP_RI
);
26565 case OPC_MXU_Q8MOVN
:
26566 /* TODO: Implement emulation of Q8MOVN instruction. */
26567 MIPS_INVAL("OPC_MXU_Q8MOVN");
26568 generate_exception_end(ctx
, EXCP_RI
);
26570 case OPC_MXU_D16MOVZ
:
26571 /* TODO: Implement emulation of D16MOVZ instruction. */
26572 MIPS_INVAL("OPC_MXU_D16MOVZ");
26573 generate_exception_end(ctx
, EXCP_RI
);
26575 case OPC_MXU_D16MOVN
:
26576 /* TODO: Implement emulation of D16MOVN instruction. */
26577 MIPS_INVAL("OPC_MXU_D16MOVN");
26578 generate_exception_end(ctx
, EXCP_RI
);
26580 case OPC_MXU_S32MOVZ
:
26581 /* TODO: Implement emulation of S32MOVZ instruction. */
26582 MIPS_INVAL("OPC_MXU_S32MOVZ");
26583 generate_exception_end(ctx
, EXCP_RI
);
26585 case OPC_MXU_S32MOVN
:
26586 /* TODO: Implement emulation of S32MOVN instruction. */
26587 MIPS_INVAL("OPC_MXU_S32MOVN");
26588 generate_exception_end(ctx
, EXCP_RI
);
26591 MIPS_INVAL("decode_opc_mxu");
26592 generate_exception_end(ctx
, EXCP_RI
);
26599 * Decode MXU pool21
26601 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26602 * +-----------+---+---+-------+-------+-------+-------+-----------+
26603 * | SPECIAL2 |an2|x x| XRd | XRc | XRb | XRa |MXU__POOL21|
26604 * +-----------+---+---+-------+-------+-------+-------+-----------+
26607 static void decode_opc_mxu__pool21(CPUMIPSState
*env
, DisasContext
*ctx
)
26609 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26612 case OPC_MXU_Q8MAC
:
26613 /* TODO: Implement emulation of Q8MAC instruction. */
26614 MIPS_INVAL("OPC_MXU_Q8MAC");
26615 generate_exception_end(ctx
, EXCP_RI
);
26617 case OPC_MXU_Q8MACSU
:
26618 /* TODO: Implement emulation of Q8MACSU instruction. */
26619 MIPS_INVAL("OPC_MXU_Q8MACSU");
26620 generate_exception_end(ctx
, EXCP_RI
);
26623 MIPS_INVAL("decode_opc_mxu");
26624 generate_exception_end(ctx
, EXCP_RI
);
26631 * Main MXU decoding function
26633 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26634 * +-----------+---------------------------------------+-----------+
26635 * | SPECIAL2 | |x x x x x x|
26636 * +-----------+---------------------------------------+-----------+
26639 static void decode_opc_mxu(CPUMIPSState
*env
, DisasContext
*ctx
)
26642 * TODO: Investigate necessity of including handling of
26643 * CLZ, CLO, SDBB in this function, as they belong to
26644 * SPECIAL2 opcode space for regular pre-R6 MIPS ISAs.
26646 uint32_t opcode
= extract32(ctx
->opcode
, 0, 6);
26648 if (opcode
== OPC__MXU_MUL
) {
26649 uint32_t rs
, rt
, rd
, op1
;
26651 rs
= extract32(ctx
->opcode
, 21, 5);
26652 rt
= extract32(ctx
->opcode
, 16, 5);
26653 rd
= extract32(ctx
->opcode
, 11, 5);
26654 op1
= MASK_SPECIAL2(ctx
->opcode
);
26656 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26661 if (opcode
== OPC_MXU_S32M2I
) {
26662 gen_mxu_s32m2i(ctx
);
26666 if (opcode
== OPC_MXU_S32I2M
) {
26667 gen_mxu_s32i2m(ctx
);
26672 TCGv t_mxu_cr
= tcg_temp_new();
26673 TCGLabel
*l_exit
= gen_new_label();
26675 gen_load_mxu_cr(t_mxu_cr
);
26676 tcg_gen_andi_tl(t_mxu_cr
, t_mxu_cr
, MXU_CR_MXU_EN
);
26677 tcg_gen_brcondi_tl(TCG_COND_NE
, t_mxu_cr
, MXU_CR_MXU_EN
, l_exit
);
26680 case OPC_MXU_S32MADD
:
26681 /* TODO: Implement emulation of S32MADD instruction. */
26682 MIPS_INVAL("OPC_MXU_S32MADD");
26683 generate_exception_end(ctx
, EXCP_RI
);
26685 case OPC_MXU_S32MADDU
:
26686 /* TODO: Implement emulation of S32MADDU instruction. */
26687 MIPS_INVAL("OPC_MXU_S32MADDU");
26688 generate_exception_end(ctx
, EXCP_RI
);
26690 case OPC_MXU__POOL00
:
26691 decode_opc_mxu__pool00(env
, ctx
);
26693 case OPC_MXU_S32MSUB
:
26694 /* TODO: Implement emulation of S32MSUB instruction. */
26695 MIPS_INVAL("OPC_MXU_S32MSUB");
26696 generate_exception_end(ctx
, EXCP_RI
);
26698 case OPC_MXU_S32MSUBU
:
26699 /* TODO: Implement emulation of S32MSUBU instruction. */
26700 MIPS_INVAL("OPC_MXU_S32MSUBU");
26701 generate_exception_end(ctx
, EXCP_RI
);
26703 case OPC_MXU__POOL01
:
26704 decode_opc_mxu__pool01(env
, ctx
);
26706 case OPC_MXU__POOL02
:
26707 decode_opc_mxu__pool02(env
, ctx
);
26709 case OPC_MXU_D16MUL
:
26710 gen_mxu_d16mul(ctx
);
26712 case OPC_MXU__POOL03
:
26713 decode_opc_mxu__pool03(env
, ctx
);
26715 case OPC_MXU_D16MAC
:
26716 gen_mxu_d16mac(ctx
);
26718 case OPC_MXU_D16MACF
:
26719 /* TODO: Implement emulation of D16MACF instruction. */
26720 MIPS_INVAL("OPC_MXU_D16MACF");
26721 generate_exception_end(ctx
, EXCP_RI
);
26723 case OPC_MXU_D16MADL
:
26724 /* TODO: Implement emulation of D16MADL instruction. */
26725 MIPS_INVAL("OPC_MXU_D16MADL");
26726 generate_exception_end(ctx
, EXCP_RI
);
26728 case OPC_MXU_S16MAD
:
26729 /* TODO: Implement emulation of S16MAD instruction. */
26730 MIPS_INVAL("OPC_MXU_S16MAD");
26731 generate_exception_end(ctx
, EXCP_RI
);
26733 case OPC_MXU_Q16ADD
:
26734 /* TODO: Implement emulation of Q16ADD instruction. */
26735 MIPS_INVAL("OPC_MXU_Q16ADD");
26736 generate_exception_end(ctx
, EXCP_RI
);
26738 case OPC_MXU_D16MACE
:
26739 /* TODO: Implement emulation of D16MACE instruction. */
26740 MIPS_INVAL("OPC_MXU_D16MACE");
26741 generate_exception_end(ctx
, EXCP_RI
);
26743 case OPC_MXU__POOL04
:
26744 decode_opc_mxu__pool04(env
, ctx
);
26746 case OPC_MXU__POOL05
:
26747 decode_opc_mxu__pool05(env
, ctx
);
26749 case OPC_MXU__POOL06
:
26750 decode_opc_mxu__pool06(env
, ctx
);
26752 case OPC_MXU__POOL07
:
26753 decode_opc_mxu__pool07(env
, ctx
);
26755 case OPC_MXU__POOL08
:
26756 decode_opc_mxu__pool08(env
, ctx
);
26758 case OPC_MXU__POOL09
:
26759 decode_opc_mxu__pool09(env
, ctx
);
26761 case OPC_MXU__POOL10
:
26762 decode_opc_mxu__pool10(env
, ctx
);
26764 case OPC_MXU__POOL11
:
26765 decode_opc_mxu__pool11(env
, ctx
);
26767 case OPC_MXU_D32ADD
:
26768 /* TODO: Implement emulation of D32ADD instruction. */
26769 MIPS_INVAL("OPC_MXU_D32ADD");
26770 generate_exception_end(ctx
, EXCP_RI
);
26772 case OPC_MXU__POOL12
:
26773 decode_opc_mxu__pool12(env
, ctx
);
26775 case OPC_MXU__POOL13
:
26776 decode_opc_mxu__pool13(env
, ctx
);
26778 case OPC_MXU__POOL14
:
26779 decode_opc_mxu__pool14(env
, ctx
);
26781 case OPC_MXU_Q8ACCE
:
26782 /* TODO: Implement emulation of Q8ACCE instruction. */
26783 MIPS_INVAL("OPC_MXU_Q8ACCE");
26784 generate_exception_end(ctx
, EXCP_RI
);
26786 case OPC_MXU_S8LDD
:
26787 gen_mxu_s8ldd(ctx
);
26789 case OPC_MXU_S8STD
:
26790 /* TODO: Implement emulation of S8STD instruction. */
26791 MIPS_INVAL("OPC_MXU_S8STD");
26792 generate_exception_end(ctx
, EXCP_RI
);
26794 case OPC_MXU_S8LDI
:
26795 /* TODO: Implement emulation of S8LDI instruction. */
26796 MIPS_INVAL("OPC_MXU_S8LDI");
26797 generate_exception_end(ctx
, EXCP_RI
);
26799 case OPC_MXU_S8SDI
:
26800 /* TODO: Implement emulation of S8SDI instruction. */
26801 MIPS_INVAL("OPC_MXU_S8SDI");
26802 generate_exception_end(ctx
, EXCP_RI
);
26804 case OPC_MXU__POOL15
:
26805 decode_opc_mxu__pool15(env
, ctx
);
26807 case OPC_MXU__POOL16
:
26808 decode_opc_mxu__pool16(env
, ctx
);
26810 case OPC_MXU__POOL17
:
26811 decode_opc_mxu__pool17(env
, ctx
);
26813 case OPC_MXU_S16LDD
:
26814 /* TODO: Implement emulation of S16LDD instruction. */
26815 MIPS_INVAL("OPC_MXU_S16LDD");
26816 generate_exception_end(ctx
, EXCP_RI
);
26818 case OPC_MXU_S16STD
:
26819 /* TODO: Implement emulation of S16STD instruction. */
26820 MIPS_INVAL("OPC_MXU_S16STD");
26821 generate_exception_end(ctx
, EXCP_RI
);
26823 case OPC_MXU_S16LDI
:
26824 /* TODO: Implement emulation of S16LDI instruction. */
26825 MIPS_INVAL("OPC_MXU_S16LDI");
26826 generate_exception_end(ctx
, EXCP_RI
);
26828 case OPC_MXU_S16SDI
:
26829 /* TODO: Implement emulation of S16SDI instruction. */
26830 MIPS_INVAL("OPC_MXU_S16SDI");
26831 generate_exception_end(ctx
, EXCP_RI
);
26833 case OPC_MXU_D32SLL
:
26834 /* TODO: Implement emulation of D32SLL instruction. */
26835 MIPS_INVAL("OPC_MXU_D32SLL");
26836 generate_exception_end(ctx
, EXCP_RI
);
26838 case OPC_MXU_D32SLR
:
26839 /* TODO: Implement emulation of D32SLR instruction. */
26840 MIPS_INVAL("OPC_MXU_D32SLR");
26841 generate_exception_end(ctx
, EXCP_RI
);
26843 case OPC_MXU_D32SARL
:
26844 /* TODO: Implement emulation of D32SARL instruction. */
26845 MIPS_INVAL("OPC_MXU_D32SARL");
26846 generate_exception_end(ctx
, EXCP_RI
);
26848 case OPC_MXU_D32SAR
:
26849 /* TODO: Implement emulation of D32SAR instruction. */
26850 MIPS_INVAL("OPC_MXU_D32SAR");
26851 generate_exception_end(ctx
, EXCP_RI
);
26853 case OPC_MXU_Q16SLL
:
26854 /* TODO: Implement emulation of Q16SLL instruction. */
26855 MIPS_INVAL("OPC_MXU_Q16SLL");
26856 generate_exception_end(ctx
, EXCP_RI
);
26858 case OPC_MXU_Q16SLR
:
26859 /* TODO: Implement emulation of Q16SLR instruction. */
26860 MIPS_INVAL("OPC_MXU_Q16SLR");
26861 generate_exception_end(ctx
, EXCP_RI
);
26863 case OPC_MXU__POOL18
:
26864 decode_opc_mxu__pool18(env
, ctx
);
26866 case OPC_MXU_Q16SAR
:
26867 /* TODO: Implement emulation of Q16SAR instruction. */
26868 MIPS_INVAL("OPC_MXU_Q16SAR");
26869 generate_exception_end(ctx
, EXCP_RI
);
26871 case OPC_MXU__POOL19
:
26872 decode_opc_mxu__pool19(env
, ctx
);
26874 case OPC_MXU__POOL20
:
26875 decode_opc_mxu__pool20(env
, ctx
);
26877 case OPC_MXU__POOL21
:
26878 decode_opc_mxu__pool21(env
, ctx
);
26880 case OPC_MXU_Q16SCOP
:
26881 /* TODO: Implement emulation of Q16SCOP instruction. */
26882 MIPS_INVAL("OPC_MXU_Q16SCOP");
26883 generate_exception_end(ctx
, EXCP_RI
);
26885 case OPC_MXU_Q8MADL
:
26886 /* TODO: Implement emulation of Q8MADL instruction. */
26887 MIPS_INVAL("OPC_MXU_Q8MADL");
26888 generate_exception_end(ctx
, EXCP_RI
);
26890 case OPC_MXU_S32SFL
:
26891 /* TODO: Implement emulation of S32SFL instruction. */
26892 MIPS_INVAL("OPC_MXU_S32SFL");
26893 generate_exception_end(ctx
, EXCP_RI
);
26895 case OPC_MXU_Q8SAD
:
26896 /* TODO: Implement emulation of Q8SAD instruction. */
26897 MIPS_INVAL("OPC_MXU_Q8SAD");
26898 generate_exception_end(ctx
, EXCP_RI
);
26901 MIPS_INVAL("decode_opc_mxu");
26902 generate_exception_end(ctx
, EXCP_RI
);
26905 gen_set_label(l_exit
);
26906 tcg_temp_free(t_mxu_cr
);
26910 #endif /* !defined(TARGET_MIPS64) */
26913 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26918 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26920 rs
= (ctx
->opcode
>> 21) & 0x1f;
26921 rt
= (ctx
->opcode
>> 16) & 0x1f;
26922 rd
= (ctx
->opcode
>> 11) & 0x1f;
26924 op1
= MASK_SPECIAL2(ctx
->opcode
);
26926 case OPC_MADD
: /* Multiply and add/sub */
26930 check_insn(ctx
, ISA_MIPS32
);
26931 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
26934 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26937 case OPC_DIVU_G_2F
:
26938 case OPC_MULT_G_2F
:
26939 case OPC_MULTU_G_2F
:
26941 case OPC_MODU_G_2F
:
26942 check_insn(ctx
, INSN_LOONGSON2F
);
26943 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26947 check_insn(ctx
, ISA_MIPS32
);
26948 gen_cl(ctx
, op1
, rd
, rs
);
26951 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
26952 gen_helper_do_semihosting(cpu_env
);
26955 * XXX: not clear which exception should be raised
26956 * when in debug mode...
26958 check_insn(ctx
, ISA_MIPS32
);
26959 generate_exception_end(ctx
, EXCP_DBp
);
26962 #if defined(TARGET_MIPS64)
26965 check_insn(ctx
, ISA_MIPS64
);
26966 check_mips_64(ctx
);
26967 gen_cl(ctx
, op1
, rd
, rs
);
26969 case OPC_DMULT_G_2F
:
26970 case OPC_DMULTU_G_2F
:
26971 case OPC_DDIV_G_2F
:
26972 case OPC_DDIVU_G_2F
:
26973 case OPC_DMOD_G_2F
:
26974 case OPC_DMODU_G_2F
:
26975 check_insn(ctx
, INSN_LOONGSON2F
);
26976 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26979 default: /* Invalid */
26980 MIPS_INVAL("special2_legacy");
26981 generate_exception_end(ctx
, EXCP_RI
);
26986 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
26988 int rs
, rt
, rd
, sa
;
26992 rs
= (ctx
->opcode
>> 21) & 0x1f;
26993 rt
= (ctx
->opcode
>> 16) & 0x1f;
26994 rd
= (ctx
->opcode
>> 11) & 0x1f;
26995 sa
= (ctx
->opcode
>> 6) & 0x1f;
26996 imm
= (int16_t)ctx
->opcode
>> 7;
26998 op1
= MASK_SPECIAL3(ctx
->opcode
);
27002 /* hint codes 24-31 are reserved and signal RI */
27003 generate_exception_end(ctx
, EXCP_RI
);
27005 /* Treat as NOP. */
27008 check_cp0_enabled(ctx
);
27009 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27010 gen_cache_operation(ctx
, rt
, rs
, imm
);
27014 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
27017 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27022 /* Treat as NOP. */
27025 op2
= MASK_BSHFL(ctx
->opcode
);
27031 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
27034 gen_bitswap(ctx
, op2
, rd
, rt
);
27039 #if defined(TARGET_MIPS64)
27041 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEQ
, false);
27044 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27047 check_mips_64(ctx
);
27050 /* Treat as NOP. */
27053 op2
= MASK_DBSHFL(ctx
->opcode
);
27063 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
27066 gen_bitswap(ctx
, op2
, rd
, rt
);
27073 default: /* Invalid */
27074 MIPS_INVAL("special3_r6");
27075 generate_exception_end(ctx
, EXCP_RI
);
27080 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
27085 rs
= (ctx
->opcode
>> 21) & 0x1f;
27086 rt
= (ctx
->opcode
>> 16) & 0x1f;
27087 rd
= (ctx
->opcode
>> 11) & 0x1f;
27089 op1
= MASK_SPECIAL3(ctx
->opcode
);
27092 case OPC_DIVU_G_2E
:
27094 case OPC_MODU_G_2E
:
27095 case OPC_MULT_G_2E
:
27096 case OPC_MULTU_G_2E
:
27098 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
27099 * the same mask and op1.
27101 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
27102 op2
= MASK_ADDUH_QB(ctx
->opcode
);
27105 case OPC_ADDUH_R_QB
:
27107 case OPC_ADDQH_R_PH
:
27109 case OPC_ADDQH_R_W
:
27111 case OPC_SUBUH_R_QB
:
27113 case OPC_SUBQH_R_PH
:
27115 case OPC_SUBQH_R_W
:
27116 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27121 case OPC_MULQ_RS_W
:
27122 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27125 MIPS_INVAL("MASK ADDUH.QB");
27126 generate_exception_end(ctx
, EXCP_RI
);
27129 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
27130 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
27132 generate_exception_end(ctx
, EXCP_RI
);
27136 op2
= MASK_LX(ctx
->opcode
);
27138 #if defined(TARGET_MIPS64)
27144 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
27146 default: /* Invalid */
27147 MIPS_INVAL("MASK LX");
27148 generate_exception_end(ctx
, EXCP_RI
);
27152 case OPC_ABSQ_S_PH_DSP
:
27153 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
27155 case OPC_ABSQ_S_QB
:
27156 case OPC_ABSQ_S_PH
:
27158 case OPC_PRECEQ_W_PHL
:
27159 case OPC_PRECEQ_W_PHR
:
27160 case OPC_PRECEQU_PH_QBL
:
27161 case OPC_PRECEQU_PH_QBR
:
27162 case OPC_PRECEQU_PH_QBLA
:
27163 case OPC_PRECEQU_PH_QBRA
:
27164 case OPC_PRECEU_PH_QBL
:
27165 case OPC_PRECEU_PH_QBR
:
27166 case OPC_PRECEU_PH_QBLA
:
27167 case OPC_PRECEU_PH_QBRA
:
27168 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27175 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
27178 MIPS_INVAL("MASK ABSQ_S.PH");
27179 generate_exception_end(ctx
, EXCP_RI
);
27183 case OPC_ADDU_QB_DSP
:
27184 op2
= MASK_ADDU_QB(ctx
->opcode
);
27187 case OPC_ADDQ_S_PH
:
27190 case OPC_ADDU_S_QB
:
27192 case OPC_ADDU_S_PH
:
27194 case OPC_SUBQ_S_PH
:
27197 case OPC_SUBU_S_QB
:
27199 case OPC_SUBU_S_PH
:
27203 case OPC_RADDU_W_QB
:
27204 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27206 case OPC_MULEU_S_PH_QBL
:
27207 case OPC_MULEU_S_PH_QBR
:
27208 case OPC_MULQ_RS_PH
:
27209 case OPC_MULEQ_S_W_PHL
:
27210 case OPC_MULEQ_S_W_PHR
:
27211 case OPC_MULQ_S_PH
:
27212 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27214 default: /* Invalid */
27215 MIPS_INVAL("MASK ADDU.QB");
27216 generate_exception_end(ctx
, EXCP_RI
);
27221 case OPC_CMPU_EQ_QB_DSP
:
27222 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
27224 case OPC_PRECR_SRA_PH_W
:
27225 case OPC_PRECR_SRA_R_PH_W
:
27226 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
27228 case OPC_PRECR_QB_PH
:
27229 case OPC_PRECRQ_QB_PH
:
27230 case OPC_PRECRQ_PH_W
:
27231 case OPC_PRECRQ_RS_PH_W
:
27232 case OPC_PRECRQU_S_QB_PH
:
27233 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27235 case OPC_CMPU_EQ_QB
:
27236 case OPC_CMPU_LT_QB
:
27237 case OPC_CMPU_LE_QB
:
27238 case OPC_CMP_EQ_PH
:
27239 case OPC_CMP_LT_PH
:
27240 case OPC_CMP_LE_PH
:
27241 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27243 case OPC_CMPGU_EQ_QB
:
27244 case OPC_CMPGU_LT_QB
:
27245 case OPC_CMPGU_LE_QB
:
27246 case OPC_CMPGDU_EQ_QB
:
27247 case OPC_CMPGDU_LT_QB
:
27248 case OPC_CMPGDU_LE_QB
:
27251 case OPC_PACKRL_PH
:
27252 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27254 default: /* Invalid */
27255 MIPS_INVAL("MASK CMPU.EQ.QB");
27256 generate_exception_end(ctx
, EXCP_RI
);
27260 case OPC_SHLL_QB_DSP
:
27261 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
27263 case OPC_DPA_W_PH_DSP
:
27264 op2
= MASK_DPA_W_PH(ctx
->opcode
);
27266 case OPC_DPAU_H_QBL
:
27267 case OPC_DPAU_H_QBR
:
27268 case OPC_DPSU_H_QBL
:
27269 case OPC_DPSU_H_QBR
:
27271 case OPC_DPAX_W_PH
:
27272 case OPC_DPAQ_S_W_PH
:
27273 case OPC_DPAQX_S_W_PH
:
27274 case OPC_DPAQX_SA_W_PH
:
27276 case OPC_DPSX_W_PH
:
27277 case OPC_DPSQ_S_W_PH
:
27278 case OPC_DPSQX_S_W_PH
:
27279 case OPC_DPSQX_SA_W_PH
:
27280 case OPC_MULSAQ_S_W_PH
:
27281 case OPC_DPAQ_SA_L_W
:
27282 case OPC_DPSQ_SA_L_W
:
27283 case OPC_MAQ_S_W_PHL
:
27284 case OPC_MAQ_S_W_PHR
:
27285 case OPC_MAQ_SA_W_PHL
:
27286 case OPC_MAQ_SA_W_PHR
:
27287 case OPC_MULSA_W_PH
:
27288 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27290 default: /* Invalid */
27291 MIPS_INVAL("MASK DPAW.PH");
27292 generate_exception_end(ctx
, EXCP_RI
);
27297 op2
= MASK_INSV(ctx
->opcode
);
27308 t0
= tcg_temp_new();
27309 t1
= tcg_temp_new();
27311 gen_load_gpr(t0
, rt
);
27312 gen_load_gpr(t1
, rs
);
27314 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
27320 default: /* Invalid */
27321 MIPS_INVAL("MASK INSV");
27322 generate_exception_end(ctx
, EXCP_RI
);
27326 case OPC_APPEND_DSP
:
27327 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
27329 case OPC_EXTR_W_DSP
:
27330 op2
= MASK_EXTR_W(ctx
->opcode
);
27334 case OPC_EXTR_RS_W
:
27336 case OPC_EXTRV_S_H
:
27338 case OPC_EXTRV_R_W
:
27339 case OPC_EXTRV_RS_W
:
27344 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27347 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27353 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27355 default: /* Invalid */
27356 MIPS_INVAL("MASK EXTR.W");
27357 generate_exception_end(ctx
, EXCP_RI
);
27361 #if defined(TARGET_MIPS64)
27362 case OPC_DDIV_G_2E
:
27363 case OPC_DDIVU_G_2E
:
27364 case OPC_DMULT_G_2E
:
27365 case OPC_DMULTU_G_2E
:
27366 case OPC_DMOD_G_2E
:
27367 case OPC_DMODU_G_2E
:
27368 check_insn(ctx
, INSN_LOONGSON2E
);
27369 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
27371 case OPC_ABSQ_S_QH_DSP
:
27372 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
27374 case OPC_PRECEQ_L_PWL
:
27375 case OPC_PRECEQ_L_PWR
:
27376 case OPC_PRECEQ_PW_QHL
:
27377 case OPC_PRECEQ_PW_QHR
:
27378 case OPC_PRECEQ_PW_QHLA
:
27379 case OPC_PRECEQ_PW_QHRA
:
27380 case OPC_PRECEQU_QH_OBL
:
27381 case OPC_PRECEQU_QH_OBR
:
27382 case OPC_PRECEQU_QH_OBLA
:
27383 case OPC_PRECEQU_QH_OBRA
:
27384 case OPC_PRECEU_QH_OBL
:
27385 case OPC_PRECEU_QH_OBR
:
27386 case OPC_PRECEU_QH_OBLA
:
27387 case OPC_PRECEU_QH_OBRA
:
27388 case OPC_ABSQ_S_OB
:
27389 case OPC_ABSQ_S_PW
:
27390 case OPC_ABSQ_S_QH
:
27391 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27399 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
27401 default: /* Invalid */
27402 MIPS_INVAL("MASK ABSQ_S.QH");
27403 generate_exception_end(ctx
, EXCP_RI
);
27407 case OPC_ADDU_OB_DSP
:
27408 op2
= MASK_ADDU_OB(ctx
->opcode
);
27410 case OPC_RADDU_L_OB
:
27412 case OPC_SUBQ_S_PW
:
27414 case OPC_SUBQ_S_QH
:
27416 case OPC_SUBU_S_OB
:
27418 case OPC_SUBU_S_QH
:
27420 case OPC_SUBUH_R_OB
:
27422 case OPC_ADDQ_S_PW
:
27424 case OPC_ADDQ_S_QH
:
27426 case OPC_ADDU_S_OB
:
27428 case OPC_ADDU_S_QH
:
27430 case OPC_ADDUH_R_OB
:
27431 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27433 case OPC_MULEQ_S_PW_QHL
:
27434 case OPC_MULEQ_S_PW_QHR
:
27435 case OPC_MULEU_S_QH_OBL
:
27436 case OPC_MULEU_S_QH_OBR
:
27437 case OPC_MULQ_RS_QH
:
27438 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27440 default: /* Invalid */
27441 MIPS_INVAL("MASK ADDU.OB");
27442 generate_exception_end(ctx
, EXCP_RI
);
27446 case OPC_CMPU_EQ_OB_DSP
:
27447 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
27449 case OPC_PRECR_SRA_QH_PW
:
27450 case OPC_PRECR_SRA_R_QH_PW
:
27451 /* Return value is rt. */
27452 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
27454 case OPC_PRECR_OB_QH
:
27455 case OPC_PRECRQ_OB_QH
:
27456 case OPC_PRECRQ_PW_L
:
27457 case OPC_PRECRQ_QH_PW
:
27458 case OPC_PRECRQ_RS_QH_PW
:
27459 case OPC_PRECRQU_S_OB_QH
:
27460 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27462 case OPC_CMPU_EQ_OB
:
27463 case OPC_CMPU_LT_OB
:
27464 case OPC_CMPU_LE_OB
:
27465 case OPC_CMP_EQ_QH
:
27466 case OPC_CMP_LT_QH
:
27467 case OPC_CMP_LE_QH
:
27468 case OPC_CMP_EQ_PW
:
27469 case OPC_CMP_LT_PW
:
27470 case OPC_CMP_LE_PW
:
27471 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27473 case OPC_CMPGDU_EQ_OB
:
27474 case OPC_CMPGDU_LT_OB
:
27475 case OPC_CMPGDU_LE_OB
:
27476 case OPC_CMPGU_EQ_OB
:
27477 case OPC_CMPGU_LT_OB
:
27478 case OPC_CMPGU_LE_OB
:
27479 case OPC_PACKRL_PW
:
27483 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27485 default: /* Invalid */
27486 MIPS_INVAL("MASK CMPU_EQ.OB");
27487 generate_exception_end(ctx
, EXCP_RI
);
27491 case OPC_DAPPEND_DSP
:
27492 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
27494 case OPC_DEXTR_W_DSP
:
27495 op2
= MASK_DEXTR_W(ctx
->opcode
);
27502 case OPC_DEXTR_R_L
:
27503 case OPC_DEXTR_RS_L
:
27505 case OPC_DEXTR_R_W
:
27506 case OPC_DEXTR_RS_W
:
27507 case OPC_DEXTR_S_H
:
27509 case OPC_DEXTRV_R_L
:
27510 case OPC_DEXTRV_RS_L
:
27511 case OPC_DEXTRV_S_H
:
27513 case OPC_DEXTRV_R_W
:
27514 case OPC_DEXTRV_RS_W
:
27515 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27520 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27522 default: /* Invalid */
27523 MIPS_INVAL("MASK EXTR.W");
27524 generate_exception_end(ctx
, EXCP_RI
);
27528 case OPC_DPAQ_W_QH_DSP
:
27529 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
27531 case OPC_DPAU_H_OBL
:
27532 case OPC_DPAU_H_OBR
:
27533 case OPC_DPSU_H_OBL
:
27534 case OPC_DPSU_H_OBR
:
27536 case OPC_DPAQ_S_W_QH
:
27538 case OPC_DPSQ_S_W_QH
:
27539 case OPC_MULSAQ_S_W_QH
:
27540 case OPC_DPAQ_SA_L_PW
:
27541 case OPC_DPSQ_SA_L_PW
:
27542 case OPC_MULSAQ_S_L_PW
:
27543 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27545 case OPC_MAQ_S_W_QHLL
:
27546 case OPC_MAQ_S_W_QHLR
:
27547 case OPC_MAQ_S_W_QHRL
:
27548 case OPC_MAQ_S_W_QHRR
:
27549 case OPC_MAQ_SA_W_QHLL
:
27550 case OPC_MAQ_SA_W_QHLR
:
27551 case OPC_MAQ_SA_W_QHRL
:
27552 case OPC_MAQ_SA_W_QHRR
:
27553 case OPC_MAQ_S_L_PWL
:
27554 case OPC_MAQ_S_L_PWR
:
27559 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27561 default: /* Invalid */
27562 MIPS_INVAL("MASK DPAQ.W.QH");
27563 generate_exception_end(ctx
, EXCP_RI
);
27567 case OPC_DINSV_DSP
:
27568 op2
= MASK_INSV(ctx
->opcode
);
27579 t0
= tcg_temp_new();
27580 t1
= tcg_temp_new();
27582 gen_load_gpr(t0
, rt
);
27583 gen_load_gpr(t1
, rs
);
27585 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
27591 default: /* Invalid */
27592 MIPS_INVAL("MASK DINSV");
27593 generate_exception_end(ctx
, EXCP_RI
);
27597 case OPC_SHLL_OB_DSP
:
27598 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
27601 default: /* Invalid */
27602 MIPS_INVAL("special3_legacy");
27603 generate_exception_end(ctx
, EXCP_RI
);
27609 #if defined(TARGET_MIPS64)
27611 static void decode_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
27613 uint32_t opc
= MASK_MMI0(ctx
->opcode
);
27616 case MMI_OPC_0_PADDW
: /* TODO: MMI_OPC_0_PADDW */
27617 case MMI_OPC_0_PSUBW
: /* TODO: MMI_OPC_0_PSUBW */
27618 case MMI_OPC_0_PCGTW
: /* TODO: MMI_OPC_0_PCGTW */
27619 case MMI_OPC_0_PMAXW
: /* TODO: MMI_OPC_0_PMAXW */
27620 case MMI_OPC_0_PADDH
: /* TODO: MMI_OPC_0_PADDH */
27621 case MMI_OPC_0_PSUBH
: /* TODO: MMI_OPC_0_PSUBH */
27622 case MMI_OPC_0_PCGTH
: /* TODO: MMI_OPC_0_PCGTH */
27623 case MMI_OPC_0_PMAXH
: /* TODO: MMI_OPC_0_PMAXH */
27624 case MMI_OPC_0_PADDB
: /* TODO: MMI_OPC_0_PADDB */
27625 case MMI_OPC_0_PSUBB
: /* TODO: MMI_OPC_0_PSUBB */
27626 case MMI_OPC_0_PCGTB
: /* TODO: MMI_OPC_0_PCGTB */
27627 case MMI_OPC_0_PADDSW
: /* TODO: MMI_OPC_0_PADDSW */
27628 case MMI_OPC_0_PSUBSW
: /* TODO: MMI_OPC_0_PSUBSW */
27629 case MMI_OPC_0_PEXTLW
: /* TODO: MMI_OPC_0_PEXTLW */
27630 case MMI_OPC_0_PPACW
: /* TODO: MMI_OPC_0_PPACW */
27631 case MMI_OPC_0_PADDSH
: /* TODO: MMI_OPC_0_PADDSH */
27632 case MMI_OPC_0_PSUBSH
: /* TODO: MMI_OPC_0_PSUBSH */
27633 case MMI_OPC_0_PEXTLH
: /* TODO: MMI_OPC_0_PEXTLH */
27634 case MMI_OPC_0_PPACH
: /* TODO: MMI_OPC_0_PPACH */
27635 case MMI_OPC_0_PADDSB
: /* TODO: MMI_OPC_0_PADDSB */
27636 case MMI_OPC_0_PSUBSB
: /* TODO: MMI_OPC_0_PSUBSB */
27637 case MMI_OPC_0_PEXTLB
: /* TODO: MMI_OPC_0_PEXTLB */
27638 case MMI_OPC_0_PPACB
: /* TODO: MMI_OPC_0_PPACB */
27639 case MMI_OPC_0_PEXT5
: /* TODO: MMI_OPC_0_PEXT5 */
27640 case MMI_OPC_0_PPAC5
: /* TODO: MMI_OPC_0_PPAC5 */
27641 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI0 */
27644 MIPS_INVAL("TX79 MMI class MMI0");
27645 generate_exception_end(ctx
, EXCP_RI
);
27650 static void decode_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
27652 uint32_t opc
= MASK_MMI1(ctx
->opcode
);
27655 case MMI_OPC_1_PABSW
: /* TODO: MMI_OPC_1_PABSW */
27656 case MMI_OPC_1_PCEQW
: /* TODO: MMI_OPC_1_PCEQW */
27657 case MMI_OPC_1_PMINW
: /* TODO: MMI_OPC_1_PMINW */
27658 case MMI_OPC_1_PADSBH
: /* TODO: MMI_OPC_1_PADSBH */
27659 case MMI_OPC_1_PABSH
: /* TODO: MMI_OPC_1_PABSH */
27660 case MMI_OPC_1_PCEQH
: /* TODO: MMI_OPC_1_PCEQH */
27661 case MMI_OPC_1_PMINH
: /* TODO: MMI_OPC_1_PMINH */
27662 case MMI_OPC_1_PCEQB
: /* TODO: MMI_OPC_1_PCEQB */
27663 case MMI_OPC_1_PADDUW
: /* TODO: MMI_OPC_1_PADDUW */
27664 case MMI_OPC_1_PSUBUW
: /* TODO: MMI_OPC_1_PSUBUW */
27665 case MMI_OPC_1_PEXTUW
: /* TODO: MMI_OPC_1_PEXTUW */
27666 case MMI_OPC_1_PADDUH
: /* TODO: MMI_OPC_1_PADDUH */
27667 case MMI_OPC_1_PSUBUH
: /* TODO: MMI_OPC_1_PSUBUH */
27668 case MMI_OPC_1_PEXTUH
: /* TODO: MMI_OPC_1_PEXTUH */
27669 case MMI_OPC_1_PADDUB
: /* TODO: MMI_OPC_1_PADDUB */
27670 case MMI_OPC_1_PSUBUB
: /* TODO: MMI_OPC_1_PSUBUB */
27671 case MMI_OPC_1_PEXTUB
: /* TODO: MMI_OPC_1_PEXTUB */
27672 case MMI_OPC_1_QFSRV
: /* TODO: MMI_OPC_1_QFSRV */
27673 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI1 */
27676 MIPS_INVAL("TX79 MMI class MMI1");
27677 generate_exception_end(ctx
, EXCP_RI
);
27682 static void decode_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
27684 uint32_t opc
= MASK_MMI2(ctx
->opcode
);
27687 case MMI_OPC_2_PMADDW
: /* TODO: MMI_OPC_2_PMADDW */
27688 case MMI_OPC_2_PSLLVW
: /* TODO: MMI_OPC_2_PSLLVW */
27689 case MMI_OPC_2_PSRLVW
: /* TODO: MMI_OPC_2_PSRLVW */
27690 case MMI_OPC_2_PMSUBW
: /* TODO: MMI_OPC_2_PMSUBW */
27691 case MMI_OPC_2_PMFHI
: /* TODO: MMI_OPC_2_PMFHI */
27692 case MMI_OPC_2_PMFLO
: /* TODO: MMI_OPC_2_PMFLO */
27693 case MMI_OPC_2_PINTH
: /* TODO: MMI_OPC_2_PINTH */
27694 case MMI_OPC_2_PMULTW
: /* TODO: MMI_OPC_2_PMULTW */
27695 case MMI_OPC_2_PDIVW
: /* TODO: MMI_OPC_2_PDIVW */
27696 case MMI_OPC_2_PMADDH
: /* TODO: MMI_OPC_2_PMADDH */
27697 case MMI_OPC_2_PHMADH
: /* TODO: MMI_OPC_2_PHMADH */
27698 case MMI_OPC_2_PAND
: /* TODO: MMI_OPC_2_PAND */
27699 case MMI_OPC_2_PXOR
: /* TODO: MMI_OPC_2_PXOR */
27700 case MMI_OPC_2_PMSUBH
: /* TODO: MMI_OPC_2_PMSUBH */
27701 case MMI_OPC_2_PHMSBH
: /* TODO: MMI_OPC_2_PHMSBH */
27702 case MMI_OPC_2_PEXEH
: /* TODO: MMI_OPC_2_PEXEH */
27703 case MMI_OPC_2_PREVH
: /* TODO: MMI_OPC_2_PREVH */
27704 case MMI_OPC_2_PMULTH
: /* TODO: MMI_OPC_2_PMULTH */
27705 case MMI_OPC_2_PDIVBW
: /* TODO: MMI_OPC_2_PDIVBW */
27706 case MMI_OPC_2_PEXEW
: /* TODO: MMI_OPC_2_PEXEW */
27707 case MMI_OPC_2_PROT3W
: /* TODO: MMI_OPC_2_PROT3W */
27708 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI2 */
27710 case MMI_OPC_2_PCPYLD
:
27711 gen_mmi_pcpyld(ctx
);
27714 MIPS_INVAL("TX79 MMI class MMI2");
27715 generate_exception_end(ctx
, EXCP_RI
);
27720 static void decode_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
27722 uint32_t opc
= MASK_MMI3(ctx
->opcode
);
27725 case MMI_OPC_3_PMADDUW
: /* TODO: MMI_OPC_3_PMADDUW */
27726 case MMI_OPC_3_PSRAVW
: /* TODO: MMI_OPC_3_PSRAVW */
27727 case MMI_OPC_3_PMTHI
: /* TODO: MMI_OPC_3_PMTHI */
27728 case MMI_OPC_3_PMTLO
: /* TODO: MMI_OPC_3_PMTLO */
27729 case MMI_OPC_3_PINTEH
: /* TODO: MMI_OPC_3_PINTEH */
27730 case MMI_OPC_3_PMULTUW
: /* TODO: MMI_OPC_3_PMULTUW */
27731 case MMI_OPC_3_PDIVUW
: /* TODO: MMI_OPC_3_PDIVUW */
27732 case MMI_OPC_3_POR
: /* TODO: MMI_OPC_3_POR */
27733 case MMI_OPC_3_PNOR
: /* TODO: MMI_OPC_3_PNOR */
27734 case MMI_OPC_3_PEXCH
: /* TODO: MMI_OPC_3_PEXCH */
27735 case MMI_OPC_3_PEXCW
: /* TODO: MMI_OPC_3_PEXCW */
27736 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI3 */
27738 case MMI_OPC_3_PCPYH
:
27739 gen_mmi_pcpyh(ctx
);
27741 case MMI_OPC_3_PCPYUD
:
27742 gen_mmi_pcpyud(ctx
);
27745 MIPS_INVAL("TX79 MMI class MMI3");
27746 generate_exception_end(ctx
, EXCP_RI
);
27751 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
27753 uint32_t opc
= MASK_MMI(ctx
->opcode
);
27754 int rs
= extract32(ctx
->opcode
, 21, 5);
27755 int rt
= extract32(ctx
->opcode
, 16, 5);
27756 int rd
= extract32(ctx
->opcode
, 11, 5);
27759 case MMI_OPC_CLASS_MMI0
:
27760 decode_mmi0(env
, ctx
);
27762 case MMI_OPC_CLASS_MMI1
:
27763 decode_mmi1(env
, ctx
);
27765 case MMI_OPC_CLASS_MMI2
:
27766 decode_mmi2(env
, ctx
);
27768 case MMI_OPC_CLASS_MMI3
:
27769 decode_mmi3(env
, ctx
);
27771 case MMI_OPC_MULT1
:
27772 case MMI_OPC_MULTU1
:
27774 case MMI_OPC_MADDU
:
27775 case MMI_OPC_MADD1
:
27776 case MMI_OPC_MADDU1
:
27777 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
27780 case MMI_OPC_DIVU1
:
27781 gen_div1_tx79(ctx
, opc
, rs
, rt
);
27783 case MMI_OPC_MTLO1
:
27784 case MMI_OPC_MTHI1
:
27785 gen_HILO1_tx79(ctx
, opc
, rs
);
27787 case MMI_OPC_MFLO1
:
27788 case MMI_OPC_MFHI1
:
27789 gen_HILO1_tx79(ctx
, opc
, rd
);
27791 case MMI_OPC_PLZCW
: /* TODO: MMI_OPC_PLZCW */
27792 case MMI_OPC_PMFHL
: /* TODO: MMI_OPC_PMFHL */
27793 case MMI_OPC_PMTHL
: /* TODO: MMI_OPC_PMTHL */
27794 case MMI_OPC_PSLLH
: /* TODO: MMI_OPC_PSLLH */
27795 case MMI_OPC_PSRLH
: /* TODO: MMI_OPC_PSRLH */
27796 case MMI_OPC_PSRAH
: /* TODO: MMI_OPC_PSRAH */
27797 case MMI_OPC_PSLLW
: /* TODO: MMI_OPC_PSLLW */
27798 case MMI_OPC_PSRLW
: /* TODO: MMI_OPC_PSRLW */
27799 case MMI_OPC_PSRAW
: /* TODO: MMI_OPC_PSRAW */
27800 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI */
27803 MIPS_INVAL("TX79 MMI class");
27804 generate_exception_end(ctx
, EXCP_RI
);
27809 static void gen_mmi_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
27811 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_LQ */
27814 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
27816 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_SQ */
27820 * The TX79-specific instruction Store Quadword
27822 * +--------+-------+-------+------------------------+
27823 * | 011111 | base | rt | offset | SQ
27824 * +--------+-------+-------+------------------------+
27827 * has the same opcode as the Read Hardware Register instruction
27829 * +--------+-------+-------+-------+-------+--------+
27830 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
27831 * +--------+-------+-------+-------+-------+--------+
27834 * that is required, trapped and emulated by the Linux kernel. However, all
27835 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
27836 * offset is odd. Therefore all valid SQ instructions can execute normally.
27837 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
27838 * between SQ and RDHWR, as the Linux kernel does.
27840 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
27842 int base
= extract32(ctx
->opcode
, 21, 5);
27843 int rt
= extract32(ctx
->opcode
, 16, 5);
27844 int offset
= extract32(ctx
->opcode
, 0, 16);
27846 #ifdef CONFIG_USER_ONLY
27847 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
27848 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
27850 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
27851 int rd
= extract32(ctx
->opcode
, 11, 5);
27853 gen_rdhwr(ctx
, rt
, rd
, 0);
27858 gen_mmi_sq(ctx
, base
, rt
, offset
);
27863 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
27865 int rs
, rt
, rd
, sa
;
27869 rs
= (ctx
->opcode
>> 21) & 0x1f;
27870 rt
= (ctx
->opcode
>> 16) & 0x1f;
27871 rd
= (ctx
->opcode
>> 11) & 0x1f;
27872 sa
= (ctx
->opcode
>> 6) & 0x1f;
27873 imm
= sextract32(ctx
->opcode
, 7, 9);
27875 op1
= MASK_SPECIAL3(ctx
->opcode
);
27878 * EVA loads and stores overlap Loongson 2E instructions decoded by
27879 * decode_opc_special3_legacy(), so be careful to allow their decoding when
27886 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27894 check_cp0_enabled(ctx
);
27895 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27899 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27904 check_cp0_enabled(ctx
);
27905 gen_st(ctx
, op1
, rt
, rs
, imm
);
27908 check_cp0_enabled(ctx
);
27909 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, true);
27912 check_cp0_enabled(ctx
);
27913 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27914 gen_cache_operation(ctx
, rt
, rs
, imm
);
27916 /* Treat as NOP. */
27919 check_cp0_enabled(ctx
);
27920 /* Treat as NOP. */
27928 check_insn(ctx
, ISA_MIPS32R2
);
27929 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27932 op2
= MASK_BSHFL(ctx
->opcode
);
27939 check_insn(ctx
, ISA_MIPS32R6
);
27940 decode_opc_special3_r6(env
, ctx
);
27943 check_insn(ctx
, ISA_MIPS32R2
);
27944 gen_bshfl(ctx
, op2
, rt
, rd
);
27948 #if defined(TARGET_MIPS64)
27955 check_insn(ctx
, ISA_MIPS64R2
);
27956 check_mips_64(ctx
);
27957 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27960 op2
= MASK_DBSHFL(ctx
->opcode
);
27971 check_insn(ctx
, ISA_MIPS32R6
);
27972 decode_opc_special3_r6(env
, ctx
);
27975 check_insn(ctx
, ISA_MIPS64R2
);
27976 check_mips_64(ctx
);
27977 op2
= MASK_DBSHFL(ctx
->opcode
);
27978 gen_bshfl(ctx
, op2
, rt
, rd
);
27984 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
27989 TCGv t0
= tcg_temp_new();
27990 TCGv t1
= tcg_temp_new();
27992 gen_load_gpr(t0
, rt
);
27993 gen_load_gpr(t1
, rs
);
27994 gen_helper_fork(t0
, t1
);
28002 TCGv t0
= tcg_temp_new();
28004 gen_load_gpr(t0
, rs
);
28005 gen_helper_yield(t0
, cpu_env
, t0
);
28006 gen_store_gpr(t0
, rd
);
28011 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28012 decode_opc_special3_r6(env
, ctx
);
28014 decode_opc_special3_legacy(env
, ctx
);
28019 /* MIPS SIMD Architecture (MSA) */
28020 static inline int check_msa_access(DisasContext
*ctx
)
28022 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
28023 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
28024 generate_exception_end(ctx
, EXCP_RI
);
28028 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
28029 if (ctx
->insn_flags
& ASE_MSA
) {
28030 generate_exception_end(ctx
, EXCP_MSADIS
);
28033 generate_exception_end(ctx
, EXCP_RI
);
28040 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
28042 /* generates tcg ops to check if any element is 0 */
28043 /* Note this function only works with MSA_WRLEN = 128 */
28044 uint64_t eval_zero_or_big
= 0;
28045 uint64_t eval_big
= 0;
28046 TCGv_i64 t0
= tcg_temp_new_i64();
28047 TCGv_i64 t1
= tcg_temp_new_i64();
28050 eval_zero_or_big
= 0x0101010101010101ULL
;
28051 eval_big
= 0x8080808080808080ULL
;
28054 eval_zero_or_big
= 0x0001000100010001ULL
;
28055 eval_big
= 0x8000800080008000ULL
;
28058 eval_zero_or_big
= 0x0000000100000001ULL
;
28059 eval_big
= 0x8000000080000000ULL
;
28062 eval_zero_or_big
= 0x0000000000000001ULL
;
28063 eval_big
= 0x8000000000000000ULL
;
28066 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<< 1], eval_zero_or_big
);
28067 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<< 1]);
28068 tcg_gen_andi_i64(t0
, t0
, eval_big
);
28069 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<< 1) + 1], eval_zero_or_big
);
28070 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<< 1) + 1]);
28071 tcg_gen_andi_i64(t1
, t1
, eval_big
);
28072 tcg_gen_or_i64(t0
, t0
, t1
);
28073 /* if all bits are zero then all elements are not zero */
28074 /* if some bit is non-zero then some element is zero */
28075 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
28076 tcg_gen_trunc_i64_tl(tresult
, t0
);
28077 tcg_temp_free_i64(t0
);
28078 tcg_temp_free_i64(t1
);
28081 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
28083 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28084 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28085 int64_t s16
= (int16_t)ctx
->opcode
;
28087 check_msa_access(ctx
);
28089 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
28090 generate_exception_end(ctx
, EXCP_RI
);
28097 TCGv_i64 t0
= tcg_temp_new_i64();
28098 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<< 1], msa_wr_d
[(wt
<< 1) + 1]);
28099 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
28100 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
28101 tcg_gen_trunc_i64_tl(bcond
, t0
);
28102 tcg_temp_free_i64(t0
);
28109 gen_check_zero_element(bcond
, df
, wt
);
28115 gen_check_zero_element(bcond
, df
, wt
);
28116 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
28120 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
28122 ctx
->hflags
|= MIPS_HFLAG_BC
;
28123 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
28126 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
28128 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
28129 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
28130 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28131 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28133 TCGv_i32 twd
= tcg_const_i32(wd
);
28134 TCGv_i32 tws
= tcg_const_i32(ws
);
28135 TCGv_i32 ti8
= tcg_const_i32(i8
);
28137 switch (MASK_MSA_I8(ctx
->opcode
)) {
28139 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
28142 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
28145 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
28148 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
28151 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
28154 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
28157 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
28163 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
28164 if (df
== DF_DOUBLE
) {
28165 generate_exception_end(ctx
, EXCP_RI
);
28167 TCGv_i32 tdf
= tcg_const_i32(df
);
28168 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
28169 tcg_temp_free_i32(tdf
);
28174 MIPS_INVAL("MSA instruction");
28175 generate_exception_end(ctx
, EXCP_RI
);
28179 tcg_temp_free_i32(twd
);
28180 tcg_temp_free_i32(tws
);
28181 tcg_temp_free_i32(ti8
);
28184 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
28186 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28187 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28188 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
28189 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
28190 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28191 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28193 TCGv_i32 tdf
= tcg_const_i32(df
);
28194 TCGv_i32 twd
= tcg_const_i32(wd
);
28195 TCGv_i32 tws
= tcg_const_i32(ws
);
28196 TCGv_i32 timm
= tcg_temp_new_i32();
28197 tcg_gen_movi_i32(timm
, u5
);
28199 switch (MASK_MSA_I5(ctx
->opcode
)) {
28201 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
28204 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
28206 case OPC_MAXI_S_df
:
28207 tcg_gen_movi_i32(timm
, s5
);
28208 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28210 case OPC_MAXI_U_df
:
28211 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28213 case OPC_MINI_S_df
:
28214 tcg_gen_movi_i32(timm
, s5
);
28215 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28217 case OPC_MINI_U_df
:
28218 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28221 tcg_gen_movi_i32(timm
, s5
);
28222 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
28224 case OPC_CLTI_S_df
:
28225 tcg_gen_movi_i32(timm
, s5
);
28226 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28228 case OPC_CLTI_U_df
:
28229 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28231 case OPC_CLEI_S_df
:
28232 tcg_gen_movi_i32(timm
, s5
);
28233 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28235 case OPC_CLEI_U_df
:
28236 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28240 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
28241 tcg_gen_movi_i32(timm
, s10
);
28242 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
28246 MIPS_INVAL("MSA instruction");
28247 generate_exception_end(ctx
, EXCP_RI
);
28251 tcg_temp_free_i32(tdf
);
28252 tcg_temp_free_i32(twd
);
28253 tcg_temp_free_i32(tws
);
28254 tcg_temp_free_i32(timm
);
28257 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
28259 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28260 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
28261 uint32_t df
= 0, m
= 0;
28262 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28263 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28270 if ((dfm
& 0x40) == 0x00) {
28273 } else if ((dfm
& 0x60) == 0x40) {
28276 } else if ((dfm
& 0x70) == 0x60) {
28279 } else if ((dfm
& 0x78) == 0x70) {
28283 generate_exception_end(ctx
, EXCP_RI
);
28287 tdf
= tcg_const_i32(df
);
28288 tm
= tcg_const_i32(m
);
28289 twd
= tcg_const_i32(wd
);
28290 tws
= tcg_const_i32(ws
);
28292 switch (MASK_MSA_BIT(ctx
->opcode
)) {
28294 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
28297 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
28300 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
28303 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28306 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
28309 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
28311 case OPC_BINSLI_df
:
28312 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
28314 case OPC_BINSRI_df
:
28315 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28318 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
28321 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
28324 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
28327 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28330 MIPS_INVAL("MSA instruction");
28331 generate_exception_end(ctx
, EXCP_RI
);
28335 tcg_temp_free_i32(tdf
);
28336 tcg_temp_free_i32(tm
);
28337 tcg_temp_free_i32(twd
);
28338 tcg_temp_free_i32(tws
);
28341 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
28343 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28344 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28345 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28346 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28347 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28349 TCGv_i32 tdf
= tcg_const_i32(df
);
28350 TCGv_i32 twd
= tcg_const_i32(wd
);
28351 TCGv_i32 tws
= tcg_const_i32(ws
);
28352 TCGv_i32 twt
= tcg_const_i32(wt
);
28354 switch (MASK_MSA_3R(ctx
->opcode
)) {
28356 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
28359 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28362 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28365 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28367 case OPC_SUBS_S_df
:
28368 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28371 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28374 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
28377 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28380 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
28383 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28385 case OPC_ADDS_A_df
:
28386 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28388 case OPC_SUBS_U_df
:
28389 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28392 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28395 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
28398 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
28401 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28404 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28407 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28409 case OPC_ADDS_S_df
:
28410 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28412 case OPC_SUBSUS_U_df
:
28413 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28416 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28419 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28422 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28425 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28428 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28431 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28433 case OPC_ADDS_U_df
:
28434 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28436 case OPC_SUBSUU_S_df
:
28437 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28440 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28443 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
28446 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28449 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28452 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28454 case OPC_ASUB_S_df
:
28455 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28458 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28461 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28464 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
28467 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28470 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28473 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28475 case OPC_ASUB_U_df
:
28476 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28479 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28482 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28485 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28488 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28490 case OPC_AVER_S_df
:
28491 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28494 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28497 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28500 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28503 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28505 case OPC_AVER_U_df
:
28506 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28509 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28512 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28515 case OPC_DOTP_S_df
:
28516 case OPC_DOTP_U_df
:
28517 case OPC_DPADD_S_df
:
28518 case OPC_DPADD_U_df
:
28519 case OPC_DPSUB_S_df
:
28520 case OPC_HADD_S_df
:
28521 case OPC_DPSUB_U_df
:
28522 case OPC_HADD_U_df
:
28523 case OPC_HSUB_S_df
:
28524 case OPC_HSUB_U_df
:
28525 if (df
== DF_BYTE
) {
28526 generate_exception_end(ctx
, EXCP_RI
);
28529 switch (MASK_MSA_3R(ctx
->opcode
)) {
28530 case OPC_DOTP_S_df
:
28531 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28533 case OPC_DOTP_U_df
:
28534 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28536 case OPC_DPADD_S_df
:
28537 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28539 case OPC_DPADD_U_df
:
28540 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28542 case OPC_DPSUB_S_df
:
28543 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28545 case OPC_HADD_S_df
:
28546 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28548 case OPC_DPSUB_U_df
:
28549 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28551 case OPC_HADD_U_df
:
28552 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28554 case OPC_HSUB_S_df
:
28555 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28557 case OPC_HSUB_U_df
:
28558 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28563 MIPS_INVAL("MSA instruction");
28564 generate_exception_end(ctx
, EXCP_RI
);
28567 tcg_temp_free_i32(twd
);
28568 tcg_temp_free_i32(tws
);
28569 tcg_temp_free_i32(twt
);
28570 tcg_temp_free_i32(tdf
);
28573 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
28575 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
28576 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
28577 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
28578 TCGv telm
= tcg_temp_new();
28579 TCGv_i32 tsr
= tcg_const_i32(source
);
28580 TCGv_i32 tdt
= tcg_const_i32(dest
);
28582 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
28584 gen_load_gpr(telm
, source
);
28585 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
28588 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
28589 gen_store_gpr(telm
, dest
);
28592 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
28595 MIPS_INVAL("MSA instruction");
28596 generate_exception_end(ctx
, EXCP_RI
);
28600 tcg_temp_free(telm
);
28601 tcg_temp_free_i32(tdt
);
28602 tcg_temp_free_i32(tsr
);
28605 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
28608 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28609 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28610 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28612 TCGv_i32 tws
= tcg_const_i32(ws
);
28613 TCGv_i32 twd
= tcg_const_i32(wd
);
28614 TCGv_i32 tn
= tcg_const_i32(n
);
28615 TCGv_i32 tdf
= tcg_const_i32(df
);
28617 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28619 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
28621 case OPC_SPLATI_df
:
28622 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
28625 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
28627 case OPC_COPY_S_df
:
28628 case OPC_COPY_U_df
:
28629 case OPC_INSERT_df
:
28630 #if !defined(TARGET_MIPS64)
28631 /* Double format valid only for MIPS64 */
28632 if (df
== DF_DOUBLE
) {
28633 generate_exception_end(ctx
, EXCP_RI
);
28636 if ((MASK_MSA_ELM(ctx
->opcode
) == OPC_COPY_U_df
) &&
28638 generate_exception_end(ctx
, EXCP_RI
);
28642 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28643 case OPC_COPY_S_df
:
28644 if (likely(wd
!= 0)) {
28647 gen_helper_msa_copy_s_b(cpu_env
, twd
, tws
, tn
);
28650 gen_helper_msa_copy_s_h(cpu_env
, twd
, tws
, tn
);
28653 gen_helper_msa_copy_s_w(cpu_env
, twd
, tws
, tn
);
28655 #if defined(TARGET_MIPS64)
28657 gen_helper_msa_copy_s_d(cpu_env
, twd
, tws
, tn
);
28665 case OPC_COPY_U_df
:
28666 if (likely(wd
!= 0)) {
28669 gen_helper_msa_copy_u_b(cpu_env
, twd
, tws
, tn
);
28672 gen_helper_msa_copy_u_h(cpu_env
, twd
, tws
, tn
);
28674 #if defined(TARGET_MIPS64)
28676 gen_helper_msa_copy_u_w(cpu_env
, twd
, tws
, tn
);
28684 case OPC_INSERT_df
:
28687 gen_helper_msa_insert_b(cpu_env
, twd
, tws
, tn
);
28690 gen_helper_msa_insert_h(cpu_env
, twd
, tws
, tn
);
28693 gen_helper_msa_insert_w(cpu_env
, twd
, tws
, tn
);
28695 #if defined(TARGET_MIPS64)
28697 gen_helper_msa_insert_d(cpu_env
, twd
, tws
, tn
);
28707 MIPS_INVAL("MSA instruction");
28708 generate_exception_end(ctx
, EXCP_RI
);
28710 tcg_temp_free_i32(twd
);
28711 tcg_temp_free_i32(tws
);
28712 tcg_temp_free_i32(tn
);
28713 tcg_temp_free_i32(tdf
);
28716 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
28718 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
28719 uint32_t df
= 0, n
= 0;
28721 if ((dfn
& 0x30) == 0x00) {
28724 } else if ((dfn
& 0x38) == 0x20) {
28727 } else if ((dfn
& 0x3c) == 0x30) {
28730 } else if ((dfn
& 0x3e) == 0x38) {
28733 } else if (dfn
== 0x3E) {
28734 /* CTCMSA, CFCMSA, MOVE.V */
28735 gen_msa_elm_3e(env
, ctx
);
28738 generate_exception_end(ctx
, EXCP_RI
);
28742 gen_msa_elm_df(env
, ctx
, df
, n
);
28745 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28747 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28748 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
28749 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28750 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28751 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28753 TCGv_i32 twd
= tcg_const_i32(wd
);
28754 TCGv_i32 tws
= tcg_const_i32(ws
);
28755 TCGv_i32 twt
= tcg_const_i32(wt
);
28756 TCGv_i32 tdf
= tcg_temp_new_i32();
28758 /* adjust df value for floating-point instruction */
28759 tcg_gen_movi_i32(tdf
, df
+ 2);
28761 switch (MASK_MSA_3RF(ctx
->opcode
)) {
28763 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28766 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28769 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28772 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28775 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28778 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28781 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
28784 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28787 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28790 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28793 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28796 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28799 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28802 tcg_gen_movi_i32(tdf
, df
+ 1);
28803 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28806 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28809 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28811 case OPC_MADD_Q_df
:
28812 tcg_gen_movi_i32(tdf
, df
+ 1);
28813 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28816 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28818 case OPC_MSUB_Q_df
:
28819 tcg_gen_movi_i32(tdf
, df
+ 1);
28820 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28823 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28826 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
28829 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28832 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
28835 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28838 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28841 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28844 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28847 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28850 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28853 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28856 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28859 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
28861 case OPC_MULR_Q_df
:
28862 tcg_gen_movi_i32(tdf
, df
+ 1);
28863 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28866 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28868 case OPC_FMIN_A_df
:
28869 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28871 case OPC_MADDR_Q_df
:
28872 tcg_gen_movi_i32(tdf
, df
+ 1);
28873 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28876 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28879 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
28881 case OPC_MSUBR_Q_df
:
28882 tcg_gen_movi_i32(tdf
, df
+ 1);
28883 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28886 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28888 case OPC_FMAX_A_df
:
28889 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28892 MIPS_INVAL("MSA instruction");
28893 generate_exception_end(ctx
, EXCP_RI
);
28897 tcg_temp_free_i32(twd
);
28898 tcg_temp_free_i32(tws
);
28899 tcg_temp_free_i32(twt
);
28900 tcg_temp_free_i32(tdf
);
28903 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
28905 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28906 (op & (0x7 << 18)))
28907 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28908 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28909 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28910 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
28911 TCGv_i32 twd
= tcg_const_i32(wd
);
28912 TCGv_i32 tws
= tcg_const_i32(ws
);
28913 TCGv_i32 twt
= tcg_const_i32(wt
);
28914 TCGv_i32 tdf
= tcg_const_i32(df
);
28916 switch (MASK_MSA_2R(ctx
->opcode
)) {
28918 #if !defined(TARGET_MIPS64)
28919 /* Double format valid only for MIPS64 */
28920 if (df
== DF_DOUBLE
) {
28921 generate_exception_end(ctx
, EXCP_RI
);
28925 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
28928 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
28931 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
28934 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
28937 MIPS_INVAL("MSA instruction");
28938 generate_exception_end(ctx
, EXCP_RI
);
28942 tcg_temp_free_i32(twd
);
28943 tcg_temp_free_i32(tws
);
28944 tcg_temp_free_i32(twt
);
28945 tcg_temp_free_i32(tdf
);
28948 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28950 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28951 (op & (0xf << 17)))
28952 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28953 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28954 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28955 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
28956 TCGv_i32 twd
= tcg_const_i32(wd
);
28957 TCGv_i32 tws
= tcg_const_i32(ws
);
28958 TCGv_i32 twt
= tcg_const_i32(wt
);
28959 /* adjust df value for floating-point instruction */
28960 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
28962 switch (MASK_MSA_2RF(ctx
->opcode
)) {
28963 case OPC_FCLASS_df
:
28964 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
28966 case OPC_FTRUNC_S_df
:
28967 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
28969 case OPC_FTRUNC_U_df
:
28970 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
28973 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
28975 case OPC_FRSQRT_df
:
28976 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
28979 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
28982 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
28985 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
28987 case OPC_FEXUPL_df
:
28988 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
28990 case OPC_FEXUPR_df
:
28991 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
28994 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
28997 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
28999 case OPC_FTINT_S_df
:
29000 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
29002 case OPC_FTINT_U_df
:
29003 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
29005 case OPC_FFINT_S_df
:
29006 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
29008 case OPC_FFINT_U_df
:
29009 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
29013 tcg_temp_free_i32(twd
);
29014 tcg_temp_free_i32(tws
);
29015 tcg_temp_free_i32(twt
);
29016 tcg_temp_free_i32(tdf
);
29019 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
29021 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
29022 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
29023 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
29024 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
29025 TCGv_i32 twd
= tcg_const_i32(wd
);
29026 TCGv_i32 tws
= tcg_const_i32(ws
);
29027 TCGv_i32 twt
= tcg_const_i32(wt
);
29029 switch (MASK_MSA_VEC(ctx
->opcode
)) {
29031 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
29034 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
29037 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
29040 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
29043 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
29046 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
29049 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
29052 MIPS_INVAL("MSA instruction");
29053 generate_exception_end(ctx
, EXCP_RI
);
29057 tcg_temp_free_i32(twd
);
29058 tcg_temp_free_i32(tws
);
29059 tcg_temp_free_i32(twt
);
29062 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
29064 switch (MASK_MSA_VEC(ctx
->opcode
)) {
29072 gen_msa_vec_v(env
, ctx
);
29075 gen_msa_2r(env
, ctx
);
29078 gen_msa_2rf(env
, ctx
);
29081 MIPS_INVAL("MSA instruction");
29082 generate_exception_end(ctx
, EXCP_RI
);
29087 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
29089 uint32_t opcode
= ctx
->opcode
;
29090 check_insn(ctx
, ASE_MSA
);
29091 check_msa_access(ctx
);
29093 switch (MASK_MSA_MINOR(opcode
)) {
29094 case OPC_MSA_I8_00
:
29095 case OPC_MSA_I8_01
:
29096 case OPC_MSA_I8_02
:
29097 gen_msa_i8(env
, ctx
);
29099 case OPC_MSA_I5_06
:
29100 case OPC_MSA_I5_07
:
29101 gen_msa_i5(env
, ctx
);
29103 case OPC_MSA_BIT_09
:
29104 case OPC_MSA_BIT_0A
:
29105 gen_msa_bit(env
, ctx
);
29107 case OPC_MSA_3R_0D
:
29108 case OPC_MSA_3R_0E
:
29109 case OPC_MSA_3R_0F
:
29110 case OPC_MSA_3R_10
:
29111 case OPC_MSA_3R_11
:
29112 case OPC_MSA_3R_12
:
29113 case OPC_MSA_3R_13
:
29114 case OPC_MSA_3R_14
:
29115 case OPC_MSA_3R_15
:
29116 gen_msa_3r(env
, ctx
);
29119 gen_msa_elm(env
, ctx
);
29121 case OPC_MSA_3RF_1A
:
29122 case OPC_MSA_3RF_1B
:
29123 case OPC_MSA_3RF_1C
:
29124 gen_msa_3rf(env
, ctx
);
29127 gen_msa_vec(env
, ctx
);
29138 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
29139 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
29140 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
29141 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
29143 TCGv_i32 twd
= tcg_const_i32(wd
);
29144 TCGv taddr
= tcg_temp_new();
29145 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
29147 switch (MASK_MSA_MINOR(opcode
)) {
29149 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
29152 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
29155 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
29158 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
29161 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
29164 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
29167 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
29170 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
29174 tcg_temp_free_i32(twd
);
29175 tcg_temp_free(taddr
);
29179 MIPS_INVAL("MSA instruction");
29180 generate_exception_end(ctx
, EXCP_RI
);
29186 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
29189 int rs
, rt
, rd
, sa
;
29193 /* make sure instructions are on a word boundary */
29194 if (ctx
->base
.pc_next
& 0x3) {
29195 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
29196 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
29200 /* Handle blikely not taken case */
29201 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
29202 TCGLabel
*l1
= gen_new_label();
29204 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
29205 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
29206 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
29210 op
= MASK_OP_MAJOR(ctx
->opcode
);
29211 rs
= (ctx
->opcode
>> 21) & 0x1f;
29212 rt
= (ctx
->opcode
>> 16) & 0x1f;
29213 rd
= (ctx
->opcode
>> 11) & 0x1f;
29214 sa
= (ctx
->opcode
>> 6) & 0x1f;
29215 imm
= (int16_t)ctx
->opcode
;
29218 decode_opc_special(env
, ctx
);
29221 #if defined(TARGET_MIPS64)
29222 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
29223 decode_mmi(env
, ctx
);
29225 if (ctx
->insn_flags
& ASE_MXU
) {
29226 decode_opc_mxu(env
, ctx
);
29229 decode_opc_special2_legacy(env
, ctx
);
29233 #if defined(TARGET_MIPS64)
29234 if (ctx
->insn_flags
& INSN_R5900
) {
29235 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
29237 decode_opc_special3(env
, ctx
);
29240 decode_opc_special3(env
, ctx
);
29244 op1
= MASK_REGIMM(ctx
->opcode
);
29246 case OPC_BLTZL
: /* REGIMM branches */
29250 check_insn(ctx
, ISA_MIPS2
);
29251 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29255 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
29259 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29261 /* OPC_NAL, OPC_BAL */
29262 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
29264 generate_exception_end(ctx
, EXCP_RI
);
29267 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
29270 case OPC_TGEI
: /* REGIMM traps */
29277 check_insn(ctx
, ISA_MIPS2
);
29278 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29279 gen_trap(ctx
, op1
, rs
, -1, imm
);
29282 check_insn(ctx
, ISA_MIPS32R6
);
29283 generate_exception_end(ctx
, EXCP_RI
);
29286 check_insn(ctx
, ISA_MIPS32R2
);
29288 * Break the TB to be able to sync copied instructions
29291 ctx
->base
.is_jmp
= DISAS_STOP
;
29293 case OPC_BPOSGE32
: /* MIPS DSP branch */
29294 #if defined(TARGET_MIPS64)
29298 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
29300 #if defined(TARGET_MIPS64)
29302 check_insn(ctx
, ISA_MIPS32R6
);
29303 check_mips_64(ctx
);
29305 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
29309 check_insn(ctx
, ISA_MIPS32R6
);
29310 check_mips_64(ctx
);
29312 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
29316 default: /* Invalid */
29317 MIPS_INVAL("regimm");
29318 generate_exception_end(ctx
, EXCP_RI
);
29323 check_cp0_enabled(ctx
);
29324 op1
= MASK_CP0(ctx
->opcode
);
29332 #if defined(TARGET_MIPS64)
29336 #ifndef CONFIG_USER_ONLY
29337 gen_cp0(env
, ctx
, op1
, rt
, rd
);
29338 #endif /* !CONFIG_USER_ONLY */
29356 #ifndef CONFIG_USER_ONLY
29357 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
29358 #endif /* !CONFIG_USER_ONLY */
29361 #ifndef CONFIG_USER_ONLY
29364 TCGv t0
= tcg_temp_new();
29366 op2
= MASK_MFMC0(ctx
->opcode
);
29370 gen_helper_dmt(t0
);
29371 gen_store_gpr(t0
, rt
);
29375 gen_helper_emt(t0
);
29376 gen_store_gpr(t0
, rt
);
29380 gen_helper_dvpe(t0
, cpu_env
);
29381 gen_store_gpr(t0
, rt
);
29385 gen_helper_evpe(t0
, cpu_env
);
29386 gen_store_gpr(t0
, rt
);
29389 check_insn(ctx
, ISA_MIPS32R6
);
29391 gen_helper_dvp(t0
, cpu_env
);
29392 gen_store_gpr(t0
, rt
);
29396 check_insn(ctx
, ISA_MIPS32R6
);
29398 gen_helper_evp(t0
, cpu_env
);
29399 gen_store_gpr(t0
, rt
);
29403 check_insn(ctx
, ISA_MIPS32R2
);
29404 save_cpu_state(ctx
, 1);
29405 gen_helper_di(t0
, cpu_env
);
29406 gen_store_gpr(t0
, rt
);
29408 * Stop translation as we may have switched
29409 * the execution mode.
29411 ctx
->base
.is_jmp
= DISAS_STOP
;
29414 check_insn(ctx
, ISA_MIPS32R2
);
29415 save_cpu_state(ctx
, 1);
29416 gen_helper_ei(t0
, cpu_env
);
29417 gen_store_gpr(t0
, rt
);
29419 * DISAS_STOP isn't sufficient, we need to ensure we break
29420 * out of translated code to check for pending interrupts.
29422 gen_save_pc(ctx
->base
.pc_next
+ 4);
29423 ctx
->base
.is_jmp
= DISAS_EXIT
;
29425 default: /* Invalid */
29426 MIPS_INVAL("mfmc0");
29427 generate_exception_end(ctx
, EXCP_RI
);
29432 #endif /* !CONFIG_USER_ONLY */
29435 check_insn(ctx
, ISA_MIPS32R2
);
29436 gen_load_srsgpr(rt
, rd
);
29439 check_insn(ctx
, ISA_MIPS32R2
);
29440 gen_store_srsgpr(rt
, rd
);
29444 generate_exception_end(ctx
, EXCP_RI
);
29448 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
29449 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29450 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
29451 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29454 /* Arithmetic with immediate opcode */
29455 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29459 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29461 case OPC_SLTI
: /* Set on less than with immediate opcode */
29463 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
29465 case OPC_ANDI
: /* Arithmetic with immediate opcode */
29466 case OPC_LUI
: /* OPC_AUI */
29469 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
29471 case OPC_J
: /* Jump */
29473 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29474 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29477 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
29478 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29480 generate_exception_end(ctx
, EXCP_RI
);
29483 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
29484 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29487 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29490 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
29491 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29493 generate_exception_end(ctx
, EXCP_RI
);
29496 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
29497 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29500 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29503 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
29506 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29508 check_insn(ctx
, ISA_MIPS32R6
);
29509 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
29510 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29513 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
29516 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29518 check_insn(ctx
, ISA_MIPS32R6
);
29519 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
29520 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29525 check_insn(ctx
, ISA_MIPS2
);
29526 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29530 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29532 case OPC_LL
: /* Load and stores */
29533 check_insn(ctx
, ISA_MIPS2
);
29534 if (ctx
->insn_flags
& INSN_R5900
) {
29535 check_insn_opc_user_only(ctx
, INSN_R5900
);
29540 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29548 gen_ld(ctx
, op
, rt
, rs
, imm
);
29552 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29557 gen_st(ctx
, op
, rt
, rs
, imm
);
29560 check_insn(ctx
, ISA_MIPS2
);
29561 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29562 if (ctx
->insn_flags
& INSN_R5900
) {
29563 check_insn_opc_user_only(ctx
, INSN_R5900
);
29565 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
29568 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29569 check_cp0_enabled(ctx
);
29570 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
29571 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
29572 gen_cache_operation(ctx
, rt
, rs
, imm
);
29574 /* Treat as NOP. */
29577 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29578 if (ctx
->insn_flags
& INSN_R5900
) {
29579 /* Treat as NOP. */
29581 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
29582 /* Treat as NOP. */
29586 /* Floating point (COP1). */
29591 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
29595 op1
= MASK_CP1(ctx
->opcode
);
29600 check_cp1_enabled(ctx
);
29601 check_insn(ctx
, ISA_MIPS32R2
);
29607 check_cp1_enabled(ctx
);
29608 gen_cp1(ctx
, op1
, rt
, rd
);
29610 #if defined(TARGET_MIPS64)
29613 check_cp1_enabled(ctx
);
29614 check_insn(ctx
, ISA_MIPS3
);
29615 check_mips_64(ctx
);
29616 gen_cp1(ctx
, op1
, rt
, rd
);
29619 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
29620 check_cp1_enabled(ctx
);
29621 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29623 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29628 check_insn(ctx
, ASE_MIPS3D
);
29629 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29630 (rt
>> 2) & 0x7, imm
<< 2);
29634 check_cp1_enabled(ctx
);
29635 check_insn(ctx
, ISA_MIPS32R6
);
29636 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29640 check_cp1_enabled(ctx
);
29641 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29643 check_insn(ctx
, ASE_MIPS3D
);
29646 check_cp1_enabled(ctx
);
29647 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29648 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29649 (rt
>> 2) & 0x7, imm
<< 2);
29656 check_cp1_enabled(ctx
);
29657 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29663 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
29664 check_cp1_enabled(ctx
);
29665 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29667 case R6_OPC_CMP_AF_S
:
29668 case R6_OPC_CMP_UN_S
:
29669 case R6_OPC_CMP_EQ_S
:
29670 case R6_OPC_CMP_UEQ_S
:
29671 case R6_OPC_CMP_LT_S
:
29672 case R6_OPC_CMP_ULT_S
:
29673 case R6_OPC_CMP_LE_S
:
29674 case R6_OPC_CMP_ULE_S
:
29675 case R6_OPC_CMP_SAF_S
:
29676 case R6_OPC_CMP_SUN_S
:
29677 case R6_OPC_CMP_SEQ_S
:
29678 case R6_OPC_CMP_SEUQ_S
:
29679 case R6_OPC_CMP_SLT_S
:
29680 case R6_OPC_CMP_SULT_S
:
29681 case R6_OPC_CMP_SLE_S
:
29682 case R6_OPC_CMP_SULE_S
:
29683 case R6_OPC_CMP_OR_S
:
29684 case R6_OPC_CMP_UNE_S
:
29685 case R6_OPC_CMP_NE_S
:
29686 case R6_OPC_CMP_SOR_S
:
29687 case R6_OPC_CMP_SUNE_S
:
29688 case R6_OPC_CMP_SNE_S
:
29689 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29691 case R6_OPC_CMP_AF_D
:
29692 case R6_OPC_CMP_UN_D
:
29693 case R6_OPC_CMP_EQ_D
:
29694 case R6_OPC_CMP_UEQ_D
:
29695 case R6_OPC_CMP_LT_D
:
29696 case R6_OPC_CMP_ULT_D
:
29697 case R6_OPC_CMP_LE_D
:
29698 case R6_OPC_CMP_ULE_D
:
29699 case R6_OPC_CMP_SAF_D
:
29700 case R6_OPC_CMP_SUN_D
:
29701 case R6_OPC_CMP_SEQ_D
:
29702 case R6_OPC_CMP_SEUQ_D
:
29703 case R6_OPC_CMP_SLT_D
:
29704 case R6_OPC_CMP_SULT_D
:
29705 case R6_OPC_CMP_SLE_D
:
29706 case R6_OPC_CMP_SULE_D
:
29707 case R6_OPC_CMP_OR_D
:
29708 case R6_OPC_CMP_UNE_D
:
29709 case R6_OPC_CMP_NE_D
:
29710 case R6_OPC_CMP_SOR_D
:
29711 case R6_OPC_CMP_SUNE_D
:
29712 case R6_OPC_CMP_SNE_D
:
29713 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29716 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
29717 rt
, rd
, sa
, (imm
>> 8) & 0x7);
29722 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29737 check_insn(ctx
, ASE_MSA
);
29738 gen_msa_branch(env
, ctx
, op1
);
29742 generate_exception_end(ctx
, EXCP_RI
);
29747 /* Compact branches [R6] and COP2 [non-R6] */
29748 case OPC_BC
: /* OPC_LWC2 */
29749 case OPC_BALC
: /* OPC_SWC2 */
29750 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29751 /* OPC_BC, OPC_BALC */
29752 gen_compute_compact_branch(ctx
, op
, 0, 0,
29753 sextract32(ctx
->opcode
<< 2, 0, 28));
29755 /* OPC_LWC2, OPC_SWC2 */
29756 /* COP2: Not implemented. */
29757 generate_exception_err(ctx
, EXCP_CpU
, 2);
29760 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
29761 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
29762 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29764 /* OPC_BEQZC, OPC_BNEZC */
29765 gen_compute_compact_branch(ctx
, op
, rs
, 0,
29766 sextract32(ctx
->opcode
<< 2, 0, 23));
29768 /* OPC_JIC, OPC_JIALC */
29769 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
29772 /* OPC_LWC2, OPC_SWC2 */
29773 /* COP2: Not implemented. */
29774 generate_exception_err(ctx
, EXCP_CpU
, 2);
29778 check_insn(ctx
, INSN_LOONGSON2F
);
29779 /* Note that these instructions use different fields. */
29780 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
29784 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29785 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
29786 check_cp1_enabled(ctx
);
29787 op1
= MASK_CP3(ctx
->opcode
);
29791 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29797 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29798 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
29801 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29802 /* Treat as NOP. */
29805 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29819 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29820 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
29824 generate_exception_end(ctx
, EXCP_RI
);
29828 generate_exception_err(ctx
, EXCP_CpU
, 1);
29832 #if defined(TARGET_MIPS64)
29833 /* MIPS64 opcodes */
29835 if (ctx
->insn_flags
& INSN_R5900
) {
29836 check_insn_opc_user_only(ctx
, INSN_R5900
);
29841 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29845 check_insn(ctx
, ISA_MIPS3
);
29846 check_mips_64(ctx
);
29847 gen_ld(ctx
, op
, rt
, rs
, imm
);
29851 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29854 check_insn(ctx
, ISA_MIPS3
);
29855 check_mips_64(ctx
);
29856 gen_st(ctx
, op
, rt
, rs
, imm
);
29859 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29860 check_insn(ctx
, ISA_MIPS3
);
29861 if (ctx
->insn_flags
& INSN_R5900
) {
29862 check_insn_opc_user_only(ctx
, INSN_R5900
);
29864 check_mips_64(ctx
);
29865 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEQ
, false);
29867 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
29868 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29869 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
29870 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29873 check_insn(ctx
, ISA_MIPS3
);
29874 check_mips_64(ctx
);
29875 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29879 check_insn(ctx
, ISA_MIPS3
);
29880 check_mips_64(ctx
);
29881 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29884 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
29885 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29886 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29888 MIPS_INVAL("major opcode");
29889 generate_exception_end(ctx
, EXCP_RI
);
29893 case OPC_DAUI
: /* OPC_JALX */
29894 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29895 #if defined(TARGET_MIPS64)
29897 check_mips_64(ctx
);
29899 generate_exception(ctx
, EXCP_RI
);
29900 } else if (rt
!= 0) {
29901 TCGv t0
= tcg_temp_new();
29902 gen_load_gpr(t0
, rs
);
29903 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
29907 generate_exception_end(ctx
, EXCP_RI
);
29908 MIPS_INVAL("major opcode");
29912 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
29913 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29914 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29917 case OPC_MSA
: /* OPC_MDMX */
29918 if (ctx
->insn_flags
& INSN_R5900
) {
29919 #if defined(TARGET_MIPS64)
29920 gen_mmi_lq(env
, ctx
); /* MMI_OPC_LQ */
29923 /* MDMX: Not implemented. */
29928 check_insn(ctx
, ISA_MIPS32R6
);
29929 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
29931 default: /* Invalid */
29932 MIPS_INVAL("major opcode");
29933 generate_exception_end(ctx
, EXCP_RI
);
29938 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
29940 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29941 CPUMIPSState
*env
= cs
->env_ptr
;
29943 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
29944 ctx
->saved_pc
= -1;
29945 ctx
->insn_flags
= env
->insn_flags
;
29946 ctx
->CP0_Config1
= env
->CP0_Config1
;
29947 ctx
->CP0_Config2
= env
->CP0_Config2
;
29948 ctx
->CP0_Config3
= env
->CP0_Config3
;
29949 ctx
->CP0_Config5
= env
->CP0_Config5
;
29951 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
29952 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
29953 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
29954 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
29955 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
29956 ctx
->PAMask
= env
->PAMask
;
29957 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
29958 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
29959 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
29960 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
29961 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
29962 /* Restore delay slot state from the tb context. */
29963 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
29964 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
29965 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
29966 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
29967 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
29968 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
29969 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
29970 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
29971 restore_cpu_state(env
, ctx
);
29972 #ifdef CONFIG_USER_ONLY
29973 ctx
->mem_idx
= MIPS_HFLAG_UM
;
29975 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
29977 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
29978 MO_UNALN
: MO_ALIGN
;
29980 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
29984 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29988 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29990 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29992 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
29996 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
29997 const CPUBreakpoint
*bp
)
29999 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
30001 save_cpu_state(ctx
, 1);
30002 ctx
->base
.is_jmp
= DISAS_NORETURN
;
30003 gen_helper_raise_exception_debug(cpu_env
);
30005 * The address covered by the breakpoint must be included in
30006 * [tb->pc, tb->pc + tb->size) in order to for it to be
30007 * properly cleared -- thus we increment the PC here so that
30008 * the logic setting tb->size below does the right thing.
30010 ctx
->base
.pc_next
+= 4;
30014 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
30016 CPUMIPSState
*env
= cs
->env_ptr
;
30017 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
30021 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
30022 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
30023 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
30024 insn_bytes
= decode_nanomips_opc(env
, ctx
);
30025 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
30026 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
30028 decode_opc(env
, ctx
);
30029 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
30030 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
30031 insn_bytes
= decode_micromips_opc(env
, ctx
);
30032 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
30033 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
30034 insn_bytes
= decode_mips16_opc(env
, ctx
);
30036 generate_exception_end(ctx
, EXCP_RI
);
30037 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
30041 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
30042 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
30043 MIPS_HFLAG_FBNSLOT
))) {
30045 * Force to generate branch as there is neither delay nor
30050 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
30051 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
30053 * Force to generate branch as microMIPS R6 doesn't restrict
30054 * branches in the forbidden slot.
30060 gen_branch(ctx
, insn_bytes
);
30062 ctx
->base
.pc_next
+= insn_bytes
;
30064 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
30068 * Execute a branch and its delay slot as a single instruction.
30069 * This is what GDB expects and is consistent with what the
30070 * hardware does (e.g. if a delay slot instruction faults, the
30071 * reported PC is the PC of the branch).
30073 if (ctx
->base
.singlestep_enabled
&&
30074 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
30075 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
30077 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
30078 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
30082 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
30084 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
30086 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
30087 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
30088 gen_helper_raise_exception_debug(cpu_env
);
30090 switch (ctx
->base
.is_jmp
) {
30092 gen_save_pc(ctx
->base
.pc_next
);
30093 tcg_gen_lookup_and_goto_ptr();
30096 case DISAS_TOO_MANY
:
30097 save_cpu_state(ctx
, 0);
30098 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
30101 tcg_gen_exit_tb(NULL
, 0);
30103 case DISAS_NORETURN
:
30106 g_assert_not_reached();
30111 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
30113 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
30114 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
30117 static const TranslatorOps mips_tr_ops
= {
30118 .init_disas_context
= mips_tr_init_disas_context
,
30119 .tb_start
= mips_tr_tb_start
,
30120 .insn_start
= mips_tr_insn_start
,
30121 .breakpoint_check
= mips_tr_breakpoint_check
,
30122 .translate_insn
= mips_tr_translate_insn
,
30123 .tb_stop
= mips_tr_tb_stop
,
30124 .disas_log
= mips_tr_disas_log
,
30127 void gen_intermediate_code(CPUState
*cs
, TranslationBlock
*tb
, int max_insns
)
30131 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
, max_insns
);
30134 static void fpu_dump_state(CPUMIPSState
*env
, FILE * f
, int flags
)
30137 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
30139 #define printfpr(fp) \
30142 qemu_fprintf(f, "w:%08x d:%016" PRIx64 \
30143 " fd:%13g fs:%13g psu: %13g\n", \
30144 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
30145 (double)(fp)->fd, \
30146 (double)(fp)->fs[FP_ENDIAN_IDX], \
30147 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
30150 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
30151 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
30152 qemu_fprintf(f, "w:%08x d:%016" PRIx64 \
30153 " fd:%13g fs:%13g psu:%13g\n", \
30154 tmp.w[FP_ENDIAN_IDX], tmp.d, \
30156 (double)tmp.fs[FP_ENDIAN_IDX], \
30157 (double)tmp.fs[!FP_ENDIAN_IDX]); \
30163 "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
30164 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
30165 get_float_exception_flags(&env
->active_fpu
.fp_status
));
30166 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
30167 qemu_fprintf(f
, "%3s: ", fregnames
[i
]);
30168 printfpr(&env
->active_fpu
.fpr
[i
]);
30174 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, int flags
)
30176 MIPSCPU
*cpu
= MIPS_CPU(cs
);
30177 CPUMIPSState
*env
= &cpu
->env
;
30180 qemu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
30181 " LO=0x" TARGET_FMT_lx
" ds %04x "
30182 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
30183 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
30184 env
->hflags
, env
->btarget
, env
->bcond
);
30185 for (i
= 0; i
< 32; i
++) {
30186 if ((i
& 3) == 0) {
30187 qemu_fprintf(f
, "GPR%02d:", i
);
30189 qemu_fprintf(f
, " %s " TARGET_FMT_lx
,
30190 regnames
[i
], env
->active_tc
.gpr
[i
]);
30191 if ((i
& 3) == 3) {
30192 qemu_fprintf(f
, "\n");
30196 qemu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x"
30197 TARGET_FMT_lx
"\n",
30198 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
30199 qemu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
30201 env
->CP0_Config0
, env
->CP0_Config1
, env
->CP0_LLAddr
);
30202 qemu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
30203 env
->CP0_Config2
, env
->CP0_Config3
);
30204 qemu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
30205 env
->CP0_Config4
, env
->CP0_Config5
);
30206 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
30207 fpu_dump_state(env
, f
, flags
);
30211 void mips_tcg_init(void)
30216 for (i
= 1; i
< 32; i
++)
30217 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
30218 offsetof(CPUMIPSState
,
30222 for (i
= 0; i
< 32; i
++) {
30223 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
30225 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
30227 * The scalar floating-point unit (FPU) registers are mapped on
30228 * the MSA vector registers.
30230 fpu_f64
[i
] = msa_wr_d
[i
* 2];
30231 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
30232 msa_wr_d
[i
* 2 + 1] =
30233 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
30236 cpu_PC
= tcg_global_mem_new(cpu_env
,
30237 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
30238 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
30239 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
30240 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
30242 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
30243 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
30246 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
30247 offsetof(CPUMIPSState
,
30248 active_tc
.DSPControl
),
30250 bcond
= tcg_global_mem_new(cpu_env
,
30251 offsetof(CPUMIPSState
, bcond
), "bcond");
30252 btarget
= tcg_global_mem_new(cpu_env
,
30253 offsetof(CPUMIPSState
, btarget
), "btarget");
30254 hflags
= tcg_global_mem_new_i32(cpu_env
,
30255 offsetof(CPUMIPSState
, hflags
), "hflags");
30257 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
30258 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
30260 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
30261 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
30263 cpu_lladdr
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, lladdr
),
30265 cpu_llval
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, llval
),
30268 #if defined(TARGET_MIPS64)
30270 for (i
= 1; i
< 32; i
++) {
30271 cpu_mmr
[i
] = tcg_global_mem_new_i64(cpu_env
,
30272 offsetof(CPUMIPSState
,
30278 #if !defined(TARGET_MIPS64)
30279 for (i
= 0; i
< NUMBER_OF_MXU_REGISTERS
- 1; i
++) {
30280 mxu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
30281 offsetof(CPUMIPSState
,
30282 active_tc
.mxu_gpr
[i
]),
30286 mxu_CR
= tcg_global_mem_new(cpu_env
,
30287 offsetof(CPUMIPSState
, active_tc
.mxu_cr
),
30288 mxuregnames
[NUMBER_OF_MXU_REGISTERS
- 1]);
30292 #include "translate_init.inc.c"
30294 void cpu_mips_realize_env(CPUMIPSState
*env
)
30296 env
->exception_base
= (int32_t)0xBFC00000;
30298 #ifndef CONFIG_USER_ONLY
30299 mmu_init(env
, env
->cpu_model
);
30301 fpu_init(env
, env
->cpu_model
);
30302 mvp_init(env
, env
->cpu_model
);
30305 bool cpu_supports_cps_smp(const char *cpu_type
)
30307 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
30308 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
30311 bool cpu_supports_isa(const char *cpu_type
, uint64_t isa
)
30313 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
30314 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
30317 void cpu_set_exception_base(int vp_index
, target_ulong address
)
30319 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
30320 vp
->env
.exception_base
= address
;
30323 void cpu_state_reset(CPUMIPSState
*env
)
30325 CPUState
*cs
= env_cpu(env
);
30327 /* Reset registers to their default values */
30328 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
30329 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
30330 #ifdef TARGET_WORDS_BIGENDIAN
30331 env
->CP0_Config0
|= (1 << CP0C0_BE
);
30333 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
30334 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
30335 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
30336 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
30337 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
30338 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
30339 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
30340 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
30341 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
30342 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
30343 << env
->cpu_model
->CP0_LLAddr_shift
;
30344 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
30345 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
30346 env
->CCRes
= env
->cpu_model
->CCRes
;
30347 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
30348 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
30349 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
30350 env
->current_tc
= 0;
30351 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
30352 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
30353 #if defined(TARGET_MIPS64)
30354 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
30355 env
->SEGMask
|= 3ULL << 62;
30358 env
->PABITS
= env
->cpu_model
->PABITS
;
30359 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
30360 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
30361 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
30362 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
30363 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
30364 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
30365 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
30366 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
30367 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
30368 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
30369 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
30370 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
30371 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
30372 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
30373 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
30374 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
30375 env
->msair
= env
->cpu_model
->MSAIR
;
30376 env
->insn_flags
= env
->cpu_model
->insn_flags
;
30378 #if defined(CONFIG_USER_ONLY)
30379 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
30380 # ifdef TARGET_MIPS64
30381 /* Enable 64-bit register mode. */
30382 env
->CP0_Status
|= (1 << CP0St_PX
);
30384 # ifdef TARGET_ABI_MIPSN64
30385 /* Enable 64-bit address mode. */
30386 env
->CP0_Status
|= (1 << CP0St_UX
);
30389 * Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
30390 * hardware registers.
30392 env
->CP0_HWREna
|= 0x0000000F;
30393 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
30394 env
->CP0_Status
|= (1 << CP0St_CU1
);
30396 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
30397 env
->CP0_Status
|= (1 << CP0St_MX
);
30399 # if defined(TARGET_MIPS64)
30400 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
30401 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
30402 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
30403 env
->CP0_Status
|= (1 << CP0St_FR
);
30407 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
30409 * If the exception was raised from a delay slot,
30410 * come back to the jump.
30412 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
30413 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
30415 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
30417 env
->active_tc
.PC
= env
->exception_base
;
30418 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
30419 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
30420 env
->CP0_Wired
= 0;
30421 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
30422 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
30423 if (mips_um_ksegs_enabled()) {
30424 env
->CP0_EBase
|= 0x40000000;
30426 env
->CP0_EBase
|= (int32_t)0x80000000;
30428 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
30429 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
30431 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
30433 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
30435 * Vectored interrupts not implemented, timer on int 7,
30436 * no performance counters.
30438 env
->CP0_IntCtl
= 0xe0000000;
30442 for (i
= 0; i
< 7; i
++) {
30443 env
->CP0_WatchLo
[i
] = 0;
30444 env
->CP0_WatchHi
[i
] = 0x80000000;
30446 env
->CP0_WatchLo
[7] = 0;
30447 env
->CP0_WatchHi
[7] = 0;
30449 /* Count register increments in debug mode, EJTAG version 1 */
30450 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
30452 cpu_mips_store_count(env
, 1);
30454 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
30457 /* Only TC0 on VPE 0 starts as active. */
30458 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
30459 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
30460 env
->tcs
[i
].CP0_TCHalt
= 1;
30462 env
->active_tc
.CP0_TCHalt
= 1;
30465 if (cs
->cpu_index
== 0) {
30466 /* VPE0 starts up enabled. */
30467 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
30468 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
30470 /* TC0 starts up unhalted. */
30472 env
->active_tc
.CP0_TCHalt
= 0;
30473 env
->tcs
[0].CP0_TCHalt
= 0;
30474 /* With thread 0 active. */
30475 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
30476 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
30481 * Configure default legacy segmentation control. We use this regardless of
30482 * whether segmentation control is presented to the guest.
30484 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
30485 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
30486 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
30487 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
30488 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
30489 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
30491 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
30492 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
30493 (3 << CP0SC_C
)) << 16;
30494 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
30495 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30496 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
30497 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
30498 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30499 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
30500 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
30501 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
30503 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
30504 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
30505 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
30506 env
->CP0_Status
|= (1 << CP0St_FR
);
30509 if (env
->insn_flags
& ISA_MIPS32R6
) {
30511 env
->CP0_PWSize
= 0x40;
30517 env
->CP0_PWField
= 0x0C30C302;
30524 env
->CP0_PWField
= 0x02;
30527 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
30528 /* microMIPS on reset when Config3.ISA is 3 */
30529 env
->hflags
|= MIPS_HFLAG_M16
;
30533 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
30537 compute_hflags(env
);
30538 restore_fp_status(env
);
30539 restore_pamask(env
);
30540 cs
->exception_index
= EXCP_NONE
;
30542 if (semihosting_get_argc()) {
30543 /* UHI interface can be used to obtain argc and argv */
30544 env
->active_tc
.gpr
[4] = -1;
30548 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
30549 target_ulong
*data
)
30551 env
->active_tc
.PC
= data
[0];
30552 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
30553 env
->hflags
|= data
[1];
30554 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
30555 case MIPS_HFLAG_BR
:
30557 case MIPS_HFLAG_BC
:
30558 case MIPS_HFLAG_BL
:
30560 env
->btarget
= data
[2];