2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
467 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
468 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
472 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
475 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
476 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
477 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
478 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
479 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
482 /* MIPS DSP REGIMM opcodes */
484 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
485 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
488 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
491 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
492 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
493 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
494 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
497 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 /* MIPS DSP Arithmetic Sub-class */
500 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
501 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
502 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
503 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
504 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
505 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
506 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
507 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
508 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
509 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
510 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
517 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
518 /* MIPS DSP Multiply Sub-class insns */
519 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
522 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
523 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
527 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
528 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
532 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
533 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
534 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
535 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
536 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
537 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
538 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
539 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
543 /* MIPS DSP Multiply Sub-class insns */
544 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
550 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
552 /* MIPS DSP Arithmetic Sub-class */
553 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
554 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
555 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
556 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
557 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
558 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
559 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
560 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
561 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
566 /* DSP Bit/Manipulation Sub-class */
567 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
574 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
576 /* MIPS DSP Arithmetic Sub-class */
577 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
581 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
582 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
583 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
584 /* DSP Compare-Pick Sub-class */
585 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
602 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
604 /* MIPS DSP GPR-Based Shift Sub-class */
605 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
607 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
608 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
609 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
610 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
611 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
612 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
613 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
629 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
631 /* MIPS DSP Multiply Sub-class insns */
632 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
634 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
635 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
636 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
637 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
638 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
639 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
640 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
656 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
658 /* DSP Bit/Manipulation Sub-class */
659 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
662 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
664 /* MIPS DSP Append Sub-class */
665 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
666 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
667 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
670 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
673 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
674 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
675 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
676 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
677 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
678 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
679 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
680 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
681 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
685 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
686 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
687 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
688 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
689 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
692 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
694 /* MIPS DSP Arithmetic Sub-class */
695 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
699 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
700 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
701 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
702 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
703 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
712 /* DSP Bit/Manipulation Sub-class */
713 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
721 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
723 /* MIPS DSP Multiply Sub-class insns */
724 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
725 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
726 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
729 /* MIPS DSP Arithmetic Sub-class */
730 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
731 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
732 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
733 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
734 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
735 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
737 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
738 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
741 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
743 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
744 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
745 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
753 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
755 /* DSP Compare-Pick Sub-class */
756 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
764 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
775 /* MIPS DSP Arithmetic Sub-class */
776 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
786 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
788 /* DSP Append Sub-class */
789 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
790 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
791 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
792 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
795 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
797 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
798 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
799 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
800 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
801 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
802 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
803 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
804 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
805 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
806 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
821 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
823 /* DSP Bit/Manipulation Sub-class */
824 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
827 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
829 /* MIPS DSP Multiply Sub-class insns */
830 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
836 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
837 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
838 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
858 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
860 /* MIPS DSP GPR-Based Shift Sub-class */
861 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
867 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
868 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
869 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
889 /* Coprocessor 0 (rs field) */
890 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
893 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
894 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
895 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
896 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
897 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
898 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
899 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
900 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
901 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
902 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
903 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
904 OPC_C0
= (0x10 << 21) | OPC_CP0
,
905 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
906 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
907 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
908 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
909 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
910 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
911 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
912 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
913 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
914 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
915 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
916 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
917 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
918 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
919 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
923 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
926 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
927 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
928 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
929 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
930 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
931 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
932 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
933 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
936 /* Coprocessor 0 (with rs == C0) */
937 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
940 OPC_TLBR
= 0x01 | OPC_C0
,
941 OPC_TLBWI
= 0x02 | OPC_C0
,
942 OPC_TLBINV
= 0x03 | OPC_C0
,
943 OPC_TLBINVF
= 0x04 | OPC_C0
,
944 OPC_TLBWR
= 0x06 | OPC_C0
,
945 OPC_TLBP
= 0x08 | OPC_C0
,
946 OPC_RFE
= 0x10 | OPC_C0
,
947 OPC_ERET
= 0x18 | OPC_C0
,
948 OPC_DERET
= 0x1F | OPC_C0
,
949 OPC_WAIT
= 0x20 | OPC_C0
,
952 /* Coprocessor 1 (rs field) */
953 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
955 /* Values for the fmt field in FP instructions */
957 /* 0 - 15 are reserved */
958 FMT_S
= 16, /* single fp */
959 FMT_D
= 17, /* double fp */
960 FMT_E
= 18, /* extended fp */
961 FMT_Q
= 19, /* quad fp */
962 FMT_W
= 20, /* 32-bit fixed */
963 FMT_L
= 21, /* 64-bit fixed */
964 FMT_PS
= 22, /* paired single fp */
965 /* 23 - 31 are reserved */
969 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
970 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
971 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
972 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
973 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
974 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
975 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
976 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
977 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
978 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
979 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
980 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
981 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
982 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
983 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
984 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
985 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
986 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
987 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
988 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
989 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
990 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
991 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
992 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
993 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
994 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
995 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
996 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
997 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
998 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1001 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1002 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1005 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1006 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1007 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1008 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1012 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1013 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1017 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1018 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1021 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1024 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1025 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1026 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1027 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1028 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1029 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1030 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1031 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1032 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1033 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1034 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1037 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1040 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1041 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1042 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1043 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1044 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1045 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1046 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1047 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1050 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1051 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1052 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1053 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1054 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1055 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1056 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1059 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1060 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1061 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1062 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1063 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1064 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1065 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1068 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1069 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1070 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1071 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1072 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1073 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1074 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1077 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1078 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1079 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1080 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1081 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1083 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1084 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1085 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1086 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1087 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1088 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1090 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1091 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1092 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1093 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1094 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1095 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1097 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1098 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1099 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1100 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1101 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1102 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1104 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1105 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1106 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1107 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1108 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1109 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1111 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1112 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1113 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1114 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1115 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1116 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1118 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1119 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1120 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1121 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1122 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1123 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1125 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1126 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1127 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1128 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1129 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1130 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1134 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1137 OPC_LWXC1
= 0x00 | OPC_CP3
,
1138 OPC_LDXC1
= 0x01 | OPC_CP3
,
1139 OPC_LUXC1
= 0x05 | OPC_CP3
,
1140 OPC_SWXC1
= 0x08 | OPC_CP3
,
1141 OPC_SDXC1
= 0x09 | OPC_CP3
,
1142 OPC_SUXC1
= 0x0D | OPC_CP3
,
1143 OPC_PREFX
= 0x0F | OPC_CP3
,
1144 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1145 OPC_MADD_S
= 0x20 | OPC_CP3
,
1146 OPC_MADD_D
= 0x21 | OPC_CP3
,
1147 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1148 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1149 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1150 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1151 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1152 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1153 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1154 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1155 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1156 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1160 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1162 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1163 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1164 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1165 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1166 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1167 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1168 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1169 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1170 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1171 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1172 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1173 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1174 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1175 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1176 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1177 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1178 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1179 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1180 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1181 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1182 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1184 /* MI10 instruction */
1185 OPC_LD_B
= (0x20) | OPC_MSA
,
1186 OPC_LD_H
= (0x21) | OPC_MSA
,
1187 OPC_LD_W
= (0x22) | OPC_MSA
,
1188 OPC_LD_D
= (0x23) | OPC_MSA
,
1189 OPC_ST_B
= (0x24) | OPC_MSA
,
1190 OPC_ST_H
= (0x25) | OPC_MSA
,
1191 OPC_ST_W
= (0x26) | OPC_MSA
,
1192 OPC_ST_D
= (0x27) | OPC_MSA
,
1196 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1197 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1198 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1199 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1200 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1201 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1202 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1203 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1204 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1205 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1206 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1207 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1208 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1210 /* I8 instruction */
1211 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1212 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1213 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1214 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1215 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1216 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1217 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1218 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1219 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1220 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1222 /* VEC/2R/2RF instruction */
1223 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1224 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1225 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1226 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1227 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1228 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1229 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1231 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1232 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1234 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1235 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1236 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1237 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1238 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1240 /* 2RF instruction df(bit 16) = _w, _d */
1241 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1242 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1243 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1244 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1245 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1246 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1247 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1248 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1249 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1250 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1251 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1252 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1253 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1254 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1255 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1256 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1258 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1259 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1260 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1261 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1262 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1263 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1264 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1265 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1266 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1267 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1268 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1269 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1270 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1272 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1274 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1275 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1278 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1279 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1280 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1281 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1282 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1283 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1284 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1285 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1286 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1287 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1288 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1289 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1290 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1292 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1293 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1294 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1295 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1296 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1297 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1298 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1299 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1300 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1301 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1302 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1303 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1304 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1305 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1306 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1307 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1308 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1309 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1310 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1311 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1312 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1313 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1314 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1315 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1316 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1317 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1318 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1319 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1320 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1321 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1323 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1324 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1325 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1326 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1327 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1328 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1329 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1330 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1331 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1332 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1334 /* 3RF instruction _df(bit 21) = _w, _d */
1335 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1336 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1337 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1338 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1339 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1340 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1341 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1342 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1343 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1346 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1348 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1349 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1350 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1351 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1352 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1353 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1357 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1358 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1359 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1361 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1362 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1363 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1364 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1365 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1366 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1367 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1370 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1373 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1374 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1375 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1377 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1378 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1379 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1380 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1381 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1382 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1383 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1384 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1385 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1386 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1387 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1388 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1389 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1394 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1395 * ============================================
1397 * MXU (full name: MIPS eXtension/enhanced Unit) is an SIMD extension of MIPS32
1398 * instructions set. It is designed to fit the needs of signal, graphical and
1399 * video processing applications. MXU instruction set is used in Xburst family
1400 * of microprocessors by Ingenic.
1402 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1403 * the control register.
1405 * The notation used in MXU assembler mnemonics:
1407 * XRa, XRb, XRc, XRd - MXU registers
1408 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1409 * s12 - a subfield of an instruction code
1410 * strd2 - a subfield of an instruction code
1411 * eptn2 - a subfield of an instruction code
1412 * eptn3 - a subfield of an instruction code
1413 * optn2 - a subfield of an instruction code
1414 * optn3 - a subfield of an instruction code
1415 * sft4 - a subfield of an instruction code
1417 * Load/Store instructions Multiplication instructions
1418 * ----------------------- ---------------------------
1420 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1421 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1422 * S32LDDV XRa, Rb, rc, strd2 S32SUB XRa, XRd, Rs, Rt
1423 * S32STDV XRa, Rb, rc, strd2 S32SUBU XRa, XRd, Rs, Rt
1424 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1425 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1426 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1427 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1428 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1429 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1430 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1431 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1432 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1433 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1434 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1435 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1436 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1437 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1438 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1439 * S16SDI XRa, Rb, s10, eptn2
1440 * S8LDD XRa, Rb, s8, eptn3
1441 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1442 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1443 * S8SDI XRa, Rb, s8, eptn3
1444 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1445 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1446 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1447 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1448 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1449 * S32CPS XRa, XRb, XRc
1450 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1451 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1452 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1453 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1454 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1455 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1456 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1457 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1458 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1459 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1460 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1461 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1462 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1463 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1464 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1465 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1466 * Q8SLT XRa, XRb, XRc
1467 * Q8SLTU XRa, XRb, XRc
1468 * Q8MOVZ XRa, XRb, XRc Shift instructions
1469 * Q8MOVN XRa, XRb, XRc ------------------
1471 * D32SLL XRa, XRb, XRc, XRd, sft4
1472 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1473 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1474 * D32SARL XRa, XRb, XRc, sft4
1475 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1476 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1477 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1478 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1479 * Q16SLL XRa, XRb, XRc, XRd, sft4
1480 * Q16SLR XRa, XRb, XRc, XRd, sft4
1481 * Miscelaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1482 * ------------------------- Q16SLLV XRa, XRb, Rb
1483 * Q16SLRV XRa, XRb, Rb
1484 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1485 * S32ALN XRa, XRb, XRc, Rb
1486 * S32ALNI XRa, XRb, XRc, s3
1487 * S32LUI XRa, s8, optn3 Move instructions
1488 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1489 * S32EXTRV XRa, XRb, Rs, Rt
1490 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1491 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1497 * ┌─ 000000 ─ OPC_MXU_S32MADD
1498 * ├─ 000001 ─ OPC_MXU_S32MADDU
1499 * ├─ 000010 ─ <not assigned>
1501 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1502 * │ ├─ 001 ─ OPC_MXU_S32MIN
1503 * │ ├─ 010 ─ OPC_MXU_D16MAX
1504 * │ ├─ 011 ─ OPC_MXU_D16MIN
1505 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1506 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1507 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1508 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1509 * ├─ 000100 ─ OPC_MXU_S32MSUB
1510 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1511 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1512 * │ ├─ 001 ─ OPC_MXU_D16SLT
1513 * │ ├─ 010 ─ OPC_MXU_D16AVG
1514 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1515 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1516 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1517 * │ └─ 111 ─ OPC_MXU_Q8ADD
1520 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1521 * │ ├─ 010 ─ OPC_MXU_D16CPS
1522 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1523 * │ └─ 110 ─ OPC_MXU_Q16SAT
1524 * ├─ 001000 ─ OPC_MXU_D16MUL
1526 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1527 * │ └─ 01 ─ OPC_MXU_D16MULE
1528 * ├─ 001010 ─ OPC_MXU_D16MAC
1529 * ├─ 001011 ─ OPC_MXU_D16MACF
1530 * ├─ 001100 ─ OPC_MXU_D16MADL
1532 * ├─ 001101 ─ OPC_MXU__POOL04 ─┬─ 00 ─ OPC_MXU_S16MAD
1533 * │ └─ 01 ─ OPC_MXU_S16MAD_1
1534 * ├─ 001110 ─ OPC_MXU_Q16ADD
1535 * ├─ 001111 ─ OPC_MXU_D16MACE
1537 * ├─ 010000 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32LDD
1538 * │ └─ 1 ─ OPC_MXU_S32LDDR
1541 * ├─ 010001 ─ OPC_MXU__POOL06 ─┬─ 0 ─ OPC_MXU_S32STD
1542 * │ └─ 1 ─ OPC_MXU_S32STDR
1545 * ├─ 010010 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1546 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1549 * ├─ 010011 ─ OPC_MXU__POOL08 ─┬─ 0000 ─ OPC_MXU_S32STDV
1550 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1553 * ├─ 010100 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32LDI
1554 * │ └─ 1 ─ OPC_MXU_S32LDIR
1557 * ├─ 010101 ─ OPC_MXU__POOL10 ─┬─ 0 ─ OPC_MXU_S32SDI
1558 * │ └─ 1 ─ OPC_MXU_S32SDIR
1561 * ├─ 010110 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1562 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1565 * ├─ 010111 ─ OPC_MXU__POOL12 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1566 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1567 * ├─ 011000 ─ OPC_MXU_D32ADD
1569 * MXU ├─ 011001 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_D32ACC
1570 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1571 * │ └─ 10 ─ OPC_MXU_D32ASUM
1572 * ├─ 011010 ─ <not assigned>
1574 * ├─ 011011 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q16ACC
1575 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1576 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1579 * ├─ 011100 ─ OPC_MXU__POOL15 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1580 * │ ├─ 01 ─ OPC_MXU_D8SUM
1581 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1582 * ├─ 011110 ─ <not assigned>
1583 * ├─ 011111 ─ <not assigned>
1584 * ├─ 100000 ─ <not assigned>
1585 * ├─ 100001 ─ <not assigned>
1586 * ├─ 100010 ─ OPC_MXU_S8LDD
1587 * ├─ 100011 ─ OPC_MXU_S8STD
1588 * ├─ 100100 ─ OPC_MXU_S8LDI
1589 * ├─ 100101 ─ OPC_MXU_S8SDI
1591 * ├─ 100110 ─ OPC_MXU__POOL16 ─┬─ 00 ─ OPC_MXU_S32MUL
1592 * │ ├─ 00 ─ OPC_MXU_S32MULU
1593 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1594 * │ └─ 00 ─ OPC_MXU_S32EXTRV
1597 * ├─ 100111 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_D32SARW
1598 * │ ├─ 001 ─ OPC_MXU_S32ALN
1599 * ├─ 101000 ─ OPC_MXU_LXB ├─ 010 ─ OPC_MXU_S32ALNI
1600 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_S32NOR
1601 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_S32AND
1602 * ├─ 101011 ─ OPC_MXU_S16STD ├─ 101 ─ OPC_MXU_S32OR
1603 * ├─ 101100 ─ OPC_MXU_S16LDI ├─ 110 ─ OPC_MXU_S32XOR
1604 * ├─ 101101 ─ OPC_MXU_S16SDI └─ 111 ─ OPC_MXU_S32LUI
1605 * ├─ 101000 ─ <not assigned>
1606 * ├─ 101001 ─ <not assigned>
1607 * ├─ 101010 ─ <not assigned>
1608 * ├─ 101011 ─ <not assigned>
1609 * ├─ 101100 ─ <not assigned>
1610 * ├─ 101101 ─ <not assigned>
1611 * ├─ 101110 ─ OPC_MXU_S32M2I
1612 * ├─ 101111 ─ OPC_MXU_S32I2M
1613 * ├─ 110000 ─ OPC_MXU_D32SLL
1614 * ├─ 110001 ─ OPC_MXU_D32SLR
1615 * ├─ 110010 ─ OPC_MXU_D32SARL
1616 * ├─ 110011 ─ OPC_MXU_D32SAR
1617 * ├─ 110100 ─ OPC_MXU_Q16SLL
1618 * ├─ 110101 ─ OPC_MXU_Q16SLR 20..18
1619 * ├─ 110110 ─ OPC_MXU__POOL18 ─┬─ 000 ─ OPC_MXU_D32SLLV
1620 * │ ├─ 001 ─ OPC_MXU_D32SLRV
1621 * │ ├─ 010 ─ OPC_MXU_D32SARV
1622 * │ ├─ 011 ─ OPC_MXU_Q16SLLV
1623 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1624 * │ └─ 101 ─ OPC_MXU_Q16SARV
1625 * ├─ 110111 ─ OPC_MXU_Q16SAR
1627 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1628 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1631 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1632 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1633 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1634 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1635 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1636 * │ └─ 101 ─ OPC_MXU_S32MOV
1639 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1640 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1641 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1642 * ├─ 111100 ─ OPC_MXU_Q8MADL
1643 * ├─ 111101 ─ OPC_MXU_S32SFL
1644 * ├─ 111110 ─ OPC_MXU_Q8SAD
1645 * └─ 111111 ─ <not assigned>
1650 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1651 * Programming Manual", Ingenic Semiconductor Co, Ltd., 2017
1655 OPC_MXU_S32MADD
= 0x00,
1656 OPC_MXU_S32MADDU
= 0x01,
1657 /* not assigned 0x02 */
1658 OPC_MXU__POOL00
= 0x03,
1659 OPC_MXU_S32MSUB
= 0x04,
1660 OPC_MXU_S32MSUBU
= 0x05,
1661 OPC_MXU__POOL01
= 0x06,
1662 OPC_MXU__POOL02
= 0x07,
1663 OPC_MXU_D16MUL
= 0x08,
1664 OPC_MXU__POOL03
= 0x09,
1665 OPC_MXU_D16MAC
= 0x0A,
1666 OPC_MXU_D16MACF
= 0x0B,
1667 OPC_MXU_D16MADL
= 0x0C,
1668 OPC_MXU__POOL04
= 0x0D,
1669 OPC_MXU_Q16ADD
= 0x0E,
1670 OPC_MXU_D16MACE
= 0x0F,
1671 OPC_MXU__POOL05
= 0x10,
1672 OPC_MXU__POOL06
= 0x11,
1673 OPC_MXU__POOL07
= 0x12,
1674 OPC_MXU__POOL08
= 0x13,
1675 OPC_MXU__POOL09
= 0x14,
1676 OPC_MXU__POOL10
= 0x15,
1677 OPC_MXU__POOL11
= 0x16,
1678 OPC_MXU__POOL12
= 0x17,
1679 OPC_MXU_D32ADD
= 0x18,
1680 OPC_MXU__POOL13
= 0x19,
1681 /* not assigned 0x1A */
1682 OPC_MXU__POOL14
= 0x1B,
1683 OPC_MXU__POOL15
= 0x1C,
1684 OPC_MXU_Q8ACCE
= 0x1D,
1685 /* not assigned 0x1E */
1686 /* not assigned 0x1F */
1687 /* not assigned 0x20 */
1688 /* not assigned 0x21 */
1689 OPC_MXU_S8LDD
= 0x22,
1690 OPC_MXU_S8STD
= 0x23,
1691 OPC_MXU_S8LDI
= 0x24,
1692 OPC_MXU_S8SDI
= 0x25,
1693 OPC_MXU__POOL16
= 0x26,
1694 OPC_MXU__POOL17
= 0x27,
1696 /* not assigned 0x29 */
1697 OPC_MXU_S16LDD
= 0x2A,
1698 OPC_MXU_S16STD
= 0x2B,
1699 OPC_MXU_S16LDI
= 0x2C,
1700 OPC_MXU_S16SDI
= 0x2D,
1701 OPC_MXU_S32M2I
= 0x2E,
1702 OPC_MXU_S32I2M
= 0x2F,
1703 OPC_MXU_D32SLL
= 0x30,
1704 OPC_MXU_D32SLR
= 0x31,
1705 OPC_MXU_D32SARL
= 0x32,
1706 OPC_MXU_D32SAR
= 0x33,
1707 OPC_MXU_Q16SLL
= 0x34,
1708 OPC_MXU_Q16SLR
= 0x35,
1709 OPC_MXU__POOL18
= 0x36,
1710 OPC_MXU_Q16SAR
= 0x37,
1711 OPC_MXU__POOL19
= 0x38,
1712 OPC_MXU__POOL20
= 0x39,
1713 OPC_MXU__POOL21
= 0x3A,
1714 OPC_MXU_Q16SCOP
= 0x3B,
1715 OPC_MXU_Q8MADL
= 0x3C,
1716 OPC_MXU_S32SFL
= 0x3D,
1717 OPC_MXU_Q8SAD
= 0x3E,
1718 /* not assigned 0x3F */
1726 OPC_MXU_S32MAX
= 0x00,
1727 OPC_MXU_S32MIN
= 0x01,
1728 OPC_MXU_D16MAX
= 0x02,
1729 OPC_MXU_D16MIN
= 0x03,
1730 OPC_MXU_Q8MAX
= 0x04,
1731 OPC_MXU_Q8MIN
= 0x05,
1732 OPC_MXU_Q8SLT
= 0x06,
1733 OPC_MXU_Q8SLTU
= 0x07,
1740 OPC_MXU_S32SLT
= 0x00,
1741 OPC_MXU_D16SLT
= 0x01,
1742 OPC_MXU_D16AVG
= 0x02,
1743 OPC_MXU_D16AVGR
= 0x03,
1744 OPC_MXU_Q8AVG
= 0x04,
1745 OPC_MXU_Q8AVGR
= 0x05,
1746 OPC_MXU_Q8ADD
= 0x07,
1753 OPC_MXU_S32CPS
= 0x00,
1754 OPC_MXU_D16CPS
= 0x02,
1755 OPC_MXU_Q8ABD
= 0x04,
1756 OPC_MXU_Q16SAT
= 0x06,
1763 OPC_MXU_D16MULF
= 0x00,
1764 OPC_MXU_D16MULE
= 0x01,
1771 OPC_MXU_S16MAD
= 0x00,
1772 OPC_MXU_S16MAD_1
= 0x01,
1779 OPC_MXU_S32LDD
= 0x00,
1780 OPC_MXU_S32LDDR
= 0x01,
1787 OPC_MXU_S32STD
= 0x00,
1788 OPC_MXU_S32STDR
= 0x01,
1795 OPC_MXU_S32LDDV
= 0x00,
1796 OPC_MXU_S32LDDVR
= 0x01,
1803 OPC_MXU_S32STDV
= 0x00,
1804 OPC_MXU_S32STDVR
= 0x01,
1811 OPC_MXU_S32LDI
= 0x00,
1812 OPC_MXU_S32LDIR
= 0x01,
1819 OPC_MXU_S32SDI
= 0x00,
1820 OPC_MXU_S32SDIR
= 0x01,
1827 OPC_MXU_S32LDIV
= 0x00,
1828 OPC_MXU_S32LDIVR
= 0x01,
1835 OPC_MXU_S32SDIV
= 0x00,
1836 OPC_MXU_S32SDIVR
= 0x01,
1843 OPC_MXU_D32ACC
= 0x00,
1844 OPC_MXU_D32ACCM
= 0x01,
1845 OPC_MXU_D32ASUM
= 0x02,
1852 OPC_MXU_Q16ACC
= 0x00,
1853 OPC_MXU_Q16ACCM
= 0x01,
1854 OPC_MXU_Q16ASUM
= 0x02,
1861 OPC_MXU_Q8ADDE
= 0x00,
1862 OPC_MXU_D8SUM
= 0x01,
1863 OPC_MXU_D8SUMC
= 0x02,
1870 OPC_MXU_S32MUL
= 0x00,
1871 OPC_MXU_S32MULU
= 0x01,
1872 OPC_MXU_S32EXTR
= 0x02,
1873 OPC_MXU_S32EXTRV
= 0x03,
1880 OPC_MXU_D32SARW
= 0x00,
1881 OPC_MXU_S32ALN
= 0x01,
1882 OPC_MXU_S32ALNI
= 0x02,
1883 OPC_MXU_S32NOR
= 0x03,
1884 OPC_MXU_S32AND
= 0x04,
1885 OPC_MXU_S32OR
= 0x05,
1886 OPC_MXU_S32XOR
= 0x06,
1887 OPC_MXU_S32LUI
= 0x07,
1894 OPC_MXU_D32SLLV
= 0x00,
1895 OPC_MXU_D32SLRV
= 0x01,
1896 OPC_MXU_D32SARV
= 0x03,
1897 OPC_MXU_Q16SLLV
= 0x04,
1898 OPC_MXU_Q16SLRV
= 0x05,
1899 OPC_MXU_Q16SARV
= 0x07,
1906 OPC_MXU_Q8MUL
= 0x00,
1907 OPC_MXU_Q8MULSU
= 0x01,
1914 OPC_MXU_Q8MOVZ
= 0x00,
1915 OPC_MXU_Q8MOVN
= 0x01,
1916 OPC_MXU_D16MOVZ
= 0x02,
1917 OPC_MXU_D16MOVN
= 0x03,
1918 OPC_MXU_S32MOVZ
= 0x04,
1919 OPC_MXU_S32MOVN
= 0x05,
1926 OPC_MXU_Q8MAC
= 0x00,
1927 OPC_MXU_Q8MACSU
= 0x01,
1931 * Overview of the TX79-specific instruction set
1932 * =============================================
1934 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
1935 * are only used by the specific quadword (128-bit) LQ/SQ load/store
1936 * instructions and certain multimedia instructions (MMIs). These MMIs
1937 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
1938 * or sixteen 8-bit paths.
1942 * The Toshiba TX System RISC TX79 Core Architecture manual,
1943 * https://wiki.qemu.org/File:C790.pdf
1945 * Three-Operand Multiply and Multiply-Add (4 instructions)
1946 * --------------------------------------------------------
1947 * MADD [rd,] rs, rt Multiply/Add
1948 * MADDU [rd,] rs, rt Multiply/Add Unsigned
1949 * MULT [rd,] rs, rt Multiply (3-operand)
1950 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
1952 * Multiply Instructions for Pipeline 1 (10 instructions)
1953 * ------------------------------------------------------
1954 * MULT1 [rd,] rs, rt Multiply Pipeline 1
1955 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
1956 * DIV1 rs, rt Divide Pipeline 1
1957 * DIVU1 rs, rt Divide Unsigned Pipeline 1
1958 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
1959 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
1960 * MFHI1 rd Move From HI1 Register
1961 * MFLO1 rd Move From LO1 Register
1962 * MTHI1 rs Move To HI1 Register
1963 * MTLO1 rs Move To LO1 Register
1965 * Arithmetic (19 instructions)
1966 * ----------------------------
1967 * PADDB rd, rs, rt Parallel Add Byte
1968 * PSUBB rd, rs, rt Parallel Subtract Byte
1969 * PADDH rd, rs, rt Parallel Add Halfword
1970 * PSUBH rd, rs, rt Parallel Subtract Halfword
1971 * PADDW rd, rs, rt Parallel Add Word
1972 * PSUBW rd, rs, rt Parallel Subtract Word
1973 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
1974 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
1975 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
1976 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
1977 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
1978 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
1979 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
1980 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
1981 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
1982 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
1983 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
1984 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
1985 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
1987 * Min/Max (4 instructions)
1988 * ------------------------
1989 * PMAXH rd, rs, rt Parallel Maximum Halfword
1990 * PMINH rd, rs, rt Parallel Minimum Halfword
1991 * PMAXW rd, rs, rt Parallel Maximum Word
1992 * PMINW rd, rs, rt Parallel Minimum Word
1994 * Absolute (2 instructions)
1995 * -------------------------
1996 * PABSH rd, rt Parallel Absolute Halfword
1997 * PABSW rd, rt Parallel Absolute Word
1999 * Logical (4 instructions)
2000 * ------------------------
2001 * PAND rd, rs, rt Parallel AND
2002 * POR rd, rs, rt Parallel OR
2003 * PXOR rd, rs, rt Parallel XOR
2004 * PNOR rd, rs, rt Parallel NOR
2006 * Shift (9 instructions)
2007 * ----------------------
2008 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2009 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2010 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2011 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2012 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2013 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2014 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2015 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2016 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2018 * Compare (6 instructions)
2019 * ------------------------
2020 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2021 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2022 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2023 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2024 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2025 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2027 * LZC (1 instruction)
2028 * -------------------
2029 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2031 * Quadword Load and Store (2 instructions)
2032 * ----------------------------------------
2033 * LQ rt, offset(base) Load Quadword
2034 * SQ rt, offset(base) Store Quadword
2036 * Multiply and Divide (19 instructions)
2037 * -------------------------------------
2038 * PMULTW rd, rs, rt Parallel Multiply Word
2039 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2040 * PDIVW rs, rt Parallel Divide Word
2041 * PDIVUW rs, rt Parallel Divide Unsigned Word
2042 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2043 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2044 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2045 * PMULTH rd, rs, rt Parallel Multiply Halfword
2046 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2047 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2048 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2049 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2050 * PDIVBW rs, rt Parallel Divide Broadcast Word
2051 * PMFHI rd Parallel Move From HI Register
2052 * PMFLO rd Parallel Move From LO Register
2053 * PMTHI rs Parallel Move To HI Register
2054 * PMTLO rs Parallel Move To LO Register
2055 * PMFHL rd Parallel Move From HI/LO Register
2056 * PMTHL rs Parallel Move To HI/LO Register
2058 * Pack/Extend (11 instructions)
2059 * -----------------------------
2060 * PPAC5 rd, rt Parallel Pack to 5 bits
2061 * PPACB rd, rs, rt Parallel Pack to Byte
2062 * PPACH rd, rs, rt Parallel Pack to Halfword
2063 * PPACW rd, rs, rt Parallel Pack to Word
2064 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2065 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2066 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2067 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2068 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2069 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2070 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2072 * Others (16 instructions)
2073 * ------------------------
2074 * PCPYH rd, rt Parallel Copy Halfword
2075 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2076 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2077 * PREVH rd, rt Parallel Reverse Halfword
2078 * PINTH rd, rs, rt Parallel Interleave Halfword
2079 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2080 * PEXEH rd, rt Parallel Exchange Even Halfword
2081 * PEXCH rd, rt Parallel Exchange Center Halfword
2082 * PEXEW rd, rt Parallel Exchange Even Word
2083 * PEXCW rd, rt Parallel Exchange Center Word
2084 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2085 * MFSA rd Move from Shift Amount Register
2086 * MTSA rs Move to Shift Amount Register
2087 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2088 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2089 * PROT3W rd, rt Parallel Rotate 3 Words
2091 * The TX79-specific Multimedia Instruction encodings
2092 * ==================================================
2094 * TX79 Multimedia Instruction encoding table keys:
2096 * * This code is reserved for future use. An attempt to execute it
2097 * causes a Reserved Instruction exception.
2098 * % This code indicates an instruction class. The instruction word
2099 * must be further decoded by examining additional tables that show
2100 * the values for other instruction fields.
2101 * # This code is reserved for the unsupported instructions DMULT,
2102 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2103 * to execute it causes a Reserved Instruction exception.
2105 * TX79 Multimedia Instructions encoded by opcode field (MMI, LQ, SQ):
2108 * +--------+----------------------------------------+
2110 * +--------+----------------------------------------+
2112 * opcode bits 28..26
2113 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2114 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2115 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2116 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2117 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2118 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2119 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2120 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2121 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2122 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2123 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2127 TX79_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2128 TX79_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2129 TX79_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2133 * TX79 Multimedia Instructions with opcode field = MMI:
2136 * +--------+-------------------------------+--------+
2137 * | MMI | |function|
2138 * +--------+-------------------------------+--------+
2140 * function bits 2..0
2141 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2142 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2143 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2144 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2145 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2146 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2147 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2148 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2149 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2150 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2151 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2154 #define MASK_TX79_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2156 TX79_MMI_MADD
= 0x00 | TX79_CLASS_MMI
, /* Same as OPC_MADD */
2157 TX79_MMI_MADDU
= 0x01 | TX79_CLASS_MMI
, /* Same as OPC_MADDU */
2158 TX79_MMI_PLZCW
= 0x04 | TX79_CLASS_MMI
,
2159 TX79_MMI_CLASS_MMI0
= 0x08 | TX79_CLASS_MMI
,
2160 TX79_MMI_CLASS_MMI2
= 0x09 | TX79_CLASS_MMI
,
2161 TX79_MMI_MFHI1
= 0x10 | TX79_CLASS_MMI
, /* Same minor as OPC_MFHI */
2162 TX79_MMI_MTHI1
= 0x11 | TX79_CLASS_MMI
, /* Same minor as OPC_MTHI */
2163 TX79_MMI_MFLO1
= 0x12 | TX79_CLASS_MMI
, /* Same minor as OPC_MFLO */
2164 TX79_MMI_MTLO1
= 0x13 | TX79_CLASS_MMI
, /* Same minor as OPC_MTLO */
2165 TX79_MMI_MULT1
= 0x18 | TX79_CLASS_MMI
, /* Same minor as OPC_MULT */
2166 TX79_MMI_MULTU1
= 0x19 | TX79_CLASS_MMI
, /* Same minor as OPC_MULTU */
2167 TX79_MMI_DIV1
= 0x1A | TX79_CLASS_MMI
, /* Same minor as OPC_DIV */
2168 TX79_MMI_DIVU1
= 0x1B | TX79_CLASS_MMI
, /* Same minor as OPC_DIVU */
2169 TX79_MMI_MADD1
= 0x20 | TX79_CLASS_MMI
,
2170 TX79_MMI_MADDU1
= 0x21 | TX79_CLASS_MMI
,
2171 TX79_MMI_CLASS_MMI1
= 0x28 | TX79_CLASS_MMI
,
2172 TX79_MMI_CLASS_MMI3
= 0x29 | TX79_CLASS_MMI
,
2173 TX79_MMI_PMFHL
= 0x30 | TX79_CLASS_MMI
,
2174 TX79_MMI_PMTHL
= 0x31 | TX79_CLASS_MMI
,
2175 TX79_MMI_PSLLH
= 0x34 | TX79_CLASS_MMI
,
2176 TX79_MMI_PSRLH
= 0x36 | TX79_CLASS_MMI
,
2177 TX79_MMI_PSRAH
= 0x37 | TX79_CLASS_MMI
,
2178 TX79_MMI_PSLLW
= 0x3C | TX79_CLASS_MMI
,
2179 TX79_MMI_PSRLW
= 0x3E | TX79_CLASS_MMI
,
2180 TX79_MMI_PSRAW
= 0x3F | TX79_CLASS_MMI
,
2184 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI0:
2187 * +--------+----------------------+--------+--------+
2188 * | MMI | |function| MMI0 |
2189 * +--------+----------------------+--------+--------+
2191 * function bits 7..6
2192 * bits | 0 | 1 | 2 | 3
2193 * 10..8 | 00 | 01 | 10 | 11
2194 * -------+-------+-------+-------+-------
2195 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2196 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2197 * 2 010 | PADDB | PSUBB | PCGTB | *
2198 * 3 011 | * | * | * | *
2199 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2200 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2201 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2202 * 7 111 | * | * | PEXT5 | PPAC5
2205 #define MASK_TX79_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2207 TX79_MMI0_PADDW
= (0x00 << 6) | TX79_MMI_CLASS_MMI0
,
2208 TX79_MMI0_PSUBW
= (0x01 << 6) | TX79_MMI_CLASS_MMI0
,
2209 TX79_MMI0_PCGTW
= (0x02 << 6) | TX79_MMI_CLASS_MMI0
,
2210 TX79_MMI0_PMAXW
= (0x03 << 6) | TX79_MMI_CLASS_MMI0
,
2211 TX79_MMI0_PADDH
= (0x04 << 6) | TX79_MMI_CLASS_MMI0
,
2212 TX79_MMI0_PSUBH
= (0x05 << 6) | TX79_MMI_CLASS_MMI0
,
2213 TX79_MMI0_PCGTH
= (0x06 << 6) | TX79_MMI_CLASS_MMI0
,
2214 TX79_MMI0_PMAXH
= (0x07 << 6) | TX79_MMI_CLASS_MMI0
,
2215 TX79_MMI0_PADDB
= (0x08 << 6) | TX79_MMI_CLASS_MMI0
,
2216 TX79_MMI0_PSUBB
= (0x09 << 6) | TX79_MMI_CLASS_MMI0
,
2217 TX79_MMI0_PCGTB
= (0x0A << 6) | TX79_MMI_CLASS_MMI0
,
2218 TX79_MMI0_PADDSW
= (0x10 << 6) | TX79_MMI_CLASS_MMI0
,
2219 TX79_MMI0_PSUBSW
= (0x11 << 6) | TX79_MMI_CLASS_MMI0
,
2220 TX79_MMI0_PEXTLW
= (0x12 << 6) | TX79_MMI_CLASS_MMI0
,
2221 TX79_MMI0_PPACW
= (0x13 << 6) | TX79_MMI_CLASS_MMI0
,
2222 TX79_MMI0_PADDSH
= (0x14 << 6) | TX79_MMI_CLASS_MMI0
,
2223 TX79_MMI0_PSUBSH
= (0x15 << 6) | TX79_MMI_CLASS_MMI0
,
2224 TX79_MMI0_PEXTLH
= (0x16 << 6) | TX79_MMI_CLASS_MMI0
,
2225 TX79_MMI0_PPACH
= (0x17 << 6) | TX79_MMI_CLASS_MMI0
,
2226 TX79_MMI0_PADDSB
= (0x18 << 6) | TX79_MMI_CLASS_MMI0
,
2227 TX79_MMI0_PSUBSB
= (0x19 << 6) | TX79_MMI_CLASS_MMI0
,
2228 TX79_MMI0_PEXTLB
= (0x1A << 6) | TX79_MMI_CLASS_MMI0
,
2229 TX79_MMI0_PPACB
= (0x1B << 6) | TX79_MMI_CLASS_MMI0
,
2230 TX79_MMI0_PEXT5
= (0x1E << 6) | TX79_MMI_CLASS_MMI0
,
2231 TX79_MMI0_PPAC5
= (0x1F << 6) | TX79_MMI_CLASS_MMI0
,
2235 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI1:
2238 * +--------+----------------------+--------+--------+
2239 * | MMI | |function| MMI1 |
2240 * +--------+----------------------+--------+--------+
2242 * function bits 7..6
2243 * bits | 0 | 1 | 2 | 3
2244 * 10..8 | 00 | 01 | 10 | 11
2245 * -------+-------+-------+-------+-------
2246 * 0 000 | * | PABSW | PCEQW | PMINW
2247 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2248 * 2 010 | * | * | PCEQB | *
2249 * 3 011 | * | * | * | *
2250 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2251 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2252 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2253 * 7 111 | * | * | * | *
2256 #define MASK_TX79_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2258 TX79_MMI1_PABSW
= (0x01 << 6) | TX79_MMI_CLASS_MMI1
,
2259 TX79_MMI1_PCEQW
= (0x02 << 6) | TX79_MMI_CLASS_MMI1
,
2260 TX79_MMI1_PMINW
= (0x03 << 6) | TX79_MMI_CLASS_MMI1
,
2261 TX79_MMI1_PADSBH
= (0x04 << 6) | TX79_MMI_CLASS_MMI1
,
2262 TX79_MMI1_PABSH
= (0x05 << 6) | TX79_MMI_CLASS_MMI1
,
2263 TX79_MMI1_PCEQH
= (0x06 << 6) | TX79_MMI_CLASS_MMI1
,
2264 TX79_MMI1_PMINH
= (0x07 << 6) | TX79_MMI_CLASS_MMI1
,
2265 TX79_MMI1_PCEQB
= (0x0A << 6) | TX79_MMI_CLASS_MMI1
,
2266 TX79_MMI1_PADDUW
= (0x10 << 6) | TX79_MMI_CLASS_MMI1
,
2267 TX79_MMI1_PSUBUW
= (0x11 << 6) | TX79_MMI_CLASS_MMI1
,
2268 TX79_MMI1_PEXTUW
= (0x12 << 6) | TX79_MMI_CLASS_MMI1
,
2269 TX79_MMI1_PADDUH
= (0x14 << 6) | TX79_MMI_CLASS_MMI1
,
2270 TX79_MMI1_PSUBUH
= (0x15 << 6) | TX79_MMI_CLASS_MMI1
,
2271 TX79_MMI1_PEXTUH
= (0x16 << 6) | TX79_MMI_CLASS_MMI1
,
2272 TX79_MMI1_PADDUB
= (0x18 << 6) | TX79_MMI_CLASS_MMI1
,
2273 TX79_MMI1_PSUBUB
= (0x19 << 6) | TX79_MMI_CLASS_MMI1
,
2274 TX79_MMI1_PEXTUB
= (0x1A << 6) | TX79_MMI_CLASS_MMI1
,
2275 TX79_MMI1_QFSRV
= (0x1B << 6) | TX79_MMI_CLASS_MMI1
,
2279 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI2:
2282 * +--------+----------------------+--------+--------+
2283 * | MMI | |function| MMI2 |
2284 * +--------+----------------------+--------+--------+
2286 * function bits 7..6
2287 * bits | 0 | 1 | 2 | 3
2288 * 10..8 | 00 | 01 | 10 | 11
2289 * -------+-------+-------+-------+-------
2290 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2291 * 1 001 | PMSUBW| * | * | *
2292 * 2 010 | PMFHI | PMFLO | PINTH | *
2293 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2294 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2295 * 5 101 | PMSUBH| PHMSBH| * | *
2296 * 6 110 | * | * | PEXEH | PREVH
2297 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2300 #define MASK_TX79_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2302 TX79_MMI2_PMADDW
= (0x00 << 6) | TX79_MMI_CLASS_MMI2
,
2303 TX79_MMI2_PSLLVW
= (0x02 << 6) | TX79_MMI_CLASS_MMI2
,
2304 TX79_MMI2_PSRLVW
= (0x03 << 6) | TX79_MMI_CLASS_MMI2
,
2305 TX79_MMI2_PMSUBW
= (0x04 << 6) | TX79_MMI_CLASS_MMI2
,
2306 TX79_MMI2_PMFHI
= (0x08 << 6) | TX79_MMI_CLASS_MMI2
,
2307 TX79_MMI2_PMFLO
= (0x09 << 6) | TX79_MMI_CLASS_MMI2
,
2308 TX79_MMI2_PINTH
= (0x0A << 6) | TX79_MMI_CLASS_MMI2
,
2309 TX79_MMI2_PMULTW
= (0x0C << 6) | TX79_MMI_CLASS_MMI2
,
2310 TX79_MMI2_PDIVW
= (0x0D << 6) | TX79_MMI_CLASS_MMI2
,
2311 TX79_MMI2_PCPYLD
= (0x0E << 6) | TX79_MMI_CLASS_MMI2
,
2312 TX79_MMI2_PMADDH
= (0x10 << 6) | TX79_MMI_CLASS_MMI2
,
2313 TX79_MMI2_PHMADH
= (0x11 << 6) | TX79_MMI_CLASS_MMI2
,
2314 TX79_MMI2_PAND
= (0x12 << 6) | TX79_MMI_CLASS_MMI2
,
2315 TX79_MMI2_PXOR
= (0x13 << 6) | TX79_MMI_CLASS_MMI2
,
2316 TX79_MMI2_PMSUBH
= (0x14 << 6) | TX79_MMI_CLASS_MMI2
,
2317 TX79_MMI2_PHMSBH
= (0x15 << 6) | TX79_MMI_CLASS_MMI2
,
2318 TX79_MMI2_PEXEH
= (0x1A << 6) | TX79_MMI_CLASS_MMI2
,
2319 TX79_MMI2_PREVH
= (0x1B << 6) | TX79_MMI_CLASS_MMI2
,
2320 TX79_MMI2_PMULTH
= (0x1C << 6) | TX79_MMI_CLASS_MMI2
,
2321 TX79_MMI2_PDIVBW
= (0x1D << 6) | TX79_MMI_CLASS_MMI2
,
2322 TX79_MMI2_PEXEW
= (0x1E << 6) | TX79_MMI_CLASS_MMI2
,
2323 TX79_MMI2_PROT3W
= (0x1F << 6) | TX79_MMI_CLASS_MMI2
,
2327 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI3:
2330 * +--------+----------------------+--------+--------+
2331 * | MMI | |function| MMI3 |
2332 * +--------+----------------------+--------+--------+
2334 * function bits 7..6
2335 * bits | 0 | 1 | 2 | 3
2336 * 10..8 | 00 | 01 | 10 | 11
2337 * -------+-------+-------+-------+-------
2338 * 0 000 |PMADDUW| * | * | PSRAVW
2339 * 1 001 | * | * | * | *
2340 * 2 010 | PMTHI | PMTLO | PINTEH| *
2341 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2342 * 4 100 | * | * | POR | PNOR
2343 * 5 101 | * | * | * | *
2344 * 6 110 | * | * | PEXCH | PCPYH
2345 * 7 111 | * | * | PEXCW | *
2348 #define MASK_TX79_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2350 TX79_MMI3_PMADDUW
= (0x00 << 6) | TX79_MMI_CLASS_MMI3
,
2351 TX79_MMI3_PSRAVW
= (0x03 << 6) | TX79_MMI_CLASS_MMI3
,
2352 TX79_MMI3_PMTHI
= (0x08 << 6) | TX79_MMI_CLASS_MMI3
,
2353 TX79_MMI3_PMTLO
= (0x09 << 6) | TX79_MMI_CLASS_MMI3
,
2354 TX79_MMI3_PINTEH
= (0x0A << 6) | TX79_MMI_CLASS_MMI3
,
2355 TX79_MMI3_PMULTUW
= (0x0C << 6) | TX79_MMI_CLASS_MMI3
,
2356 TX79_MMI3_PDIVUW
= (0x0D << 6) | TX79_MMI_CLASS_MMI3
,
2357 TX79_MMI3_PCPYUD
= (0x0E << 6) | TX79_MMI_CLASS_MMI3
,
2358 TX79_MMI3_POR
= (0x12 << 6) | TX79_MMI_CLASS_MMI3
,
2359 TX79_MMI3_PNOR
= (0x13 << 6) | TX79_MMI_CLASS_MMI3
,
2360 TX79_MMI3_PEXCH
= (0x1A << 6) | TX79_MMI_CLASS_MMI3
,
2361 TX79_MMI3_PCPYH
= (0x1B << 6) | TX79_MMI_CLASS_MMI3
,
2362 TX79_MMI3_PEXCW
= (0x1E << 6) | TX79_MMI_CLASS_MMI3
,
2365 /* global register indices */
2366 static TCGv cpu_gpr
[32], cpu_PC
;
2367 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2368 static TCGv cpu_dspctrl
, btarget
, bcond
;
2369 static TCGv_i32 hflags
;
2370 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2371 static TCGv_i64 fpu_f64
[32];
2372 static TCGv_i64 msa_wr_d
[64];
2374 #include "exec/gen-icount.h"
2376 #define gen_helper_0e0i(name, arg) do { \
2377 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2378 gen_helper_##name(cpu_env, helper_tmp); \
2379 tcg_temp_free_i32(helper_tmp); \
2382 #define gen_helper_0e1i(name, arg1, arg2) do { \
2383 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2384 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2385 tcg_temp_free_i32(helper_tmp); \
2388 #define gen_helper_1e0i(name, ret, arg1) do { \
2389 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2390 gen_helper_##name(ret, cpu_env, helper_tmp); \
2391 tcg_temp_free_i32(helper_tmp); \
2394 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2395 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2396 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2397 tcg_temp_free_i32(helper_tmp); \
2400 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2401 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2402 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2403 tcg_temp_free_i32(helper_tmp); \
2406 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2407 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2408 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2409 tcg_temp_free_i32(helper_tmp); \
2412 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2413 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2414 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2415 tcg_temp_free_i32(helper_tmp); \
2418 typedef struct DisasContext
{
2419 DisasContextBase base
;
2420 target_ulong saved_pc
;
2421 target_ulong page_start
;
2423 uint64_t insn_flags
;
2424 int32_t CP0_Config1
;
2425 int32_t CP0_Config2
;
2426 int32_t CP0_Config3
;
2427 int32_t CP0_Config5
;
2428 /* Routine used to access memory */
2430 TCGMemOp default_tcg_memop_mask
;
2431 uint32_t hflags
, saved_hflags
;
2432 target_ulong btarget
;
2443 int CP0_LLAddr_shift
;
2452 #define DISAS_STOP DISAS_TARGET_0
2453 #define DISAS_EXIT DISAS_TARGET_1
2455 static const char * const regnames
[] = {
2456 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2457 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2458 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2459 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2462 static const char * const regnames_HI
[] = {
2463 "HI0", "HI1", "HI2", "HI3",
2466 static const char * const regnames_LO
[] = {
2467 "LO0", "LO1", "LO2", "LO3",
2470 static const char * const fregnames
[] = {
2471 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2472 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2473 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2474 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2477 static const char * const msaregnames
[] = {
2478 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2479 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2480 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2481 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2482 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2483 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2484 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2485 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2486 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2487 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2488 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2489 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2490 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2491 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2492 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2493 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2496 #define LOG_DISAS(...) \
2498 if (MIPS_DEBUG_DISAS) { \
2499 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2503 #define MIPS_INVAL(op) \
2505 if (MIPS_DEBUG_DISAS) { \
2506 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2507 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2508 ctx->base.pc_next, ctx->opcode, op, \
2509 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2510 ((ctx->opcode >> 16) & 0x1F)); \
2514 /* General purpose registers moves. */
2515 static inline void gen_load_gpr (TCGv t
, int reg
)
2518 tcg_gen_movi_tl(t
, 0);
2520 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2523 static inline void gen_store_gpr (TCGv t
, int reg
)
2526 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2529 /* Moves to/from shadow registers. */
2530 static inline void gen_load_srsgpr (int from
, int to
)
2532 TCGv t0
= tcg_temp_new();
2535 tcg_gen_movi_tl(t0
, 0);
2537 TCGv_i32 t2
= tcg_temp_new_i32();
2538 TCGv_ptr addr
= tcg_temp_new_ptr();
2540 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2541 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2542 tcg_gen_andi_i32(t2
, t2
, 0xf);
2543 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2544 tcg_gen_ext_i32_ptr(addr
, t2
);
2545 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2547 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2548 tcg_temp_free_ptr(addr
);
2549 tcg_temp_free_i32(t2
);
2551 gen_store_gpr(t0
, to
);
2555 static inline void gen_store_srsgpr (int from
, int to
)
2558 TCGv t0
= tcg_temp_new();
2559 TCGv_i32 t2
= tcg_temp_new_i32();
2560 TCGv_ptr addr
= tcg_temp_new_ptr();
2562 gen_load_gpr(t0
, from
);
2563 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2564 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2565 tcg_gen_andi_i32(t2
, t2
, 0xf);
2566 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2567 tcg_gen_ext_i32_ptr(addr
, t2
);
2568 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2570 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2571 tcg_temp_free_ptr(addr
);
2572 tcg_temp_free_i32(t2
);
2578 static inline void gen_save_pc(target_ulong pc
)
2580 tcg_gen_movi_tl(cpu_PC
, pc
);
2583 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2585 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2586 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2587 gen_save_pc(ctx
->base
.pc_next
);
2588 ctx
->saved_pc
= ctx
->base
.pc_next
;
2590 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2591 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2592 ctx
->saved_hflags
= ctx
->hflags
;
2593 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2599 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2605 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2607 ctx
->saved_hflags
= ctx
->hflags
;
2608 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2614 ctx
->btarget
= env
->btarget
;
2619 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2621 TCGv_i32 texcp
= tcg_const_i32(excp
);
2622 TCGv_i32 terr
= tcg_const_i32(err
);
2623 save_cpu_state(ctx
, 1);
2624 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2625 tcg_temp_free_i32(terr
);
2626 tcg_temp_free_i32(texcp
);
2627 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2630 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2632 gen_helper_0e0i(raise_exception
, excp
);
2635 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2637 generate_exception_err(ctx
, excp
, 0);
2640 /* Floating point register moves. */
2641 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2643 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2644 generate_exception(ctx
, EXCP_RI
);
2646 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2649 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2652 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2653 generate_exception(ctx
, EXCP_RI
);
2655 t64
= tcg_temp_new_i64();
2656 tcg_gen_extu_i32_i64(t64
, t
);
2657 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2658 tcg_temp_free_i64(t64
);
2661 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2663 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2664 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2666 gen_load_fpr32(ctx
, t
, reg
| 1);
2670 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2672 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2673 TCGv_i64 t64
= tcg_temp_new_i64();
2674 tcg_gen_extu_i32_i64(t64
, t
);
2675 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2676 tcg_temp_free_i64(t64
);
2678 gen_store_fpr32(ctx
, t
, reg
| 1);
2682 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2684 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2685 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2687 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2691 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2693 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2694 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2697 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2698 t0
= tcg_temp_new_i64();
2699 tcg_gen_shri_i64(t0
, t
, 32);
2700 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2701 tcg_temp_free_i64(t0
);
2705 static inline int get_fp_bit (int cc
)
2713 /* Addresses computation */
2714 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
2716 tcg_gen_add_tl(ret
, arg0
, arg1
);
2718 #if defined(TARGET_MIPS64)
2719 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2720 tcg_gen_ext32s_i64(ret
, ret
);
2725 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2728 tcg_gen_addi_tl(ret
, base
, ofs
);
2730 #if defined(TARGET_MIPS64)
2731 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2732 tcg_gen_ext32s_i64(ret
, ret
);
2737 /* Addresses computation (translation time) */
2738 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2741 target_long sum
= base
+ offset
;
2743 #if defined(TARGET_MIPS64)
2744 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2751 /* Sign-extract the low 32-bits to a target_long. */
2752 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2754 #if defined(TARGET_MIPS64)
2755 tcg_gen_ext32s_i64(ret
, arg
);
2757 tcg_gen_extrl_i64_i32(ret
, arg
);
2761 /* Sign-extract the high 32-bits to a target_long. */
2762 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2764 #if defined(TARGET_MIPS64)
2765 tcg_gen_sari_i64(ret
, arg
, 32);
2767 tcg_gen_extrh_i64_i32(ret
, arg
);
2771 static inline void check_cp0_enabled(DisasContext
*ctx
)
2773 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
2774 generate_exception_err(ctx
, EXCP_CpU
, 0);
2777 static inline void check_cp1_enabled(DisasContext
*ctx
)
2779 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
2780 generate_exception_err(ctx
, EXCP_CpU
, 1);
2783 /* Verify that the processor is running with COP1X instructions enabled.
2784 This is associated with the nabla symbol in the MIPS32 and MIPS64
2787 static inline void check_cop1x(DisasContext
*ctx
)
2789 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
2790 generate_exception_end(ctx
, EXCP_RI
);
2793 /* Verify that the processor is running with 64-bit floating-point
2794 operations enabled. */
2796 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2798 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
2799 generate_exception_end(ctx
, EXCP_RI
);
2803 * Verify if floating point register is valid; an operation is not defined
2804 * if bit 0 of any register specification is set and the FR bit in the
2805 * Status register equals zero, since the register numbers specify an
2806 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2807 * in the Status register equals one, both even and odd register numbers
2808 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2810 * Multiple 64 bit wide registers can be checked by calling
2811 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2813 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2815 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
2816 generate_exception_end(ctx
, EXCP_RI
);
2819 /* Verify that the processor is running with DSP instructions enabled.
2820 This is enabled by CP0 Status register MX(24) bit.
2823 static inline void check_dsp(DisasContext
*ctx
)
2825 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2826 if (ctx
->insn_flags
& ASE_DSP
) {
2827 generate_exception_end(ctx
, EXCP_DSPDIS
);
2829 generate_exception_end(ctx
, EXCP_RI
);
2834 static inline void check_dsp_r2(DisasContext
*ctx
)
2836 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2837 if (ctx
->insn_flags
& ASE_DSP
) {
2838 generate_exception_end(ctx
, EXCP_DSPDIS
);
2840 generate_exception_end(ctx
, EXCP_RI
);
2845 static inline void check_dsp_r3(DisasContext
*ctx
)
2847 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2848 if (ctx
->insn_flags
& ASE_DSP
) {
2849 generate_exception_end(ctx
, EXCP_DSPDIS
);
2851 generate_exception_end(ctx
, EXCP_RI
);
2856 /* This code generates a "reserved instruction" exception if the
2857 CPU does not support the instruction set corresponding to flags. */
2858 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
2860 if (unlikely(!(ctx
->insn_flags
& flags
))) {
2861 generate_exception_end(ctx
, EXCP_RI
);
2865 /* This code generates a "reserved instruction" exception if the
2866 CPU has corresponding flag set which indicates that the instruction
2867 has been removed. */
2868 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
2870 if (unlikely(ctx
->insn_flags
& flags
)) {
2871 generate_exception_end(ctx
, EXCP_RI
);
2875 /* This code generates a "reserved instruction" exception if the
2876 CPU does not support 64-bit paired-single (PS) floating point data type */
2877 static inline void check_ps(DisasContext
*ctx
)
2879 if (unlikely(!ctx
->ps
)) {
2880 generate_exception(ctx
, EXCP_RI
);
2882 check_cp1_64bitmode(ctx
);
2885 #ifdef TARGET_MIPS64
2886 /* This code generates a "reserved instruction" exception if 64-bit
2887 instructions are not enabled. */
2888 static inline void check_mips_64(DisasContext
*ctx
)
2890 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
2891 generate_exception_end(ctx
, EXCP_RI
);
2895 #ifndef CONFIG_USER_ONLY
2896 static inline void check_mvh(DisasContext
*ctx
)
2898 if (unlikely(!ctx
->mvh
)) {
2899 generate_exception(ctx
, EXCP_RI
);
2905 * This code generates a "reserved instruction" exception if the
2906 * Config5 XNP bit is set.
2908 static inline void check_xnp(DisasContext
*ctx
)
2910 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
2911 generate_exception_end(ctx
, EXCP_RI
);
2915 #ifndef CONFIG_USER_ONLY
2917 * This code generates a "reserved instruction" exception if the
2918 * Config3 PW bit is NOT set.
2920 static inline void check_pw(DisasContext
*ctx
)
2922 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
2923 generate_exception_end(ctx
, EXCP_RI
);
2929 * This code generates a "reserved instruction" exception if the
2930 * Config3 MT bit is NOT set.
2932 static inline void check_mt(DisasContext
*ctx
)
2934 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2935 generate_exception_end(ctx
, EXCP_RI
);
2939 #ifndef CONFIG_USER_ONLY
2941 * This code generates a "coprocessor unusable" exception if CP0 is not
2942 * available, and, if that is not the case, generates a "reserved instruction"
2943 * exception if the Config5 MT bit is NOT set. This is needed for availability
2944 * control of some of MT ASE instructions.
2946 static inline void check_cp0_mt(DisasContext
*ctx
)
2948 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
2949 generate_exception_err(ctx
, EXCP_CpU
, 0);
2951 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2952 generate_exception_err(ctx
, EXCP_RI
, 0);
2959 * This code generates a "reserved instruction" exception if the
2960 * Config5 NMS bit is set.
2962 static inline void check_nms(DisasContext
*ctx
)
2964 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
2965 generate_exception_end(ctx
, EXCP_RI
);
2970 /* Define small wrappers for gen_load_fpr* so that we have a uniform
2971 calling interface for 32 and 64-bit FPRs. No sense in changing
2972 all callers for gen_load_fpr32 when we need the CTX parameter for
2974 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
2975 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
2976 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
2977 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
2978 int ft, int fs, int cc) \
2980 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
2981 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
2990 check_cp1_registers(ctx, fs | ft); \
2998 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
2999 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
3001 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
3002 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
3003 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
3004 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
3005 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
3006 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
3007 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
3008 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
3009 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
3010 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
3011 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
3012 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
3013 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
3014 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
3015 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
3016 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
3019 tcg_temp_free_i##bits (fp0); \
3020 tcg_temp_free_i##bits (fp1); \
3023 FOP_CONDS(, 0, d
, FMT_D
, 64)
3024 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3025 FOP_CONDS(, 0, s
, FMT_S
, 32)
3026 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3027 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3028 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3031 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3032 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
3033 int ft, int fs, int fd) \
3035 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3036 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3037 if (ifmt == FMT_D) { \
3038 check_cp1_registers(ctx, fs | ft | fd); \
3040 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3041 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3044 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3047 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3050 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3053 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3056 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3059 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3062 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3065 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3068 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3071 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3074 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3077 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3080 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3083 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3086 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3089 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3092 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3095 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3098 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3101 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3104 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3107 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3113 tcg_temp_free_i ## bits (fp0); \
3114 tcg_temp_free_i ## bits (fp1); \
3117 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3118 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3120 #undef gen_ldcmp_fpr32
3121 #undef gen_ldcmp_fpr64
3123 /* load/store instructions. */
3124 #ifdef CONFIG_USER_ONLY
3125 #define OP_LD_ATOMIC(insn,fname) \
3126 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3127 DisasContext *ctx) \
3129 TCGv t0 = tcg_temp_new(); \
3130 tcg_gen_mov_tl(t0, arg1); \
3131 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3132 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3133 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3134 tcg_temp_free(t0); \
3137 #define OP_LD_ATOMIC(insn,fname) \
3138 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3139 DisasContext *ctx) \
3141 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3144 OP_LD_ATOMIC(ll
,ld32s
);
3145 #if defined(TARGET_MIPS64)
3146 OP_LD_ATOMIC(lld
,ld64
);
3150 #ifdef CONFIG_USER_ONLY
3151 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3152 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3153 DisasContext *ctx) \
3155 TCGv t0 = tcg_temp_new(); \
3156 TCGLabel *l1 = gen_new_label(); \
3157 TCGLabel *l2 = gen_new_label(); \
3159 tcg_gen_andi_tl(t0, arg2, almask); \
3160 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
3161 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
3162 generate_exception(ctx, EXCP_AdES); \
3163 gen_set_label(l1); \
3164 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3165 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
3166 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
3167 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
3168 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
3169 generate_exception_end(ctx, EXCP_SC); \
3170 gen_set_label(l2); \
3171 tcg_gen_movi_tl(t0, 0); \
3172 gen_store_gpr(t0, rt); \
3173 tcg_temp_free(t0); \
3176 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3177 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3178 DisasContext *ctx) \
3180 TCGv t0 = tcg_temp_new(); \
3181 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
3182 gen_store_gpr(t0, rt); \
3183 tcg_temp_free(t0); \
3186 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
3187 #if defined(TARGET_MIPS64)
3188 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
3192 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
3193 int base
, int offset
)
3196 tcg_gen_movi_tl(addr
, offset
);
3197 } else if (offset
== 0) {
3198 gen_load_gpr(addr
, base
);
3200 tcg_gen_movi_tl(addr
, offset
);
3201 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3205 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
3207 target_ulong pc
= ctx
->base
.pc_next
;
3209 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3210 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3215 pc
&= ~(target_ulong
)3;
3220 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3221 int rt
, int base
, int offset
)
3224 int mem_idx
= ctx
->mem_idx
;
3226 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3227 /* Loongson CPU uses a load to zero register for prefetch.
3228 We emulate it as a NOP. On other CPU we must perform the
3229 actual memory access. */
3233 t0
= tcg_temp_new();
3234 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3237 #if defined(TARGET_MIPS64)
3239 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3240 ctx
->default_tcg_memop_mask
);
3241 gen_store_gpr(t0
, rt
);
3244 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3245 ctx
->default_tcg_memop_mask
);
3246 gen_store_gpr(t0
, rt
);
3250 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3251 gen_store_gpr(t0
, rt
);
3254 t1
= tcg_temp_new();
3255 /* Do a byte access to possibly trigger a page
3256 fault with the unaligned address. */
3257 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3258 tcg_gen_andi_tl(t1
, t0
, 7);
3259 #ifndef TARGET_WORDS_BIGENDIAN
3260 tcg_gen_xori_tl(t1
, t1
, 7);
3262 tcg_gen_shli_tl(t1
, t1
, 3);
3263 tcg_gen_andi_tl(t0
, t0
, ~7);
3264 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3265 tcg_gen_shl_tl(t0
, t0
, t1
);
3266 t2
= tcg_const_tl(-1);
3267 tcg_gen_shl_tl(t2
, t2
, t1
);
3268 gen_load_gpr(t1
, rt
);
3269 tcg_gen_andc_tl(t1
, t1
, t2
);
3271 tcg_gen_or_tl(t0
, t0
, t1
);
3273 gen_store_gpr(t0
, rt
);
3276 t1
= tcg_temp_new();
3277 /* Do a byte access to possibly trigger a page
3278 fault with the unaligned address. */
3279 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3280 tcg_gen_andi_tl(t1
, t0
, 7);
3281 #ifdef TARGET_WORDS_BIGENDIAN
3282 tcg_gen_xori_tl(t1
, t1
, 7);
3284 tcg_gen_shli_tl(t1
, t1
, 3);
3285 tcg_gen_andi_tl(t0
, t0
, ~7);
3286 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3287 tcg_gen_shr_tl(t0
, t0
, t1
);
3288 tcg_gen_xori_tl(t1
, t1
, 63);
3289 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3290 tcg_gen_shl_tl(t2
, t2
, t1
);
3291 gen_load_gpr(t1
, rt
);
3292 tcg_gen_and_tl(t1
, t1
, t2
);
3294 tcg_gen_or_tl(t0
, t0
, t1
);
3296 gen_store_gpr(t0
, rt
);
3299 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3300 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3302 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3303 gen_store_gpr(t0
, rt
);
3307 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3308 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3310 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3311 gen_store_gpr(t0
, rt
);
3314 mem_idx
= MIPS_HFLAG_UM
;
3317 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3318 ctx
->default_tcg_memop_mask
);
3319 gen_store_gpr(t0
, rt
);
3322 mem_idx
= MIPS_HFLAG_UM
;
3325 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3326 ctx
->default_tcg_memop_mask
);
3327 gen_store_gpr(t0
, rt
);
3330 mem_idx
= MIPS_HFLAG_UM
;
3333 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3334 ctx
->default_tcg_memop_mask
);
3335 gen_store_gpr(t0
, rt
);
3338 mem_idx
= MIPS_HFLAG_UM
;
3341 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3342 gen_store_gpr(t0
, rt
);
3345 mem_idx
= MIPS_HFLAG_UM
;
3348 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3349 gen_store_gpr(t0
, rt
);
3352 mem_idx
= MIPS_HFLAG_UM
;
3355 t1
= tcg_temp_new();
3356 /* Do a byte access to possibly trigger a page
3357 fault with the unaligned address. */
3358 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3359 tcg_gen_andi_tl(t1
, t0
, 3);
3360 #ifndef TARGET_WORDS_BIGENDIAN
3361 tcg_gen_xori_tl(t1
, t1
, 3);
3363 tcg_gen_shli_tl(t1
, t1
, 3);
3364 tcg_gen_andi_tl(t0
, t0
, ~3);
3365 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3366 tcg_gen_shl_tl(t0
, t0
, t1
);
3367 t2
= tcg_const_tl(-1);
3368 tcg_gen_shl_tl(t2
, t2
, t1
);
3369 gen_load_gpr(t1
, rt
);
3370 tcg_gen_andc_tl(t1
, t1
, t2
);
3372 tcg_gen_or_tl(t0
, t0
, t1
);
3374 tcg_gen_ext32s_tl(t0
, t0
);
3375 gen_store_gpr(t0
, rt
);
3378 mem_idx
= MIPS_HFLAG_UM
;
3381 t1
= tcg_temp_new();
3382 /* Do a byte access to possibly trigger a page
3383 fault with the unaligned address. */
3384 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3385 tcg_gen_andi_tl(t1
, t0
, 3);
3386 #ifdef TARGET_WORDS_BIGENDIAN
3387 tcg_gen_xori_tl(t1
, t1
, 3);
3389 tcg_gen_shli_tl(t1
, t1
, 3);
3390 tcg_gen_andi_tl(t0
, t0
, ~3);
3391 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3392 tcg_gen_shr_tl(t0
, t0
, t1
);
3393 tcg_gen_xori_tl(t1
, t1
, 31);
3394 t2
= tcg_const_tl(0xfffffffeull
);
3395 tcg_gen_shl_tl(t2
, t2
, t1
);
3396 gen_load_gpr(t1
, rt
);
3397 tcg_gen_and_tl(t1
, t1
, t2
);
3399 tcg_gen_or_tl(t0
, t0
, t1
);
3401 tcg_gen_ext32s_tl(t0
, t0
);
3402 gen_store_gpr(t0
, rt
);
3405 mem_idx
= MIPS_HFLAG_UM
;
3409 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3410 gen_store_gpr(t0
, rt
);
3416 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3417 uint32_t reg1
, uint32_t reg2
)
3419 TCGv taddr
= tcg_temp_new();
3420 TCGv_i64 tval
= tcg_temp_new_i64();
3421 TCGv tmp1
= tcg_temp_new();
3422 TCGv tmp2
= tcg_temp_new();
3424 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3425 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3426 #ifdef TARGET_WORDS_BIGENDIAN
3427 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3429 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3431 gen_store_gpr(tmp1
, reg1
);
3432 tcg_temp_free(tmp1
);
3433 gen_store_gpr(tmp2
, reg2
);
3434 tcg_temp_free(tmp2
);
3435 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3436 tcg_temp_free_i64(tval
);
3437 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3438 tcg_temp_free(taddr
);
3442 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
3443 int base
, int offset
)
3445 TCGv t0
= tcg_temp_new();
3446 TCGv t1
= tcg_temp_new();
3447 int mem_idx
= ctx
->mem_idx
;
3449 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3450 gen_load_gpr(t1
, rt
);
3452 #if defined(TARGET_MIPS64)
3454 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3455 ctx
->default_tcg_memop_mask
);
3458 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3461 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3465 mem_idx
= MIPS_HFLAG_UM
;
3468 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3469 ctx
->default_tcg_memop_mask
);
3472 mem_idx
= MIPS_HFLAG_UM
;
3475 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3476 ctx
->default_tcg_memop_mask
);
3479 mem_idx
= MIPS_HFLAG_UM
;
3482 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3485 mem_idx
= MIPS_HFLAG_UM
;
3488 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3491 mem_idx
= MIPS_HFLAG_UM
;
3494 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3502 /* Store conditional */
3503 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
3504 int base
, int16_t offset
)
3507 int mem_idx
= ctx
->mem_idx
;
3509 #ifdef CONFIG_USER_ONLY
3510 t0
= tcg_temp_local_new();
3511 t1
= tcg_temp_local_new();
3513 t0
= tcg_temp_new();
3514 t1
= tcg_temp_new();
3516 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3517 gen_load_gpr(t1
, rt
);
3519 #if defined(TARGET_MIPS64)
3522 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
3526 mem_idx
= MIPS_HFLAG_UM
;
3530 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
3537 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3538 uint32_t reg1
, uint32_t reg2
)
3540 TCGv taddr
= tcg_temp_local_new();
3541 TCGv lladdr
= tcg_temp_local_new();
3542 TCGv_i64 tval
= tcg_temp_new_i64();
3543 TCGv_i64 llval
= tcg_temp_new_i64();
3544 TCGv_i64 val
= tcg_temp_new_i64();
3545 TCGv tmp1
= tcg_temp_new();
3546 TCGv tmp2
= tcg_temp_new();
3547 TCGLabel
*lab_fail
= gen_new_label();
3548 TCGLabel
*lab_done
= gen_new_label();
3550 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3552 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3553 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3555 gen_load_gpr(tmp1
, reg1
);
3556 gen_load_gpr(tmp2
, reg2
);
3558 #ifdef TARGET_WORDS_BIGENDIAN
3559 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3561 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3564 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3565 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3566 ctx
->mem_idx
, MO_64
);
3568 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3570 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3572 gen_set_label(lab_fail
);
3575 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3577 gen_set_label(lab_done
);
3578 tcg_gen_movi_tl(lladdr
, -1);
3579 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3582 /* Load and store */
3583 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
3586 /* Don't do NOP if destination is zero: we must perform the actual
3591 TCGv_i32 fp0
= tcg_temp_new_i32();
3592 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3593 ctx
->default_tcg_memop_mask
);
3594 gen_store_fpr32(ctx
, fp0
, ft
);
3595 tcg_temp_free_i32(fp0
);
3600 TCGv_i32 fp0
= tcg_temp_new_i32();
3601 gen_load_fpr32(ctx
, fp0
, ft
);
3602 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3603 ctx
->default_tcg_memop_mask
);
3604 tcg_temp_free_i32(fp0
);
3609 TCGv_i64 fp0
= tcg_temp_new_i64();
3610 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3611 ctx
->default_tcg_memop_mask
);
3612 gen_store_fpr64(ctx
, fp0
, ft
);
3613 tcg_temp_free_i64(fp0
);
3618 TCGv_i64 fp0
= tcg_temp_new_i64();
3619 gen_load_fpr64(ctx
, fp0
, ft
);
3620 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3621 ctx
->default_tcg_memop_mask
);
3622 tcg_temp_free_i64(fp0
);
3626 MIPS_INVAL("flt_ldst");
3627 generate_exception_end(ctx
, EXCP_RI
);
3632 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3633 int rs
, int16_t imm
)
3635 TCGv t0
= tcg_temp_new();
3637 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3638 check_cp1_enabled(ctx
);
3642 check_insn(ctx
, ISA_MIPS2
);
3645 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3646 gen_flt_ldst(ctx
, op
, rt
, t0
);
3649 generate_exception_err(ctx
, EXCP_CpU
, 1);
3654 /* Arithmetic with immediate operand */
3655 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3656 int rt
, int rs
, int imm
)
3658 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3660 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3661 /* If no destination, treat it as a NOP.
3662 For addi, we must generate the overflow exception when needed. */
3668 TCGv t0
= tcg_temp_local_new();
3669 TCGv t1
= tcg_temp_new();
3670 TCGv t2
= tcg_temp_new();
3671 TCGLabel
*l1
= gen_new_label();
3673 gen_load_gpr(t1
, rs
);
3674 tcg_gen_addi_tl(t0
, t1
, uimm
);
3675 tcg_gen_ext32s_tl(t0
, t0
);
3677 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3678 tcg_gen_xori_tl(t2
, t0
, uimm
);
3679 tcg_gen_and_tl(t1
, t1
, t2
);
3681 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3683 /* operands of same sign, result different sign */
3684 generate_exception(ctx
, EXCP_OVERFLOW
);
3686 tcg_gen_ext32s_tl(t0
, t0
);
3687 gen_store_gpr(t0
, rt
);
3693 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3694 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3696 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3699 #if defined(TARGET_MIPS64)
3702 TCGv t0
= tcg_temp_local_new();
3703 TCGv t1
= tcg_temp_new();
3704 TCGv t2
= tcg_temp_new();
3705 TCGLabel
*l1
= gen_new_label();
3707 gen_load_gpr(t1
, rs
);
3708 tcg_gen_addi_tl(t0
, t1
, uimm
);
3710 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3711 tcg_gen_xori_tl(t2
, t0
, uimm
);
3712 tcg_gen_and_tl(t1
, t1
, t2
);
3714 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3716 /* operands of same sign, result different sign */
3717 generate_exception(ctx
, EXCP_OVERFLOW
);
3719 gen_store_gpr(t0
, rt
);
3725 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3727 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3734 /* Logic with immediate operand */
3735 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3736 int rt
, int rs
, int16_t imm
)
3741 /* If no destination, treat it as a NOP. */
3744 uimm
= (uint16_t)imm
;
3747 if (likely(rs
!= 0))
3748 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3750 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3754 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3756 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3759 if (likely(rs
!= 0))
3760 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3762 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3765 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3767 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3768 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3770 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3779 /* Set on less than with immediate operand */
3780 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3781 int rt
, int rs
, int16_t imm
)
3783 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3787 /* If no destination, treat it as a NOP. */
3790 t0
= tcg_temp_new();
3791 gen_load_gpr(t0
, rs
);
3794 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3797 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3803 /* Shifts with immediate operand */
3804 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3805 int rt
, int rs
, int16_t imm
)
3807 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3811 /* If no destination, treat it as a NOP. */
3815 t0
= tcg_temp_new();
3816 gen_load_gpr(t0
, rs
);
3819 tcg_gen_shli_tl(t0
, t0
, uimm
);
3820 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3823 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3827 tcg_gen_ext32u_tl(t0
, t0
);
3828 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3830 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3835 TCGv_i32 t1
= tcg_temp_new_i32();
3837 tcg_gen_trunc_tl_i32(t1
, t0
);
3838 tcg_gen_rotri_i32(t1
, t1
, uimm
);
3839 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
3840 tcg_temp_free_i32(t1
);
3842 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3845 #if defined(TARGET_MIPS64)
3847 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
3850 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3853 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3857 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
3859 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
3863 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3866 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3869 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3872 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3880 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
3881 int rd
, int rs
, int rt
)
3883 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
3884 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
3885 /* If no destination, treat it as a NOP.
3886 For add & sub, we must generate the overflow exception when needed. */
3893 TCGv t0
= tcg_temp_local_new();
3894 TCGv t1
= tcg_temp_new();
3895 TCGv t2
= tcg_temp_new();
3896 TCGLabel
*l1
= gen_new_label();
3898 gen_load_gpr(t1
, rs
);
3899 gen_load_gpr(t2
, rt
);
3900 tcg_gen_add_tl(t0
, t1
, t2
);
3901 tcg_gen_ext32s_tl(t0
, t0
);
3902 tcg_gen_xor_tl(t1
, t1
, t2
);
3903 tcg_gen_xor_tl(t2
, t0
, t2
);
3904 tcg_gen_andc_tl(t1
, t2
, t1
);
3906 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3908 /* operands of same sign, result different sign */
3909 generate_exception(ctx
, EXCP_OVERFLOW
);
3911 gen_store_gpr(t0
, rd
);
3916 if (rs
!= 0 && rt
!= 0) {
3917 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3918 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3919 } else if (rs
== 0 && rt
!= 0) {
3920 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3921 } else if (rs
!= 0 && rt
== 0) {
3922 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3924 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3929 TCGv t0
= tcg_temp_local_new();
3930 TCGv t1
= tcg_temp_new();
3931 TCGv t2
= tcg_temp_new();
3932 TCGLabel
*l1
= gen_new_label();
3934 gen_load_gpr(t1
, rs
);
3935 gen_load_gpr(t2
, rt
);
3936 tcg_gen_sub_tl(t0
, t1
, t2
);
3937 tcg_gen_ext32s_tl(t0
, t0
);
3938 tcg_gen_xor_tl(t2
, t1
, t2
);
3939 tcg_gen_xor_tl(t1
, t0
, t1
);
3940 tcg_gen_and_tl(t1
, t1
, t2
);
3942 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3944 /* operands of different sign, first operand and result different sign */
3945 generate_exception(ctx
, EXCP_OVERFLOW
);
3947 gen_store_gpr(t0
, rd
);
3952 if (rs
!= 0 && rt
!= 0) {
3953 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3954 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3955 } else if (rs
== 0 && rt
!= 0) {
3956 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3957 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3958 } else if (rs
!= 0 && rt
== 0) {
3959 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3961 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3964 #if defined(TARGET_MIPS64)
3967 TCGv t0
= tcg_temp_local_new();
3968 TCGv t1
= tcg_temp_new();
3969 TCGv t2
= tcg_temp_new();
3970 TCGLabel
*l1
= gen_new_label();
3972 gen_load_gpr(t1
, rs
);
3973 gen_load_gpr(t2
, rt
);
3974 tcg_gen_add_tl(t0
, t1
, t2
);
3975 tcg_gen_xor_tl(t1
, t1
, t2
);
3976 tcg_gen_xor_tl(t2
, t0
, t2
);
3977 tcg_gen_andc_tl(t1
, t2
, t1
);
3979 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3981 /* operands of same sign, result different sign */
3982 generate_exception(ctx
, EXCP_OVERFLOW
);
3984 gen_store_gpr(t0
, rd
);
3989 if (rs
!= 0 && rt
!= 0) {
3990 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3991 } else if (rs
== 0 && rt
!= 0) {
3992 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3993 } else if (rs
!= 0 && rt
== 0) {
3994 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3996 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4001 TCGv t0
= tcg_temp_local_new();
4002 TCGv t1
= tcg_temp_new();
4003 TCGv t2
= tcg_temp_new();
4004 TCGLabel
*l1
= gen_new_label();
4006 gen_load_gpr(t1
, rs
);
4007 gen_load_gpr(t2
, rt
);
4008 tcg_gen_sub_tl(t0
, t1
, t2
);
4009 tcg_gen_xor_tl(t2
, t1
, t2
);
4010 tcg_gen_xor_tl(t1
, t0
, t1
);
4011 tcg_gen_and_tl(t1
, t1
, t2
);
4013 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4015 /* operands of different sign, first operand and result different sign */
4016 generate_exception(ctx
, EXCP_OVERFLOW
);
4018 gen_store_gpr(t0
, rd
);
4023 if (rs
!= 0 && rt
!= 0) {
4024 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4025 } else if (rs
== 0 && rt
!= 0) {
4026 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4027 } else if (rs
!= 0 && rt
== 0) {
4028 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4030 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4035 if (likely(rs
!= 0 && rt
!= 0)) {
4036 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4037 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4039 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4045 /* Conditional move */
4046 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4047 int rd
, int rs
, int rt
)
4052 /* If no destination, treat it as a NOP. */
4056 t0
= tcg_temp_new();
4057 gen_load_gpr(t0
, rt
);
4058 t1
= tcg_const_tl(0);
4059 t2
= tcg_temp_new();
4060 gen_load_gpr(t2
, rs
);
4063 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4066 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4069 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4072 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4081 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4082 int rd
, int rs
, int rt
)
4085 /* If no destination, treat it as a NOP. */
4091 if (likely(rs
!= 0 && rt
!= 0)) {
4092 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4094 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4098 if (rs
!= 0 && rt
!= 0) {
4099 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4100 } else if (rs
== 0 && rt
!= 0) {
4101 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4102 } else if (rs
!= 0 && rt
== 0) {
4103 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4105 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4109 if (likely(rs
!= 0 && rt
!= 0)) {
4110 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4111 } else if (rs
== 0 && rt
!= 0) {
4112 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4113 } else if (rs
!= 0 && rt
== 0) {
4114 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4116 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4120 if (likely(rs
!= 0 && rt
!= 0)) {
4121 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4122 } else if (rs
== 0 && rt
!= 0) {
4123 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4124 } else if (rs
!= 0 && rt
== 0) {
4125 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4127 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4133 /* Set on lower than */
4134 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4135 int rd
, int rs
, int rt
)
4140 /* If no destination, treat it as a NOP. */
4144 t0
= tcg_temp_new();
4145 t1
= tcg_temp_new();
4146 gen_load_gpr(t0
, rs
);
4147 gen_load_gpr(t1
, rt
);
4150 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4153 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4161 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4162 int rd
, int rs
, int rt
)
4167 /* If no destination, treat it as a NOP.
4168 For add & sub, we must generate the overflow exception when needed. */
4172 t0
= tcg_temp_new();
4173 t1
= tcg_temp_new();
4174 gen_load_gpr(t0
, rs
);
4175 gen_load_gpr(t1
, rt
);
4178 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4179 tcg_gen_shl_tl(t0
, t1
, t0
);
4180 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4183 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4184 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4187 tcg_gen_ext32u_tl(t1
, t1
);
4188 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4189 tcg_gen_shr_tl(t0
, t1
, t0
);
4190 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4194 TCGv_i32 t2
= tcg_temp_new_i32();
4195 TCGv_i32 t3
= tcg_temp_new_i32();
4197 tcg_gen_trunc_tl_i32(t2
, t0
);
4198 tcg_gen_trunc_tl_i32(t3
, t1
);
4199 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4200 tcg_gen_rotr_i32(t2
, t3
, t2
);
4201 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4202 tcg_temp_free_i32(t2
);
4203 tcg_temp_free_i32(t3
);
4206 #if defined(TARGET_MIPS64)
4208 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4209 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4212 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4213 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4216 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4217 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4220 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4221 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4229 /* Arithmetic on HI/LO registers */
4230 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4232 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== TX79_MMI_MFHI1
||
4233 opc
== OPC_MFLO
|| opc
== TX79_MMI_MFLO1
)) {
4239 if (!(ctx
->insn_flags
& INSN_R5900
)) {
4246 case TX79_MMI_MFHI1
:
4247 #if defined(TARGET_MIPS64)
4249 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4253 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4257 case TX79_MMI_MFLO1
:
4258 #if defined(TARGET_MIPS64)
4260 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4264 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4268 case TX79_MMI_MTHI1
:
4270 #if defined(TARGET_MIPS64)
4272 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4276 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4279 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4283 case TX79_MMI_MTLO1
:
4285 #if defined(TARGET_MIPS64)
4287 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4291 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4294 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4300 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4303 TCGv t0
= tcg_const_tl(addr
);
4304 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4305 gen_store_gpr(t0
, reg
);
4309 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4315 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4318 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4319 addr
= addr_add(ctx
, pc
, offset
);
4320 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4324 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4325 addr
= addr_add(ctx
, pc
, offset
);
4326 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4328 #if defined(TARGET_MIPS64)
4331 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4332 addr
= addr_add(ctx
, pc
, offset
);
4333 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4337 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4340 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4341 addr
= addr_add(ctx
, pc
, offset
);
4342 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4347 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4348 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4349 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4352 #if defined(TARGET_MIPS64)
4353 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4354 case R6_OPC_LDPC
+ (1 << 16):
4355 case R6_OPC_LDPC
+ (2 << 16):
4356 case R6_OPC_LDPC
+ (3 << 16):
4358 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4359 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4360 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4364 MIPS_INVAL("OPC_PCREL");
4365 generate_exception_end(ctx
, EXCP_RI
);
4372 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4381 t0
= tcg_temp_new();
4382 t1
= tcg_temp_new();
4384 gen_load_gpr(t0
, rs
);
4385 gen_load_gpr(t1
, rt
);
4390 TCGv t2
= tcg_temp_new();
4391 TCGv t3
= tcg_temp_new();
4392 tcg_gen_ext32s_tl(t0
, t0
);
4393 tcg_gen_ext32s_tl(t1
, t1
);
4394 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4395 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4396 tcg_gen_and_tl(t2
, t2
, t3
);
4397 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4398 tcg_gen_or_tl(t2
, t2
, t3
);
4399 tcg_gen_movi_tl(t3
, 0);
4400 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4401 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4402 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4409 TCGv t2
= tcg_temp_new();
4410 TCGv t3
= tcg_temp_new();
4411 tcg_gen_ext32s_tl(t0
, t0
);
4412 tcg_gen_ext32s_tl(t1
, t1
);
4413 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4414 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4415 tcg_gen_and_tl(t2
, t2
, t3
);
4416 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4417 tcg_gen_or_tl(t2
, t2
, t3
);
4418 tcg_gen_movi_tl(t3
, 0);
4419 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4420 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4421 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4428 TCGv t2
= tcg_const_tl(0);
4429 TCGv t3
= tcg_const_tl(1);
4430 tcg_gen_ext32u_tl(t0
, t0
);
4431 tcg_gen_ext32u_tl(t1
, t1
);
4432 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4433 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4434 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4441 TCGv t2
= tcg_const_tl(0);
4442 TCGv t3
= tcg_const_tl(1);
4443 tcg_gen_ext32u_tl(t0
, t0
);
4444 tcg_gen_ext32u_tl(t1
, t1
);
4445 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4446 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4447 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4454 TCGv_i32 t2
= tcg_temp_new_i32();
4455 TCGv_i32 t3
= tcg_temp_new_i32();
4456 tcg_gen_trunc_tl_i32(t2
, t0
);
4457 tcg_gen_trunc_tl_i32(t3
, t1
);
4458 tcg_gen_mul_i32(t2
, t2
, t3
);
4459 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4460 tcg_temp_free_i32(t2
);
4461 tcg_temp_free_i32(t3
);
4466 TCGv_i32 t2
= tcg_temp_new_i32();
4467 TCGv_i32 t3
= tcg_temp_new_i32();
4468 tcg_gen_trunc_tl_i32(t2
, t0
);
4469 tcg_gen_trunc_tl_i32(t3
, t1
);
4470 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4471 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4472 tcg_temp_free_i32(t2
);
4473 tcg_temp_free_i32(t3
);
4478 TCGv_i32 t2
= tcg_temp_new_i32();
4479 TCGv_i32 t3
= tcg_temp_new_i32();
4480 tcg_gen_trunc_tl_i32(t2
, t0
);
4481 tcg_gen_trunc_tl_i32(t3
, t1
);
4482 tcg_gen_mul_i32(t2
, t2
, t3
);
4483 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4484 tcg_temp_free_i32(t2
);
4485 tcg_temp_free_i32(t3
);
4490 TCGv_i32 t2
= tcg_temp_new_i32();
4491 TCGv_i32 t3
= tcg_temp_new_i32();
4492 tcg_gen_trunc_tl_i32(t2
, t0
);
4493 tcg_gen_trunc_tl_i32(t3
, t1
);
4494 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4495 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4496 tcg_temp_free_i32(t2
);
4497 tcg_temp_free_i32(t3
);
4500 #if defined(TARGET_MIPS64)
4503 TCGv t2
= tcg_temp_new();
4504 TCGv t3
= tcg_temp_new();
4505 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4506 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4507 tcg_gen_and_tl(t2
, t2
, t3
);
4508 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4509 tcg_gen_or_tl(t2
, t2
, t3
);
4510 tcg_gen_movi_tl(t3
, 0);
4511 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4512 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4519 TCGv t2
= tcg_temp_new();
4520 TCGv t3
= tcg_temp_new();
4521 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4522 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4523 tcg_gen_and_tl(t2
, t2
, t3
);
4524 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4525 tcg_gen_or_tl(t2
, t2
, t3
);
4526 tcg_gen_movi_tl(t3
, 0);
4527 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4528 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4535 TCGv t2
= tcg_const_tl(0);
4536 TCGv t3
= tcg_const_tl(1);
4537 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4538 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4545 TCGv t2
= tcg_const_tl(0);
4546 TCGv t3
= tcg_const_tl(1);
4547 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4548 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4554 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4558 TCGv t2
= tcg_temp_new();
4559 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4564 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4568 TCGv t2
= tcg_temp_new();
4569 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4575 MIPS_INVAL("r6 mul/div");
4576 generate_exception_end(ctx
, EXCP_RI
);
4584 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4585 int acc
, int rs
, int rt
)
4589 t0
= tcg_temp_new();
4590 t1
= tcg_temp_new();
4592 gen_load_gpr(t0
, rs
);
4593 gen_load_gpr(t1
, rt
);
4596 if (!(ctx
->insn_flags
& INSN_R5900
)) {
4605 TCGv t2
= tcg_temp_new();
4606 TCGv t3
= tcg_temp_new();
4607 tcg_gen_ext32s_tl(t0
, t0
);
4608 tcg_gen_ext32s_tl(t1
, t1
);
4609 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4610 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4611 tcg_gen_and_tl(t2
, t2
, t3
);
4612 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4613 tcg_gen_or_tl(t2
, t2
, t3
);
4614 tcg_gen_movi_tl(t3
, 0);
4615 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4616 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4617 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4618 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4619 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4625 case TX79_MMI_DIVU1
:
4627 TCGv t2
= tcg_const_tl(0);
4628 TCGv t3
= tcg_const_tl(1);
4629 tcg_gen_ext32u_tl(t0
, t0
);
4630 tcg_gen_ext32u_tl(t1
, t1
);
4631 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4632 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4633 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4634 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4635 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4642 TCGv_i32 t2
= tcg_temp_new_i32();
4643 TCGv_i32 t3
= tcg_temp_new_i32();
4644 tcg_gen_trunc_tl_i32(t2
, t0
);
4645 tcg_gen_trunc_tl_i32(t3
, t1
);
4646 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4647 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4648 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4649 tcg_temp_free_i32(t2
);
4650 tcg_temp_free_i32(t3
);
4655 TCGv_i32 t2
= tcg_temp_new_i32();
4656 TCGv_i32 t3
= tcg_temp_new_i32();
4657 tcg_gen_trunc_tl_i32(t2
, t0
);
4658 tcg_gen_trunc_tl_i32(t3
, t1
);
4659 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4660 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4661 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4662 tcg_temp_free_i32(t2
);
4663 tcg_temp_free_i32(t3
);
4666 #if defined(TARGET_MIPS64)
4669 TCGv t2
= tcg_temp_new();
4670 TCGv t3
= tcg_temp_new();
4671 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4672 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4673 tcg_gen_and_tl(t2
, t2
, t3
);
4674 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4675 tcg_gen_or_tl(t2
, t2
, t3
);
4676 tcg_gen_movi_tl(t3
, 0);
4677 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4678 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4679 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4686 TCGv t2
= tcg_const_tl(0);
4687 TCGv t3
= tcg_const_tl(1);
4688 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4689 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4690 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4696 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4699 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4704 TCGv_i64 t2
= tcg_temp_new_i64();
4705 TCGv_i64 t3
= tcg_temp_new_i64();
4707 tcg_gen_ext_tl_i64(t2
, t0
);
4708 tcg_gen_ext_tl_i64(t3
, t1
);
4709 tcg_gen_mul_i64(t2
, t2
, t3
);
4710 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4711 tcg_gen_add_i64(t2
, t2
, t3
);
4712 tcg_temp_free_i64(t3
);
4713 gen_move_low32(cpu_LO
[acc
], t2
);
4714 gen_move_high32(cpu_HI
[acc
], t2
);
4715 tcg_temp_free_i64(t2
);
4720 TCGv_i64 t2
= tcg_temp_new_i64();
4721 TCGv_i64 t3
= tcg_temp_new_i64();
4723 tcg_gen_ext32u_tl(t0
, t0
);
4724 tcg_gen_ext32u_tl(t1
, t1
);
4725 tcg_gen_extu_tl_i64(t2
, t0
);
4726 tcg_gen_extu_tl_i64(t3
, t1
);
4727 tcg_gen_mul_i64(t2
, t2
, t3
);
4728 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4729 tcg_gen_add_i64(t2
, t2
, t3
);
4730 tcg_temp_free_i64(t3
);
4731 gen_move_low32(cpu_LO
[acc
], t2
);
4732 gen_move_high32(cpu_HI
[acc
], t2
);
4733 tcg_temp_free_i64(t2
);
4738 TCGv_i64 t2
= tcg_temp_new_i64();
4739 TCGv_i64 t3
= tcg_temp_new_i64();
4741 tcg_gen_ext_tl_i64(t2
, t0
);
4742 tcg_gen_ext_tl_i64(t3
, t1
);
4743 tcg_gen_mul_i64(t2
, t2
, t3
);
4744 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4745 tcg_gen_sub_i64(t2
, t3
, t2
);
4746 tcg_temp_free_i64(t3
);
4747 gen_move_low32(cpu_LO
[acc
], t2
);
4748 gen_move_high32(cpu_HI
[acc
], t2
);
4749 tcg_temp_free_i64(t2
);
4754 TCGv_i64 t2
= tcg_temp_new_i64();
4755 TCGv_i64 t3
= tcg_temp_new_i64();
4757 tcg_gen_ext32u_tl(t0
, t0
);
4758 tcg_gen_ext32u_tl(t1
, t1
);
4759 tcg_gen_extu_tl_i64(t2
, t0
);
4760 tcg_gen_extu_tl_i64(t3
, t1
);
4761 tcg_gen_mul_i64(t2
, t2
, t3
);
4762 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4763 tcg_gen_sub_i64(t2
, t3
, t2
);
4764 tcg_temp_free_i64(t3
);
4765 gen_move_low32(cpu_LO
[acc
], t2
);
4766 gen_move_high32(cpu_HI
[acc
], t2
);
4767 tcg_temp_free_i64(t2
);
4771 MIPS_INVAL("mul/div");
4772 generate_exception_end(ctx
, EXCP_RI
);
4781 * These MULT and MULTU instructions implemented in for example the
4782 * Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
4783 * architectures are special three-operand variants with the syntax
4785 * MULT[U][1] rd, rs, rt
4789 * (rd, LO, HI) <- rs * rt
4791 * where the low-order 32-bits of the result is placed into both the
4792 * GPR rd and the special register LO. The high-order 32-bits of the
4793 * result is placed into the special register HI.
4795 * If the GPR rd is omitted in assembly language, it is taken to be 0,
4796 * which is the zero register that always reads as 0.
4798 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
4799 int rd
, int rs
, int rt
)
4801 TCGv t0
= tcg_temp_new();
4802 TCGv t1
= tcg_temp_new();
4805 gen_load_gpr(t0
, rs
);
4806 gen_load_gpr(t1
, rt
);
4809 case TX79_MMI_MULT1
:
4814 TCGv_i32 t2
= tcg_temp_new_i32();
4815 TCGv_i32 t3
= tcg_temp_new_i32();
4816 tcg_gen_trunc_tl_i32(t2
, t0
);
4817 tcg_gen_trunc_tl_i32(t3
, t1
);
4818 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4820 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4822 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4823 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4824 tcg_temp_free_i32(t2
);
4825 tcg_temp_free_i32(t3
);
4828 case TX79_MMI_MULTU1
:
4833 TCGv_i32 t2
= tcg_temp_new_i32();
4834 TCGv_i32 t3
= tcg_temp_new_i32();
4835 tcg_gen_trunc_tl_i32(t2
, t0
);
4836 tcg_gen_trunc_tl_i32(t3
, t1
);
4837 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4839 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4841 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4842 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4843 tcg_temp_free_i32(t2
);
4844 tcg_temp_free_i32(t3
);
4848 MIPS_INVAL("mul TXx9");
4849 generate_exception_end(ctx
, EXCP_RI
);
4858 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
4859 int rd
, int rs
, int rt
)
4861 TCGv t0
= tcg_temp_new();
4862 TCGv t1
= tcg_temp_new();
4864 gen_load_gpr(t0
, rs
);
4865 gen_load_gpr(t1
, rt
);
4868 case OPC_VR54XX_MULS
:
4869 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
4871 case OPC_VR54XX_MULSU
:
4872 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
4874 case OPC_VR54XX_MACC
:
4875 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
4877 case OPC_VR54XX_MACCU
:
4878 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
4880 case OPC_VR54XX_MSAC
:
4881 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
4883 case OPC_VR54XX_MSACU
:
4884 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
4886 case OPC_VR54XX_MULHI
:
4887 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
4889 case OPC_VR54XX_MULHIU
:
4890 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
4892 case OPC_VR54XX_MULSHI
:
4893 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
4895 case OPC_VR54XX_MULSHIU
:
4896 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
4898 case OPC_VR54XX_MACCHI
:
4899 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
4901 case OPC_VR54XX_MACCHIU
:
4902 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
4904 case OPC_VR54XX_MSACHI
:
4905 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
4907 case OPC_VR54XX_MSACHIU
:
4908 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
4911 MIPS_INVAL("mul vr54xx");
4912 generate_exception_end(ctx
, EXCP_RI
);
4915 gen_store_gpr(t0
, rd
);
4922 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
4932 gen_load_gpr(t0
, rs
);
4937 #if defined(TARGET_MIPS64)
4941 tcg_gen_not_tl(t0
, t0
);
4950 tcg_gen_ext32u_tl(t0
, t0
);
4951 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
4952 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
4954 #if defined(TARGET_MIPS64)
4959 tcg_gen_clzi_i64(t0
, t0
, 64);
4965 /* Godson integer instructions */
4966 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
4967 int rd
, int rs
, int rt
)
4979 case OPC_MULTU_G_2E
:
4980 case OPC_MULTU_G_2F
:
4981 #if defined(TARGET_MIPS64)
4982 case OPC_DMULT_G_2E
:
4983 case OPC_DMULT_G_2F
:
4984 case OPC_DMULTU_G_2E
:
4985 case OPC_DMULTU_G_2F
:
4987 t0
= tcg_temp_new();
4988 t1
= tcg_temp_new();
4991 t0
= tcg_temp_local_new();
4992 t1
= tcg_temp_local_new();
4996 gen_load_gpr(t0
, rs
);
4997 gen_load_gpr(t1
, rt
);
5002 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5003 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5005 case OPC_MULTU_G_2E
:
5006 case OPC_MULTU_G_2F
:
5007 tcg_gen_ext32u_tl(t0
, t0
);
5008 tcg_gen_ext32u_tl(t1
, t1
);
5009 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5010 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5015 TCGLabel
*l1
= gen_new_label();
5016 TCGLabel
*l2
= gen_new_label();
5017 TCGLabel
*l3
= gen_new_label();
5018 tcg_gen_ext32s_tl(t0
, t0
);
5019 tcg_gen_ext32s_tl(t1
, t1
);
5020 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5021 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5024 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5025 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5026 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5029 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5030 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5037 TCGLabel
*l1
= gen_new_label();
5038 TCGLabel
*l2
= gen_new_label();
5039 tcg_gen_ext32u_tl(t0
, t0
);
5040 tcg_gen_ext32u_tl(t1
, t1
);
5041 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5042 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5045 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5046 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5053 TCGLabel
*l1
= gen_new_label();
5054 TCGLabel
*l2
= gen_new_label();
5055 TCGLabel
*l3
= gen_new_label();
5056 tcg_gen_ext32u_tl(t0
, t0
);
5057 tcg_gen_ext32u_tl(t1
, t1
);
5058 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5059 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5060 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5062 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5065 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5066 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5073 TCGLabel
*l1
= gen_new_label();
5074 TCGLabel
*l2
= gen_new_label();
5075 tcg_gen_ext32u_tl(t0
, t0
);
5076 tcg_gen_ext32u_tl(t1
, t1
);
5077 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5078 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5081 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5082 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5086 #if defined(TARGET_MIPS64)
5087 case OPC_DMULT_G_2E
:
5088 case OPC_DMULT_G_2F
:
5089 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5091 case OPC_DMULTU_G_2E
:
5092 case OPC_DMULTU_G_2F
:
5093 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5098 TCGLabel
*l1
= gen_new_label();
5099 TCGLabel
*l2
= gen_new_label();
5100 TCGLabel
*l3
= gen_new_label();
5101 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5102 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5105 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5106 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5107 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5110 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5114 case OPC_DDIVU_G_2E
:
5115 case OPC_DDIVU_G_2F
:
5117 TCGLabel
*l1
= gen_new_label();
5118 TCGLabel
*l2
= gen_new_label();
5119 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5120 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5123 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5130 TCGLabel
*l1
= gen_new_label();
5131 TCGLabel
*l2
= gen_new_label();
5132 TCGLabel
*l3
= gen_new_label();
5133 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5134 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5135 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5137 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5140 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5144 case OPC_DMODU_G_2E
:
5145 case OPC_DMODU_G_2F
:
5147 TCGLabel
*l1
= gen_new_label();
5148 TCGLabel
*l2
= gen_new_label();
5149 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5150 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5153 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5164 /* Loongson multimedia instructions */
5165 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5167 uint32_t opc
, shift_max
;
5170 opc
= MASK_LMI(ctx
->opcode
);
5176 t0
= tcg_temp_local_new_i64();
5177 t1
= tcg_temp_local_new_i64();
5180 t0
= tcg_temp_new_i64();
5181 t1
= tcg_temp_new_i64();
5185 check_cp1_enabled(ctx
);
5186 gen_load_fpr64(ctx
, t0
, rs
);
5187 gen_load_fpr64(ctx
, t1
, rt
);
5189 #define LMI_HELPER(UP, LO) \
5190 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5191 #define LMI_HELPER_1(UP, LO) \
5192 case OPC_##UP: gen_helper_##LO(t0, t0); break
5193 #define LMI_DIRECT(UP, LO, OP) \
5194 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5197 LMI_HELPER(PADDSH
, paddsh
);
5198 LMI_HELPER(PADDUSH
, paddush
);
5199 LMI_HELPER(PADDH
, paddh
);
5200 LMI_HELPER(PADDW
, paddw
);
5201 LMI_HELPER(PADDSB
, paddsb
);
5202 LMI_HELPER(PADDUSB
, paddusb
);
5203 LMI_HELPER(PADDB
, paddb
);
5205 LMI_HELPER(PSUBSH
, psubsh
);
5206 LMI_HELPER(PSUBUSH
, psubush
);
5207 LMI_HELPER(PSUBH
, psubh
);
5208 LMI_HELPER(PSUBW
, psubw
);
5209 LMI_HELPER(PSUBSB
, psubsb
);
5210 LMI_HELPER(PSUBUSB
, psubusb
);
5211 LMI_HELPER(PSUBB
, psubb
);
5213 LMI_HELPER(PSHUFH
, pshufh
);
5214 LMI_HELPER(PACKSSWH
, packsswh
);
5215 LMI_HELPER(PACKSSHB
, packsshb
);
5216 LMI_HELPER(PACKUSHB
, packushb
);
5218 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5219 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5220 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5221 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5222 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5223 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5225 LMI_HELPER(PAVGH
, pavgh
);
5226 LMI_HELPER(PAVGB
, pavgb
);
5227 LMI_HELPER(PMAXSH
, pmaxsh
);
5228 LMI_HELPER(PMINSH
, pminsh
);
5229 LMI_HELPER(PMAXUB
, pmaxub
);
5230 LMI_HELPER(PMINUB
, pminub
);
5232 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5233 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5234 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5235 LMI_HELPER(PCMPGTH
, pcmpgth
);
5236 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5237 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5239 LMI_HELPER(PSLLW
, psllw
);
5240 LMI_HELPER(PSLLH
, psllh
);
5241 LMI_HELPER(PSRLW
, psrlw
);
5242 LMI_HELPER(PSRLH
, psrlh
);
5243 LMI_HELPER(PSRAW
, psraw
);
5244 LMI_HELPER(PSRAH
, psrah
);
5246 LMI_HELPER(PMULLH
, pmullh
);
5247 LMI_HELPER(PMULHH
, pmulhh
);
5248 LMI_HELPER(PMULHUH
, pmulhuh
);
5249 LMI_HELPER(PMADDHW
, pmaddhw
);
5251 LMI_HELPER(PASUBUB
, pasubub
);
5252 LMI_HELPER_1(BIADD
, biadd
);
5253 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5255 LMI_DIRECT(PADDD
, paddd
, add
);
5256 LMI_DIRECT(PSUBD
, psubd
, sub
);
5257 LMI_DIRECT(XOR_CP2
, xor, xor);
5258 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5259 LMI_DIRECT(AND_CP2
, and, and);
5260 LMI_DIRECT(OR_CP2
, or, or);
5263 tcg_gen_andc_i64(t0
, t1
, t0
);
5267 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5270 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5273 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5276 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5280 tcg_gen_andi_i64(t1
, t1
, 3);
5281 tcg_gen_shli_i64(t1
, t1
, 4);
5282 tcg_gen_shr_i64(t0
, t0
, t1
);
5283 tcg_gen_ext16u_i64(t0
, t0
);
5287 tcg_gen_add_i64(t0
, t0
, t1
);
5288 tcg_gen_ext32s_i64(t0
, t0
);
5291 tcg_gen_sub_i64(t0
, t0
, t1
);
5292 tcg_gen_ext32s_i64(t0
, t0
);
5314 /* Make sure shift count isn't TCG undefined behaviour. */
5315 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5320 tcg_gen_shl_i64(t0
, t0
, t1
);
5324 /* Since SRA is UndefinedResult without sign-extended inputs,
5325 we can treat SRA and DSRA the same. */
5326 tcg_gen_sar_i64(t0
, t0
, t1
);
5329 /* We want to shift in zeros for SRL; zero-extend first. */
5330 tcg_gen_ext32u_i64(t0
, t0
);
5333 tcg_gen_shr_i64(t0
, t0
, t1
);
5337 if (shift_max
== 32) {
5338 tcg_gen_ext32s_i64(t0
, t0
);
5341 /* Shifts larger than MAX produce zero. */
5342 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5343 tcg_gen_neg_i64(t1
, t1
);
5344 tcg_gen_and_i64(t0
, t0
, t1
);
5350 TCGv_i64 t2
= tcg_temp_new_i64();
5351 TCGLabel
*lab
= gen_new_label();
5353 tcg_gen_mov_i64(t2
, t0
);
5354 tcg_gen_add_i64(t0
, t1
, t2
);
5355 if (opc
== OPC_ADD_CP2
) {
5356 tcg_gen_ext32s_i64(t0
, t0
);
5358 tcg_gen_xor_i64(t1
, t1
, t2
);
5359 tcg_gen_xor_i64(t2
, t2
, t0
);
5360 tcg_gen_andc_i64(t1
, t2
, t1
);
5361 tcg_temp_free_i64(t2
);
5362 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5363 generate_exception(ctx
, EXCP_OVERFLOW
);
5371 TCGv_i64 t2
= tcg_temp_new_i64();
5372 TCGLabel
*lab
= gen_new_label();
5374 tcg_gen_mov_i64(t2
, t0
);
5375 tcg_gen_sub_i64(t0
, t1
, t2
);
5376 if (opc
== OPC_SUB_CP2
) {
5377 tcg_gen_ext32s_i64(t0
, t0
);
5379 tcg_gen_xor_i64(t1
, t1
, t2
);
5380 tcg_gen_xor_i64(t2
, t2
, t0
);
5381 tcg_gen_and_i64(t1
, t1
, t2
);
5382 tcg_temp_free_i64(t2
);
5383 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5384 generate_exception(ctx
, EXCP_OVERFLOW
);
5390 tcg_gen_ext32u_i64(t0
, t0
);
5391 tcg_gen_ext32u_i64(t1
, t1
);
5392 tcg_gen_mul_i64(t0
, t0
, t1
);
5401 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
5402 FD field is the CC field? */
5404 MIPS_INVAL("loongson_cp2");
5405 generate_exception_end(ctx
, EXCP_RI
);
5412 gen_store_fpr64(ctx
, t0
, rd
);
5414 tcg_temp_free_i64(t0
);
5415 tcg_temp_free_i64(t1
);
5419 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5420 int rs
, int rt
, int16_t imm
)
5423 TCGv t0
= tcg_temp_new();
5424 TCGv t1
= tcg_temp_new();
5427 /* Load needed operands */
5435 /* Compare two registers */
5437 gen_load_gpr(t0
, rs
);
5438 gen_load_gpr(t1
, rt
);
5448 /* Compare register to immediate */
5449 if (rs
!= 0 || imm
!= 0) {
5450 gen_load_gpr(t0
, rs
);
5451 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5458 case OPC_TEQ
: /* rs == rs */
5459 case OPC_TEQI
: /* r0 == 0 */
5460 case OPC_TGE
: /* rs >= rs */
5461 case OPC_TGEI
: /* r0 >= 0 */
5462 case OPC_TGEU
: /* rs >= rs unsigned */
5463 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5465 generate_exception_end(ctx
, EXCP_TRAP
);
5467 case OPC_TLT
: /* rs < rs */
5468 case OPC_TLTI
: /* r0 < 0 */
5469 case OPC_TLTU
: /* rs < rs unsigned */
5470 case OPC_TLTIU
: /* r0 < 0 unsigned */
5471 case OPC_TNE
: /* rs != rs */
5472 case OPC_TNEI
: /* r0 != 0 */
5473 /* Never trap: treat as NOP. */
5477 TCGLabel
*l1
= gen_new_label();
5482 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5486 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5490 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5494 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5498 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5502 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5505 generate_exception(ctx
, EXCP_TRAP
);
5512 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5514 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5518 #ifndef CONFIG_USER_ONLY
5519 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5525 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5527 if (use_goto_tb(ctx
, dest
)) {
5530 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5533 if (ctx
->base
.singlestep_enabled
) {
5534 save_cpu_state(ctx
, 0);
5535 gen_helper_raise_exception_debug(cpu_env
);
5537 tcg_gen_lookup_and_goto_ptr();
5541 /* Branches (before delay slot) */
5542 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5544 int rs
, int rt
, int32_t offset
,
5547 target_ulong btgt
= -1;
5549 int bcond_compute
= 0;
5550 TCGv t0
= tcg_temp_new();
5551 TCGv t1
= tcg_temp_new();
5553 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5554 #ifdef MIPS_DEBUG_DISAS
5555 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5556 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5558 generate_exception_end(ctx
, EXCP_RI
);
5562 /* Load needed operands */
5568 /* Compare two registers */
5570 gen_load_gpr(t0
, rs
);
5571 gen_load_gpr(t1
, rt
);
5574 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5588 /* Compare to zero */
5590 gen_load_gpr(t0
, rs
);
5593 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5596 #if defined(TARGET_MIPS64)
5598 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5600 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5603 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5608 /* Jump to immediate */
5609 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5614 /* Jump to register */
5615 if (offset
!= 0 && offset
!= 16) {
5616 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5617 others are reserved. */
5618 MIPS_INVAL("jump hint");
5619 generate_exception_end(ctx
, EXCP_RI
);
5622 gen_load_gpr(btarget
, rs
);
5625 MIPS_INVAL("branch/jump");
5626 generate_exception_end(ctx
, EXCP_RI
);
5629 if (bcond_compute
== 0) {
5630 /* No condition to be computed */
5632 case OPC_BEQ
: /* rx == rx */
5633 case OPC_BEQL
: /* rx == rx likely */
5634 case OPC_BGEZ
: /* 0 >= 0 */
5635 case OPC_BGEZL
: /* 0 >= 0 likely */
5636 case OPC_BLEZ
: /* 0 <= 0 */
5637 case OPC_BLEZL
: /* 0 <= 0 likely */
5639 ctx
->hflags
|= MIPS_HFLAG_B
;
5641 case OPC_BGEZAL
: /* 0 >= 0 */
5642 case OPC_BGEZALL
: /* 0 >= 0 likely */
5643 /* Always take and link */
5645 ctx
->hflags
|= MIPS_HFLAG_B
;
5647 case OPC_BNE
: /* rx != rx */
5648 case OPC_BGTZ
: /* 0 > 0 */
5649 case OPC_BLTZ
: /* 0 < 0 */
5652 case OPC_BLTZAL
: /* 0 < 0 */
5653 /* Handle as an unconditional branch to get correct delay
5656 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5657 ctx
->hflags
|= MIPS_HFLAG_B
;
5659 case OPC_BLTZALL
: /* 0 < 0 likely */
5660 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5661 /* Skip the instruction in the delay slot */
5662 ctx
->base
.pc_next
+= 4;
5664 case OPC_BNEL
: /* rx != rx likely */
5665 case OPC_BGTZL
: /* 0 > 0 likely */
5666 case OPC_BLTZL
: /* 0 < 0 likely */
5667 /* Skip the instruction in the delay slot */
5668 ctx
->base
.pc_next
+= 4;
5671 ctx
->hflags
|= MIPS_HFLAG_B
;
5674 ctx
->hflags
|= MIPS_HFLAG_BX
;
5678 ctx
->hflags
|= MIPS_HFLAG_B
;
5681 ctx
->hflags
|= MIPS_HFLAG_BR
;
5685 ctx
->hflags
|= MIPS_HFLAG_BR
;
5688 MIPS_INVAL("branch/jump");
5689 generate_exception_end(ctx
, EXCP_RI
);
5695 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5698 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5701 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5704 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5707 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5710 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5713 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5717 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5721 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5724 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5727 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5730 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5733 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5736 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5739 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5741 #if defined(TARGET_MIPS64)
5743 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
5747 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5750 ctx
->hflags
|= MIPS_HFLAG_BC
;
5753 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5756 ctx
->hflags
|= MIPS_HFLAG_BL
;
5759 MIPS_INVAL("conditional branch/jump");
5760 generate_exception_end(ctx
, EXCP_RI
);
5765 ctx
->btarget
= btgt
;
5767 switch (delayslot_size
) {
5769 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
5772 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
5777 int post_delay
= insn_bytes
+ delayslot_size
;
5778 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
5780 tcg_gen_movi_tl(cpu_gpr
[blink
],
5781 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
5785 if (insn_bytes
== 2)
5786 ctx
->hflags
|= MIPS_HFLAG_B16
;
5792 /* nanoMIPS Branches */
5793 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
5795 int rs
, int rt
, int32_t offset
)
5797 target_ulong btgt
= -1;
5798 int bcond_compute
= 0;
5799 TCGv t0
= tcg_temp_new();
5800 TCGv t1
= tcg_temp_new();
5802 /* Load needed operands */
5806 /* Compare two registers */
5808 gen_load_gpr(t0
, rs
);
5809 gen_load_gpr(t1
, rt
);
5812 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5815 /* Compare to zero */
5817 gen_load_gpr(t0
, rs
);
5820 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5823 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5825 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5829 /* Jump to register */
5830 if (offset
!= 0 && offset
!= 16) {
5831 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5832 others are reserved. */
5833 MIPS_INVAL("jump hint");
5834 generate_exception_end(ctx
, EXCP_RI
);
5837 gen_load_gpr(btarget
, rs
);
5840 MIPS_INVAL("branch/jump");
5841 generate_exception_end(ctx
, EXCP_RI
);
5844 if (bcond_compute
== 0) {
5845 /* No condition to be computed */
5847 case OPC_BEQ
: /* rx == rx */
5849 ctx
->hflags
|= MIPS_HFLAG_B
;
5851 case OPC_BGEZAL
: /* 0 >= 0 */
5852 /* Always take and link */
5853 tcg_gen_movi_tl(cpu_gpr
[31],
5854 ctx
->base
.pc_next
+ insn_bytes
);
5855 ctx
->hflags
|= MIPS_HFLAG_B
;
5857 case OPC_BNE
: /* rx != rx */
5858 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5859 /* Skip the instruction in the delay slot */
5860 ctx
->base
.pc_next
+= 4;
5863 ctx
->hflags
|= MIPS_HFLAG_BR
;
5867 tcg_gen_movi_tl(cpu_gpr
[rt
],
5868 ctx
->base
.pc_next
+ insn_bytes
);
5870 ctx
->hflags
|= MIPS_HFLAG_BR
;
5873 MIPS_INVAL("branch/jump");
5874 generate_exception_end(ctx
, EXCP_RI
);
5880 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5883 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5886 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5887 tcg_gen_movi_tl(cpu_gpr
[31],
5888 ctx
->base
.pc_next
+ insn_bytes
);
5891 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5893 ctx
->hflags
|= MIPS_HFLAG_BC
;
5896 MIPS_INVAL("conditional branch/jump");
5897 generate_exception_end(ctx
, EXCP_RI
);
5902 ctx
->btarget
= btgt
;
5905 if (insn_bytes
== 2) {
5906 ctx
->hflags
|= MIPS_HFLAG_B16
;
5913 /* special3 bitfield operations */
5914 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
5915 int rs
, int lsb
, int msb
)
5917 TCGv t0
= tcg_temp_new();
5918 TCGv t1
= tcg_temp_new();
5920 gen_load_gpr(t1
, rs
);
5923 if (lsb
+ msb
> 31) {
5927 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5929 /* The two checks together imply that lsb == 0,
5930 so this is a simple sign-extension. */
5931 tcg_gen_ext32s_tl(t0
, t1
);
5934 #if defined(TARGET_MIPS64)
5943 if (lsb
+ msb
> 63) {
5946 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5953 gen_load_gpr(t0
, rt
);
5954 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5955 tcg_gen_ext32s_tl(t0
, t0
);
5957 #if defined(TARGET_MIPS64)
5968 gen_load_gpr(t0
, rt
);
5969 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5974 MIPS_INVAL("bitops");
5975 generate_exception_end(ctx
, EXCP_RI
);
5980 gen_store_gpr(t0
, rt
);
5985 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
5990 /* If no destination, treat it as a NOP. */
5994 t0
= tcg_temp_new();
5995 gen_load_gpr(t0
, rt
);
5999 TCGv t1
= tcg_temp_new();
6000 TCGv t2
= tcg_const_tl(0x00FF00FF);
6002 tcg_gen_shri_tl(t1
, t0
, 8);
6003 tcg_gen_and_tl(t1
, t1
, t2
);
6004 tcg_gen_and_tl(t0
, t0
, t2
);
6005 tcg_gen_shli_tl(t0
, t0
, 8);
6006 tcg_gen_or_tl(t0
, t0
, t1
);
6009 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6013 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6016 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6018 #if defined(TARGET_MIPS64)
6021 TCGv t1
= tcg_temp_new();
6022 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6024 tcg_gen_shri_tl(t1
, t0
, 8);
6025 tcg_gen_and_tl(t1
, t1
, t2
);
6026 tcg_gen_and_tl(t0
, t0
, t2
);
6027 tcg_gen_shli_tl(t0
, t0
, 8);
6028 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6035 TCGv t1
= tcg_temp_new();
6036 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6038 tcg_gen_shri_tl(t1
, t0
, 16);
6039 tcg_gen_and_tl(t1
, t1
, t2
);
6040 tcg_gen_and_tl(t0
, t0
, t2
);
6041 tcg_gen_shli_tl(t0
, t0
, 16);
6042 tcg_gen_or_tl(t0
, t0
, t1
);
6043 tcg_gen_shri_tl(t1
, t0
, 32);
6044 tcg_gen_shli_tl(t0
, t0
, 32);
6045 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6052 MIPS_INVAL("bsfhl");
6053 generate_exception_end(ctx
, EXCP_RI
);
6060 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6069 t0
= tcg_temp_new();
6070 t1
= tcg_temp_new();
6071 gen_load_gpr(t0
, rs
);
6072 gen_load_gpr(t1
, rt
);
6073 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6074 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6075 if (opc
== OPC_LSA
) {
6076 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6085 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6093 t0
= tcg_temp_new();
6094 if (bits
== 0 || bits
== wordsz
) {
6096 gen_load_gpr(t0
, rt
);
6098 gen_load_gpr(t0
, rs
);
6102 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6104 #if defined(TARGET_MIPS64)
6106 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6111 TCGv t1
= tcg_temp_new();
6112 gen_load_gpr(t0
, rt
);
6113 gen_load_gpr(t1
, rs
);
6117 TCGv_i64 t2
= tcg_temp_new_i64();
6118 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6119 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6120 gen_move_low32(cpu_gpr
[rd
], t2
);
6121 tcg_temp_free_i64(t2
);
6124 #if defined(TARGET_MIPS64)
6126 tcg_gen_shli_tl(t0
, t0
, bits
);
6127 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6128 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6138 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6141 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6144 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6147 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6150 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6157 t0
= tcg_temp_new();
6158 gen_load_gpr(t0
, rt
);
6161 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6163 #if defined(TARGET_MIPS64)
6165 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6172 #ifndef CONFIG_USER_ONLY
6173 /* CP0 (MMU and control) */
6174 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6176 TCGv_i64 t0
= tcg_temp_new_i64();
6177 TCGv_i64 t1
= tcg_temp_new_i64();
6179 tcg_gen_ext_tl_i64(t0
, arg
);
6180 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6181 #if defined(TARGET_MIPS64)
6182 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6184 tcg_gen_concat32_i64(t1
, t1
, t0
);
6186 tcg_gen_st_i64(t1
, cpu_env
, off
);
6187 tcg_temp_free_i64(t1
);
6188 tcg_temp_free_i64(t0
);
6191 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6193 TCGv_i64 t0
= tcg_temp_new_i64();
6194 TCGv_i64 t1
= tcg_temp_new_i64();
6196 tcg_gen_ext_tl_i64(t0
, arg
);
6197 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6198 tcg_gen_concat32_i64(t1
, t1
, t0
);
6199 tcg_gen_st_i64(t1
, cpu_env
, off
);
6200 tcg_temp_free_i64(t1
);
6201 tcg_temp_free_i64(t0
);
6204 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6206 TCGv_i64 t0
= tcg_temp_new_i64();
6208 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6209 #if defined(TARGET_MIPS64)
6210 tcg_gen_shri_i64(t0
, t0
, 30);
6212 tcg_gen_shri_i64(t0
, t0
, 32);
6214 gen_move_low32(arg
, t0
);
6215 tcg_temp_free_i64(t0
);
6218 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6220 TCGv_i64 t0
= tcg_temp_new_i64();
6222 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6223 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6224 gen_move_low32(arg
, t0
);
6225 tcg_temp_free_i64(t0
);
6228 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
6230 TCGv_i32 t0
= tcg_temp_new_i32();
6232 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6233 tcg_gen_ext_i32_tl(arg
, t0
);
6234 tcg_temp_free_i32(t0
);
6237 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
6239 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6240 tcg_gen_ext32s_tl(arg
, arg
);
6243 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
6245 TCGv_i32 t0
= tcg_temp_new_i32();
6247 tcg_gen_trunc_tl_i32(t0
, arg
);
6248 tcg_gen_st_i32(t0
, cpu_env
, off
);
6249 tcg_temp_free_i32(t0
);
6252 #define CP0_CHECK(c) \
6255 goto cp0_unimplemented; \
6259 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6261 const char *rn
= "invalid";
6267 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6268 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6272 goto cp0_unimplemented
;
6278 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6279 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6283 goto cp0_unimplemented
;
6289 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
6290 ctx
->CP0_LLAddr_shift
);
6294 CP0_CHECK(ctx
->mrp
);
6295 gen_helper_mfhc0_maar(arg
, cpu_env
);
6299 goto cp0_unimplemented
;
6308 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6312 goto cp0_unimplemented
;
6316 goto cp0_unimplemented
;
6318 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
6322 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6323 tcg_gen_movi_tl(arg
, 0);
6326 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6328 const char *rn
= "invalid";
6329 uint64_t mask
= ctx
->PAMask
>> 36;
6335 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6336 tcg_gen_andi_tl(arg
, arg
, mask
);
6337 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6341 goto cp0_unimplemented
;
6347 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6348 tcg_gen_andi_tl(arg
, arg
, mask
);
6349 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6353 goto cp0_unimplemented
;
6359 /* LLAddr is read-only (the only exception is bit 0 if LLB is
6360 supported); the CP0_LLAddr_rw_bitmask does not seem to be
6361 relevant for modern MIPS cores supporting MTHC0, therefore
6362 treating MTHC0 to LLAddr as NOP. */
6366 CP0_CHECK(ctx
->mrp
);
6367 gen_helper_mthc0_maar(cpu_env
, arg
);
6371 goto cp0_unimplemented
;
6380 tcg_gen_andi_tl(arg
, arg
, mask
);
6381 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6385 goto cp0_unimplemented
;
6389 goto cp0_unimplemented
;
6391 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
6394 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6397 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6399 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6400 tcg_gen_movi_tl(arg
, 0);
6402 tcg_gen_movi_tl(arg
, ~0);
6406 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6408 const char *rn
= "invalid";
6411 check_insn(ctx
, ISA_MIPS32
);
6417 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6421 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6422 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6426 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6427 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6431 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6432 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6437 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6441 goto cp0_unimplemented
;
6447 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6448 gen_helper_mfc0_random(arg
, cpu_env
);
6452 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6453 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6457 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6458 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6462 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6463 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6467 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6468 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6472 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6473 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6477 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6478 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6479 rn
= "VPEScheFBack";
6482 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6483 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6487 goto cp0_unimplemented
;
6494 TCGv_i64 tmp
= tcg_temp_new_i64();
6495 tcg_gen_ld_i64(tmp
, cpu_env
,
6496 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6497 #if defined(TARGET_MIPS64)
6499 /* Move RI/XI fields to bits 31:30 */
6500 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6501 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6504 gen_move_low32(arg
, tmp
);
6505 tcg_temp_free_i64(tmp
);
6510 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6511 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6515 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6516 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6520 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6521 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6525 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6526 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6530 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6531 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6535 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6536 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6540 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6541 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6545 goto cp0_unimplemented
;
6552 TCGv_i64 tmp
= tcg_temp_new_i64();
6553 tcg_gen_ld_i64(tmp
, cpu_env
,
6554 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6555 #if defined(TARGET_MIPS64)
6557 /* Move RI/XI fields to bits 31:30 */
6558 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6559 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6562 gen_move_low32(arg
, tmp
);
6563 tcg_temp_free_i64(tmp
);
6569 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6570 rn
= "GlobalNumber";
6573 goto cp0_unimplemented
;
6579 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6580 tcg_gen_ext32s_tl(arg
, arg
);
6584 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
6585 rn
= "ContextConfig";
6586 goto cp0_unimplemented
;
6588 CP0_CHECK(ctx
->ulri
);
6589 tcg_gen_ld_tl(arg
, cpu_env
,
6590 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6591 tcg_gen_ext32s_tl(arg
, arg
);
6595 goto cp0_unimplemented
;
6601 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6605 check_insn(ctx
, ISA_MIPS32R2
);
6606 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6611 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6612 tcg_gen_ext32s_tl(arg
, arg
);
6617 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6618 tcg_gen_ext32s_tl(arg
, arg
);
6623 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6624 tcg_gen_ext32s_tl(arg
, arg
);
6629 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6634 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
6639 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
6643 goto cp0_unimplemented
;
6649 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6653 check_insn(ctx
, ISA_MIPS32R2
);
6654 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6658 check_insn(ctx
, ISA_MIPS32R2
);
6659 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6663 check_insn(ctx
, ISA_MIPS32R2
);
6664 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6668 check_insn(ctx
, ISA_MIPS32R2
);
6669 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6673 check_insn(ctx
, ISA_MIPS32R2
);
6674 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6679 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
6683 goto cp0_unimplemented
;
6689 check_insn(ctx
, ISA_MIPS32R2
);
6690 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6694 goto cp0_unimplemented
;
6700 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6701 tcg_gen_ext32s_tl(arg
, arg
);
6706 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6711 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6716 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
6717 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
6721 goto cp0_unimplemented
;
6727 /* Mark as an IO operation because we read the time. */
6728 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6731 gen_helper_mfc0_count(arg
, cpu_env
);
6732 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6735 /* Break the TB to be able to take timer interrupts immediately
6736 after reading count. DISAS_STOP isn't sufficient, we need to
6737 ensure we break completely out of translated code. */
6738 gen_save_pc(ctx
->base
.pc_next
+ 4);
6739 ctx
->base
.is_jmp
= DISAS_EXIT
;
6742 /* 6,7 are implementation dependent */
6744 goto cp0_unimplemented
;
6750 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6751 tcg_gen_ext32s_tl(arg
, arg
);
6755 goto cp0_unimplemented
;
6761 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6764 /* 6,7 are implementation dependent */
6766 goto cp0_unimplemented
;
6772 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6776 check_insn(ctx
, ISA_MIPS32R2
);
6777 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6781 check_insn(ctx
, ISA_MIPS32R2
);
6782 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6786 check_insn(ctx
, ISA_MIPS32R2
);
6787 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6791 goto cp0_unimplemented
;
6797 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6801 goto cp0_unimplemented
;
6807 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6808 tcg_gen_ext32s_tl(arg
, arg
);
6812 goto cp0_unimplemented
;
6818 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6822 check_insn(ctx
, ISA_MIPS32R2
);
6823 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6824 tcg_gen_ext32s_tl(arg
, arg
);
6828 check_insn(ctx
, ISA_MIPS32R2
);
6829 CP0_CHECK(ctx
->cmgcr
);
6830 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6831 tcg_gen_ext32s_tl(arg
, arg
);
6835 goto cp0_unimplemented
;
6841 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6845 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6849 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6853 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6857 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6861 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6864 /* 6,7 are implementation dependent */
6866 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6870 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6874 goto cp0_unimplemented
;
6880 gen_helper_mfc0_lladdr(arg
, cpu_env
);
6884 CP0_CHECK(ctx
->mrp
);
6885 gen_helper_mfc0_maar(arg
, cpu_env
);
6889 CP0_CHECK(ctx
->mrp
);
6890 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6894 goto cp0_unimplemented
;
6907 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6908 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
6912 goto cp0_unimplemented
;
6925 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6926 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6930 goto cp0_unimplemented
;
6936 #if defined(TARGET_MIPS64)
6937 check_insn(ctx
, ISA_MIPS3
);
6938 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6939 tcg_gen_ext32s_tl(arg
, arg
);
6944 goto cp0_unimplemented
;
6948 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6949 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6952 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6956 goto cp0_unimplemented
;
6960 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6961 rn
= "'Diagnostic"; /* implementation dependent */
6966 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6970 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
6971 rn
= "TraceControl";
6972 goto cp0_unimplemented
;
6974 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
6975 rn
= "TraceControl2";
6976 goto cp0_unimplemented
;
6978 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
6979 rn
= "UserTraceData";
6980 goto cp0_unimplemented
;
6982 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
6984 goto cp0_unimplemented
;
6986 goto cp0_unimplemented
;
6993 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6994 tcg_gen_ext32s_tl(arg
, arg
);
6998 goto cp0_unimplemented
;
7004 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7005 rn
= "Performance0";
7008 // gen_helper_mfc0_performance1(arg);
7009 rn
= "Performance1";
7010 goto cp0_unimplemented
;
7012 // gen_helper_mfc0_performance2(arg);
7013 rn
= "Performance2";
7014 goto cp0_unimplemented
;
7016 // gen_helper_mfc0_performance3(arg);
7017 rn
= "Performance3";
7018 goto cp0_unimplemented
;
7020 // gen_helper_mfc0_performance4(arg);
7021 rn
= "Performance4";
7022 goto cp0_unimplemented
;
7024 // gen_helper_mfc0_performance5(arg);
7025 rn
= "Performance5";
7026 goto cp0_unimplemented
;
7028 // gen_helper_mfc0_performance6(arg);
7029 rn
= "Performance6";
7030 goto cp0_unimplemented
;
7032 // gen_helper_mfc0_performance7(arg);
7033 rn
= "Performance7";
7034 goto cp0_unimplemented
;
7036 goto cp0_unimplemented
;
7042 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7046 goto cp0_unimplemented
;
7055 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7059 goto cp0_unimplemented
;
7069 TCGv_i64 tmp
= tcg_temp_new_i64();
7070 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7071 gen_move_low32(arg
, tmp
);
7072 tcg_temp_free_i64(tmp
);
7080 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7084 goto cp0_unimplemented
;
7093 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7100 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7104 goto cp0_unimplemented
;
7110 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7111 tcg_gen_ext32s_tl(arg
, arg
);
7115 goto cp0_unimplemented
;
7122 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7131 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7132 tcg_gen_ld_tl(arg
, cpu_env
,
7133 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7134 tcg_gen_ext32s_tl(arg
, arg
);
7138 goto cp0_unimplemented
;
7142 goto cp0_unimplemented
;
7144 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
7148 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7149 gen_mfc0_unimplemented(ctx
, arg
);
7152 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7154 const char *rn
= "invalid";
7157 check_insn(ctx
, ISA_MIPS32
);
7159 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7167 gen_helper_mtc0_index(cpu_env
, arg
);
7171 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7172 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7176 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7181 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7191 goto cp0_unimplemented
;
7201 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7202 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7206 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7207 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7211 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7212 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7216 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7217 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7221 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7222 tcg_gen_st_tl(arg
, cpu_env
,
7223 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7227 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7228 tcg_gen_st_tl(arg
, cpu_env
,
7229 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7230 rn
= "VPEScheFBack";
7233 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7234 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7238 goto cp0_unimplemented
;
7244 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7248 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7249 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7253 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7254 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7258 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7259 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7263 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7264 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7268 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7269 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7273 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7274 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7278 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7279 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7283 goto cp0_unimplemented
;
7289 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7295 rn
= "GlobalNumber";
7298 goto cp0_unimplemented
;
7304 gen_helper_mtc0_context(cpu_env
, arg
);
7308 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7309 rn
= "ContextConfig";
7310 goto cp0_unimplemented
;
7312 CP0_CHECK(ctx
->ulri
);
7313 tcg_gen_st_tl(arg
, cpu_env
,
7314 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7318 goto cp0_unimplemented
;
7324 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7328 check_insn(ctx
, ISA_MIPS32R2
);
7329 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7331 ctx
->base
.is_jmp
= DISAS_STOP
;
7335 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7340 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7345 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7350 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7355 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7360 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7364 goto cp0_unimplemented
;
7370 gen_helper_mtc0_wired(cpu_env
, arg
);
7374 check_insn(ctx
, ISA_MIPS32R2
);
7375 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7379 check_insn(ctx
, ISA_MIPS32R2
);
7380 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7384 check_insn(ctx
, ISA_MIPS32R2
);
7385 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7389 check_insn(ctx
, ISA_MIPS32R2
);
7390 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7394 check_insn(ctx
, ISA_MIPS32R2
);
7395 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7400 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7404 goto cp0_unimplemented
;
7410 check_insn(ctx
, ISA_MIPS32R2
);
7411 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7412 ctx
->base
.is_jmp
= DISAS_STOP
;
7416 goto cp0_unimplemented
;
7438 goto cp0_unimplemented
;
7444 gen_helper_mtc0_count(cpu_env
, arg
);
7447 /* 6,7 are implementation dependent */
7449 goto cp0_unimplemented
;
7455 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7459 goto cp0_unimplemented
;
7465 gen_helper_mtc0_compare(cpu_env
, arg
);
7468 /* 6,7 are implementation dependent */
7470 goto cp0_unimplemented
;
7476 save_cpu_state(ctx
, 1);
7477 gen_helper_mtc0_status(cpu_env
, arg
);
7478 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7479 gen_save_pc(ctx
->base
.pc_next
+ 4);
7480 ctx
->base
.is_jmp
= DISAS_EXIT
;
7484 check_insn(ctx
, ISA_MIPS32R2
);
7485 gen_helper_mtc0_intctl(cpu_env
, arg
);
7486 /* Stop translation as we may have switched the execution mode */
7487 ctx
->base
.is_jmp
= DISAS_STOP
;
7491 check_insn(ctx
, ISA_MIPS32R2
);
7492 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7493 /* Stop translation as we may have switched the execution mode */
7494 ctx
->base
.is_jmp
= DISAS_STOP
;
7498 check_insn(ctx
, ISA_MIPS32R2
);
7499 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7500 /* Stop translation as we may have switched the execution mode */
7501 ctx
->base
.is_jmp
= DISAS_STOP
;
7505 goto cp0_unimplemented
;
7511 save_cpu_state(ctx
, 1);
7512 gen_helper_mtc0_cause(cpu_env
, arg
);
7513 /* Stop translation as we may have triggered an interrupt.
7514 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7515 * translated code to check for pending interrupts. */
7516 gen_save_pc(ctx
->base
.pc_next
+ 4);
7517 ctx
->base
.is_jmp
= DISAS_EXIT
;
7521 goto cp0_unimplemented
;
7527 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7531 goto cp0_unimplemented
;
7541 check_insn(ctx
, ISA_MIPS32R2
);
7542 gen_helper_mtc0_ebase(cpu_env
, arg
);
7546 goto cp0_unimplemented
;
7552 gen_helper_mtc0_config0(cpu_env
, arg
);
7554 /* Stop translation as we may have switched the execution mode */
7555 ctx
->base
.is_jmp
= DISAS_STOP
;
7558 /* ignored, read only */
7562 gen_helper_mtc0_config2(cpu_env
, arg
);
7564 /* Stop translation as we may have switched the execution mode */
7565 ctx
->base
.is_jmp
= DISAS_STOP
;
7568 gen_helper_mtc0_config3(cpu_env
, arg
);
7570 /* Stop translation as we may have switched the execution mode */
7571 ctx
->base
.is_jmp
= DISAS_STOP
;
7574 gen_helper_mtc0_config4(cpu_env
, arg
);
7576 ctx
->base
.is_jmp
= DISAS_STOP
;
7579 gen_helper_mtc0_config5(cpu_env
, arg
);
7581 /* Stop translation as we may have switched the execution mode */
7582 ctx
->base
.is_jmp
= DISAS_STOP
;
7584 /* 6,7 are implementation dependent */
7594 rn
= "Invalid config selector";
7595 goto cp0_unimplemented
;
7601 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7605 CP0_CHECK(ctx
->mrp
);
7606 gen_helper_mtc0_maar(cpu_env
, arg
);
7610 CP0_CHECK(ctx
->mrp
);
7611 gen_helper_mtc0_maari(cpu_env
, arg
);
7615 goto cp0_unimplemented
;
7628 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7629 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7633 goto cp0_unimplemented
;
7646 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7647 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7651 goto cp0_unimplemented
;
7657 #if defined(TARGET_MIPS64)
7658 check_insn(ctx
, ISA_MIPS3
);
7659 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7664 goto cp0_unimplemented
;
7668 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7669 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7672 gen_helper_mtc0_framemask(cpu_env
, arg
);
7676 goto cp0_unimplemented
;
7681 rn
= "Diagnostic"; /* implementation dependent */
7686 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7687 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7688 gen_save_pc(ctx
->base
.pc_next
+ 4);
7689 ctx
->base
.is_jmp
= DISAS_EXIT
;
7693 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7694 rn
= "TraceControl";
7695 /* Stop translation as we may have switched the execution mode */
7696 ctx
->base
.is_jmp
= DISAS_STOP
;
7697 goto cp0_unimplemented
;
7699 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7700 rn
= "TraceControl2";
7701 /* Stop translation as we may have switched the execution mode */
7702 ctx
->base
.is_jmp
= DISAS_STOP
;
7703 goto cp0_unimplemented
;
7705 /* Stop translation as we may have switched the execution mode */
7706 ctx
->base
.is_jmp
= DISAS_STOP
;
7707 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7708 rn
= "UserTraceData";
7709 /* Stop translation as we may have switched the execution mode */
7710 ctx
->base
.is_jmp
= DISAS_STOP
;
7711 goto cp0_unimplemented
;
7713 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7714 /* Stop translation as we may have switched the execution mode */
7715 ctx
->base
.is_jmp
= DISAS_STOP
;
7717 goto cp0_unimplemented
;
7719 goto cp0_unimplemented
;
7726 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7730 goto cp0_unimplemented
;
7736 gen_helper_mtc0_performance0(cpu_env
, arg
);
7737 rn
= "Performance0";
7740 // gen_helper_mtc0_performance1(arg);
7741 rn
= "Performance1";
7742 goto cp0_unimplemented
;
7744 // gen_helper_mtc0_performance2(arg);
7745 rn
= "Performance2";
7746 goto cp0_unimplemented
;
7748 // gen_helper_mtc0_performance3(arg);
7749 rn
= "Performance3";
7750 goto cp0_unimplemented
;
7752 // gen_helper_mtc0_performance4(arg);
7753 rn
= "Performance4";
7754 goto cp0_unimplemented
;
7756 // gen_helper_mtc0_performance5(arg);
7757 rn
= "Performance5";
7758 goto cp0_unimplemented
;
7760 // gen_helper_mtc0_performance6(arg);
7761 rn
= "Performance6";
7762 goto cp0_unimplemented
;
7764 // gen_helper_mtc0_performance7(arg);
7765 rn
= "Performance7";
7766 goto cp0_unimplemented
;
7768 goto cp0_unimplemented
;
7774 gen_helper_mtc0_errctl(cpu_env
, arg
);
7775 ctx
->base
.is_jmp
= DISAS_STOP
;
7779 goto cp0_unimplemented
;
7792 goto cp0_unimplemented
;
7801 gen_helper_mtc0_taglo(cpu_env
, arg
);
7808 gen_helper_mtc0_datalo(cpu_env
, arg
);
7812 goto cp0_unimplemented
;
7821 gen_helper_mtc0_taghi(cpu_env
, arg
);
7828 gen_helper_mtc0_datahi(cpu_env
, arg
);
7833 goto cp0_unimplemented
;
7839 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7843 goto cp0_unimplemented
;
7850 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7859 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7860 tcg_gen_st_tl(arg
, cpu_env
,
7861 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7865 goto cp0_unimplemented
;
7869 goto cp0_unimplemented
;
7871 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
7873 /* For simplicity assume that all writes can cause interrupts. */
7874 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7876 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
7877 * translated code to check for pending interrupts. */
7878 gen_save_pc(ctx
->base
.pc_next
+ 4);
7879 ctx
->base
.is_jmp
= DISAS_EXIT
;
7884 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7887 #if defined(TARGET_MIPS64)
7888 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7890 const char *rn
= "invalid";
7893 check_insn(ctx
, ISA_MIPS64
);
7899 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
7903 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7904 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
7908 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7909 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
7913 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7914 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
7919 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
7923 goto cp0_unimplemented
;
7929 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7930 gen_helper_mfc0_random(arg
, cpu_env
);
7934 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7935 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
7939 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7940 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
7944 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7945 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
7949 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7950 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
7954 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7955 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7959 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7960 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7961 rn
= "VPEScheFBack";
7964 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7965 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
7969 goto cp0_unimplemented
;
7975 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
7979 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7980 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
7984 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7985 gen_helper_mfc0_tcbind(arg
, cpu_env
);
7989 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7990 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
7994 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7995 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
7999 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8000 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8004 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8005 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8009 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8010 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8014 goto cp0_unimplemented
;
8020 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8025 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8026 rn
= "GlobalNumber";
8029 goto cp0_unimplemented
;
8035 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8039 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8040 rn
= "ContextConfig";
8041 goto cp0_unimplemented
;
8043 CP0_CHECK(ctx
->ulri
);
8044 tcg_gen_ld_tl(arg
, cpu_env
,
8045 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8049 goto cp0_unimplemented
;
8055 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8059 check_insn(ctx
, ISA_MIPS32R2
);
8060 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8065 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8070 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8075 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8080 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8085 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8090 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8094 goto cp0_unimplemented
;
8100 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8104 check_insn(ctx
, ISA_MIPS32R2
);
8105 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8109 check_insn(ctx
, ISA_MIPS32R2
);
8110 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8114 check_insn(ctx
, ISA_MIPS32R2
);
8115 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8119 check_insn(ctx
, ISA_MIPS32R2
);
8120 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8124 check_insn(ctx
, ISA_MIPS32R2
);
8125 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8130 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8134 goto cp0_unimplemented
;
8140 check_insn(ctx
, ISA_MIPS32R2
);
8141 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8145 goto cp0_unimplemented
;
8151 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8156 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8161 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8166 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8167 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8171 goto cp0_unimplemented
;
8177 /* Mark as an IO operation because we read the time. */
8178 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8181 gen_helper_mfc0_count(arg
, cpu_env
);
8182 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8185 /* Break the TB to be able to take timer interrupts immediately
8186 after reading count. DISAS_STOP isn't sufficient, we need to
8187 ensure we break completely out of translated code. */
8188 gen_save_pc(ctx
->base
.pc_next
+ 4);
8189 ctx
->base
.is_jmp
= DISAS_EXIT
;
8192 /* 6,7 are implementation dependent */
8194 goto cp0_unimplemented
;
8200 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8204 goto cp0_unimplemented
;
8210 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8213 /* 6,7 are implementation dependent */
8215 goto cp0_unimplemented
;
8221 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8225 check_insn(ctx
, ISA_MIPS32R2
);
8226 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8230 check_insn(ctx
, ISA_MIPS32R2
);
8231 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8235 check_insn(ctx
, ISA_MIPS32R2
);
8236 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8240 goto cp0_unimplemented
;
8246 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8250 goto cp0_unimplemented
;
8256 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8260 goto cp0_unimplemented
;
8266 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8270 check_insn(ctx
, ISA_MIPS32R2
);
8271 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8275 check_insn(ctx
, ISA_MIPS32R2
);
8276 CP0_CHECK(ctx
->cmgcr
);
8277 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8281 goto cp0_unimplemented
;
8287 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8291 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8295 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8303 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8307 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8310 /* 6,7 are implementation dependent */
8312 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8316 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8320 goto cp0_unimplemented
;
8326 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8330 CP0_CHECK(ctx
->mrp
);
8331 gen_helper_dmfc0_maar(arg
, cpu_env
);
8335 CP0_CHECK(ctx
->mrp
);
8336 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8340 goto cp0_unimplemented
;
8353 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8354 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8358 goto cp0_unimplemented
;
8371 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8372 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8376 goto cp0_unimplemented
;
8382 check_insn(ctx
, ISA_MIPS3
);
8383 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8387 goto cp0_unimplemented
;
8391 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8392 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8395 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8399 goto cp0_unimplemented
;
8403 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8404 rn
= "'Diagnostic"; /* implementation dependent */
8409 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8413 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8414 rn
= "TraceControl";
8415 goto cp0_unimplemented
;
8417 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8418 rn
= "TraceControl2";
8419 goto cp0_unimplemented
;
8421 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8422 rn
= "UserTraceData";
8423 goto cp0_unimplemented
;
8425 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8427 goto cp0_unimplemented
;
8429 goto cp0_unimplemented
;
8436 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8440 goto cp0_unimplemented
;
8446 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8447 rn
= "Performance0";
8450 // gen_helper_dmfc0_performance1(arg);
8451 rn
= "Performance1";
8452 goto cp0_unimplemented
;
8454 // gen_helper_dmfc0_performance2(arg);
8455 rn
= "Performance2";
8456 goto cp0_unimplemented
;
8458 // gen_helper_dmfc0_performance3(arg);
8459 rn
= "Performance3";
8460 goto cp0_unimplemented
;
8462 // gen_helper_dmfc0_performance4(arg);
8463 rn
= "Performance4";
8464 goto cp0_unimplemented
;
8466 // gen_helper_dmfc0_performance5(arg);
8467 rn
= "Performance5";
8468 goto cp0_unimplemented
;
8470 // gen_helper_dmfc0_performance6(arg);
8471 rn
= "Performance6";
8472 goto cp0_unimplemented
;
8474 // gen_helper_dmfc0_performance7(arg);
8475 rn
= "Performance7";
8476 goto cp0_unimplemented
;
8478 goto cp0_unimplemented
;
8484 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8488 goto cp0_unimplemented
;
8498 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8502 goto cp0_unimplemented
;
8511 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8518 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8522 goto cp0_unimplemented
;
8531 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8538 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8542 goto cp0_unimplemented
;
8548 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8552 goto cp0_unimplemented
;
8559 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8568 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8569 tcg_gen_ld_tl(arg
, cpu_env
,
8570 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8574 goto cp0_unimplemented
;
8578 goto cp0_unimplemented
;
8580 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
8584 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8585 gen_mfc0_unimplemented(ctx
, arg
);
8588 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8590 const char *rn
= "invalid";
8593 check_insn(ctx
, ISA_MIPS64
);
8595 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8603 gen_helper_mtc0_index(cpu_env
, arg
);
8607 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8608 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
8612 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8617 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8627 goto cp0_unimplemented
;
8637 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8638 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
8642 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8643 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
8647 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8648 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
8652 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8653 gen_helper_mtc0_yqmask(cpu_env
, arg
);
8657 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8658 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8662 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8663 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8664 rn
= "VPEScheFBack";
8667 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8668 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
8672 goto cp0_unimplemented
;
8678 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
8682 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8683 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
8687 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8688 gen_helper_mtc0_tcbind(cpu_env
, arg
);
8692 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8693 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
8697 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8698 gen_helper_mtc0_tchalt(cpu_env
, arg
);
8702 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8703 gen_helper_mtc0_tccontext(cpu_env
, arg
);
8707 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8708 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
8712 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8713 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
8717 goto cp0_unimplemented
;
8723 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
8729 rn
= "GlobalNumber";
8732 goto cp0_unimplemented
;
8738 gen_helper_mtc0_context(cpu_env
, arg
);
8742 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
8743 rn
= "ContextConfig";
8744 goto cp0_unimplemented
;
8746 CP0_CHECK(ctx
->ulri
);
8747 tcg_gen_st_tl(arg
, cpu_env
,
8748 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8752 goto cp0_unimplemented
;
8758 gen_helper_mtc0_pagemask(cpu_env
, arg
);
8762 check_insn(ctx
, ISA_MIPS32R2
);
8763 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
8768 gen_helper_mtc0_segctl0(cpu_env
, arg
);
8773 gen_helper_mtc0_segctl1(cpu_env
, arg
);
8778 gen_helper_mtc0_segctl2(cpu_env
, arg
);
8783 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8788 gen_helper_mtc0_pwfield(cpu_env
, arg
);
8793 gen_helper_mtc0_pwsize(cpu_env
, arg
);
8797 goto cp0_unimplemented
;
8803 gen_helper_mtc0_wired(cpu_env
, arg
);
8807 check_insn(ctx
, ISA_MIPS32R2
);
8808 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
8812 check_insn(ctx
, ISA_MIPS32R2
);
8813 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
8817 check_insn(ctx
, ISA_MIPS32R2
);
8818 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
8822 check_insn(ctx
, ISA_MIPS32R2
);
8823 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
8827 check_insn(ctx
, ISA_MIPS32R2
);
8828 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
8833 gen_helper_mtc0_pwctl(cpu_env
, arg
);
8837 goto cp0_unimplemented
;
8843 check_insn(ctx
, ISA_MIPS32R2
);
8844 gen_helper_mtc0_hwrena(cpu_env
, arg
);
8845 ctx
->base
.is_jmp
= DISAS_STOP
;
8849 goto cp0_unimplemented
;
8871 goto cp0_unimplemented
;
8877 gen_helper_mtc0_count(cpu_env
, arg
);
8880 /* 6,7 are implementation dependent */
8882 goto cp0_unimplemented
;
8884 /* Stop translation as we may have switched the execution mode */
8885 ctx
->base
.is_jmp
= DISAS_STOP
;
8890 gen_helper_mtc0_entryhi(cpu_env
, arg
);
8894 goto cp0_unimplemented
;
8900 gen_helper_mtc0_compare(cpu_env
, arg
);
8903 /* 6,7 are implementation dependent */
8905 goto cp0_unimplemented
;
8907 /* Stop translation as we may have switched the execution mode */
8908 ctx
->base
.is_jmp
= DISAS_STOP
;
8913 save_cpu_state(ctx
, 1);
8914 gen_helper_mtc0_status(cpu_env
, arg
);
8915 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8916 gen_save_pc(ctx
->base
.pc_next
+ 4);
8917 ctx
->base
.is_jmp
= DISAS_EXIT
;
8921 check_insn(ctx
, ISA_MIPS32R2
);
8922 gen_helper_mtc0_intctl(cpu_env
, arg
);
8923 /* Stop translation as we may have switched the execution mode */
8924 ctx
->base
.is_jmp
= DISAS_STOP
;
8928 check_insn(ctx
, ISA_MIPS32R2
);
8929 gen_helper_mtc0_srsctl(cpu_env
, arg
);
8930 /* Stop translation as we may have switched the execution mode */
8931 ctx
->base
.is_jmp
= DISAS_STOP
;
8935 check_insn(ctx
, ISA_MIPS32R2
);
8936 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8937 /* Stop translation as we may have switched the execution mode */
8938 ctx
->base
.is_jmp
= DISAS_STOP
;
8942 goto cp0_unimplemented
;
8948 save_cpu_state(ctx
, 1);
8949 gen_helper_mtc0_cause(cpu_env
, arg
);
8950 /* Stop translation as we may have triggered an interrupt.
8951 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8952 * translated code to check for pending interrupts. */
8953 gen_save_pc(ctx
->base
.pc_next
+ 4);
8954 ctx
->base
.is_jmp
= DISAS_EXIT
;
8958 goto cp0_unimplemented
;
8964 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8968 goto cp0_unimplemented
;
8978 check_insn(ctx
, ISA_MIPS32R2
);
8979 gen_helper_mtc0_ebase(cpu_env
, arg
);
8983 goto cp0_unimplemented
;
8989 gen_helper_mtc0_config0(cpu_env
, arg
);
8991 /* Stop translation as we may have switched the execution mode */
8992 ctx
->base
.is_jmp
= DISAS_STOP
;
8995 /* ignored, read only */
8999 gen_helper_mtc0_config2(cpu_env
, arg
);
9001 /* Stop translation as we may have switched the execution mode */
9002 ctx
->base
.is_jmp
= DISAS_STOP
;
9005 gen_helper_mtc0_config3(cpu_env
, arg
);
9007 /* Stop translation as we may have switched the execution mode */
9008 ctx
->base
.is_jmp
= DISAS_STOP
;
9011 /* currently ignored */
9015 gen_helper_mtc0_config5(cpu_env
, arg
);
9017 /* Stop translation as we may have switched the execution mode */
9018 ctx
->base
.is_jmp
= DISAS_STOP
;
9020 /* 6,7 are implementation dependent */
9022 rn
= "Invalid config selector";
9023 goto cp0_unimplemented
;
9029 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9033 CP0_CHECK(ctx
->mrp
);
9034 gen_helper_mtc0_maar(cpu_env
, arg
);
9038 CP0_CHECK(ctx
->mrp
);
9039 gen_helper_mtc0_maari(cpu_env
, arg
);
9043 goto cp0_unimplemented
;
9056 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9057 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9061 goto cp0_unimplemented
;
9074 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9075 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9079 goto cp0_unimplemented
;
9085 check_insn(ctx
, ISA_MIPS3
);
9086 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9090 goto cp0_unimplemented
;
9094 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9095 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9098 gen_helper_mtc0_framemask(cpu_env
, arg
);
9102 goto cp0_unimplemented
;
9107 rn
= "Diagnostic"; /* implementation dependent */
9112 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9113 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9114 gen_save_pc(ctx
->base
.pc_next
+ 4);
9115 ctx
->base
.is_jmp
= DISAS_EXIT
;
9119 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9120 /* Stop translation as we may have switched the execution mode */
9121 ctx
->base
.is_jmp
= DISAS_STOP
;
9122 rn
= "TraceControl";
9123 goto cp0_unimplemented
;
9125 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9126 /* Stop translation as we may have switched the execution mode */
9127 ctx
->base
.is_jmp
= DISAS_STOP
;
9128 rn
= "TraceControl2";
9129 goto cp0_unimplemented
;
9131 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9132 /* Stop translation as we may have switched the execution mode */
9133 ctx
->base
.is_jmp
= DISAS_STOP
;
9134 rn
= "UserTraceData";
9135 goto cp0_unimplemented
;
9137 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9138 /* Stop translation as we may have switched the execution mode */
9139 ctx
->base
.is_jmp
= DISAS_STOP
;
9141 goto cp0_unimplemented
;
9143 goto cp0_unimplemented
;
9150 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9154 goto cp0_unimplemented
;
9160 gen_helper_mtc0_performance0(cpu_env
, arg
);
9161 rn
= "Performance0";
9164 // gen_helper_mtc0_performance1(cpu_env, arg);
9165 rn
= "Performance1";
9166 goto cp0_unimplemented
;
9168 // gen_helper_mtc0_performance2(cpu_env, arg);
9169 rn
= "Performance2";
9170 goto cp0_unimplemented
;
9172 // gen_helper_mtc0_performance3(cpu_env, arg);
9173 rn
= "Performance3";
9174 goto cp0_unimplemented
;
9176 // gen_helper_mtc0_performance4(cpu_env, arg);
9177 rn
= "Performance4";
9178 goto cp0_unimplemented
;
9180 // gen_helper_mtc0_performance5(cpu_env, arg);
9181 rn
= "Performance5";
9182 goto cp0_unimplemented
;
9184 // gen_helper_mtc0_performance6(cpu_env, arg);
9185 rn
= "Performance6";
9186 goto cp0_unimplemented
;
9188 // gen_helper_mtc0_performance7(cpu_env, arg);
9189 rn
= "Performance7";
9190 goto cp0_unimplemented
;
9192 goto cp0_unimplemented
;
9198 gen_helper_mtc0_errctl(cpu_env
, arg
);
9199 ctx
->base
.is_jmp
= DISAS_STOP
;
9203 goto cp0_unimplemented
;
9216 goto cp0_unimplemented
;
9225 gen_helper_mtc0_taglo(cpu_env
, arg
);
9232 gen_helper_mtc0_datalo(cpu_env
, arg
);
9236 goto cp0_unimplemented
;
9245 gen_helper_mtc0_taghi(cpu_env
, arg
);
9252 gen_helper_mtc0_datahi(cpu_env
, arg
);
9257 goto cp0_unimplemented
;
9263 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9267 goto cp0_unimplemented
;
9274 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9283 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9284 tcg_gen_st_tl(arg
, cpu_env
,
9285 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9289 goto cp0_unimplemented
;
9293 goto cp0_unimplemented
;
9295 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
9297 /* For simplicity assume that all writes can cause interrupts. */
9298 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9300 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
9301 * translated code to check for pending interrupts. */
9302 gen_save_pc(ctx
->base
.pc_next
+ 4);
9303 ctx
->base
.is_jmp
= DISAS_EXIT
;
9308 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
9310 #endif /* TARGET_MIPS64 */
9312 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9313 int u
, int sel
, int h
)
9315 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9316 TCGv t0
= tcg_temp_local_new();
9318 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9319 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9320 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9321 tcg_gen_movi_tl(t0
, -1);
9322 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9323 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9324 tcg_gen_movi_tl(t0
, -1);
9330 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9333 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9343 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9346 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9349 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9352 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9355 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9358 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9361 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9364 gen_mfc0(ctx
, t0
, rt
, sel
);
9371 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9374 gen_mfc0(ctx
, t0
, rt
, sel
);
9380 gen_helper_mftc0_status(t0
, cpu_env
);
9383 gen_mfc0(ctx
, t0
, rt
, sel
);
9389 gen_helper_mftc0_cause(t0
, cpu_env
);
9399 gen_helper_mftc0_epc(t0
, cpu_env
);
9409 gen_helper_mftc0_ebase(t0
, cpu_env
);
9426 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9436 gen_helper_mftc0_debug(t0
, cpu_env
);
9439 gen_mfc0(ctx
, t0
, rt
, sel
);
9444 gen_mfc0(ctx
, t0
, rt
, sel
);
9446 } else switch (sel
) {
9447 /* GPR registers. */
9449 gen_helper_1e0i(mftgpr
, t0
, rt
);
9451 /* Auxiliary CPU registers */
9455 gen_helper_1e0i(mftlo
, t0
, 0);
9458 gen_helper_1e0i(mfthi
, t0
, 0);
9461 gen_helper_1e0i(mftacx
, t0
, 0);
9464 gen_helper_1e0i(mftlo
, t0
, 1);
9467 gen_helper_1e0i(mfthi
, t0
, 1);
9470 gen_helper_1e0i(mftacx
, t0
, 1);
9473 gen_helper_1e0i(mftlo
, t0
, 2);
9476 gen_helper_1e0i(mfthi
, t0
, 2);
9479 gen_helper_1e0i(mftacx
, t0
, 2);
9482 gen_helper_1e0i(mftlo
, t0
, 3);
9485 gen_helper_1e0i(mfthi
, t0
, 3);
9488 gen_helper_1e0i(mftacx
, t0
, 3);
9491 gen_helper_mftdsp(t0
, cpu_env
);
9497 /* Floating point (COP1). */
9499 /* XXX: For now we support only a single FPU context. */
9501 TCGv_i32 fp0
= tcg_temp_new_i32();
9503 gen_load_fpr32(ctx
, fp0
, rt
);
9504 tcg_gen_ext_i32_tl(t0
, fp0
);
9505 tcg_temp_free_i32(fp0
);
9507 TCGv_i32 fp0
= tcg_temp_new_i32();
9509 gen_load_fpr32h(ctx
, fp0
, rt
);
9510 tcg_gen_ext_i32_tl(t0
, fp0
);
9511 tcg_temp_free_i32(fp0
);
9515 /* XXX: For now we support only a single FPU context. */
9516 gen_helper_1e0i(cfc1
, t0
, rt
);
9518 /* COP2: Not implemented. */
9525 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9526 gen_store_gpr(t0
, rd
);
9532 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9533 generate_exception_end(ctx
, EXCP_RI
);
9536 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9537 int u
, int sel
, int h
)
9539 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9540 TCGv t0
= tcg_temp_local_new();
9542 gen_load_gpr(t0
, rt
);
9543 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9544 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9545 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9547 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9548 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9555 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
9558 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
9568 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
9571 gen_helper_mttc0_tcbind(cpu_env
, t0
);
9574 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
9577 gen_helper_mttc0_tchalt(cpu_env
, t0
);
9580 gen_helper_mttc0_tccontext(cpu_env
, t0
);
9583 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
9586 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
9589 gen_mtc0(ctx
, t0
, rd
, sel
);
9596 gen_helper_mttc0_entryhi(cpu_env
, t0
);
9599 gen_mtc0(ctx
, t0
, rd
, sel
);
9605 gen_helper_mttc0_status(cpu_env
, t0
);
9608 gen_mtc0(ctx
, t0
, rd
, sel
);
9614 gen_helper_mttc0_cause(cpu_env
, t0
);
9624 gen_helper_mttc0_ebase(cpu_env
, t0
);
9634 gen_helper_mttc0_debug(cpu_env
, t0
);
9637 gen_mtc0(ctx
, t0
, rd
, sel
);
9642 gen_mtc0(ctx
, t0
, rd
, sel
);
9644 } else switch (sel
) {
9645 /* GPR registers. */
9647 gen_helper_0e1i(mttgpr
, t0
, rd
);
9649 /* Auxiliary CPU registers */
9653 gen_helper_0e1i(mttlo
, t0
, 0);
9656 gen_helper_0e1i(mtthi
, t0
, 0);
9659 gen_helper_0e1i(mttacx
, t0
, 0);
9662 gen_helper_0e1i(mttlo
, t0
, 1);
9665 gen_helper_0e1i(mtthi
, t0
, 1);
9668 gen_helper_0e1i(mttacx
, t0
, 1);
9671 gen_helper_0e1i(mttlo
, t0
, 2);
9674 gen_helper_0e1i(mtthi
, t0
, 2);
9677 gen_helper_0e1i(mttacx
, t0
, 2);
9680 gen_helper_0e1i(mttlo
, t0
, 3);
9683 gen_helper_0e1i(mtthi
, t0
, 3);
9686 gen_helper_0e1i(mttacx
, t0
, 3);
9689 gen_helper_mttdsp(cpu_env
, t0
);
9695 /* Floating point (COP1). */
9697 /* XXX: For now we support only a single FPU context. */
9699 TCGv_i32 fp0
= tcg_temp_new_i32();
9701 tcg_gen_trunc_tl_i32(fp0
, t0
);
9702 gen_store_fpr32(ctx
, fp0
, rd
);
9703 tcg_temp_free_i32(fp0
);
9705 TCGv_i32 fp0
= tcg_temp_new_i32();
9707 tcg_gen_trunc_tl_i32(fp0
, t0
);
9708 gen_store_fpr32h(ctx
, fp0
, rd
);
9709 tcg_temp_free_i32(fp0
);
9713 /* XXX: For now we support only a single FPU context. */
9715 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
9717 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
9718 tcg_temp_free_i32(fs_tmp
);
9720 /* Stop translation as we may have changed hflags */
9721 ctx
->base
.is_jmp
= DISAS_STOP
;
9723 /* COP2: Not implemented. */
9730 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
9736 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
9737 generate_exception_end(ctx
, EXCP_RI
);
9740 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
9742 const char *opn
= "ldst";
9744 check_cp0_enabled(ctx
);
9751 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9756 TCGv t0
= tcg_temp_new();
9758 gen_load_gpr(t0
, rt
);
9759 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9764 #if defined(TARGET_MIPS64)
9766 check_insn(ctx
, ISA_MIPS3
);
9771 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9775 check_insn(ctx
, ISA_MIPS3
);
9777 TCGv t0
= tcg_temp_new();
9779 gen_load_gpr(t0
, rt
);
9780 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9792 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9798 TCGv t0
= tcg_temp_new();
9799 gen_load_gpr(t0
, rt
);
9800 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9806 check_cp0_enabled(ctx
);
9811 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
9812 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9816 check_cp0_enabled(ctx
);
9817 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
9818 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9823 if (!env
->tlb
->helper_tlbwi
)
9825 gen_helper_tlbwi(cpu_env
);
9830 if (!env
->tlb
->helper_tlbinv
) {
9833 gen_helper_tlbinv(cpu_env
);
9834 } /* treat as nop if TLBINV not supported */
9839 if (!env
->tlb
->helper_tlbinvf
) {
9842 gen_helper_tlbinvf(cpu_env
);
9843 } /* treat as nop if TLBINV not supported */
9847 if (!env
->tlb
->helper_tlbwr
)
9849 gen_helper_tlbwr(cpu_env
);
9853 if (!env
->tlb
->helper_tlbp
)
9855 gen_helper_tlbp(cpu_env
);
9859 if (!env
->tlb
->helper_tlbr
)
9861 gen_helper_tlbr(cpu_env
);
9863 case OPC_ERET
: /* OPC_ERETNC */
9864 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9865 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9868 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
9869 if (ctx
->opcode
& (1 << bit_shift
)) {
9872 check_insn(ctx
, ISA_MIPS32R5
);
9873 gen_helper_eretnc(cpu_env
);
9877 check_insn(ctx
, ISA_MIPS2
);
9878 gen_helper_eret(cpu_env
);
9880 ctx
->base
.is_jmp
= DISAS_EXIT
;
9885 check_insn(ctx
, ISA_MIPS32
);
9886 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9887 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9890 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9892 generate_exception_end(ctx
, EXCP_RI
);
9894 gen_helper_deret(cpu_env
);
9895 ctx
->base
.is_jmp
= DISAS_EXIT
;
9900 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
9901 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9902 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9905 /* If we get an exception, we want to restart at next instruction */
9906 ctx
->base
.pc_next
+= 4;
9907 save_cpu_state(ctx
, 1);
9908 ctx
->base
.pc_next
-= 4;
9909 gen_helper_wait(cpu_env
);
9910 ctx
->base
.is_jmp
= DISAS_NORETURN
;
9915 generate_exception_end(ctx
, EXCP_RI
);
9918 (void)opn
; /* avoid a compiler warning */
9920 #endif /* !CONFIG_USER_ONLY */
9922 /* CP1 Branches (before delay slot) */
9923 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
9924 int32_t cc
, int32_t offset
)
9926 target_ulong btarget
;
9927 TCGv_i32 t0
= tcg_temp_new_i32();
9929 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9930 generate_exception_end(ctx
, EXCP_RI
);
9935 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
9937 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
9941 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9942 tcg_gen_not_i32(t0
, t0
);
9943 tcg_gen_andi_i32(t0
, t0
, 1);
9944 tcg_gen_extu_i32_tl(bcond
, t0
);
9947 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9948 tcg_gen_not_i32(t0
, t0
);
9949 tcg_gen_andi_i32(t0
, t0
, 1);
9950 tcg_gen_extu_i32_tl(bcond
, t0
);
9953 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9954 tcg_gen_andi_i32(t0
, t0
, 1);
9955 tcg_gen_extu_i32_tl(bcond
, t0
);
9958 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9959 tcg_gen_andi_i32(t0
, t0
, 1);
9960 tcg_gen_extu_i32_tl(bcond
, t0
);
9962 ctx
->hflags
|= MIPS_HFLAG_BL
;
9966 TCGv_i32 t1
= tcg_temp_new_i32();
9967 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9968 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
9969 tcg_gen_nand_i32(t0
, t0
, t1
);
9970 tcg_temp_free_i32(t1
);
9971 tcg_gen_andi_i32(t0
, t0
, 1);
9972 tcg_gen_extu_i32_tl(bcond
, t0
);
9977 TCGv_i32 t1
= tcg_temp_new_i32();
9978 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9979 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
9980 tcg_gen_or_i32(t0
, t0
, t1
);
9981 tcg_temp_free_i32(t1
);
9982 tcg_gen_andi_i32(t0
, t0
, 1);
9983 tcg_gen_extu_i32_tl(bcond
, t0
);
9988 TCGv_i32 t1
= tcg_temp_new_i32();
9989 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9990 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
9991 tcg_gen_and_i32(t0
, t0
, t1
);
9992 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
9993 tcg_gen_and_i32(t0
, t0
, t1
);
9994 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
9995 tcg_gen_nand_i32(t0
, t0
, t1
);
9996 tcg_temp_free_i32(t1
);
9997 tcg_gen_andi_i32(t0
, t0
, 1);
9998 tcg_gen_extu_i32_tl(bcond
, t0
);
10003 TCGv_i32 t1
= tcg_temp_new_i32();
10004 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10005 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10006 tcg_gen_or_i32(t0
, t0
, t1
);
10007 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10008 tcg_gen_or_i32(t0
, t0
, t1
);
10009 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10010 tcg_gen_or_i32(t0
, t0
, t1
);
10011 tcg_temp_free_i32(t1
);
10012 tcg_gen_andi_i32(t0
, t0
, 1);
10013 tcg_gen_extu_i32_tl(bcond
, t0
);
10016 ctx
->hflags
|= MIPS_HFLAG_BC
;
10019 MIPS_INVAL("cp1 cond branch");
10020 generate_exception_end(ctx
, EXCP_RI
);
10023 ctx
->btarget
= btarget
;
10024 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10026 tcg_temp_free_i32(t0
);
10029 /* R6 CP1 Branches */
10030 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10031 int32_t ft
, int32_t offset
,
10032 int delayslot_size
)
10034 target_ulong btarget
;
10035 TCGv_i64 t0
= tcg_temp_new_i64();
10037 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10038 #ifdef MIPS_DEBUG_DISAS
10039 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10040 "\n", ctx
->base
.pc_next
);
10042 generate_exception_end(ctx
, EXCP_RI
);
10046 gen_load_fpr64(ctx
, t0
, ft
);
10047 tcg_gen_andi_i64(t0
, t0
, 1);
10049 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10053 tcg_gen_xori_i64(t0
, t0
, 1);
10054 ctx
->hflags
|= MIPS_HFLAG_BC
;
10057 /* t0 already set */
10058 ctx
->hflags
|= MIPS_HFLAG_BC
;
10061 MIPS_INVAL("cp1 cond branch");
10062 generate_exception_end(ctx
, EXCP_RI
);
10066 tcg_gen_trunc_i64_tl(bcond
, t0
);
10068 ctx
->btarget
= btarget
;
10070 switch (delayslot_size
) {
10072 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10075 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10080 tcg_temp_free_i64(t0
);
10083 /* Coprocessor 1 (FPU) */
10085 #define FOP(func, fmt) (((fmt) << 21) | (func))
10088 OPC_ADD_S
= FOP(0, FMT_S
),
10089 OPC_SUB_S
= FOP(1, FMT_S
),
10090 OPC_MUL_S
= FOP(2, FMT_S
),
10091 OPC_DIV_S
= FOP(3, FMT_S
),
10092 OPC_SQRT_S
= FOP(4, FMT_S
),
10093 OPC_ABS_S
= FOP(5, FMT_S
),
10094 OPC_MOV_S
= FOP(6, FMT_S
),
10095 OPC_NEG_S
= FOP(7, FMT_S
),
10096 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10097 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10098 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10099 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10100 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10101 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10102 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10103 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10104 OPC_SEL_S
= FOP(16, FMT_S
),
10105 OPC_MOVCF_S
= FOP(17, FMT_S
),
10106 OPC_MOVZ_S
= FOP(18, FMT_S
),
10107 OPC_MOVN_S
= FOP(19, FMT_S
),
10108 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10109 OPC_RECIP_S
= FOP(21, FMT_S
),
10110 OPC_RSQRT_S
= FOP(22, FMT_S
),
10111 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10112 OPC_MADDF_S
= FOP(24, FMT_S
),
10113 OPC_MSUBF_S
= FOP(25, FMT_S
),
10114 OPC_RINT_S
= FOP(26, FMT_S
),
10115 OPC_CLASS_S
= FOP(27, FMT_S
),
10116 OPC_MIN_S
= FOP(28, FMT_S
),
10117 OPC_RECIP2_S
= FOP(28, FMT_S
),
10118 OPC_MINA_S
= FOP(29, FMT_S
),
10119 OPC_RECIP1_S
= FOP(29, FMT_S
),
10120 OPC_MAX_S
= FOP(30, FMT_S
),
10121 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10122 OPC_MAXA_S
= FOP(31, FMT_S
),
10123 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10124 OPC_CVT_D_S
= FOP(33, FMT_S
),
10125 OPC_CVT_W_S
= FOP(36, FMT_S
),
10126 OPC_CVT_L_S
= FOP(37, FMT_S
),
10127 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10128 OPC_CMP_F_S
= FOP (48, FMT_S
),
10129 OPC_CMP_UN_S
= FOP (49, FMT_S
),
10130 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
10131 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
10132 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
10133 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
10134 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
10135 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
10136 OPC_CMP_SF_S
= FOP (56, FMT_S
),
10137 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
10138 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
10139 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
10140 OPC_CMP_LT_S
= FOP (60, FMT_S
),
10141 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
10142 OPC_CMP_LE_S
= FOP (62, FMT_S
),
10143 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
10145 OPC_ADD_D
= FOP(0, FMT_D
),
10146 OPC_SUB_D
= FOP(1, FMT_D
),
10147 OPC_MUL_D
= FOP(2, FMT_D
),
10148 OPC_DIV_D
= FOP(3, FMT_D
),
10149 OPC_SQRT_D
= FOP(4, FMT_D
),
10150 OPC_ABS_D
= FOP(5, FMT_D
),
10151 OPC_MOV_D
= FOP(6, FMT_D
),
10152 OPC_NEG_D
= FOP(7, FMT_D
),
10153 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10154 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10155 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10156 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10157 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10158 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10159 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10160 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10161 OPC_SEL_D
= FOP(16, FMT_D
),
10162 OPC_MOVCF_D
= FOP(17, FMT_D
),
10163 OPC_MOVZ_D
= FOP(18, FMT_D
),
10164 OPC_MOVN_D
= FOP(19, FMT_D
),
10165 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10166 OPC_RECIP_D
= FOP(21, FMT_D
),
10167 OPC_RSQRT_D
= FOP(22, FMT_D
),
10168 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10169 OPC_MADDF_D
= FOP(24, FMT_D
),
10170 OPC_MSUBF_D
= FOP(25, FMT_D
),
10171 OPC_RINT_D
= FOP(26, FMT_D
),
10172 OPC_CLASS_D
= FOP(27, FMT_D
),
10173 OPC_MIN_D
= FOP(28, FMT_D
),
10174 OPC_RECIP2_D
= FOP(28, FMT_D
),
10175 OPC_MINA_D
= FOP(29, FMT_D
),
10176 OPC_RECIP1_D
= FOP(29, FMT_D
),
10177 OPC_MAX_D
= FOP(30, FMT_D
),
10178 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10179 OPC_MAXA_D
= FOP(31, FMT_D
),
10180 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10181 OPC_CVT_S_D
= FOP(32, FMT_D
),
10182 OPC_CVT_W_D
= FOP(36, FMT_D
),
10183 OPC_CVT_L_D
= FOP(37, FMT_D
),
10184 OPC_CMP_F_D
= FOP (48, FMT_D
),
10185 OPC_CMP_UN_D
= FOP (49, FMT_D
),
10186 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
10187 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
10188 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
10189 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
10190 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
10191 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
10192 OPC_CMP_SF_D
= FOP (56, FMT_D
),
10193 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
10194 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
10195 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
10196 OPC_CMP_LT_D
= FOP (60, FMT_D
),
10197 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
10198 OPC_CMP_LE_D
= FOP (62, FMT_D
),
10199 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
10201 OPC_CVT_S_W
= FOP(32, FMT_W
),
10202 OPC_CVT_D_W
= FOP(33, FMT_W
),
10203 OPC_CVT_S_L
= FOP(32, FMT_L
),
10204 OPC_CVT_D_L
= FOP(33, FMT_L
),
10205 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10207 OPC_ADD_PS
= FOP(0, FMT_PS
),
10208 OPC_SUB_PS
= FOP(1, FMT_PS
),
10209 OPC_MUL_PS
= FOP(2, FMT_PS
),
10210 OPC_DIV_PS
= FOP(3, FMT_PS
),
10211 OPC_ABS_PS
= FOP(5, FMT_PS
),
10212 OPC_MOV_PS
= FOP(6, FMT_PS
),
10213 OPC_NEG_PS
= FOP(7, FMT_PS
),
10214 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10215 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10216 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10217 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10218 OPC_MULR_PS
= FOP(26, FMT_PS
),
10219 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10220 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10221 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10222 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10224 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10225 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10226 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10227 OPC_PLL_PS
= FOP(44, FMT_PS
),
10228 OPC_PLU_PS
= FOP(45, FMT_PS
),
10229 OPC_PUL_PS
= FOP(46, FMT_PS
),
10230 OPC_PUU_PS
= FOP(47, FMT_PS
),
10231 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
10232 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
10233 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
10234 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
10235 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
10236 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
10237 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
10238 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
10239 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
10240 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
10241 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
10242 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
10243 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
10244 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
10245 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
10246 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
10250 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10251 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10252 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10253 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10254 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10255 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10256 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10257 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10258 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10259 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10260 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10261 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10262 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10263 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10264 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10265 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10266 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10267 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10268 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10269 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10270 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10271 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10273 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10274 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10275 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10276 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10277 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10278 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10279 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10280 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10281 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10282 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10283 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10284 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10285 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10286 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10287 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10288 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10289 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10290 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10291 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10292 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10293 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10294 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10296 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10298 TCGv t0
= tcg_temp_new();
10303 TCGv_i32 fp0
= tcg_temp_new_i32();
10305 gen_load_fpr32(ctx
, fp0
, fs
);
10306 tcg_gen_ext_i32_tl(t0
, fp0
);
10307 tcg_temp_free_i32(fp0
);
10309 gen_store_gpr(t0
, rt
);
10312 gen_load_gpr(t0
, rt
);
10314 TCGv_i32 fp0
= tcg_temp_new_i32();
10316 tcg_gen_trunc_tl_i32(fp0
, t0
);
10317 gen_store_fpr32(ctx
, fp0
, fs
);
10318 tcg_temp_free_i32(fp0
);
10322 gen_helper_1e0i(cfc1
, t0
, fs
);
10323 gen_store_gpr(t0
, rt
);
10326 gen_load_gpr(t0
, rt
);
10327 save_cpu_state(ctx
, 0);
10329 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10331 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10332 tcg_temp_free_i32(fs_tmp
);
10334 /* Stop translation as we may have changed hflags */
10335 ctx
->base
.is_jmp
= DISAS_STOP
;
10337 #if defined(TARGET_MIPS64)
10339 gen_load_fpr64(ctx
, t0
, fs
);
10340 gen_store_gpr(t0
, rt
);
10343 gen_load_gpr(t0
, rt
);
10344 gen_store_fpr64(ctx
, t0
, fs
);
10349 TCGv_i32 fp0
= tcg_temp_new_i32();
10351 gen_load_fpr32h(ctx
, fp0
, fs
);
10352 tcg_gen_ext_i32_tl(t0
, fp0
);
10353 tcg_temp_free_i32(fp0
);
10355 gen_store_gpr(t0
, rt
);
10358 gen_load_gpr(t0
, rt
);
10360 TCGv_i32 fp0
= tcg_temp_new_i32();
10362 tcg_gen_trunc_tl_i32(fp0
, t0
);
10363 gen_store_fpr32h(ctx
, fp0
, fs
);
10364 tcg_temp_free_i32(fp0
);
10368 MIPS_INVAL("cp1 move");
10369 generate_exception_end(ctx
, EXCP_RI
);
10377 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10384 /* Treat as NOP. */
10389 cond
= TCG_COND_EQ
;
10391 cond
= TCG_COND_NE
;
10393 l1
= gen_new_label();
10394 t0
= tcg_temp_new_i32();
10395 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10396 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10397 tcg_temp_free_i32(t0
);
10399 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10401 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10406 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10410 TCGv_i32 t0
= tcg_temp_new_i32();
10411 TCGLabel
*l1
= gen_new_label();
10414 cond
= TCG_COND_EQ
;
10416 cond
= TCG_COND_NE
;
10418 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10419 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10420 gen_load_fpr32(ctx
, t0
, fs
);
10421 gen_store_fpr32(ctx
, t0
, fd
);
10423 tcg_temp_free_i32(t0
);
10426 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
10429 TCGv_i32 t0
= tcg_temp_new_i32();
10431 TCGLabel
*l1
= gen_new_label();
10434 cond
= TCG_COND_EQ
;
10436 cond
= TCG_COND_NE
;
10438 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10439 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10440 tcg_temp_free_i32(t0
);
10441 fp0
= tcg_temp_new_i64();
10442 gen_load_fpr64(ctx
, fp0
, fs
);
10443 gen_store_fpr64(ctx
, fp0
, fd
);
10444 tcg_temp_free_i64(fp0
);
10448 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10452 TCGv_i32 t0
= tcg_temp_new_i32();
10453 TCGLabel
*l1
= gen_new_label();
10454 TCGLabel
*l2
= gen_new_label();
10457 cond
= TCG_COND_EQ
;
10459 cond
= TCG_COND_NE
;
10461 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10462 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10463 gen_load_fpr32(ctx
, t0
, fs
);
10464 gen_store_fpr32(ctx
, t0
, fd
);
10467 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10468 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10469 gen_load_fpr32h(ctx
, t0
, fs
);
10470 gen_store_fpr32h(ctx
, t0
, fd
);
10471 tcg_temp_free_i32(t0
);
10475 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10478 TCGv_i32 t1
= tcg_const_i32(0);
10479 TCGv_i32 fp0
= tcg_temp_new_i32();
10480 TCGv_i32 fp1
= tcg_temp_new_i32();
10481 TCGv_i32 fp2
= tcg_temp_new_i32();
10482 gen_load_fpr32(ctx
, fp0
, fd
);
10483 gen_load_fpr32(ctx
, fp1
, ft
);
10484 gen_load_fpr32(ctx
, fp2
, fs
);
10488 tcg_gen_andi_i32(fp0
, fp0
, 1);
10489 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10492 tcg_gen_andi_i32(fp1
, fp1
, 1);
10493 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10496 tcg_gen_andi_i32(fp1
, fp1
, 1);
10497 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10500 MIPS_INVAL("gen_sel_s");
10501 generate_exception_end(ctx
, EXCP_RI
);
10505 gen_store_fpr32(ctx
, fp0
, fd
);
10506 tcg_temp_free_i32(fp2
);
10507 tcg_temp_free_i32(fp1
);
10508 tcg_temp_free_i32(fp0
);
10509 tcg_temp_free_i32(t1
);
10512 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10515 TCGv_i64 t1
= tcg_const_i64(0);
10516 TCGv_i64 fp0
= tcg_temp_new_i64();
10517 TCGv_i64 fp1
= tcg_temp_new_i64();
10518 TCGv_i64 fp2
= tcg_temp_new_i64();
10519 gen_load_fpr64(ctx
, fp0
, fd
);
10520 gen_load_fpr64(ctx
, fp1
, ft
);
10521 gen_load_fpr64(ctx
, fp2
, fs
);
10525 tcg_gen_andi_i64(fp0
, fp0
, 1);
10526 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10529 tcg_gen_andi_i64(fp1
, fp1
, 1);
10530 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10533 tcg_gen_andi_i64(fp1
, fp1
, 1);
10534 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10537 MIPS_INVAL("gen_sel_d");
10538 generate_exception_end(ctx
, EXCP_RI
);
10542 gen_store_fpr64(ctx
, fp0
, fd
);
10543 tcg_temp_free_i64(fp2
);
10544 tcg_temp_free_i64(fp1
);
10545 tcg_temp_free_i64(fp0
);
10546 tcg_temp_free_i64(t1
);
10549 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
10550 int ft
, int fs
, int fd
, int cc
)
10552 uint32_t func
= ctx
->opcode
& 0x3f;
10556 TCGv_i32 fp0
= tcg_temp_new_i32();
10557 TCGv_i32 fp1
= tcg_temp_new_i32();
10559 gen_load_fpr32(ctx
, fp0
, fs
);
10560 gen_load_fpr32(ctx
, fp1
, ft
);
10561 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
10562 tcg_temp_free_i32(fp1
);
10563 gen_store_fpr32(ctx
, fp0
, fd
);
10564 tcg_temp_free_i32(fp0
);
10569 TCGv_i32 fp0
= tcg_temp_new_i32();
10570 TCGv_i32 fp1
= tcg_temp_new_i32();
10572 gen_load_fpr32(ctx
, fp0
, fs
);
10573 gen_load_fpr32(ctx
, fp1
, ft
);
10574 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
10575 tcg_temp_free_i32(fp1
);
10576 gen_store_fpr32(ctx
, fp0
, fd
);
10577 tcg_temp_free_i32(fp0
);
10582 TCGv_i32 fp0
= tcg_temp_new_i32();
10583 TCGv_i32 fp1
= tcg_temp_new_i32();
10585 gen_load_fpr32(ctx
, fp0
, fs
);
10586 gen_load_fpr32(ctx
, fp1
, ft
);
10587 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
10588 tcg_temp_free_i32(fp1
);
10589 gen_store_fpr32(ctx
, fp0
, fd
);
10590 tcg_temp_free_i32(fp0
);
10595 TCGv_i32 fp0
= tcg_temp_new_i32();
10596 TCGv_i32 fp1
= tcg_temp_new_i32();
10598 gen_load_fpr32(ctx
, fp0
, fs
);
10599 gen_load_fpr32(ctx
, fp1
, ft
);
10600 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
10601 tcg_temp_free_i32(fp1
);
10602 gen_store_fpr32(ctx
, fp0
, fd
);
10603 tcg_temp_free_i32(fp0
);
10608 TCGv_i32 fp0
= tcg_temp_new_i32();
10610 gen_load_fpr32(ctx
, fp0
, fs
);
10611 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
10612 gen_store_fpr32(ctx
, fp0
, fd
);
10613 tcg_temp_free_i32(fp0
);
10618 TCGv_i32 fp0
= tcg_temp_new_i32();
10620 gen_load_fpr32(ctx
, fp0
, fs
);
10621 if (ctx
->abs2008
) {
10622 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
10624 gen_helper_float_abs_s(fp0
, fp0
);
10626 gen_store_fpr32(ctx
, fp0
, fd
);
10627 tcg_temp_free_i32(fp0
);
10632 TCGv_i32 fp0
= tcg_temp_new_i32();
10634 gen_load_fpr32(ctx
, fp0
, fs
);
10635 gen_store_fpr32(ctx
, fp0
, fd
);
10636 tcg_temp_free_i32(fp0
);
10641 TCGv_i32 fp0
= tcg_temp_new_i32();
10643 gen_load_fpr32(ctx
, fp0
, fs
);
10644 if (ctx
->abs2008
) {
10645 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
10647 gen_helper_float_chs_s(fp0
, fp0
);
10649 gen_store_fpr32(ctx
, fp0
, fd
);
10650 tcg_temp_free_i32(fp0
);
10653 case OPC_ROUND_L_S
:
10654 check_cp1_64bitmode(ctx
);
10656 TCGv_i32 fp32
= tcg_temp_new_i32();
10657 TCGv_i64 fp64
= tcg_temp_new_i64();
10659 gen_load_fpr32(ctx
, fp32
, fs
);
10660 if (ctx
->nan2008
) {
10661 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
10663 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
10665 tcg_temp_free_i32(fp32
);
10666 gen_store_fpr64(ctx
, fp64
, fd
);
10667 tcg_temp_free_i64(fp64
);
10670 case OPC_TRUNC_L_S
:
10671 check_cp1_64bitmode(ctx
);
10673 TCGv_i32 fp32
= tcg_temp_new_i32();
10674 TCGv_i64 fp64
= tcg_temp_new_i64();
10676 gen_load_fpr32(ctx
, fp32
, fs
);
10677 if (ctx
->nan2008
) {
10678 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
10680 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
10682 tcg_temp_free_i32(fp32
);
10683 gen_store_fpr64(ctx
, fp64
, fd
);
10684 tcg_temp_free_i64(fp64
);
10688 check_cp1_64bitmode(ctx
);
10690 TCGv_i32 fp32
= tcg_temp_new_i32();
10691 TCGv_i64 fp64
= tcg_temp_new_i64();
10693 gen_load_fpr32(ctx
, fp32
, fs
);
10694 if (ctx
->nan2008
) {
10695 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
10697 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
10699 tcg_temp_free_i32(fp32
);
10700 gen_store_fpr64(ctx
, fp64
, fd
);
10701 tcg_temp_free_i64(fp64
);
10704 case OPC_FLOOR_L_S
:
10705 check_cp1_64bitmode(ctx
);
10707 TCGv_i32 fp32
= tcg_temp_new_i32();
10708 TCGv_i64 fp64
= tcg_temp_new_i64();
10710 gen_load_fpr32(ctx
, fp32
, fs
);
10711 if (ctx
->nan2008
) {
10712 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
10714 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
10716 tcg_temp_free_i32(fp32
);
10717 gen_store_fpr64(ctx
, fp64
, fd
);
10718 tcg_temp_free_i64(fp64
);
10721 case OPC_ROUND_W_S
:
10723 TCGv_i32 fp0
= tcg_temp_new_i32();
10725 gen_load_fpr32(ctx
, fp0
, fs
);
10726 if (ctx
->nan2008
) {
10727 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
10729 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
10731 gen_store_fpr32(ctx
, fp0
, fd
);
10732 tcg_temp_free_i32(fp0
);
10735 case OPC_TRUNC_W_S
:
10737 TCGv_i32 fp0
= tcg_temp_new_i32();
10739 gen_load_fpr32(ctx
, fp0
, fs
);
10740 if (ctx
->nan2008
) {
10741 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
10743 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
10745 gen_store_fpr32(ctx
, fp0
, fd
);
10746 tcg_temp_free_i32(fp0
);
10751 TCGv_i32 fp0
= tcg_temp_new_i32();
10753 gen_load_fpr32(ctx
, fp0
, fs
);
10754 if (ctx
->nan2008
) {
10755 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
10757 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
10759 gen_store_fpr32(ctx
, fp0
, fd
);
10760 tcg_temp_free_i32(fp0
);
10763 case OPC_FLOOR_W_S
:
10765 TCGv_i32 fp0
= tcg_temp_new_i32();
10767 gen_load_fpr32(ctx
, fp0
, fs
);
10768 if (ctx
->nan2008
) {
10769 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
10771 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
10773 gen_store_fpr32(ctx
, fp0
, fd
);
10774 tcg_temp_free_i32(fp0
);
10778 check_insn(ctx
, ISA_MIPS32R6
);
10779 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10782 check_insn(ctx
, ISA_MIPS32R6
);
10783 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10786 check_insn(ctx
, ISA_MIPS32R6
);
10787 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10790 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10791 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10794 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10796 TCGLabel
*l1
= gen_new_label();
10800 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10802 fp0
= tcg_temp_new_i32();
10803 gen_load_fpr32(ctx
, fp0
, fs
);
10804 gen_store_fpr32(ctx
, fp0
, fd
);
10805 tcg_temp_free_i32(fp0
);
10810 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10812 TCGLabel
*l1
= gen_new_label();
10816 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10817 fp0
= tcg_temp_new_i32();
10818 gen_load_fpr32(ctx
, fp0
, fs
);
10819 gen_store_fpr32(ctx
, fp0
, fd
);
10820 tcg_temp_free_i32(fp0
);
10827 TCGv_i32 fp0
= tcg_temp_new_i32();
10829 gen_load_fpr32(ctx
, fp0
, fs
);
10830 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
10831 gen_store_fpr32(ctx
, fp0
, fd
);
10832 tcg_temp_free_i32(fp0
);
10837 TCGv_i32 fp0
= tcg_temp_new_i32();
10839 gen_load_fpr32(ctx
, fp0
, fs
);
10840 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
10841 gen_store_fpr32(ctx
, fp0
, fd
);
10842 tcg_temp_free_i32(fp0
);
10846 check_insn(ctx
, ISA_MIPS32R6
);
10848 TCGv_i32 fp0
= tcg_temp_new_i32();
10849 TCGv_i32 fp1
= tcg_temp_new_i32();
10850 TCGv_i32 fp2
= tcg_temp_new_i32();
10851 gen_load_fpr32(ctx
, fp0
, fs
);
10852 gen_load_fpr32(ctx
, fp1
, ft
);
10853 gen_load_fpr32(ctx
, fp2
, fd
);
10854 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10855 gen_store_fpr32(ctx
, fp2
, fd
);
10856 tcg_temp_free_i32(fp2
);
10857 tcg_temp_free_i32(fp1
);
10858 tcg_temp_free_i32(fp0
);
10862 check_insn(ctx
, ISA_MIPS32R6
);
10864 TCGv_i32 fp0
= tcg_temp_new_i32();
10865 TCGv_i32 fp1
= tcg_temp_new_i32();
10866 TCGv_i32 fp2
= tcg_temp_new_i32();
10867 gen_load_fpr32(ctx
, fp0
, fs
);
10868 gen_load_fpr32(ctx
, fp1
, ft
);
10869 gen_load_fpr32(ctx
, fp2
, fd
);
10870 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10871 gen_store_fpr32(ctx
, fp2
, fd
);
10872 tcg_temp_free_i32(fp2
);
10873 tcg_temp_free_i32(fp1
);
10874 tcg_temp_free_i32(fp0
);
10878 check_insn(ctx
, ISA_MIPS32R6
);
10880 TCGv_i32 fp0
= tcg_temp_new_i32();
10881 gen_load_fpr32(ctx
, fp0
, fs
);
10882 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
10883 gen_store_fpr32(ctx
, fp0
, fd
);
10884 tcg_temp_free_i32(fp0
);
10888 check_insn(ctx
, ISA_MIPS32R6
);
10890 TCGv_i32 fp0
= tcg_temp_new_i32();
10891 gen_load_fpr32(ctx
, fp0
, fs
);
10892 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
10893 gen_store_fpr32(ctx
, fp0
, fd
);
10894 tcg_temp_free_i32(fp0
);
10897 case OPC_MIN_S
: /* OPC_RECIP2_S */
10898 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10900 TCGv_i32 fp0
= tcg_temp_new_i32();
10901 TCGv_i32 fp1
= tcg_temp_new_i32();
10902 TCGv_i32 fp2
= tcg_temp_new_i32();
10903 gen_load_fpr32(ctx
, fp0
, fs
);
10904 gen_load_fpr32(ctx
, fp1
, ft
);
10905 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
10906 gen_store_fpr32(ctx
, fp2
, fd
);
10907 tcg_temp_free_i32(fp2
);
10908 tcg_temp_free_i32(fp1
);
10909 tcg_temp_free_i32(fp0
);
10912 check_cp1_64bitmode(ctx
);
10914 TCGv_i32 fp0
= tcg_temp_new_i32();
10915 TCGv_i32 fp1
= tcg_temp_new_i32();
10917 gen_load_fpr32(ctx
, fp0
, fs
);
10918 gen_load_fpr32(ctx
, fp1
, ft
);
10919 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
10920 tcg_temp_free_i32(fp1
);
10921 gen_store_fpr32(ctx
, fp0
, fd
);
10922 tcg_temp_free_i32(fp0
);
10926 case OPC_MINA_S
: /* OPC_RECIP1_S */
10927 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10929 TCGv_i32 fp0
= tcg_temp_new_i32();
10930 TCGv_i32 fp1
= tcg_temp_new_i32();
10931 TCGv_i32 fp2
= tcg_temp_new_i32();
10932 gen_load_fpr32(ctx
, fp0
, fs
);
10933 gen_load_fpr32(ctx
, fp1
, ft
);
10934 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
10935 gen_store_fpr32(ctx
, fp2
, fd
);
10936 tcg_temp_free_i32(fp2
);
10937 tcg_temp_free_i32(fp1
);
10938 tcg_temp_free_i32(fp0
);
10941 check_cp1_64bitmode(ctx
);
10943 TCGv_i32 fp0
= tcg_temp_new_i32();
10945 gen_load_fpr32(ctx
, fp0
, fs
);
10946 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
10947 gen_store_fpr32(ctx
, fp0
, fd
);
10948 tcg_temp_free_i32(fp0
);
10952 case OPC_MAX_S
: /* OPC_RSQRT1_S */
10953 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10955 TCGv_i32 fp0
= tcg_temp_new_i32();
10956 TCGv_i32 fp1
= tcg_temp_new_i32();
10957 gen_load_fpr32(ctx
, fp0
, fs
);
10958 gen_load_fpr32(ctx
, fp1
, ft
);
10959 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
10960 gen_store_fpr32(ctx
, fp1
, fd
);
10961 tcg_temp_free_i32(fp1
);
10962 tcg_temp_free_i32(fp0
);
10965 check_cp1_64bitmode(ctx
);
10967 TCGv_i32 fp0
= tcg_temp_new_i32();
10969 gen_load_fpr32(ctx
, fp0
, fs
);
10970 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
10971 gen_store_fpr32(ctx
, fp0
, fd
);
10972 tcg_temp_free_i32(fp0
);
10976 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
10977 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10979 TCGv_i32 fp0
= tcg_temp_new_i32();
10980 TCGv_i32 fp1
= tcg_temp_new_i32();
10981 gen_load_fpr32(ctx
, fp0
, fs
);
10982 gen_load_fpr32(ctx
, fp1
, ft
);
10983 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
10984 gen_store_fpr32(ctx
, fp1
, fd
);
10985 tcg_temp_free_i32(fp1
);
10986 tcg_temp_free_i32(fp0
);
10989 check_cp1_64bitmode(ctx
);
10991 TCGv_i32 fp0
= tcg_temp_new_i32();
10992 TCGv_i32 fp1
= tcg_temp_new_i32();
10994 gen_load_fpr32(ctx
, fp0
, fs
);
10995 gen_load_fpr32(ctx
, fp1
, ft
);
10996 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
10997 tcg_temp_free_i32(fp1
);
10998 gen_store_fpr32(ctx
, fp0
, fd
);
10999 tcg_temp_free_i32(fp0
);
11004 check_cp1_registers(ctx
, fd
);
11006 TCGv_i32 fp32
= tcg_temp_new_i32();
11007 TCGv_i64 fp64
= tcg_temp_new_i64();
11009 gen_load_fpr32(ctx
, fp32
, fs
);
11010 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11011 tcg_temp_free_i32(fp32
);
11012 gen_store_fpr64(ctx
, fp64
, fd
);
11013 tcg_temp_free_i64(fp64
);
11018 TCGv_i32 fp0
= tcg_temp_new_i32();
11020 gen_load_fpr32(ctx
, fp0
, fs
);
11021 if (ctx
->nan2008
) {
11022 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11024 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11026 gen_store_fpr32(ctx
, fp0
, fd
);
11027 tcg_temp_free_i32(fp0
);
11031 check_cp1_64bitmode(ctx
);
11033 TCGv_i32 fp32
= tcg_temp_new_i32();
11034 TCGv_i64 fp64
= tcg_temp_new_i64();
11036 gen_load_fpr32(ctx
, fp32
, fs
);
11037 if (ctx
->nan2008
) {
11038 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11040 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11042 tcg_temp_free_i32(fp32
);
11043 gen_store_fpr64(ctx
, fp64
, fd
);
11044 tcg_temp_free_i64(fp64
);
11050 TCGv_i64 fp64
= tcg_temp_new_i64();
11051 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11052 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11054 gen_load_fpr32(ctx
, fp32_0
, fs
);
11055 gen_load_fpr32(ctx
, fp32_1
, ft
);
11056 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11057 tcg_temp_free_i32(fp32_1
);
11058 tcg_temp_free_i32(fp32_0
);
11059 gen_store_fpr64(ctx
, fp64
, fd
);
11060 tcg_temp_free_i64(fp64
);
11066 case OPC_CMP_UEQ_S
:
11067 case OPC_CMP_OLT_S
:
11068 case OPC_CMP_ULT_S
:
11069 case OPC_CMP_OLE_S
:
11070 case OPC_CMP_ULE_S
:
11072 case OPC_CMP_NGLE_S
:
11073 case OPC_CMP_SEQ_S
:
11074 case OPC_CMP_NGL_S
:
11076 case OPC_CMP_NGE_S
:
11078 case OPC_CMP_NGT_S
:
11079 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11080 if (ctx
->opcode
& (1 << 6)) {
11081 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
11083 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
11087 check_cp1_registers(ctx
, fs
| ft
| fd
);
11089 TCGv_i64 fp0
= tcg_temp_new_i64();
11090 TCGv_i64 fp1
= tcg_temp_new_i64();
11092 gen_load_fpr64(ctx
, fp0
, fs
);
11093 gen_load_fpr64(ctx
, fp1
, ft
);
11094 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11095 tcg_temp_free_i64(fp1
);
11096 gen_store_fpr64(ctx
, fp0
, fd
);
11097 tcg_temp_free_i64(fp0
);
11101 check_cp1_registers(ctx
, fs
| ft
| fd
);
11103 TCGv_i64 fp0
= tcg_temp_new_i64();
11104 TCGv_i64 fp1
= tcg_temp_new_i64();
11106 gen_load_fpr64(ctx
, fp0
, fs
);
11107 gen_load_fpr64(ctx
, fp1
, ft
);
11108 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11109 tcg_temp_free_i64(fp1
);
11110 gen_store_fpr64(ctx
, fp0
, fd
);
11111 tcg_temp_free_i64(fp0
);
11115 check_cp1_registers(ctx
, fs
| ft
| fd
);
11117 TCGv_i64 fp0
= tcg_temp_new_i64();
11118 TCGv_i64 fp1
= tcg_temp_new_i64();
11120 gen_load_fpr64(ctx
, fp0
, fs
);
11121 gen_load_fpr64(ctx
, fp1
, ft
);
11122 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11123 tcg_temp_free_i64(fp1
);
11124 gen_store_fpr64(ctx
, fp0
, fd
);
11125 tcg_temp_free_i64(fp0
);
11129 check_cp1_registers(ctx
, fs
| ft
| fd
);
11131 TCGv_i64 fp0
= tcg_temp_new_i64();
11132 TCGv_i64 fp1
= tcg_temp_new_i64();
11134 gen_load_fpr64(ctx
, fp0
, fs
);
11135 gen_load_fpr64(ctx
, fp1
, ft
);
11136 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11137 tcg_temp_free_i64(fp1
);
11138 gen_store_fpr64(ctx
, fp0
, fd
);
11139 tcg_temp_free_i64(fp0
);
11143 check_cp1_registers(ctx
, fs
| fd
);
11145 TCGv_i64 fp0
= tcg_temp_new_i64();
11147 gen_load_fpr64(ctx
, fp0
, fs
);
11148 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11149 gen_store_fpr64(ctx
, fp0
, fd
);
11150 tcg_temp_free_i64(fp0
);
11154 check_cp1_registers(ctx
, fs
| fd
);
11156 TCGv_i64 fp0
= tcg_temp_new_i64();
11158 gen_load_fpr64(ctx
, fp0
, fs
);
11159 if (ctx
->abs2008
) {
11160 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11162 gen_helper_float_abs_d(fp0
, fp0
);
11164 gen_store_fpr64(ctx
, fp0
, fd
);
11165 tcg_temp_free_i64(fp0
);
11169 check_cp1_registers(ctx
, fs
| fd
);
11171 TCGv_i64 fp0
= tcg_temp_new_i64();
11173 gen_load_fpr64(ctx
, fp0
, fs
);
11174 gen_store_fpr64(ctx
, fp0
, fd
);
11175 tcg_temp_free_i64(fp0
);
11179 check_cp1_registers(ctx
, fs
| fd
);
11181 TCGv_i64 fp0
= tcg_temp_new_i64();
11183 gen_load_fpr64(ctx
, fp0
, fs
);
11184 if (ctx
->abs2008
) {
11185 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11187 gen_helper_float_chs_d(fp0
, fp0
);
11189 gen_store_fpr64(ctx
, fp0
, fd
);
11190 tcg_temp_free_i64(fp0
);
11193 case OPC_ROUND_L_D
:
11194 check_cp1_64bitmode(ctx
);
11196 TCGv_i64 fp0
= tcg_temp_new_i64();
11198 gen_load_fpr64(ctx
, fp0
, fs
);
11199 if (ctx
->nan2008
) {
11200 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11202 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11204 gen_store_fpr64(ctx
, fp0
, fd
);
11205 tcg_temp_free_i64(fp0
);
11208 case OPC_TRUNC_L_D
:
11209 check_cp1_64bitmode(ctx
);
11211 TCGv_i64 fp0
= tcg_temp_new_i64();
11213 gen_load_fpr64(ctx
, fp0
, fs
);
11214 if (ctx
->nan2008
) {
11215 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11217 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11219 gen_store_fpr64(ctx
, fp0
, fd
);
11220 tcg_temp_free_i64(fp0
);
11224 check_cp1_64bitmode(ctx
);
11226 TCGv_i64 fp0
= tcg_temp_new_i64();
11228 gen_load_fpr64(ctx
, fp0
, fs
);
11229 if (ctx
->nan2008
) {
11230 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11232 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11234 gen_store_fpr64(ctx
, fp0
, fd
);
11235 tcg_temp_free_i64(fp0
);
11238 case OPC_FLOOR_L_D
:
11239 check_cp1_64bitmode(ctx
);
11241 TCGv_i64 fp0
= tcg_temp_new_i64();
11243 gen_load_fpr64(ctx
, fp0
, fs
);
11244 if (ctx
->nan2008
) {
11245 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11247 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11249 gen_store_fpr64(ctx
, fp0
, fd
);
11250 tcg_temp_free_i64(fp0
);
11253 case OPC_ROUND_W_D
:
11254 check_cp1_registers(ctx
, fs
);
11256 TCGv_i32 fp32
= tcg_temp_new_i32();
11257 TCGv_i64 fp64
= tcg_temp_new_i64();
11259 gen_load_fpr64(ctx
, fp64
, fs
);
11260 if (ctx
->nan2008
) {
11261 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11263 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11265 tcg_temp_free_i64(fp64
);
11266 gen_store_fpr32(ctx
, fp32
, fd
);
11267 tcg_temp_free_i32(fp32
);
11270 case OPC_TRUNC_W_D
:
11271 check_cp1_registers(ctx
, fs
);
11273 TCGv_i32 fp32
= tcg_temp_new_i32();
11274 TCGv_i64 fp64
= tcg_temp_new_i64();
11276 gen_load_fpr64(ctx
, fp64
, fs
);
11277 if (ctx
->nan2008
) {
11278 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11280 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11282 tcg_temp_free_i64(fp64
);
11283 gen_store_fpr32(ctx
, fp32
, fd
);
11284 tcg_temp_free_i32(fp32
);
11288 check_cp1_registers(ctx
, fs
);
11290 TCGv_i32 fp32
= tcg_temp_new_i32();
11291 TCGv_i64 fp64
= tcg_temp_new_i64();
11293 gen_load_fpr64(ctx
, fp64
, fs
);
11294 if (ctx
->nan2008
) {
11295 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11297 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11299 tcg_temp_free_i64(fp64
);
11300 gen_store_fpr32(ctx
, fp32
, fd
);
11301 tcg_temp_free_i32(fp32
);
11304 case OPC_FLOOR_W_D
:
11305 check_cp1_registers(ctx
, fs
);
11307 TCGv_i32 fp32
= tcg_temp_new_i32();
11308 TCGv_i64 fp64
= tcg_temp_new_i64();
11310 gen_load_fpr64(ctx
, fp64
, fs
);
11311 if (ctx
->nan2008
) {
11312 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11314 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11316 tcg_temp_free_i64(fp64
);
11317 gen_store_fpr32(ctx
, fp32
, fd
);
11318 tcg_temp_free_i32(fp32
);
11322 check_insn(ctx
, ISA_MIPS32R6
);
11323 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11326 check_insn(ctx
, ISA_MIPS32R6
);
11327 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11330 check_insn(ctx
, ISA_MIPS32R6
);
11331 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11334 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11335 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11338 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11340 TCGLabel
*l1
= gen_new_label();
11344 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11346 fp0
= tcg_temp_new_i64();
11347 gen_load_fpr64(ctx
, fp0
, fs
);
11348 gen_store_fpr64(ctx
, fp0
, fd
);
11349 tcg_temp_free_i64(fp0
);
11354 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11356 TCGLabel
*l1
= gen_new_label();
11360 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11361 fp0
= tcg_temp_new_i64();
11362 gen_load_fpr64(ctx
, fp0
, fs
);
11363 gen_store_fpr64(ctx
, fp0
, fd
);
11364 tcg_temp_free_i64(fp0
);
11370 check_cp1_registers(ctx
, fs
| fd
);
11372 TCGv_i64 fp0
= tcg_temp_new_i64();
11374 gen_load_fpr64(ctx
, fp0
, fs
);
11375 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11376 gen_store_fpr64(ctx
, fp0
, fd
);
11377 tcg_temp_free_i64(fp0
);
11381 check_cp1_registers(ctx
, fs
| fd
);
11383 TCGv_i64 fp0
= tcg_temp_new_i64();
11385 gen_load_fpr64(ctx
, fp0
, fs
);
11386 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11387 gen_store_fpr64(ctx
, fp0
, fd
);
11388 tcg_temp_free_i64(fp0
);
11392 check_insn(ctx
, ISA_MIPS32R6
);
11394 TCGv_i64 fp0
= tcg_temp_new_i64();
11395 TCGv_i64 fp1
= tcg_temp_new_i64();
11396 TCGv_i64 fp2
= tcg_temp_new_i64();
11397 gen_load_fpr64(ctx
, fp0
, fs
);
11398 gen_load_fpr64(ctx
, fp1
, ft
);
11399 gen_load_fpr64(ctx
, fp2
, fd
);
11400 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11401 gen_store_fpr64(ctx
, fp2
, fd
);
11402 tcg_temp_free_i64(fp2
);
11403 tcg_temp_free_i64(fp1
);
11404 tcg_temp_free_i64(fp0
);
11408 check_insn(ctx
, ISA_MIPS32R6
);
11410 TCGv_i64 fp0
= tcg_temp_new_i64();
11411 TCGv_i64 fp1
= tcg_temp_new_i64();
11412 TCGv_i64 fp2
= tcg_temp_new_i64();
11413 gen_load_fpr64(ctx
, fp0
, fs
);
11414 gen_load_fpr64(ctx
, fp1
, ft
);
11415 gen_load_fpr64(ctx
, fp2
, fd
);
11416 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11417 gen_store_fpr64(ctx
, fp2
, fd
);
11418 tcg_temp_free_i64(fp2
);
11419 tcg_temp_free_i64(fp1
);
11420 tcg_temp_free_i64(fp0
);
11424 check_insn(ctx
, ISA_MIPS32R6
);
11426 TCGv_i64 fp0
= tcg_temp_new_i64();
11427 gen_load_fpr64(ctx
, fp0
, fs
);
11428 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11429 gen_store_fpr64(ctx
, fp0
, fd
);
11430 tcg_temp_free_i64(fp0
);
11434 check_insn(ctx
, ISA_MIPS32R6
);
11436 TCGv_i64 fp0
= tcg_temp_new_i64();
11437 gen_load_fpr64(ctx
, fp0
, fs
);
11438 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11439 gen_store_fpr64(ctx
, fp0
, fd
);
11440 tcg_temp_free_i64(fp0
);
11443 case OPC_MIN_D
: /* OPC_RECIP2_D */
11444 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11446 TCGv_i64 fp0
= tcg_temp_new_i64();
11447 TCGv_i64 fp1
= tcg_temp_new_i64();
11448 gen_load_fpr64(ctx
, fp0
, fs
);
11449 gen_load_fpr64(ctx
, fp1
, ft
);
11450 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11451 gen_store_fpr64(ctx
, fp1
, fd
);
11452 tcg_temp_free_i64(fp1
);
11453 tcg_temp_free_i64(fp0
);
11456 check_cp1_64bitmode(ctx
);
11458 TCGv_i64 fp0
= tcg_temp_new_i64();
11459 TCGv_i64 fp1
= tcg_temp_new_i64();
11461 gen_load_fpr64(ctx
, fp0
, fs
);
11462 gen_load_fpr64(ctx
, fp1
, ft
);
11463 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11464 tcg_temp_free_i64(fp1
);
11465 gen_store_fpr64(ctx
, fp0
, fd
);
11466 tcg_temp_free_i64(fp0
);
11470 case OPC_MINA_D
: /* OPC_RECIP1_D */
11471 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11473 TCGv_i64 fp0
= tcg_temp_new_i64();
11474 TCGv_i64 fp1
= tcg_temp_new_i64();
11475 gen_load_fpr64(ctx
, fp0
, fs
);
11476 gen_load_fpr64(ctx
, fp1
, ft
);
11477 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11478 gen_store_fpr64(ctx
, fp1
, fd
);
11479 tcg_temp_free_i64(fp1
);
11480 tcg_temp_free_i64(fp0
);
11483 check_cp1_64bitmode(ctx
);
11485 TCGv_i64 fp0
= tcg_temp_new_i64();
11487 gen_load_fpr64(ctx
, fp0
, fs
);
11488 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11489 gen_store_fpr64(ctx
, fp0
, fd
);
11490 tcg_temp_free_i64(fp0
);
11494 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11495 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11497 TCGv_i64 fp0
= tcg_temp_new_i64();
11498 TCGv_i64 fp1
= tcg_temp_new_i64();
11499 gen_load_fpr64(ctx
, fp0
, fs
);
11500 gen_load_fpr64(ctx
, fp1
, ft
);
11501 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11502 gen_store_fpr64(ctx
, fp1
, fd
);
11503 tcg_temp_free_i64(fp1
);
11504 tcg_temp_free_i64(fp0
);
11507 check_cp1_64bitmode(ctx
);
11509 TCGv_i64 fp0
= tcg_temp_new_i64();
11511 gen_load_fpr64(ctx
, fp0
, fs
);
11512 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11513 gen_store_fpr64(ctx
, fp0
, fd
);
11514 tcg_temp_free_i64(fp0
);
11518 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11519 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11521 TCGv_i64 fp0
= tcg_temp_new_i64();
11522 TCGv_i64 fp1
= tcg_temp_new_i64();
11523 gen_load_fpr64(ctx
, fp0
, fs
);
11524 gen_load_fpr64(ctx
, fp1
, ft
);
11525 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11526 gen_store_fpr64(ctx
, fp1
, fd
);
11527 tcg_temp_free_i64(fp1
);
11528 tcg_temp_free_i64(fp0
);
11531 check_cp1_64bitmode(ctx
);
11533 TCGv_i64 fp0
= tcg_temp_new_i64();
11534 TCGv_i64 fp1
= tcg_temp_new_i64();
11536 gen_load_fpr64(ctx
, fp0
, fs
);
11537 gen_load_fpr64(ctx
, fp1
, ft
);
11538 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
11539 tcg_temp_free_i64(fp1
);
11540 gen_store_fpr64(ctx
, fp0
, fd
);
11541 tcg_temp_free_i64(fp0
);
11548 case OPC_CMP_UEQ_D
:
11549 case OPC_CMP_OLT_D
:
11550 case OPC_CMP_ULT_D
:
11551 case OPC_CMP_OLE_D
:
11552 case OPC_CMP_ULE_D
:
11554 case OPC_CMP_NGLE_D
:
11555 case OPC_CMP_SEQ_D
:
11556 case OPC_CMP_NGL_D
:
11558 case OPC_CMP_NGE_D
:
11560 case OPC_CMP_NGT_D
:
11561 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11562 if (ctx
->opcode
& (1 << 6)) {
11563 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
11565 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
11569 check_cp1_registers(ctx
, fs
);
11571 TCGv_i32 fp32
= tcg_temp_new_i32();
11572 TCGv_i64 fp64
= tcg_temp_new_i64();
11574 gen_load_fpr64(ctx
, fp64
, fs
);
11575 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
11576 tcg_temp_free_i64(fp64
);
11577 gen_store_fpr32(ctx
, fp32
, fd
);
11578 tcg_temp_free_i32(fp32
);
11582 check_cp1_registers(ctx
, fs
);
11584 TCGv_i32 fp32
= tcg_temp_new_i32();
11585 TCGv_i64 fp64
= tcg_temp_new_i64();
11587 gen_load_fpr64(ctx
, fp64
, fs
);
11588 if (ctx
->nan2008
) {
11589 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
11591 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
11593 tcg_temp_free_i64(fp64
);
11594 gen_store_fpr32(ctx
, fp32
, fd
);
11595 tcg_temp_free_i32(fp32
);
11599 check_cp1_64bitmode(ctx
);
11601 TCGv_i64 fp0
= tcg_temp_new_i64();
11603 gen_load_fpr64(ctx
, fp0
, fs
);
11604 if (ctx
->nan2008
) {
11605 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
11607 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
11609 gen_store_fpr64(ctx
, fp0
, fd
);
11610 tcg_temp_free_i64(fp0
);
11615 TCGv_i32 fp0
= tcg_temp_new_i32();
11617 gen_load_fpr32(ctx
, fp0
, fs
);
11618 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
11619 gen_store_fpr32(ctx
, fp0
, fd
);
11620 tcg_temp_free_i32(fp0
);
11624 check_cp1_registers(ctx
, fd
);
11626 TCGv_i32 fp32
= tcg_temp_new_i32();
11627 TCGv_i64 fp64
= tcg_temp_new_i64();
11629 gen_load_fpr32(ctx
, fp32
, fs
);
11630 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
11631 tcg_temp_free_i32(fp32
);
11632 gen_store_fpr64(ctx
, fp64
, fd
);
11633 tcg_temp_free_i64(fp64
);
11637 check_cp1_64bitmode(ctx
);
11639 TCGv_i32 fp32
= tcg_temp_new_i32();
11640 TCGv_i64 fp64
= tcg_temp_new_i64();
11642 gen_load_fpr64(ctx
, fp64
, fs
);
11643 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
11644 tcg_temp_free_i64(fp64
);
11645 gen_store_fpr32(ctx
, fp32
, fd
);
11646 tcg_temp_free_i32(fp32
);
11650 check_cp1_64bitmode(ctx
);
11652 TCGv_i64 fp0
= tcg_temp_new_i64();
11654 gen_load_fpr64(ctx
, fp0
, fs
);
11655 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
11656 gen_store_fpr64(ctx
, fp0
, fd
);
11657 tcg_temp_free_i64(fp0
);
11660 case OPC_CVT_PS_PW
:
11663 TCGv_i64 fp0
= tcg_temp_new_i64();
11665 gen_load_fpr64(ctx
, fp0
, fs
);
11666 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
11667 gen_store_fpr64(ctx
, fp0
, fd
);
11668 tcg_temp_free_i64(fp0
);
11674 TCGv_i64 fp0
= tcg_temp_new_i64();
11675 TCGv_i64 fp1
= tcg_temp_new_i64();
11677 gen_load_fpr64(ctx
, fp0
, fs
);
11678 gen_load_fpr64(ctx
, fp1
, ft
);
11679 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
11680 tcg_temp_free_i64(fp1
);
11681 gen_store_fpr64(ctx
, fp0
, fd
);
11682 tcg_temp_free_i64(fp0
);
11688 TCGv_i64 fp0
= tcg_temp_new_i64();
11689 TCGv_i64 fp1
= tcg_temp_new_i64();
11691 gen_load_fpr64(ctx
, fp0
, fs
);
11692 gen_load_fpr64(ctx
, fp1
, ft
);
11693 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
11694 tcg_temp_free_i64(fp1
);
11695 gen_store_fpr64(ctx
, fp0
, fd
);
11696 tcg_temp_free_i64(fp0
);
11702 TCGv_i64 fp0
= tcg_temp_new_i64();
11703 TCGv_i64 fp1
= tcg_temp_new_i64();
11705 gen_load_fpr64(ctx
, fp0
, fs
);
11706 gen_load_fpr64(ctx
, fp1
, ft
);
11707 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
11708 tcg_temp_free_i64(fp1
);
11709 gen_store_fpr64(ctx
, fp0
, fd
);
11710 tcg_temp_free_i64(fp0
);
11716 TCGv_i64 fp0
= tcg_temp_new_i64();
11718 gen_load_fpr64(ctx
, fp0
, fs
);
11719 gen_helper_float_abs_ps(fp0
, fp0
);
11720 gen_store_fpr64(ctx
, fp0
, fd
);
11721 tcg_temp_free_i64(fp0
);
11727 TCGv_i64 fp0
= tcg_temp_new_i64();
11729 gen_load_fpr64(ctx
, fp0
, fs
);
11730 gen_store_fpr64(ctx
, fp0
, fd
);
11731 tcg_temp_free_i64(fp0
);
11737 TCGv_i64 fp0
= tcg_temp_new_i64();
11739 gen_load_fpr64(ctx
, fp0
, fs
);
11740 gen_helper_float_chs_ps(fp0
, fp0
);
11741 gen_store_fpr64(ctx
, fp0
, fd
);
11742 tcg_temp_free_i64(fp0
);
11747 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11752 TCGLabel
*l1
= gen_new_label();
11756 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11757 fp0
= tcg_temp_new_i64();
11758 gen_load_fpr64(ctx
, fp0
, fs
);
11759 gen_store_fpr64(ctx
, fp0
, fd
);
11760 tcg_temp_free_i64(fp0
);
11767 TCGLabel
*l1
= gen_new_label();
11771 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11772 fp0
= tcg_temp_new_i64();
11773 gen_load_fpr64(ctx
, fp0
, fs
);
11774 gen_store_fpr64(ctx
, fp0
, fd
);
11775 tcg_temp_free_i64(fp0
);
11783 TCGv_i64 fp0
= tcg_temp_new_i64();
11784 TCGv_i64 fp1
= tcg_temp_new_i64();
11786 gen_load_fpr64(ctx
, fp0
, ft
);
11787 gen_load_fpr64(ctx
, fp1
, fs
);
11788 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
11789 tcg_temp_free_i64(fp1
);
11790 gen_store_fpr64(ctx
, fp0
, fd
);
11791 tcg_temp_free_i64(fp0
);
11797 TCGv_i64 fp0
= tcg_temp_new_i64();
11798 TCGv_i64 fp1
= tcg_temp_new_i64();
11800 gen_load_fpr64(ctx
, fp0
, ft
);
11801 gen_load_fpr64(ctx
, fp1
, fs
);
11802 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
11803 tcg_temp_free_i64(fp1
);
11804 gen_store_fpr64(ctx
, fp0
, fd
);
11805 tcg_temp_free_i64(fp0
);
11808 case OPC_RECIP2_PS
:
11811 TCGv_i64 fp0
= tcg_temp_new_i64();
11812 TCGv_i64 fp1
= tcg_temp_new_i64();
11814 gen_load_fpr64(ctx
, fp0
, fs
);
11815 gen_load_fpr64(ctx
, fp1
, ft
);
11816 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
11817 tcg_temp_free_i64(fp1
);
11818 gen_store_fpr64(ctx
, fp0
, fd
);
11819 tcg_temp_free_i64(fp0
);
11822 case OPC_RECIP1_PS
:
11825 TCGv_i64 fp0
= tcg_temp_new_i64();
11827 gen_load_fpr64(ctx
, fp0
, fs
);
11828 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
11829 gen_store_fpr64(ctx
, fp0
, fd
);
11830 tcg_temp_free_i64(fp0
);
11833 case OPC_RSQRT1_PS
:
11836 TCGv_i64 fp0
= tcg_temp_new_i64();
11838 gen_load_fpr64(ctx
, fp0
, fs
);
11839 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
11840 gen_store_fpr64(ctx
, fp0
, fd
);
11841 tcg_temp_free_i64(fp0
);
11844 case OPC_RSQRT2_PS
:
11847 TCGv_i64 fp0
= tcg_temp_new_i64();
11848 TCGv_i64 fp1
= tcg_temp_new_i64();
11850 gen_load_fpr64(ctx
, fp0
, fs
);
11851 gen_load_fpr64(ctx
, fp1
, ft
);
11852 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
11853 tcg_temp_free_i64(fp1
);
11854 gen_store_fpr64(ctx
, fp0
, fd
);
11855 tcg_temp_free_i64(fp0
);
11859 check_cp1_64bitmode(ctx
);
11861 TCGv_i32 fp0
= tcg_temp_new_i32();
11863 gen_load_fpr32h(ctx
, fp0
, fs
);
11864 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
11865 gen_store_fpr32(ctx
, fp0
, fd
);
11866 tcg_temp_free_i32(fp0
);
11869 case OPC_CVT_PW_PS
:
11872 TCGv_i64 fp0
= tcg_temp_new_i64();
11874 gen_load_fpr64(ctx
, fp0
, fs
);
11875 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
11876 gen_store_fpr64(ctx
, fp0
, fd
);
11877 tcg_temp_free_i64(fp0
);
11881 check_cp1_64bitmode(ctx
);
11883 TCGv_i32 fp0
= tcg_temp_new_i32();
11885 gen_load_fpr32(ctx
, fp0
, fs
);
11886 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
11887 gen_store_fpr32(ctx
, fp0
, fd
);
11888 tcg_temp_free_i32(fp0
);
11894 TCGv_i32 fp0
= tcg_temp_new_i32();
11895 TCGv_i32 fp1
= tcg_temp_new_i32();
11897 gen_load_fpr32(ctx
, fp0
, fs
);
11898 gen_load_fpr32(ctx
, fp1
, ft
);
11899 gen_store_fpr32h(ctx
, fp0
, fd
);
11900 gen_store_fpr32(ctx
, fp1
, fd
);
11901 tcg_temp_free_i32(fp0
);
11902 tcg_temp_free_i32(fp1
);
11908 TCGv_i32 fp0
= tcg_temp_new_i32();
11909 TCGv_i32 fp1
= tcg_temp_new_i32();
11911 gen_load_fpr32(ctx
, fp0
, fs
);
11912 gen_load_fpr32h(ctx
, fp1
, ft
);
11913 gen_store_fpr32(ctx
, fp1
, fd
);
11914 gen_store_fpr32h(ctx
, fp0
, fd
);
11915 tcg_temp_free_i32(fp0
);
11916 tcg_temp_free_i32(fp1
);
11922 TCGv_i32 fp0
= tcg_temp_new_i32();
11923 TCGv_i32 fp1
= tcg_temp_new_i32();
11925 gen_load_fpr32h(ctx
, fp0
, fs
);
11926 gen_load_fpr32(ctx
, fp1
, ft
);
11927 gen_store_fpr32(ctx
, fp1
, fd
);
11928 gen_store_fpr32h(ctx
, fp0
, fd
);
11929 tcg_temp_free_i32(fp0
);
11930 tcg_temp_free_i32(fp1
);
11936 TCGv_i32 fp0
= tcg_temp_new_i32();
11937 TCGv_i32 fp1
= tcg_temp_new_i32();
11939 gen_load_fpr32h(ctx
, fp0
, fs
);
11940 gen_load_fpr32h(ctx
, fp1
, ft
);
11941 gen_store_fpr32(ctx
, fp1
, fd
);
11942 gen_store_fpr32h(ctx
, fp0
, fd
);
11943 tcg_temp_free_i32(fp0
);
11944 tcg_temp_free_i32(fp1
);
11948 case OPC_CMP_UN_PS
:
11949 case OPC_CMP_EQ_PS
:
11950 case OPC_CMP_UEQ_PS
:
11951 case OPC_CMP_OLT_PS
:
11952 case OPC_CMP_ULT_PS
:
11953 case OPC_CMP_OLE_PS
:
11954 case OPC_CMP_ULE_PS
:
11955 case OPC_CMP_SF_PS
:
11956 case OPC_CMP_NGLE_PS
:
11957 case OPC_CMP_SEQ_PS
:
11958 case OPC_CMP_NGL_PS
:
11959 case OPC_CMP_LT_PS
:
11960 case OPC_CMP_NGE_PS
:
11961 case OPC_CMP_LE_PS
:
11962 case OPC_CMP_NGT_PS
:
11963 if (ctx
->opcode
& (1 << 6)) {
11964 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
11966 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
11970 MIPS_INVAL("farith");
11971 generate_exception_end(ctx
, EXCP_RI
);
11976 /* Coprocessor 3 (FPU) */
11977 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
11978 int fd
, int fs
, int base
, int index
)
11980 TCGv t0
= tcg_temp_new();
11983 gen_load_gpr(t0
, index
);
11984 } else if (index
== 0) {
11985 gen_load_gpr(t0
, base
);
11987 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
11989 /* Don't do NOP if destination is zero: we must perform the actual
11995 TCGv_i32 fp0
= tcg_temp_new_i32();
11997 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
11998 tcg_gen_trunc_tl_i32(fp0
, t0
);
11999 gen_store_fpr32(ctx
, fp0
, fd
);
12000 tcg_temp_free_i32(fp0
);
12005 check_cp1_registers(ctx
, fd
);
12007 TCGv_i64 fp0
= tcg_temp_new_i64();
12008 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12009 gen_store_fpr64(ctx
, fp0
, fd
);
12010 tcg_temp_free_i64(fp0
);
12014 check_cp1_64bitmode(ctx
);
12015 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12017 TCGv_i64 fp0
= tcg_temp_new_i64();
12019 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12020 gen_store_fpr64(ctx
, fp0
, fd
);
12021 tcg_temp_free_i64(fp0
);
12027 TCGv_i32 fp0
= tcg_temp_new_i32();
12028 gen_load_fpr32(ctx
, fp0
, fs
);
12029 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12030 tcg_temp_free_i32(fp0
);
12035 check_cp1_registers(ctx
, fs
);
12037 TCGv_i64 fp0
= tcg_temp_new_i64();
12038 gen_load_fpr64(ctx
, fp0
, fs
);
12039 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12040 tcg_temp_free_i64(fp0
);
12044 check_cp1_64bitmode(ctx
);
12045 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12047 TCGv_i64 fp0
= tcg_temp_new_i64();
12048 gen_load_fpr64(ctx
, fp0
, fs
);
12049 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12050 tcg_temp_free_i64(fp0
);
12057 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
12058 int fd
, int fr
, int fs
, int ft
)
12064 TCGv t0
= tcg_temp_local_new();
12065 TCGv_i32 fp
= tcg_temp_new_i32();
12066 TCGv_i32 fph
= tcg_temp_new_i32();
12067 TCGLabel
*l1
= gen_new_label();
12068 TCGLabel
*l2
= gen_new_label();
12070 gen_load_gpr(t0
, fr
);
12071 tcg_gen_andi_tl(t0
, t0
, 0x7);
12073 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12074 gen_load_fpr32(ctx
, fp
, fs
);
12075 gen_load_fpr32h(ctx
, fph
, fs
);
12076 gen_store_fpr32(ctx
, fp
, fd
);
12077 gen_store_fpr32h(ctx
, fph
, fd
);
12080 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12082 #ifdef TARGET_WORDS_BIGENDIAN
12083 gen_load_fpr32(ctx
, fp
, fs
);
12084 gen_load_fpr32h(ctx
, fph
, ft
);
12085 gen_store_fpr32h(ctx
, fp
, fd
);
12086 gen_store_fpr32(ctx
, fph
, fd
);
12088 gen_load_fpr32h(ctx
, fph
, fs
);
12089 gen_load_fpr32(ctx
, fp
, ft
);
12090 gen_store_fpr32(ctx
, fph
, fd
);
12091 gen_store_fpr32h(ctx
, fp
, fd
);
12094 tcg_temp_free_i32(fp
);
12095 tcg_temp_free_i32(fph
);
12101 TCGv_i32 fp0
= tcg_temp_new_i32();
12102 TCGv_i32 fp1
= tcg_temp_new_i32();
12103 TCGv_i32 fp2
= tcg_temp_new_i32();
12105 gen_load_fpr32(ctx
, fp0
, fs
);
12106 gen_load_fpr32(ctx
, fp1
, ft
);
12107 gen_load_fpr32(ctx
, fp2
, fr
);
12108 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12109 tcg_temp_free_i32(fp0
);
12110 tcg_temp_free_i32(fp1
);
12111 gen_store_fpr32(ctx
, fp2
, fd
);
12112 tcg_temp_free_i32(fp2
);
12117 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12119 TCGv_i64 fp0
= tcg_temp_new_i64();
12120 TCGv_i64 fp1
= tcg_temp_new_i64();
12121 TCGv_i64 fp2
= tcg_temp_new_i64();
12123 gen_load_fpr64(ctx
, fp0
, fs
);
12124 gen_load_fpr64(ctx
, fp1
, ft
);
12125 gen_load_fpr64(ctx
, fp2
, fr
);
12126 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12127 tcg_temp_free_i64(fp0
);
12128 tcg_temp_free_i64(fp1
);
12129 gen_store_fpr64(ctx
, fp2
, fd
);
12130 tcg_temp_free_i64(fp2
);
12136 TCGv_i64 fp0
= tcg_temp_new_i64();
12137 TCGv_i64 fp1
= tcg_temp_new_i64();
12138 TCGv_i64 fp2
= tcg_temp_new_i64();
12140 gen_load_fpr64(ctx
, fp0
, fs
);
12141 gen_load_fpr64(ctx
, fp1
, ft
);
12142 gen_load_fpr64(ctx
, fp2
, fr
);
12143 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12144 tcg_temp_free_i64(fp0
);
12145 tcg_temp_free_i64(fp1
);
12146 gen_store_fpr64(ctx
, fp2
, fd
);
12147 tcg_temp_free_i64(fp2
);
12153 TCGv_i32 fp0
= tcg_temp_new_i32();
12154 TCGv_i32 fp1
= tcg_temp_new_i32();
12155 TCGv_i32 fp2
= tcg_temp_new_i32();
12157 gen_load_fpr32(ctx
, fp0
, fs
);
12158 gen_load_fpr32(ctx
, fp1
, ft
);
12159 gen_load_fpr32(ctx
, fp2
, fr
);
12160 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12161 tcg_temp_free_i32(fp0
);
12162 tcg_temp_free_i32(fp1
);
12163 gen_store_fpr32(ctx
, fp2
, fd
);
12164 tcg_temp_free_i32(fp2
);
12169 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12171 TCGv_i64 fp0
= tcg_temp_new_i64();
12172 TCGv_i64 fp1
= tcg_temp_new_i64();
12173 TCGv_i64 fp2
= tcg_temp_new_i64();
12175 gen_load_fpr64(ctx
, fp0
, fs
);
12176 gen_load_fpr64(ctx
, fp1
, ft
);
12177 gen_load_fpr64(ctx
, fp2
, fr
);
12178 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12179 tcg_temp_free_i64(fp0
);
12180 tcg_temp_free_i64(fp1
);
12181 gen_store_fpr64(ctx
, fp2
, fd
);
12182 tcg_temp_free_i64(fp2
);
12188 TCGv_i64 fp0
= tcg_temp_new_i64();
12189 TCGv_i64 fp1
= tcg_temp_new_i64();
12190 TCGv_i64 fp2
= tcg_temp_new_i64();
12192 gen_load_fpr64(ctx
, fp0
, fs
);
12193 gen_load_fpr64(ctx
, fp1
, ft
);
12194 gen_load_fpr64(ctx
, fp2
, fr
);
12195 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12196 tcg_temp_free_i64(fp0
);
12197 tcg_temp_free_i64(fp1
);
12198 gen_store_fpr64(ctx
, fp2
, fd
);
12199 tcg_temp_free_i64(fp2
);
12205 TCGv_i32 fp0
= tcg_temp_new_i32();
12206 TCGv_i32 fp1
= tcg_temp_new_i32();
12207 TCGv_i32 fp2
= tcg_temp_new_i32();
12209 gen_load_fpr32(ctx
, fp0
, fs
);
12210 gen_load_fpr32(ctx
, fp1
, ft
);
12211 gen_load_fpr32(ctx
, fp2
, fr
);
12212 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12213 tcg_temp_free_i32(fp0
);
12214 tcg_temp_free_i32(fp1
);
12215 gen_store_fpr32(ctx
, fp2
, fd
);
12216 tcg_temp_free_i32(fp2
);
12221 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12223 TCGv_i64 fp0
= tcg_temp_new_i64();
12224 TCGv_i64 fp1
= tcg_temp_new_i64();
12225 TCGv_i64 fp2
= tcg_temp_new_i64();
12227 gen_load_fpr64(ctx
, fp0
, fs
);
12228 gen_load_fpr64(ctx
, fp1
, ft
);
12229 gen_load_fpr64(ctx
, fp2
, fr
);
12230 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12231 tcg_temp_free_i64(fp0
);
12232 tcg_temp_free_i64(fp1
);
12233 gen_store_fpr64(ctx
, fp2
, fd
);
12234 tcg_temp_free_i64(fp2
);
12240 TCGv_i64 fp0
= tcg_temp_new_i64();
12241 TCGv_i64 fp1
= tcg_temp_new_i64();
12242 TCGv_i64 fp2
= tcg_temp_new_i64();
12244 gen_load_fpr64(ctx
, fp0
, fs
);
12245 gen_load_fpr64(ctx
, fp1
, ft
);
12246 gen_load_fpr64(ctx
, fp2
, fr
);
12247 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12248 tcg_temp_free_i64(fp0
);
12249 tcg_temp_free_i64(fp1
);
12250 gen_store_fpr64(ctx
, fp2
, fd
);
12251 tcg_temp_free_i64(fp2
);
12257 TCGv_i32 fp0
= tcg_temp_new_i32();
12258 TCGv_i32 fp1
= tcg_temp_new_i32();
12259 TCGv_i32 fp2
= tcg_temp_new_i32();
12261 gen_load_fpr32(ctx
, fp0
, fs
);
12262 gen_load_fpr32(ctx
, fp1
, ft
);
12263 gen_load_fpr32(ctx
, fp2
, fr
);
12264 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12265 tcg_temp_free_i32(fp0
);
12266 tcg_temp_free_i32(fp1
);
12267 gen_store_fpr32(ctx
, fp2
, fd
);
12268 tcg_temp_free_i32(fp2
);
12273 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12275 TCGv_i64 fp0
= tcg_temp_new_i64();
12276 TCGv_i64 fp1
= tcg_temp_new_i64();
12277 TCGv_i64 fp2
= tcg_temp_new_i64();
12279 gen_load_fpr64(ctx
, fp0
, fs
);
12280 gen_load_fpr64(ctx
, fp1
, ft
);
12281 gen_load_fpr64(ctx
, fp2
, fr
);
12282 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12283 tcg_temp_free_i64(fp0
);
12284 tcg_temp_free_i64(fp1
);
12285 gen_store_fpr64(ctx
, fp2
, fd
);
12286 tcg_temp_free_i64(fp2
);
12292 TCGv_i64 fp0
= tcg_temp_new_i64();
12293 TCGv_i64 fp1
= tcg_temp_new_i64();
12294 TCGv_i64 fp2
= tcg_temp_new_i64();
12296 gen_load_fpr64(ctx
, fp0
, fs
);
12297 gen_load_fpr64(ctx
, fp1
, ft
);
12298 gen_load_fpr64(ctx
, fp2
, fr
);
12299 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12300 tcg_temp_free_i64(fp0
);
12301 tcg_temp_free_i64(fp1
);
12302 gen_store_fpr64(ctx
, fp2
, fd
);
12303 tcg_temp_free_i64(fp2
);
12307 MIPS_INVAL("flt3_arith");
12308 generate_exception_end(ctx
, EXCP_RI
);
12313 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12317 #if !defined(CONFIG_USER_ONLY)
12318 /* The Linux kernel will emulate rdhwr if it's not supported natively.
12319 Therefore only check the ISA in system mode. */
12320 check_insn(ctx
, ISA_MIPS32R2
);
12322 t0
= tcg_temp_new();
12326 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12327 gen_store_gpr(t0
, rt
);
12330 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12331 gen_store_gpr(t0
, rt
);
12334 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12337 gen_helper_rdhwr_cc(t0
, cpu_env
);
12338 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12341 gen_store_gpr(t0
, rt
);
12342 /* Break the TB to be able to take timer interrupts immediately
12343 after reading count. DISAS_STOP isn't sufficient, we need to ensure
12344 we break completely out of translated code. */
12345 gen_save_pc(ctx
->base
.pc_next
+ 4);
12346 ctx
->base
.is_jmp
= DISAS_EXIT
;
12349 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12350 gen_store_gpr(t0
, rt
);
12353 check_insn(ctx
, ISA_MIPS32R6
);
12355 /* Performance counter registers are not implemented other than
12356 * control register 0.
12358 generate_exception(ctx
, EXCP_RI
);
12360 gen_helper_rdhwr_performance(t0
, cpu_env
);
12361 gen_store_gpr(t0
, rt
);
12364 check_insn(ctx
, ISA_MIPS32R6
);
12365 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12366 gen_store_gpr(t0
, rt
);
12369 #if defined(CONFIG_USER_ONLY)
12370 tcg_gen_ld_tl(t0
, cpu_env
,
12371 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12372 gen_store_gpr(t0
, rt
);
12375 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12376 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12377 tcg_gen_ld_tl(t0
, cpu_env
,
12378 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12379 gen_store_gpr(t0
, rt
);
12381 generate_exception_end(ctx
, EXCP_RI
);
12385 default: /* Invalid */
12386 MIPS_INVAL("rdhwr");
12387 generate_exception_end(ctx
, EXCP_RI
);
12393 static inline void clear_branch_hflags(DisasContext
*ctx
)
12395 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12396 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12397 save_cpu_state(ctx
, 0);
12399 /* it is not safe to save ctx->hflags as hflags may be changed
12400 in execution time by the instruction in delay / forbidden slot. */
12401 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12405 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12407 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12408 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12409 /* Branches completion */
12410 clear_branch_hflags(ctx
);
12411 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12412 /* FIXME: Need to clear can_do_io. */
12413 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12414 case MIPS_HFLAG_FBNSLOT
:
12415 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12418 /* unconditional branch */
12419 if (proc_hflags
& MIPS_HFLAG_BX
) {
12420 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12422 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12424 case MIPS_HFLAG_BL
:
12425 /* blikely taken case */
12426 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12428 case MIPS_HFLAG_BC
:
12429 /* Conditional branch */
12431 TCGLabel
*l1
= gen_new_label();
12433 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12434 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12436 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12439 case MIPS_HFLAG_BR
:
12440 /* unconditional branch to register */
12441 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12442 TCGv t0
= tcg_temp_new();
12443 TCGv_i32 t1
= tcg_temp_new_i32();
12445 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12446 tcg_gen_trunc_tl_i32(t1
, t0
);
12448 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12449 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12450 tcg_gen_or_i32(hflags
, hflags
, t1
);
12451 tcg_temp_free_i32(t1
);
12453 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12455 tcg_gen_mov_tl(cpu_PC
, btarget
);
12457 if (ctx
->base
.singlestep_enabled
) {
12458 save_cpu_state(ctx
, 0);
12459 gen_helper_raise_exception_debug(cpu_env
);
12461 tcg_gen_lookup_and_goto_ptr();
12464 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12470 /* Compact Branches */
12471 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12472 int rs
, int rt
, int32_t offset
)
12474 int bcond_compute
= 0;
12475 TCGv t0
= tcg_temp_new();
12476 TCGv t1
= tcg_temp_new();
12477 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12479 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12480 #ifdef MIPS_DEBUG_DISAS
12481 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12482 "\n", ctx
->base
.pc_next
);
12484 generate_exception_end(ctx
, EXCP_RI
);
12488 /* Load needed operands and calculate btarget */
12490 /* compact branch */
12491 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12492 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12493 gen_load_gpr(t0
, rs
);
12494 gen_load_gpr(t1
, rt
);
12496 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12497 if (rs
<= rt
&& rs
== 0) {
12498 /* OPC_BEQZALC, OPC_BNEZALC */
12499 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12502 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12503 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12504 gen_load_gpr(t0
, rs
);
12505 gen_load_gpr(t1
, rt
);
12507 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12509 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12510 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12511 if (rs
== 0 || rs
== rt
) {
12512 /* OPC_BLEZALC, OPC_BGEZALC */
12513 /* OPC_BGTZALC, OPC_BLTZALC */
12514 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12516 gen_load_gpr(t0
, rs
);
12517 gen_load_gpr(t1
, rt
);
12519 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12523 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12528 /* OPC_BEQZC, OPC_BNEZC */
12529 gen_load_gpr(t0
, rs
);
12531 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12533 /* OPC_JIC, OPC_JIALC */
12534 TCGv tbase
= tcg_temp_new();
12535 TCGv toffset
= tcg_temp_new();
12537 gen_load_gpr(tbase
, rt
);
12538 tcg_gen_movi_tl(toffset
, offset
);
12539 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
12540 tcg_temp_free(tbase
);
12541 tcg_temp_free(toffset
);
12545 MIPS_INVAL("Compact branch/jump");
12546 generate_exception_end(ctx
, EXCP_RI
);
12550 if (bcond_compute
== 0) {
12551 /* Uncoditional compact branch */
12554 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12557 ctx
->hflags
|= MIPS_HFLAG_BR
;
12560 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12563 ctx
->hflags
|= MIPS_HFLAG_B
;
12566 MIPS_INVAL("Compact branch/jump");
12567 generate_exception_end(ctx
, EXCP_RI
);
12571 /* Generating branch here as compact branches don't have delay slot */
12572 gen_branch(ctx
, 4);
12574 /* Conditional compact branch */
12575 TCGLabel
*fs
= gen_new_label();
12576 save_cpu_state(ctx
, 0);
12579 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12580 if (rs
== 0 && rt
!= 0) {
12582 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12583 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12585 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12588 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
12591 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12592 if (rs
== 0 && rt
!= 0) {
12594 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12595 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12597 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12600 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
12603 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12604 if (rs
== 0 && rt
!= 0) {
12606 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12607 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12609 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12612 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
12615 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12616 if (rs
== 0 && rt
!= 0) {
12618 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12619 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12621 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12624 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
12627 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12628 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12630 /* OPC_BOVC, OPC_BNVC */
12631 TCGv t2
= tcg_temp_new();
12632 TCGv t3
= tcg_temp_new();
12633 TCGv t4
= tcg_temp_new();
12634 TCGv input_overflow
= tcg_temp_new();
12636 gen_load_gpr(t0
, rs
);
12637 gen_load_gpr(t1
, rt
);
12638 tcg_gen_ext32s_tl(t2
, t0
);
12639 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
12640 tcg_gen_ext32s_tl(t3
, t1
);
12641 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
12642 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
12644 tcg_gen_add_tl(t4
, t2
, t3
);
12645 tcg_gen_ext32s_tl(t4
, t4
);
12646 tcg_gen_xor_tl(t2
, t2
, t3
);
12647 tcg_gen_xor_tl(t3
, t4
, t3
);
12648 tcg_gen_andc_tl(t2
, t3
, t2
);
12649 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
12650 tcg_gen_or_tl(t4
, t4
, input_overflow
);
12651 if (opc
== OPC_BOVC
) {
12653 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
12656 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
12658 tcg_temp_free(input_overflow
);
12662 } else if (rs
< rt
&& rs
== 0) {
12663 /* OPC_BEQZALC, OPC_BNEZALC */
12664 if (opc
== OPC_BEQZALC
) {
12666 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
12669 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
12672 /* OPC_BEQC, OPC_BNEC */
12673 if (opc
== OPC_BEQC
) {
12675 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
12678 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
12683 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
12686 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
12689 MIPS_INVAL("Compact conditional branch/jump");
12690 generate_exception_end(ctx
, EXCP_RI
);
12694 /* Generating branch here as compact branches don't have delay slot */
12695 gen_goto_tb(ctx
, 1, ctx
->btarget
);
12698 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
12706 /* ISA extensions (ASEs) */
12707 /* MIPS16 extension to MIPS32 */
12709 /* MIPS16 major opcodes */
12711 M16_OPC_ADDIUSP
= 0x00,
12712 M16_OPC_ADDIUPC
= 0x01,
12714 M16_OPC_JAL
= 0x03,
12715 M16_OPC_BEQZ
= 0x04,
12716 M16_OPC_BNEQZ
= 0x05,
12717 M16_OPC_SHIFT
= 0x06,
12719 M16_OPC_RRIA
= 0x08,
12720 M16_OPC_ADDIU8
= 0x09,
12721 M16_OPC_SLTI
= 0x0a,
12722 M16_OPC_SLTIU
= 0x0b,
12725 M16_OPC_CMPI
= 0x0e,
12729 M16_OPC_LWSP
= 0x12,
12731 M16_OPC_LBU
= 0x14,
12732 M16_OPC_LHU
= 0x15,
12733 M16_OPC_LWPC
= 0x16,
12734 M16_OPC_LWU
= 0x17,
12737 M16_OPC_SWSP
= 0x1a,
12739 M16_OPC_RRR
= 0x1c,
12741 M16_OPC_EXTEND
= 0x1e,
12745 /* I8 funct field */
12764 /* RR funct field */
12798 /* I64 funct field */
12806 I64_DADDIUPC
= 0x6,
12810 /* RR ry field for CNVT */
12812 RR_RY_CNVT_ZEB
= 0x0,
12813 RR_RY_CNVT_ZEH
= 0x1,
12814 RR_RY_CNVT_ZEW
= 0x2,
12815 RR_RY_CNVT_SEB
= 0x4,
12816 RR_RY_CNVT_SEH
= 0x5,
12817 RR_RY_CNVT_SEW
= 0x6,
12820 static int xlat (int r
)
12822 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12827 static void gen_mips16_save (DisasContext
*ctx
,
12828 int xsregs
, int aregs
,
12829 int do_ra
, int do_s0
, int do_s1
,
12832 TCGv t0
= tcg_temp_new();
12833 TCGv t1
= tcg_temp_new();
12834 TCGv t2
= tcg_temp_new();
12864 generate_exception_end(ctx
, EXCP_RI
);
12870 gen_base_offset_addr(ctx
, t0
, 29, 12);
12871 gen_load_gpr(t1
, 7);
12872 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12875 gen_base_offset_addr(ctx
, t0
, 29, 8);
12876 gen_load_gpr(t1
, 6);
12877 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12880 gen_base_offset_addr(ctx
, t0
, 29, 4);
12881 gen_load_gpr(t1
, 5);
12882 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12885 gen_base_offset_addr(ctx
, t0
, 29, 0);
12886 gen_load_gpr(t1
, 4);
12887 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12890 gen_load_gpr(t0
, 29);
12892 #define DECR_AND_STORE(reg) do { \
12893 tcg_gen_movi_tl(t2, -4); \
12894 gen_op_addr_add(ctx, t0, t0, t2); \
12895 gen_load_gpr(t1, reg); \
12896 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
12900 DECR_AND_STORE(31);
12905 DECR_AND_STORE(30);
12908 DECR_AND_STORE(23);
12911 DECR_AND_STORE(22);
12914 DECR_AND_STORE(21);
12917 DECR_AND_STORE(20);
12920 DECR_AND_STORE(19);
12923 DECR_AND_STORE(18);
12927 DECR_AND_STORE(17);
12930 DECR_AND_STORE(16);
12960 generate_exception_end(ctx
, EXCP_RI
);
12976 #undef DECR_AND_STORE
12978 tcg_gen_movi_tl(t2
, -framesize
);
12979 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
12985 static void gen_mips16_restore (DisasContext
*ctx
,
12986 int xsregs
, int aregs
,
12987 int do_ra
, int do_s0
, int do_s1
,
12991 TCGv t0
= tcg_temp_new();
12992 TCGv t1
= tcg_temp_new();
12993 TCGv t2
= tcg_temp_new();
12995 tcg_gen_movi_tl(t2
, framesize
);
12996 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
12998 #define DECR_AND_LOAD(reg) do { \
12999 tcg_gen_movi_tl(t2, -4); \
13000 gen_op_addr_add(ctx, t0, t0, t2); \
13001 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13002 gen_store_gpr(t1, reg); \
13066 generate_exception_end(ctx
, EXCP_RI
);
13082 #undef DECR_AND_LOAD
13084 tcg_gen_movi_tl(t2
, framesize
);
13085 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13091 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
13092 int is_64_bit
, int extended
)
13096 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13097 generate_exception_end(ctx
, EXCP_RI
);
13101 t0
= tcg_temp_new();
13103 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13104 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13106 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13112 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13115 TCGv_i32 t0
= tcg_const_i32(op
);
13116 TCGv t1
= tcg_temp_new();
13117 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13118 gen_helper_cache(cpu_env
, t1
, t0
);
13121 #if defined(TARGET_MIPS64)
13122 static void decode_i64_mips16 (DisasContext
*ctx
,
13123 int ry
, int funct
, int16_t offset
,
13128 check_insn(ctx
, ISA_MIPS3
);
13129 check_mips_64(ctx
);
13130 offset
= extended
? offset
: offset
<< 3;
13131 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13134 check_insn(ctx
, ISA_MIPS3
);
13135 check_mips_64(ctx
);
13136 offset
= extended
? offset
: offset
<< 3;
13137 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13140 check_insn(ctx
, ISA_MIPS3
);
13141 check_mips_64(ctx
);
13142 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13143 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13146 check_insn(ctx
, ISA_MIPS3
);
13147 check_mips_64(ctx
);
13148 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13149 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13152 check_insn(ctx
, ISA_MIPS3
);
13153 check_mips_64(ctx
);
13154 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13155 generate_exception_end(ctx
, EXCP_RI
);
13157 offset
= extended
? offset
: offset
<< 3;
13158 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13162 check_insn(ctx
, ISA_MIPS3
);
13163 check_mips_64(ctx
);
13164 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13165 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13168 check_insn(ctx
, ISA_MIPS3
);
13169 check_mips_64(ctx
);
13170 offset
= extended
? offset
: offset
<< 2;
13171 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13174 check_insn(ctx
, ISA_MIPS3
);
13175 check_mips_64(ctx
);
13176 offset
= extended
? offset
: offset
<< 2;
13177 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13183 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13185 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13186 int op
, rx
, ry
, funct
, sa
;
13187 int16_t imm
, offset
;
13189 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13190 op
= (ctx
->opcode
>> 11) & 0x1f;
13191 sa
= (ctx
->opcode
>> 22) & 0x1f;
13192 funct
= (ctx
->opcode
>> 8) & 0x7;
13193 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13194 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13195 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13196 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13197 | (ctx
->opcode
& 0x1f));
13199 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
13202 case M16_OPC_ADDIUSP
:
13203 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13205 case M16_OPC_ADDIUPC
:
13206 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13209 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13210 /* No delay slot, so just process as a normal instruction */
13213 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13214 /* No delay slot, so just process as a normal instruction */
13216 case M16_OPC_BNEQZ
:
13217 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13218 /* No delay slot, so just process as a normal instruction */
13220 case M16_OPC_SHIFT
:
13221 switch (ctx
->opcode
& 0x3) {
13223 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13226 #if defined(TARGET_MIPS64)
13227 check_mips_64(ctx
);
13228 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13230 generate_exception_end(ctx
, EXCP_RI
);
13234 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13237 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13241 #if defined(TARGET_MIPS64)
13243 check_insn(ctx
, ISA_MIPS3
);
13244 check_mips_64(ctx
);
13245 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13249 imm
= ctx
->opcode
& 0xf;
13250 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13251 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13252 imm
= (int16_t) (imm
<< 1) >> 1;
13253 if ((ctx
->opcode
>> 4) & 0x1) {
13254 #if defined(TARGET_MIPS64)
13255 check_mips_64(ctx
);
13256 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13258 generate_exception_end(ctx
, EXCP_RI
);
13261 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13264 case M16_OPC_ADDIU8
:
13265 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13268 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13270 case M16_OPC_SLTIU
:
13271 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13276 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13279 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13282 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13285 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13288 check_insn(ctx
, ISA_MIPS32
);
13290 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13291 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13292 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13293 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13294 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13295 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13296 | (ctx
->opcode
& 0xf)) << 3;
13298 if (ctx
->opcode
& (1 << 7)) {
13299 gen_mips16_save(ctx
, xsregs
, aregs
,
13300 do_ra
, do_s0
, do_s1
,
13303 gen_mips16_restore(ctx
, xsregs
, aregs
,
13304 do_ra
, do_s0
, do_s1
,
13310 generate_exception_end(ctx
, EXCP_RI
);
13315 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13318 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13320 #if defined(TARGET_MIPS64)
13322 check_insn(ctx
, ISA_MIPS3
);
13323 check_mips_64(ctx
);
13324 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13328 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13331 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13334 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13337 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13340 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13343 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13346 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13348 #if defined(TARGET_MIPS64)
13350 check_insn(ctx
, ISA_MIPS3
);
13351 check_mips_64(ctx
);
13352 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13356 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13359 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13362 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13365 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13367 #if defined(TARGET_MIPS64)
13369 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13373 generate_exception_end(ctx
, EXCP_RI
);
13380 static inline bool is_uhi(int sdbbp_code
)
13382 #ifdef CONFIG_USER_ONLY
13385 return semihosting_enabled() && sdbbp_code
== 1;
13389 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13393 int op
, cnvt_op
, op1
, offset
;
13397 op
= (ctx
->opcode
>> 11) & 0x1f;
13398 sa
= (ctx
->opcode
>> 2) & 0x7;
13399 sa
= sa
== 0 ? 8 : sa
;
13400 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13401 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13402 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13403 op1
= offset
= ctx
->opcode
& 0x1f;
13408 case M16_OPC_ADDIUSP
:
13410 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13412 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13415 case M16_OPC_ADDIUPC
:
13416 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13419 offset
= (ctx
->opcode
& 0x7ff) << 1;
13420 offset
= (int16_t)(offset
<< 4) >> 4;
13421 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13422 /* No delay slot, so just process as a normal instruction */
13425 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13426 offset
= (((ctx
->opcode
& 0x1f) << 21)
13427 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13429 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13430 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13434 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13435 ((int8_t)ctx
->opcode
) << 1, 0);
13436 /* No delay slot, so just process as a normal instruction */
13438 case M16_OPC_BNEQZ
:
13439 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13440 ((int8_t)ctx
->opcode
) << 1, 0);
13441 /* No delay slot, so just process as a normal instruction */
13443 case M16_OPC_SHIFT
:
13444 switch (ctx
->opcode
& 0x3) {
13446 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13449 #if defined(TARGET_MIPS64)
13450 check_insn(ctx
, ISA_MIPS3
);
13451 check_mips_64(ctx
);
13452 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13454 generate_exception_end(ctx
, EXCP_RI
);
13458 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13461 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13465 #if defined(TARGET_MIPS64)
13467 check_insn(ctx
, ISA_MIPS3
);
13468 check_mips_64(ctx
);
13469 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13474 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13476 if ((ctx
->opcode
>> 4) & 1) {
13477 #if defined(TARGET_MIPS64)
13478 check_insn(ctx
, ISA_MIPS3
);
13479 check_mips_64(ctx
);
13480 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13482 generate_exception_end(ctx
, EXCP_RI
);
13485 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13489 case M16_OPC_ADDIU8
:
13491 int16_t imm
= (int8_t) ctx
->opcode
;
13493 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13498 int16_t imm
= (uint8_t) ctx
->opcode
;
13499 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13502 case M16_OPC_SLTIU
:
13504 int16_t imm
= (uint8_t) ctx
->opcode
;
13505 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13512 funct
= (ctx
->opcode
>> 8) & 0x7;
13515 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
13516 ((int8_t)ctx
->opcode
) << 1, 0);
13519 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
13520 ((int8_t)ctx
->opcode
) << 1, 0);
13523 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
13526 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
13527 ((int8_t)ctx
->opcode
) << 3);
13530 check_insn(ctx
, ISA_MIPS32
);
13532 int do_ra
= ctx
->opcode
& (1 << 6);
13533 int do_s0
= ctx
->opcode
& (1 << 5);
13534 int do_s1
= ctx
->opcode
& (1 << 4);
13535 int framesize
= ctx
->opcode
& 0xf;
13537 if (framesize
== 0) {
13540 framesize
= framesize
<< 3;
13543 if (ctx
->opcode
& (1 << 7)) {
13544 gen_mips16_save(ctx
, 0, 0,
13545 do_ra
, do_s0
, do_s1
, framesize
);
13547 gen_mips16_restore(ctx
, 0, 0,
13548 do_ra
, do_s0
, do_s1
, framesize
);
13554 int rz
= xlat(ctx
->opcode
& 0x7);
13556 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
13557 ((ctx
->opcode
>> 5) & 0x7);
13558 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
13562 reg32
= ctx
->opcode
& 0x1f;
13563 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
13566 generate_exception_end(ctx
, EXCP_RI
);
13573 int16_t imm
= (uint8_t) ctx
->opcode
;
13575 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
13580 int16_t imm
= (uint8_t) ctx
->opcode
;
13581 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
13584 #if defined(TARGET_MIPS64)
13586 check_insn(ctx
, ISA_MIPS3
);
13587 check_mips_64(ctx
);
13588 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
13592 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13595 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
13598 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13601 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
13604 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13607 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
13610 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
13612 #if defined (TARGET_MIPS64)
13614 check_insn(ctx
, ISA_MIPS3
);
13615 check_mips_64(ctx
);
13616 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
13620 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13623 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
13626 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13629 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
13633 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
13636 switch (ctx
->opcode
& 0x3) {
13638 mips32_op
= OPC_ADDU
;
13641 mips32_op
= OPC_SUBU
;
13643 #if defined(TARGET_MIPS64)
13645 mips32_op
= OPC_DADDU
;
13646 check_insn(ctx
, ISA_MIPS3
);
13647 check_mips_64(ctx
);
13650 mips32_op
= OPC_DSUBU
;
13651 check_insn(ctx
, ISA_MIPS3
);
13652 check_mips_64(ctx
);
13656 generate_exception_end(ctx
, EXCP_RI
);
13660 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
13669 int nd
= (ctx
->opcode
>> 7) & 0x1;
13670 int link
= (ctx
->opcode
>> 6) & 0x1;
13671 int ra
= (ctx
->opcode
>> 5) & 0x1;
13674 check_insn(ctx
, ISA_MIPS32
);
13683 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
13688 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
13689 gen_helper_do_semihosting(cpu_env
);
13691 /* XXX: not clear which exception should be raised
13692 * when in debug mode...
13694 check_insn(ctx
, ISA_MIPS32
);
13695 generate_exception_end(ctx
, EXCP_DBp
);
13699 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
13702 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
13705 generate_exception_end(ctx
, EXCP_BREAK
);
13708 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
13711 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
13714 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
13716 #if defined (TARGET_MIPS64)
13718 check_insn(ctx
, ISA_MIPS3
);
13719 check_mips_64(ctx
);
13720 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
13724 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
13727 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
13730 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
13733 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
13736 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
13739 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
13742 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
13745 check_insn(ctx
, ISA_MIPS32
);
13747 case RR_RY_CNVT_ZEB
:
13748 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13750 case RR_RY_CNVT_ZEH
:
13751 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13753 case RR_RY_CNVT_SEB
:
13754 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13756 case RR_RY_CNVT_SEH
:
13757 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13759 #if defined (TARGET_MIPS64)
13760 case RR_RY_CNVT_ZEW
:
13761 check_insn(ctx
, ISA_MIPS64
);
13762 check_mips_64(ctx
);
13763 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13765 case RR_RY_CNVT_SEW
:
13766 check_insn(ctx
, ISA_MIPS64
);
13767 check_mips_64(ctx
);
13768 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13772 generate_exception_end(ctx
, EXCP_RI
);
13777 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
13779 #if defined (TARGET_MIPS64)
13781 check_insn(ctx
, ISA_MIPS3
);
13782 check_mips_64(ctx
);
13783 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
13786 check_insn(ctx
, ISA_MIPS3
);
13787 check_mips_64(ctx
);
13788 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
13791 check_insn(ctx
, ISA_MIPS3
);
13792 check_mips_64(ctx
);
13793 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
13796 check_insn(ctx
, ISA_MIPS3
);
13797 check_mips_64(ctx
);
13798 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
13802 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
13805 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
13808 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
13811 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
13813 #if defined (TARGET_MIPS64)
13815 check_insn(ctx
, ISA_MIPS3
);
13816 check_mips_64(ctx
);
13817 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
13820 check_insn(ctx
, ISA_MIPS3
);
13821 check_mips_64(ctx
);
13822 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
13825 check_insn(ctx
, ISA_MIPS3
);
13826 check_mips_64(ctx
);
13827 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
13830 check_insn(ctx
, ISA_MIPS3
);
13831 check_mips_64(ctx
);
13832 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
13836 generate_exception_end(ctx
, EXCP_RI
);
13840 case M16_OPC_EXTEND
:
13841 decode_extended_mips16_opc(env
, ctx
);
13844 #if defined(TARGET_MIPS64)
13846 funct
= (ctx
->opcode
>> 8) & 0x7;
13847 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
13851 generate_exception_end(ctx
, EXCP_RI
);
13858 /* microMIPS extension to MIPS32/MIPS64 */
13861 * microMIPS32/microMIPS64 major opcodes
13863 * 1. MIPS Architecture for Programmers Volume II-B:
13864 * The microMIPS32 Instruction Set (Revision 3.05)
13866 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
13868 * 2. MIPS Architecture For Programmers Volume II-A:
13869 * The MIPS64 Instruction Set (Revision 3.51)
13899 POOL32S
= 0x16, /* MIPS64 */
13900 DADDIU32
= 0x17, /* MIPS64 */
13929 /* 0x29 is reserved */
13942 /* 0x31 is reserved */
13955 SD32
= 0x36, /* MIPS64 */
13956 LD32
= 0x37, /* MIPS64 */
13958 /* 0x39 is reserved */
13974 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
13996 /* POOL32A encoding of minor opcode field */
13999 /* These opcodes are distinguished only by bits 9..6; those bits are
14000 * what are recorded below. */
14037 /* The following can be distinguished by their lower 6 bits. */
14047 /* POOL32AXF encoding of minor opcode field extension */
14050 * 1. MIPS Architecture for Programmers Volume II-B:
14051 * The microMIPS32 Instruction Set (Revision 3.05)
14053 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14055 * 2. MIPS Architecture for Programmers VolumeIV-e:
14056 * The MIPS DSP Application-Specific Extension
14057 * to the microMIPS32 Architecture (Revision 2.34)
14059 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14074 /* begin of microMIPS32 DSP */
14076 /* bits 13..12 for 0x01 */
14082 /* bits 13..12 for 0x2a */
14088 /* bits 13..12 for 0x32 */
14092 /* end of microMIPS32 DSP */
14094 /* bits 15..12 for 0x2c */
14111 /* bits 15..12 for 0x34 */
14119 /* bits 15..12 for 0x3c */
14121 JR
= 0x0, /* alias */
14129 /* bits 15..12 for 0x05 */
14133 /* bits 15..12 for 0x0d */
14145 /* bits 15..12 for 0x15 */
14151 /* bits 15..12 for 0x1d */
14155 /* bits 15..12 for 0x2d */
14160 /* bits 15..12 for 0x35 */
14167 /* POOL32B encoding of minor opcode field (bits 15..12) */
14183 /* POOL32C encoding of minor opcode field (bits 15..12) */
14204 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14217 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14230 /* POOL32F encoding of minor opcode field (bits 5..0) */
14233 /* These are the bit 7..6 values */
14242 /* These are the bit 8..6 values */
14267 MOVZ_FMT_05
= 0x05,
14301 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14308 /* POOL32Fxf encoding of minor opcode extension field */
14346 /* POOL32I encoding of minor opcode field (bits 25..21) */
14376 /* These overlap and are distinguished by bit16 of the instruction */
14385 /* POOL16A encoding of minor opcode field */
14392 /* POOL16B encoding of minor opcode field */
14399 /* POOL16C encoding of minor opcode field */
14419 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14443 /* POOL16D encoding of minor opcode field */
14450 /* POOL16E encoding of minor opcode field */
14457 static int mmreg (int r
)
14459 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14464 /* Used for 16-bit store instructions. */
14465 static int mmreg2 (int r
)
14467 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14472 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14473 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14474 #define uMIPS_RS2(op) uMIPS_RS(op)
14475 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14476 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14477 #define uMIPS_RS5(op) (op & 0x1f)
14479 /* Signed immediate */
14480 #define SIMM(op, start, width) \
14481 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14484 /* Zero-extended immediate */
14485 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14487 static void gen_addiur1sp(DisasContext
*ctx
)
14489 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14491 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14494 static void gen_addiur2(DisasContext
*ctx
)
14496 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14497 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14498 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14500 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14503 static void gen_addiusp(DisasContext
*ctx
)
14505 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14508 if (encoded
<= 1) {
14509 decoded
= 256 + encoded
;
14510 } else if (encoded
<= 255) {
14512 } else if (encoded
<= 509) {
14513 decoded
= encoded
- 512;
14515 decoded
= encoded
- 768;
14518 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
14521 static void gen_addius5(DisasContext
*ctx
)
14523 int imm
= SIMM(ctx
->opcode
, 1, 4);
14524 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14526 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
14529 static void gen_andi16(DisasContext
*ctx
)
14531 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
14532 31, 32, 63, 64, 255, 32768, 65535 };
14533 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14534 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14535 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
14537 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
14540 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
14541 int base
, int16_t offset
)
14546 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
14547 generate_exception_end(ctx
, EXCP_RI
);
14551 t0
= tcg_temp_new();
14553 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14555 t1
= tcg_const_tl(reglist
);
14556 t2
= tcg_const_i32(ctx
->mem_idx
);
14558 save_cpu_state(ctx
, 1);
14561 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
14564 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
14566 #ifdef TARGET_MIPS64
14568 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
14571 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
14577 tcg_temp_free_i32(t2
);
14581 static void gen_pool16c_insn(DisasContext
*ctx
)
14583 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
14584 int rs
= mmreg(ctx
->opcode
& 0x7);
14586 switch (((ctx
->opcode
) >> 4) & 0x3f) {
14591 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
14597 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
14603 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
14609 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
14616 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14617 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14619 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
14628 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14629 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14631 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
14638 int reg
= ctx
->opcode
& 0x1f;
14640 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
14646 int reg
= ctx
->opcode
& 0x1f;
14647 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
14648 /* Let normal delay slot handling in our caller take us
14649 to the branch target. */
14654 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
14655 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14659 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
14660 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14664 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
14668 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
14671 generate_exception_end(ctx
, EXCP_BREAK
);
14674 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
14675 gen_helper_do_semihosting(cpu_env
);
14677 /* XXX: not clear which exception should be raised
14678 * when in debug mode...
14680 check_insn(ctx
, ISA_MIPS32
);
14681 generate_exception_end(ctx
, EXCP_DBp
);
14684 case JRADDIUSP
+ 0:
14685 case JRADDIUSP
+ 1:
14687 int imm
= ZIMM(ctx
->opcode
, 0, 5);
14688 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14689 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14690 /* Let normal delay slot handling in our caller take us
14691 to the branch target. */
14695 generate_exception_end(ctx
, EXCP_RI
);
14700 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
14703 int rd
, rs
, re
, rt
;
14704 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
14705 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
14706 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
14707 rd
= rd_enc
[enc_dest
];
14708 re
= re_enc
[enc_dest
];
14709 rs
= rs_rt_enc
[enc_rs
];
14710 rt
= rs_rt_enc
[enc_rt
];
14712 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
14714 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
14717 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
14719 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
14723 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
14725 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
14726 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
14728 switch (ctx
->opcode
& 0xf) {
14730 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
14733 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
14737 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14738 int offset
= extract32(ctx
->opcode
, 4, 4);
14739 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
14742 case R6_JRC16
: /* JRCADDIUSP */
14743 if ((ctx
->opcode
>> 4) & 1) {
14745 int imm
= extract32(ctx
->opcode
, 5, 5);
14746 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14747 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14750 rs
= extract32(ctx
->opcode
, 5, 5);
14751 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
14763 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14764 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14765 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
14766 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14770 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
14773 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
14777 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14778 int offset
= extract32(ctx
->opcode
, 4, 4);
14779 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
14782 case JALRC16
: /* BREAK16, SDBBP16 */
14783 switch (ctx
->opcode
& 0x3f) {
14785 case JALRC16
+ 0x20:
14787 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
14792 generate_exception(ctx
, EXCP_BREAK
);
14796 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
14797 gen_helper_do_semihosting(cpu_env
);
14799 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
14800 generate_exception(ctx
, EXCP_RI
);
14802 generate_exception(ctx
, EXCP_DBp
);
14809 generate_exception(ctx
, EXCP_RI
);
14814 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
14816 TCGv t0
= tcg_temp_new();
14817 TCGv t1
= tcg_temp_new();
14819 gen_load_gpr(t0
, base
);
14822 gen_load_gpr(t1
, index
);
14823 tcg_gen_shli_tl(t1
, t1
, 2);
14824 gen_op_addr_add(ctx
, t0
, t1
, t0
);
14827 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14828 gen_store_gpr(t1
, rd
);
14834 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
14835 int base
, int16_t offset
)
14839 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
14840 generate_exception_end(ctx
, EXCP_RI
);
14844 t0
= tcg_temp_new();
14845 t1
= tcg_temp_new();
14847 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14852 generate_exception_end(ctx
, EXCP_RI
);
14855 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14856 gen_store_gpr(t1
, rd
);
14857 tcg_gen_movi_tl(t1
, 4);
14858 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14859 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14860 gen_store_gpr(t1
, rd
+1);
14863 gen_load_gpr(t1
, rd
);
14864 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
14865 tcg_gen_movi_tl(t1
, 4);
14866 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14867 gen_load_gpr(t1
, rd
+1);
14868 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
14870 #ifdef TARGET_MIPS64
14873 generate_exception_end(ctx
, EXCP_RI
);
14876 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14877 gen_store_gpr(t1
, rd
);
14878 tcg_gen_movi_tl(t1
, 8);
14879 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14880 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14881 gen_store_gpr(t1
, rd
+1);
14884 gen_load_gpr(t1
, rd
);
14885 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14886 tcg_gen_movi_tl(t1
, 8);
14887 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14888 gen_load_gpr(t1
, rd
+1);
14889 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14897 static void gen_sync(int stype
)
14899 TCGBar tcg_mo
= TCG_BAR_SC
;
14902 case 0x4: /* SYNC_WMB */
14903 tcg_mo
|= TCG_MO_ST_ST
;
14905 case 0x10: /* SYNC_MB */
14906 tcg_mo
|= TCG_MO_ALL
;
14908 case 0x11: /* SYNC_ACQUIRE */
14909 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
14911 case 0x12: /* SYNC_RELEASE */
14912 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
14914 case 0x13: /* SYNC_RMB */
14915 tcg_mo
|= TCG_MO_LD_LD
;
14918 tcg_mo
|= TCG_MO_ALL
;
14922 tcg_gen_mb(tcg_mo
);
14925 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
14927 int extension
= (ctx
->opcode
>> 6) & 0x3f;
14928 int minor
= (ctx
->opcode
>> 12) & 0xf;
14929 uint32_t mips32_op
;
14931 switch (extension
) {
14933 mips32_op
= OPC_TEQ
;
14936 mips32_op
= OPC_TGE
;
14939 mips32_op
= OPC_TGEU
;
14942 mips32_op
= OPC_TLT
;
14945 mips32_op
= OPC_TLTU
;
14948 mips32_op
= OPC_TNE
;
14950 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
14952 #ifndef CONFIG_USER_ONLY
14955 check_cp0_enabled(ctx
);
14957 /* Treat as NOP. */
14960 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
14964 check_cp0_enabled(ctx
);
14966 TCGv t0
= tcg_temp_new();
14968 gen_load_gpr(t0
, rt
);
14969 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
14975 switch (minor
& 3) {
14977 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14980 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14983 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14986 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14989 goto pool32axf_invalid
;
14993 switch (minor
& 3) {
14995 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14998 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15001 goto pool32axf_invalid
;
15007 check_insn(ctx
, ISA_MIPS32R6
);
15008 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15011 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15014 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15017 mips32_op
= OPC_CLO
;
15020 mips32_op
= OPC_CLZ
;
15022 check_insn(ctx
, ISA_MIPS32
);
15023 gen_cl(ctx
, mips32_op
, rt
, rs
);
15026 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15027 gen_rdhwr(ctx
, rt
, rs
, 0);
15030 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15033 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15034 mips32_op
= OPC_MULT
;
15037 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15038 mips32_op
= OPC_MULTU
;
15041 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15042 mips32_op
= OPC_DIV
;
15045 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15046 mips32_op
= OPC_DIVU
;
15049 check_insn(ctx
, ISA_MIPS32
);
15050 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15053 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15054 mips32_op
= OPC_MADD
;
15057 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15058 mips32_op
= OPC_MADDU
;
15061 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15062 mips32_op
= OPC_MSUB
;
15065 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15066 mips32_op
= OPC_MSUBU
;
15068 check_insn(ctx
, ISA_MIPS32
);
15069 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15072 goto pool32axf_invalid
;
15083 generate_exception_err(ctx
, EXCP_CpU
, 2);
15086 goto pool32axf_invalid
;
15091 case JALR
: /* JALRC */
15092 case JALR_HB
: /* JALRC_HB */
15093 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15094 /* JALRC, JALRC_HB */
15095 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15097 /* JALR, JALR_HB */
15098 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15099 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15104 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15105 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15106 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15109 goto pool32axf_invalid
;
15115 check_cp0_enabled(ctx
);
15116 check_insn(ctx
, ISA_MIPS32R2
);
15117 gen_load_srsgpr(rs
, rt
);
15120 check_cp0_enabled(ctx
);
15121 check_insn(ctx
, ISA_MIPS32R2
);
15122 gen_store_srsgpr(rs
, rt
);
15125 goto pool32axf_invalid
;
15128 #ifndef CONFIG_USER_ONLY
15132 mips32_op
= OPC_TLBP
;
15135 mips32_op
= OPC_TLBR
;
15138 mips32_op
= OPC_TLBWI
;
15141 mips32_op
= OPC_TLBWR
;
15144 mips32_op
= OPC_TLBINV
;
15147 mips32_op
= OPC_TLBINVF
;
15150 mips32_op
= OPC_WAIT
;
15153 mips32_op
= OPC_DERET
;
15156 mips32_op
= OPC_ERET
;
15158 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15161 goto pool32axf_invalid
;
15167 check_cp0_enabled(ctx
);
15169 TCGv t0
= tcg_temp_new();
15171 save_cpu_state(ctx
, 1);
15172 gen_helper_di(t0
, cpu_env
);
15173 gen_store_gpr(t0
, rs
);
15174 /* Stop translation as we may have switched the execution mode */
15175 ctx
->base
.is_jmp
= DISAS_STOP
;
15180 check_cp0_enabled(ctx
);
15182 TCGv t0
= tcg_temp_new();
15184 save_cpu_state(ctx
, 1);
15185 gen_helper_ei(t0
, cpu_env
);
15186 gen_store_gpr(t0
, rs
);
15187 /* DISAS_STOP isn't sufficient, we need to ensure we break out
15188 of translated code to check for pending interrupts. */
15189 gen_save_pc(ctx
->base
.pc_next
+ 4);
15190 ctx
->base
.is_jmp
= DISAS_EXIT
;
15195 goto pool32axf_invalid
;
15202 gen_sync(extract32(ctx
->opcode
, 16, 5));
15205 generate_exception_end(ctx
, EXCP_SYSCALL
);
15208 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15209 gen_helper_do_semihosting(cpu_env
);
15211 check_insn(ctx
, ISA_MIPS32
);
15212 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15213 generate_exception_end(ctx
, EXCP_RI
);
15215 generate_exception_end(ctx
, EXCP_DBp
);
15220 goto pool32axf_invalid
;
15224 switch (minor
& 3) {
15226 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15229 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15232 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15235 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15238 goto pool32axf_invalid
;
15242 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15245 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15248 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15251 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15254 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15257 goto pool32axf_invalid
;
15262 MIPS_INVAL("pool32axf");
15263 generate_exception_end(ctx
, EXCP_RI
);
15268 /* Values for microMIPS fmt field. Variable-width, depending on which
15269 formats the instruction supports. */
15288 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15290 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15291 uint32_t mips32_op
;
15293 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
15294 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
15295 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
15297 switch (extension
) {
15298 case FLOAT_1BIT_FMT(CFC1
, 0):
15299 mips32_op
= OPC_CFC1
;
15301 case FLOAT_1BIT_FMT(CTC1
, 0):
15302 mips32_op
= OPC_CTC1
;
15304 case FLOAT_1BIT_FMT(MFC1
, 0):
15305 mips32_op
= OPC_MFC1
;
15307 case FLOAT_1BIT_FMT(MTC1
, 0):
15308 mips32_op
= OPC_MTC1
;
15310 case FLOAT_1BIT_FMT(MFHC1
, 0):
15311 mips32_op
= OPC_MFHC1
;
15313 case FLOAT_1BIT_FMT(MTHC1
, 0):
15314 mips32_op
= OPC_MTHC1
;
15316 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15319 /* Reciprocal square root */
15320 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15321 mips32_op
= OPC_RSQRT_S
;
15323 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15324 mips32_op
= OPC_RSQRT_D
;
15328 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15329 mips32_op
= OPC_SQRT_S
;
15331 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15332 mips32_op
= OPC_SQRT_D
;
15336 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15337 mips32_op
= OPC_RECIP_S
;
15339 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15340 mips32_op
= OPC_RECIP_D
;
15344 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15345 mips32_op
= OPC_FLOOR_L_S
;
15347 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15348 mips32_op
= OPC_FLOOR_L_D
;
15350 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15351 mips32_op
= OPC_FLOOR_W_S
;
15353 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15354 mips32_op
= OPC_FLOOR_W_D
;
15358 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15359 mips32_op
= OPC_CEIL_L_S
;
15361 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15362 mips32_op
= OPC_CEIL_L_D
;
15364 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15365 mips32_op
= OPC_CEIL_W_S
;
15367 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15368 mips32_op
= OPC_CEIL_W_D
;
15372 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15373 mips32_op
= OPC_TRUNC_L_S
;
15375 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15376 mips32_op
= OPC_TRUNC_L_D
;
15378 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15379 mips32_op
= OPC_TRUNC_W_S
;
15381 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15382 mips32_op
= OPC_TRUNC_W_D
;
15386 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15387 mips32_op
= OPC_ROUND_L_S
;
15389 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15390 mips32_op
= OPC_ROUND_L_D
;
15392 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15393 mips32_op
= OPC_ROUND_W_S
;
15395 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15396 mips32_op
= OPC_ROUND_W_D
;
15399 /* Integer to floating-point conversion */
15400 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15401 mips32_op
= OPC_CVT_L_S
;
15403 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15404 mips32_op
= OPC_CVT_L_D
;
15406 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15407 mips32_op
= OPC_CVT_W_S
;
15409 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15410 mips32_op
= OPC_CVT_W_D
;
15413 /* Paired-foo conversions */
15414 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15415 mips32_op
= OPC_CVT_S_PL
;
15417 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15418 mips32_op
= OPC_CVT_S_PU
;
15420 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15421 mips32_op
= OPC_CVT_PW_PS
;
15423 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15424 mips32_op
= OPC_CVT_PS_PW
;
15427 /* Floating-point moves */
15428 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15429 mips32_op
= OPC_MOV_S
;
15431 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15432 mips32_op
= OPC_MOV_D
;
15434 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15435 mips32_op
= OPC_MOV_PS
;
15438 /* Absolute value */
15439 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15440 mips32_op
= OPC_ABS_S
;
15442 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15443 mips32_op
= OPC_ABS_D
;
15445 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15446 mips32_op
= OPC_ABS_PS
;
15450 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15451 mips32_op
= OPC_NEG_S
;
15453 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15454 mips32_op
= OPC_NEG_D
;
15456 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15457 mips32_op
= OPC_NEG_PS
;
15460 /* Reciprocal square root step */
15461 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15462 mips32_op
= OPC_RSQRT1_S
;
15464 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15465 mips32_op
= OPC_RSQRT1_D
;
15467 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15468 mips32_op
= OPC_RSQRT1_PS
;
15471 /* Reciprocal step */
15472 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15473 mips32_op
= OPC_RECIP1_S
;
15475 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15476 mips32_op
= OPC_RECIP1_S
;
15478 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15479 mips32_op
= OPC_RECIP1_PS
;
15482 /* Conversions from double */
15483 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15484 mips32_op
= OPC_CVT_D_S
;
15486 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15487 mips32_op
= OPC_CVT_D_W
;
15489 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15490 mips32_op
= OPC_CVT_D_L
;
15493 /* Conversions from single */
15494 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15495 mips32_op
= OPC_CVT_S_D
;
15497 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15498 mips32_op
= OPC_CVT_S_W
;
15500 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15501 mips32_op
= OPC_CVT_S_L
;
15503 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
15506 /* Conditional moves on floating-point codes */
15507 case COND_FLOAT_MOV(MOVT
, 0):
15508 case COND_FLOAT_MOV(MOVT
, 1):
15509 case COND_FLOAT_MOV(MOVT
, 2):
15510 case COND_FLOAT_MOV(MOVT
, 3):
15511 case COND_FLOAT_MOV(MOVT
, 4):
15512 case COND_FLOAT_MOV(MOVT
, 5):
15513 case COND_FLOAT_MOV(MOVT
, 6):
15514 case COND_FLOAT_MOV(MOVT
, 7):
15515 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15516 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
15518 case COND_FLOAT_MOV(MOVF
, 0):
15519 case COND_FLOAT_MOV(MOVF
, 1):
15520 case COND_FLOAT_MOV(MOVF
, 2):
15521 case COND_FLOAT_MOV(MOVF
, 3):
15522 case COND_FLOAT_MOV(MOVF
, 4):
15523 case COND_FLOAT_MOV(MOVF
, 5):
15524 case COND_FLOAT_MOV(MOVF
, 6):
15525 case COND_FLOAT_MOV(MOVF
, 7):
15526 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15527 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
15530 MIPS_INVAL("pool32fxf");
15531 generate_exception_end(ctx
, EXCP_RI
);
15536 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15540 int rt
, rs
, rd
, rr
;
15542 uint32_t op
, minor
, minor2
, mips32_op
;
15543 uint32_t cond
, fmt
, cc
;
15545 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
15546 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
15548 rt
= (ctx
->opcode
>> 21) & 0x1f;
15549 rs
= (ctx
->opcode
>> 16) & 0x1f;
15550 rd
= (ctx
->opcode
>> 11) & 0x1f;
15551 rr
= (ctx
->opcode
>> 6) & 0x1f;
15552 imm
= (int16_t) ctx
->opcode
;
15554 op
= (ctx
->opcode
>> 26) & 0x3f;
15557 minor
= ctx
->opcode
& 0x3f;
15560 minor
= (ctx
->opcode
>> 6) & 0xf;
15563 mips32_op
= OPC_SLL
;
15566 mips32_op
= OPC_SRA
;
15569 mips32_op
= OPC_SRL
;
15572 mips32_op
= OPC_ROTR
;
15574 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
15577 check_insn(ctx
, ISA_MIPS32R6
);
15578 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
15581 check_insn(ctx
, ISA_MIPS32R6
);
15582 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
15585 check_insn(ctx
, ISA_MIPS32R6
);
15586 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
15589 goto pool32a_invalid
;
15593 minor
= (ctx
->opcode
>> 6) & 0xf;
15597 mips32_op
= OPC_ADD
;
15600 mips32_op
= OPC_ADDU
;
15603 mips32_op
= OPC_SUB
;
15606 mips32_op
= OPC_SUBU
;
15609 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15610 mips32_op
= OPC_MUL
;
15612 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
15616 mips32_op
= OPC_SLLV
;
15619 mips32_op
= OPC_SRLV
;
15622 mips32_op
= OPC_SRAV
;
15625 mips32_op
= OPC_ROTRV
;
15627 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
15629 /* Logical operations */
15631 mips32_op
= OPC_AND
;
15634 mips32_op
= OPC_OR
;
15637 mips32_op
= OPC_NOR
;
15640 mips32_op
= OPC_XOR
;
15642 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
15644 /* Set less than */
15646 mips32_op
= OPC_SLT
;
15649 mips32_op
= OPC_SLTU
;
15651 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
15654 goto pool32a_invalid
;
15658 minor
= (ctx
->opcode
>> 6) & 0xf;
15660 /* Conditional moves */
15661 case MOVN
: /* MUL */
15662 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15664 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
15667 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
15670 case MOVZ
: /* MUH */
15671 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15673 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
15676 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
15680 check_insn(ctx
, ISA_MIPS32R6
);
15681 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
15684 check_insn(ctx
, ISA_MIPS32R6
);
15685 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
15687 case LWXS
: /* DIV */
15688 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15690 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
15693 gen_ldxs(ctx
, rs
, rt
, rd
);
15697 check_insn(ctx
, ISA_MIPS32R6
);
15698 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
15701 check_insn(ctx
, ISA_MIPS32R6
);
15702 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
15705 check_insn(ctx
, ISA_MIPS32R6
);
15706 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
15709 goto pool32a_invalid
;
15713 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
15716 check_insn(ctx
, ISA_MIPS32R6
);
15717 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
15718 extract32(ctx
->opcode
, 9, 2));
15721 check_insn(ctx
, ISA_MIPS32R6
);
15722 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
15725 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
15728 gen_pool32axf(env
, ctx
, rt
, rs
);
15731 generate_exception_end(ctx
, EXCP_BREAK
);
15734 check_insn(ctx
, ISA_MIPS32R6
);
15735 generate_exception_end(ctx
, EXCP_RI
);
15739 MIPS_INVAL("pool32a");
15740 generate_exception_end(ctx
, EXCP_RI
);
15745 minor
= (ctx
->opcode
>> 12) & 0xf;
15748 check_cp0_enabled(ctx
);
15749 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15750 gen_cache_operation(ctx
, rt
, rs
, imm
);
15755 /* COP2: Not implemented. */
15756 generate_exception_err(ctx
, EXCP_CpU
, 2);
15758 #ifdef TARGET_MIPS64
15761 check_insn(ctx
, ISA_MIPS3
);
15762 check_mips_64(ctx
);
15767 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
15769 #ifdef TARGET_MIPS64
15772 check_insn(ctx
, ISA_MIPS3
);
15773 check_mips_64(ctx
);
15778 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
15781 MIPS_INVAL("pool32b");
15782 generate_exception_end(ctx
, EXCP_RI
);
15787 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
15788 minor
= ctx
->opcode
& 0x3f;
15789 check_cp1_enabled(ctx
);
15792 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15793 mips32_op
= OPC_ALNV_PS
;
15796 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15797 mips32_op
= OPC_MADD_S
;
15800 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15801 mips32_op
= OPC_MADD_D
;
15804 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15805 mips32_op
= OPC_MADD_PS
;
15808 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15809 mips32_op
= OPC_MSUB_S
;
15812 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15813 mips32_op
= OPC_MSUB_D
;
15816 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15817 mips32_op
= OPC_MSUB_PS
;
15820 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15821 mips32_op
= OPC_NMADD_S
;
15824 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15825 mips32_op
= OPC_NMADD_D
;
15828 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15829 mips32_op
= OPC_NMADD_PS
;
15832 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15833 mips32_op
= OPC_NMSUB_S
;
15836 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15837 mips32_op
= OPC_NMSUB_D
;
15840 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15841 mips32_op
= OPC_NMSUB_PS
;
15843 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
15845 case CABS_COND_FMT
:
15846 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15847 cond
= (ctx
->opcode
>> 6) & 0xf;
15848 cc
= (ctx
->opcode
>> 13) & 0x7;
15849 fmt
= (ctx
->opcode
>> 10) & 0x3;
15852 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
15855 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
15858 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
15861 goto pool32f_invalid
;
15865 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15866 cond
= (ctx
->opcode
>> 6) & 0xf;
15867 cc
= (ctx
->opcode
>> 13) & 0x7;
15868 fmt
= (ctx
->opcode
>> 10) & 0x3;
15871 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
15874 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
15877 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
15880 goto pool32f_invalid
;
15884 check_insn(ctx
, ISA_MIPS32R6
);
15885 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
15888 check_insn(ctx
, ISA_MIPS32R6
);
15889 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
15892 gen_pool32fxf(ctx
, rt
, rs
);
15896 switch ((ctx
->opcode
>> 6) & 0x7) {
15898 mips32_op
= OPC_PLL_PS
;
15901 mips32_op
= OPC_PLU_PS
;
15904 mips32_op
= OPC_PUL_PS
;
15907 mips32_op
= OPC_PUU_PS
;
15910 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15911 mips32_op
= OPC_CVT_PS_S
;
15913 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
15916 goto pool32f_invalid
;
15920 check_insn(ctx
, ISA_MIPS32R6
);
15921 switch ((ctx
->opcode
>> 9) & 0x3) {
15923 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
15926 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
15929 goto pool32f_invalid
;
15934 switch ((ctx
->opcode
>> 6) & 0x7) {
15936 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15937 mips32_op
= OPC_LWXC1
;
15940 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15941 mips32_op
= OPC_SWXC1
;
15944 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15945 mips32_op
= OPC_LDXC1
;
15948 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15949 mips32_op
= OPC_SDXC1
;
15952 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15953 mips32_op
= OPC_LUXC1
;
15956 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15957 mips32_op
= OPC_SUXC1
;
15959 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
15962 goto pool32f_invalid
;
15966 check_insn(ctx
, ISA_MIPS32R6
);
15967 switch ((ctx
->opcode
>> 9) & 0x3) {
15969 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
15972 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
15975 goto pool32f_invalid
;
15980 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15981 fmt
= (ctx
->opcode
>> 9) & 0x3;
15982 switch ((ctx
->opcode
>> 6) & 0x7) {
15986 mips32_op
= OPC_RSQRT2_S
;
15989 mips32_op
= OPC_RSQRT2_D
;
15992 mips32_op
= OPC_RSQRT2_PS
;
15995 goto pool32f_invalid
;
16001 mips32_op
= OPC_RECIP2_S
;
16004 mips32_op
= OPC_RECIP2_D
;
16007 mips32_op
= OPC_RECIP2_PS
;
16010 goto pool32f_invalid
;
16014 mips32_op
= OPC_ADDR_PS
;
16017 mips32_op
= OPC_MULR_PS
;
16019 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16022 goto pool32f_invalid
;
16026 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16027 cc
= (ctx
->opcode
>> 13) & 0x7;
16028 fmt
= (ctx
->opcode
>> 9) & 0x3;
16029 switch ((ctx
->opcode
>> 6) & 0x7) {
16030 case MOVF_FMT
: /* RINT_FMT */
16031 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16035 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16038 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16041 goto pool32f_invalid
;
16047 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16050 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16054 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16057 goto pool32f_invalid
;
16061 case MOVT_FMT
: /* CLASS_FMT */
16062 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16066 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16069 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16072 goto pool32f_invalid
;
16078 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16081 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16085 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16088 goto pool32f_invalid
;
16093 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16096 goto pool32f_invalid
;
16099 #define FINSN_3ARG_SDPS(prfx) \
16100 switch ((ctx->opcode >> 8) & 0x3) { \
16102 mips32_op = OPC_##prfx##_S; \
16105 mips32_op = OPC_##prfx##_D; \
16107 case FMT_SDPS_PS: \
16109 mips32_op = OPC_##prfx##_PS; \
16112 goto pool32f_invalid; \
16115 check_insn(ctx
, ISA_MIPS32R6
);
16116 switch ((ctx
->opcode
>> 9) & 0x3) {
16118 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16121 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16124 goto pool32f_invalid
;
16128 check_insn(ctx
, ISA_MIPS32R6
);
16129 switch ((ctx
->opcode
>> 9) & 0x3) {
16131 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16134 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16137 goto pool32f_invalid
;
16141 /* regular FP ops */
16142 switch ((ctx
->opcode
>> 6) & 0x3) {
16144 FINSN_3ARG_SDPS(ADD
);
16147 FINSN_3ARG_SDPS(SUB
);
16150 FINSN_3ARG_SDPS(MUL
);
16153 fmt
= (ctx
->opcode
>> 8) & 0x3;
16155 mips32_op
= OPC_DIV_D
;
16156 } else if (fmt
== 0) {
16157 mips32_op
= OPC_DIV_S
;
16159 goto pool32f_invalid
;
16163 goto pool32f_invalid
;
16168 switch ((ctx
->opcode
>> 6) & 0x7) {
16169 case MOVN_FMT
: /* SELEQZ_FMT */
16170 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16172 switch ((ctx
->opcode
>> 9) & 0x3) {
16174 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16177 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16180 goto pool32f_invalid
;
16184 FINSN_3ARG_SDPS(MOVN
);
16188 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16189 FINSN_3ARG_SDPS(MOVN
);
16191 case MOVZ_FMT
: /* SELNEZ_FMT */
16192 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16194 switch ((ctx
->opcode
>> 9) & 0x3) {
16196 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16199 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16202 goto pool32f_invalid
;
16206 FINSN_3ARG_SDPS(MOVZ
);
16210 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16211 FINSN_3ARG_SDPS(MOVZ
);
16214 check_insn(ctx
, ISA_MIPS32R6
);
16215 switch ((ctx
->opcode
>> 9) & 0x3) {
16217 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16220 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16223 goto pool32f_invalid
;
16227 check_insn(ctx
, ISA_MIPS32R6
);
16228 switch ((ctx
->opcode
>> 9) & 0x3) {
16230 mips32_op
= OPC_MADDF_S
;
16233 mips32_op
= OPC_MADDF_D
;
16236 goto pool32f_invalid
;
16240 check_insn(ctx
, ISA_MIPS32R6
);
16241 switch ((ctx
->opcode
>> 9) & 0x3) {
16243 mips32_op
= OPC_MSUBF_S
;
16246 mips32_op
= OPC_MSUBF_D
;
16249 goto pool32f_invalid
;
16253 goto pool32f_invalid
;
16257 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16261 MIPS_INVAL("pool32f");
16262 generate_exception_end(ctx
, EXCP_RI
);
16266 generate_exception_err(ctx
, EXCP_CpU
, 1);
16270 minor
= (ctx
->opcode
>> 21) & 0x1f;
16273 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16274 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16277 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16278 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16279 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16282 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16283 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16284 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16287 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16288 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16291 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16292 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16293 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16296 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16297 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16298 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16301 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16302 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16305 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16306 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16310 case TLTI
: /* BC1EQZC */
16311 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16313 check_cp1_enabled(ctx
);
16314 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16317 mips32_op
= OPC_TLTI
;
16321 case TGEI
: /* BC1NEZC */
16322 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16324 check_cp1_enabled(ctx
);
16325 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16328 mips32_op
= OPC_TGEI
;
16333 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16334 mips32_op
= OPC_TLTIU
;
16337 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16338 mips32_op
= OPC_TGEIU
;
16340 case TNEI
: /* SYNCI */
16341 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16343 /* Break the TB to be able to sync copied instructions
16345 ctx
->base
.is_jmp
= DISAS_STOP
;
16348 mips32_op
= OPC_TNEI
;
16353 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16354 mips32_op
= OPC_TEQI
;
16356 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16361 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16362 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16363 4, rs
, 0, imm
<< 1, 0);
16364 /* Compact branches don't have a delay slot, so just let
16365 the normal delay slot handling take us to the branch
16369 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16370 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16373 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16374 /* Break the TB to be able to sync copied instructions
16376 ctx
->base
.is_jmp
= DISAS_STOP
;
16380 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16381 /* COP2: Not implemented. */
16382 generate_exception_err(ctx
, EXCP_CpU
, 2);
16385 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16386 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16389 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16390 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16393 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16394 mips32_op
= OPC_BC1FANY4
;
16397 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16398 mips32_op
= OPC_BC1TANY4
;
16401 check_insn(ctx
, ASE_MIPS3D
);
16404 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16405 check_cp1_enabled(ctx
);
16406 gen_compute_branch1(ctx
, mips32_op
,
16407 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16409 generate_exception_err(ctx
, EXCP_CpU
, 1);
16414 /* MIPS DSP: not implemented */
16417 MIPS_INVAL("pool32i");
16418 generate_exception_end(ctx
, EXCP_RI
);
16423 minor
= (ctx
->opcode
>> 12) & 0xf;
16424 offset
= sextract32(ctx
->opcode
, 0,
16425 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16428 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16429 mips32_op
= OPC_LWL
;
16432 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16433 mips32_op
= OPC_SWL
;
16436 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16437 mips32_op
= OPC_LWR
;
16440 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16441 mips32_op
= OPC_SWR
;
16443 #if defined(TARGET_MIPS64)
16445 check_insn(ctx
, ISA_MIPS3
);
16446 check_mips_64(ctx
);
16447 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16448 mips32_op
= OPC_LDL
;
16451 check_insn(ctx
, ISA_MIPS3
);
16452 check_mips_64(ctx
);
16453 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16454 mips32_op
= OPC_SDL
;
16457 check_insn(ctx
, ISA_MIPS3
);
16458 check_mips_64(ctx
);
16459 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16460 mips32_op
= OPC_LDR
;
16463 check_insn(ctx
, ISA_MIPS3
);
16464 check_mips_64(ctx
);
16465 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16466 mips32_op
= OPC_SDR
;
16469 check_insn(ctx
, ISA_MIPS3
);
16470 check_mips_64(ctx
);
16471 mips32_op
= OPC_LWU
;
16474 check_insn(ctx
, ISA_MIPS3
);
16475 check_mips_64(ctx
);
16476 mips32_op
= OPC_LLD
;
16480 mips32_op
= OPC_LL
;
16483 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16486 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16489 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
16491 #if defined(TARGET_MIPS64)
16493 check_insn(ctx
, ISA_MIPS3
);
16494 check_mips_64(ctx
);
16495 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
16500 MIPS_INVAL("pool32c ld-eva");
16501 generate_exception_end(ctx
, EXCP_RI
);
16504 check_cp0_enabled(ctx
);
16506 minor2
= (ctx
->opcode
>> 9) & 0x7;
16507 offset
= sextract32(ctx
->opcode
, 0, 9);
16510 mips32_op
= OPC_LBUE
;
16513 mips32_op
= OPC_LHUE
;
16516 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16517 mips32_op
= OPC_LWLE
;
16520 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16521 mips32_op
= OPC_LWRE
;
16524 mips32_op
= OPC_LBE
;
16527 mips32_op
= OPC_LHE
;
16530 mips32_op
= OPC_LLE
;
16533 mips32_op
= OPC_LWE
;
16539 MIPS_INVAL("pool32c st-eva");
16540 generate_exception_end(ctx
, EXCP_RI
);
16543 check_cp0_enabled(ctx
);
16545 minor2
= (ctx
->opcode
>> 9) & 0x7;
16546 offset
= sextract32(ctx
->opcode
, 0, 9);
16549 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16550 mips32_op
= OPC_SWLE
;
16553 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16554 mips32_op
= OPC_SWRE
;
16557 /* Treat as no-op */
16558 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16559 /* hint codes 24-31 are reserved and signal RI */
16560 generate_exception(ctx
, EXCP_RI
);
16564 /* Treat as no-op */
16565 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16566 gen_cache_operation(ctx
, rt
, rs
, offset
);
16570 mips32_op
= OPC_SBE
;
16573 mips32_op
= OPC_SHE
;
16576 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
16579 mips32_op
= OPC_SWE
;
16584 /* Treat as no-op */
16585 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16586 /* hint codes 24-31 are reserved and signal RI */
16587 generate_exception(ctx
, EXCP_RI
);
16591 MIPS_INVAL("pool32c");
16592 generate_exception_end(ctx
, EXCP_RI
);
16596 case ADDI32
: /* AUI, LUI */
16597 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16599 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
16602 mips32_op
= OPC_ADDI
;
16607 mips32_op
= OPC_ADDIU
;
16609 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16612 /* Logical operations */
16614 mips32_op
= OPC_ORI
;
16617 mips32_op
= OPC_XORI
;
16620 mips32_op
= OPC_ANDI
;
16622 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16625 /* Set less than immediate */
16627 mips32_op
= OPC_SLTI
;
16630 mips32_op
= OPC_SLTIU
;
16632 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16635 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16636 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
16637 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
16638 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16640 case JALS32
: /* BOVC, BEQC, BEQZALC */
16641 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16644 mips32_op
= OPC_BOVC
;
16645 } else if (rs
< rt
&& rs
== 0) {
16647 mips32_op
= OPC_BEQZALC
;
16650 mips32_op
= OPC_BEQC
;
16652 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16655 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
16656 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
16657 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16660 case BEQ32
: /* BC */
16661 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16663 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
16664 sextract32(ctx
->opcode
<< 1, 0, 27));
16667 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
16670 case BNE32
: /* BALC */
16671 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16673 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
16674 sextract32(ctx
->opcode
<< 1, 0, 27));
16677 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
16680 case J32
: /* BGTZC, BLTZC, BLTC */
16681 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16682 if (rs
== 0 && rt
!= 0) {
16684 mips32_op
= OPC_BGTZC
;
16685 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16687 mips32_op
= OPC_BLTZC
;
16690 mips32_op
= OPC_BLTC
;
16692 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16695 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
16696 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16699 case JAL32
: /* BLEZC, BGEZC, BGEC */
16700 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16701 if (rs
== 0 && rt
!= 0) {
16703 mips32_op
= OPC_BLEZC
;
16704 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16706 mips32_op
= OPC_BGEZC
;
16709 mips32_op
= OPC_BGEC
;
16711 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16714 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
16715 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16716 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16719 /* Floating point (COP1) */
16721 mips32_op
= OPC_LWC1
;
16724 mips32_op
= OPC_LDC1
;
16727 mips32_op
= OPC_SWC1
;
16730 mips32_op
= OPC_SDC1
;
16732 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
16734 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16735 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16736 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16737 switch ((ctx
->opcode
>> 16) & 0x1f) {
16746 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16749 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
16752 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
16762 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16765 generate_exception(ctx
, EXCP_RI
);
16770 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
16771 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
16773 gen_addiupc(ctx
, reg
, offset
, 0, 0);
16776 case BNVC
: /* BNEC, BNEZALC */
16777 check_insn(ctx
, ISA_MIPS32R6
);
16780 mips32_op
= OPC_BNVC
;
16781 } else if (rs
< rt
&& rs
== 0) {
16783 mips32_op
= OPC_BNEZALC
;
16786 mips32_op
= OPC_BNEC
;
16788 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16790 case R6_BNEZC
: /* JIALC */
16791 check_insn(ctx
, ISA_MIPS32R6
);
16794 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
16795 sextract32(ctx
->opcode
<< 1, 0, 22));
16798 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
16801 case R6_BEQZC
: /* JIC */
16802 check_insn(ctx
, ISA_MIPS32R6
);
16805 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
16806 sextract32(ctx
->opcode
<< 1, 0, 22));
16809 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
16812 case BLEZALC
: /* BGEZALC, BGEUC */
16813 check_insn(ctx
, ISA_MIPS32R6
);
16814 if (rs
== 0 && rt
!= 0) {
16816 mips32_op
= OPC_BLEZALC
;
16817 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16819 mips32_op
= OPC_BGEZALC
;
16822 mips32_op
= OPC_BGEUC
;
16824 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16826 case BGTZALC
: /* BLTZALC, BLTUC */
16827 check_insn(ctx
, ISA_MIPS32R6
);
16828 if (rs
== 0 && rt
!= 0) {
16830 mips32_op
= OPC_BGTZALC
;
16831 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16833 mips32_op
= OPC_BLTZALC
;
16836 mips32_op
= OPC_BLTUC
;
16838 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16840 /* Loads and stores */
16842 mips32_op
= OPC_LB
;
16845 mips32_op
= OPC_LBU
;
16848 mips32_op
= OPC_LH
;
16851 mips32_op
= OPC_LHU
;
16854 mips32_op
= OPC_LW
;
16856 #ifdef TARGET_MIPS64
16858 check_insn(ctx
, ISA_MIPS3
);
16859 check_mips_64(ctx
);
16860 mips32_op
= OPC_LD
;
16863 check_insn(ctx
, ISA_MIPS3
);
16864 check_mips_64(ctx
);
16865 mips32_op
= OPC_SD
;
16869 mips32_op
= OPC_SB
;
16872 mips32_op
= OPC_SH
;
16875 mips32_op
= OPC_SW
;
16878 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
16881 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
16884 generate_exception_end(ctx
, EXCP_RI
);
16889 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
16893 /* make sure instructions are on a halfword boundary */
16894 if (ctx
->base
.pc_next
& 0x1) {
16895 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
16896 generate_exception_end(ctx
, EXCP_AdEL
);
16900 op
= (ctx
->opcode
>> 10) & 0x3f;
16901 /* Enforce properly-sized instructions in a delay slot */
16902 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
16903 switch (op
& 0x7) { /* MSB-3..MSB-5 */
16905 /* POOL32A, POOL32B, POOL32I, POOL32C */
16907 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
16909 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
16911 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
16913 /* LB32, LH32, LWC132, LDC132, LW32 */
16914 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
16915 generate_exception_end(ctx
, EXCP_RI
);
16920 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
16922 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
16924 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
16925 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
16926 generate_exception_end(ctx
, EXCP_RI
);
16936 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16937 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
16938 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
16941 switch (ctx
->opcode
& 0x1) {
16949 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16950 /* In the Release 6 the register number location in
16951 * the instruction encoding has changed.
16953 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
16955 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
16961 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16962 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
16963 int amount
= (ctx
->opcode
>> 1) & 0x7;
16965 amount
= amount
== 0 ? 8 : amount
;
16967 switch (ctx
->opcode
& 0x1) {
16976 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
16980 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16981 gen_pool16c_r6_insn(ctx
);
16983 gen_pool16c_insn(ctx
);
16988 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16989 int rb
= 28; /* GP */
16990 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
16992 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
16996 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16997 if (ctx
->opcode
& 1) {
16998 generate_exception_end(ctx
, EXCP_RI
);
17001 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17002 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17003 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17004 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17009 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17010 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17011 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17012 offset
= (offset
== 0xf ? -1 : offset
);
17014 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17019 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17020 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17021 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17023 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17028 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17029 int rb
= 29; /* SP */
17030 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17032 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17037 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17038 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17039 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17041 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17046 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17047 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17048 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17050 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17055 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17056 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17057 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17059 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17064 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17065 int rb
= 29; /* SP */
17066 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17068 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17073 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17074 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17075 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17077 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17082 int rd
= uMIPS_RD5(ctx
->opcode
);
17083 int rs
= uMIPS_RS5(ctx
->opcode
);
17085 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17092 switch (ctx
->opcode
& 0x1) {
17102 switch (ctx
->opcode
& 0x1) {
17107 gen_addiur1sp(ctx
);
17111 case B16
: /* BC16 */
17112 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17113 sextract32(ctx
->opcode
, 0, 10) << 1,
17114 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17116 case BNEZ16
: /* BNEZC16 */
17117 case BEQZ16
: /* BEQZC16 */
17118 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17119 mmreg(uMIPS_RD(ctx
->opcode
)),
17120 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17121 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17126 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17127 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17129 imm
= (imm
== 0x7f ? -1 : imm
);
17130 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17136 generate_exception_end(ctx
, EXCP_RI
);
17139 decode_micromips32_opc(env
, ctx
);
17152 /* MAJOR, P16, and P32 pools opcodes */
17156 NM_MOVE_BALC
= 0x02,
17164 NM_P16_SHIFT
= 0x0c,
17182 NM_P_LS_U12
= 0x21,
17192 NM_P16_ADDU
= 0x2c,
17206 NM_MOVEPREV
= 0x3f,
17209 /* POOL32A instruction pool */
17211 NM_POOL32A0
= 0x00,
17212 NM_SPECIAL2
= 0x01,
17215 NM_POOL32A5
= 0x05,
17216 NM_POOL32A7
= 0x07,
17219 /* P.GP.W instruction pool */
17221 NM_ADDIUGP_W
= 0x00,
17226 /* P48I instruction pool */
17230 NM_ADDIUGP48
= 0x02,
17231 NM_ADDIUPC48
= 0x03,
17236 /* P.U12 instruction pool */
17245 NM_ADDIUNEG
= 0x08,
17252 /* POOL32F instruction pool */
17254 NM_POOL32F_0
= 0x00,
17255 NM_POOL32F_3
= 0x03,
17256 NM_POOL32F_5
= 0x05,
17259 /* POOL32S instruction pool */
17261 NM_POOL32S_0
= 0x00,
17262 NM_POOL32S_4
= 0x04,
17265 /* P.LUI instruction pool */
17271 /* P.GP.BH instruction pool */
17276 NM_ADDIUGP_B
= 0x03,
17279 NM_P_GP_CP1
= 0x06,
17282 /* P.LS.U12 instruction pool */
17287 NM_P_PREFU12
= 0x03,
17300 /* P.LS.S9 instruction pool */
17306 NM_P_LS_UAWM
= 0x05,
17309 /* P.BAL instruction pool */
17315 /* P.J instruction pool */
17318 NM_JALRC_HB
= 0x01,
17319 NM_P_BALRSC
= 0x08,
17322 /* P.BR1 instruction pool */
17330 /* P.BR2 instruction pool */
17337 /* P.BRI instruction pool */
17349 /* P16.SHIFT instruction pool */
17355 /* POOL16C instruction pool */
17357 NM_POOL16C_0
= 0x00,
17361 /* P16.A1 instruction pool */
17363 NM_ADDIUR1SP
= 0x01,
17366 /* P16.A2 instruction pool */
17369 NM_P_ADDIURS5
= 0x01,
17372 /* P16.ADDU instruction pool */
17378 /* P16.SR instruction pool */
17381 NM_RESTORE_JRC16
= 0x01,
17384 /* P16.4X4 instruction pool */
17390 /* P16.LB instruction pool */
17397 /* P16.LH instruction pool */
17404 /* P.RI instruction pool */
17407 NM_P_SYSCALL
= 0x01,
17412 /* POOL32A0 instruction pool */
17447 NM_D_E_MT_VPE
= 0x56,
17455 /* POOL32A5 instruction pool */
17457 NM_CMP_EQ_PH
= 0x00,
17458 NM_CMP_LT_PH
= 0x08,
17459 NM_CMP_LE_PH
= 0x10,
17460 NM_CMPGU_EQ_QB
= 0x18,
17461 NM_CMPGU_LT_QB
= 0x20,
17462 NM_CMPGU_LE_QB
= 0x28,
17463 NM_CMPGDU_EQ_QB
= 0x30,
17464 NM_CMPGDU_LT_QB
= 0x38,
17465 NM_CMPGDU_LE_QB
= 0x40,
17466 NM_CMPU_EQ_QB
= 0x48,
17467 NM_CMPU_LT_QB
= 0x50,
17468 NM_CMPU_LE_QB
= 0x58,
17469 NM_ADDQ_S_W
= 0x60,
17470 NM_SUBQ_S_W
= 0x68,
17474 NM_ADDQ_S_PH
= 0x01,
17475 NM_ADDQH_R_PH
= 0x09,
17476 NM_ADDQH_R_W
= 0x11,
17477 NM_ADDU_S_QB
= 0x19,
17478 NM_ADDU_S_PH
= 0x21,
17479 NM_ADDUH_R_QB
= 0x29,
17480 NM_SHRAV_R_PH
= 0x31,
17481 NM_SHRAV_R_QB
= 0x39,
17482 NM_SUBQ_S_PH
= 0x41,
17483 NM_SUBQH_R_PH
= 0x49,
17484 NM_SUBQH_R_W
= 0x51,
17485 NM_SUBU_S_QB
= 0x59,
17486 NM_SUBU_S_PH
= 0x61,
17487 NM_SUBUH_R_QB
= 0x69,
17488 NM_SHLLV_S_PH
= 0x71,
17489 NM_PRECR_SRA_R_PH_W
= 0x79,
17491 NM_MULEU_S_PH_QBL
= 0x12,
17492 NM_MULEU_S_PH_QBR
= 0x1a,
17493 NM_MULQ_RS_PH
= 0x22,
17494 NM_MULQ_S_PH
= 0x2a,
17495 NM_MULQ_RS_W
= 0x32,
17496 NM_MULQ_S_W
= 0x3a,
17499 NM_SHRAV_R_W
= 0x5a,
17500 NM_SHRLV_PH
= 0x62,
17501 NM_SHRLV_QB
= 0x6a,
17502 NM_SHLLV_QB
= 0x72,
17503 NM_SHLLV_S_W
= 0x7a,
17507 NM_MULEQ_S_W_PHL
= 0x04,
17508 NM_MULEQ_S_W_PHR
= 0x0c,
17510 NM_MUL_S_PH
= 0x05,
17511 NM_PRECR_QB_PH
= 0x0d,
17512 NM_PRECRQ_QB_PH
= 0x15,
17513 NM_PRECRQ_PH_W
= 0x1d,
17514 NM_PRECRQ_RS_PH_W
= 0x25,
17515 NM_PRECRQU_S_QB_PH
= 0x2d,
17516 NM_PACKRL_PH
= 0x35,
17520 NM_SHRA_R_W
= 0x5e,
17521 NM_SHRA_R_PH
= 0x66,
17522 NM_SHLL_S_PH
= 0x76,
17523 NM_SHLL_S_W
= 0x7e,
17528 /* POOL32A7 instruction pool */
17533 NM_POOL32AXF
= 0x07,
17536 /* P.SR instruction pool */
17542 /* P.SHIFT instruction pool */
17550 /* P.ROTX instruction pool */
17555 /* P.INS instruction pool */
17560 /* P.EXT instruction pool */
17565 /* POOL32F_0 (fmt) instruction pool */
17570 NM_SELEQZ_S
= 0x07,
17571 NM_SELEQZ_D
= 0x47,
17575 NM_SELNEZ_S
= 0x0f,
17576 NM_SELNEZ_D
= 0x4f,
17591 /* POOL32F_3 instruction pool */
17595 NM_MINA_FMT
= 0x04,
17596 NM_MAXA_FMT
= 0x05,
17597 NM_POOL32FXF
= 0x07,
17600 /* POOL32F_5 instruction pool */
17602 NM_CMP_CONDN_S
= 0x00,
17603 NM_CMP_CONDN_D
= 0x02,
17606 /* P.GP.LH instruction pool */
17612 /* P.GP.SH instruction pool */
17617 /* P.GP.CP1 instruction pool */
17625 /* P.LS.S0 instruction pool */
17642 NM_P_PREFS9
= 0x03,
17648 /* P.LS.S1 instruction pool */
17650 NM_ASET_ACLR
= 0x02,
17658 /* P.LS.E0 instruction pool */
17674 /* P.PREFE instruction pool */
17680 /* P.LLE instruction pool */
17686 /* P.SCE instruction pool */
17692 /* P.LS.WM instruction pool */
17698 /* P.LS.UAWM instruction pool */
17704 /* P.BR3A instruction pool */
17710 NM_BPOSGE32C
= 0x04,
17713 /* P16.RI instruction pool */
17715 NM_P16_SYSCALL
= 0x01,
17720 /* POOL16C_0 instruction pool */
17722 NM_POOL16C_00
= 0x00,
17725 /* P16.JRC instruction pool */
17731 /* P.SYSCALL instruction pool */
17737 /* P.TRAP instruction pool */
17743 /* P.CMOVE instruction pool */
17749 /* POOL32Axf instruction pool */
17751 NM_POOL32AXF_1
= 0x01,
17752 NM_POOL32AXF_2
= 0x02,
17753 NM_POOL32AXF_4
= 0x04,
17754 NM_POOL32AXF_5
= 0x05,
17755 NM_POOL32AXF_7
= 0x07,
17758 /* POOL32Axf_1 instruction pool */
17760 NM_POOL32AXF_1_0
= 0x00,
17761 NM_POOL32AXF_1_1
= 0x01,
17762 NM_POOL32AXF_1_3
= 0x03,
17763 NM_POOL32AXF_1_4
= 0x04,
17764 NM_POOL32AXF_1_5
= 0x05,
17765 NM_POOL32AXF_1_7
= 0x07,
17768 /* POOL32Axf_2 instruction pool */
17770 NM_POOL32AXF_2_0_7
= 0x00,
17771 NM_POOL32AXF_2_8_15
= 0x01,
17772 NM_POOL32AXF_2_16_23
= 0x02,
17773 NM_POOL32AXF_2_24_31
= 0x03,
17776 /* POOL32Axf_7 instruction pool */
17778 NM_SHRA_R_QB
= 0x0,
17783 /* POOL32Axf_1_0 instruction pool */
17791 /* POOL32Axf_1_1 instruction pool */
17797 /* POOL32Axf_1_3 instruction pool */
17805 /* POOL32Axf_1_4 instruction pool */
17811 /* POOL32Axf_1_5 instruction pool */
17813 NM_MAQ_S_W_PHR
= 0x0,
17814 NM_MAQ_S_W_PHL
= 0x1,
17815 NM_MAQ_SA_W_PHR
= 0x2,
17816 NM_MAQ_SA_W_PHL
= 0x3,
17819 /* POOL32Axf_1_7 instruction pool */
17823 NM_EXTR_RS_W
= 0x2,
17827 /* POOL32Axf_2_0_7 instruction pool */
17830 NM_DPAQ_S_W_PH
= 0x1,
17832 NM_DPSQ_S_W_PH
= 0x3,
17839 /* POOL32Axf_2_8_15 instruction pool */
17841 NM_DPAX_W_PH
= 0x0,
17842 NM_DPAQ_SA_L_W
= 0x1,
17843 NM_DPSX_W_PH
= 0x2,
17844 NM_DPSQ_SA_L_W
= 0x3,
17847 NM_EXTRV_R_W
= 0x7,
17850 /* POOL32Axf_2_16_23 instruction pool */
17852 NM_DPAU_H_QBL
= 0x0,
17853 NM_DPAQX_S_W_PH
= 0x1,
17854 NM_DPSU_H_QBL
= 0x2,
17855 NM_DPSQX_S_W_PH
= 0x3,
17858 NM_MULSA_W_PH
= 0x6,
17859 NM_EXTRV_RS_W
= 0x7,
17862 /* POOL32Axf_2_24_31 instruction pool */
17864 NM_DPAU_H_QBR
= 0x0,
17865 NM_DPAQX_SA_W_PH
= 0x1,
17866 NM_DPSU_H_QBR
= 0x2,
17867 NM_DPSQX_SA_W_PH
= 0x3,
17870 NM_MULSAQ_S_W_PH
= 0x6,
17871 NM_EXTRV_S_H
= 0x7,
17874 /* POOL32Axf_{4, 5} instruction pool */
17893 /* nanoMIPS DSP instructions */
17894 NM_ABSQ_S_QB
= 0x00,
17895 NM_ABSQ_S_PH
= 0x08,
17896 NM_ABSQ_S_W
= 0x10,
17897 NM_PRECEQ_W_PHL
= 0x28,
17898 NM_PRECEQ_W_PHR
= 0x30,
17899 NM_PRECEQU_PH_QBL
= 0x38,
17900 NM_PRECEQU_PH_QBR
= 0x48,
17901 NM_PRECEU_PH_QBL
= 0x58,
17902 NM_PRECEU_PH_QBR
= 0x68,
17903 NM_PRECEQU_PH_QBLA
= 0x39,
17904 NM_PRECEQU_PH_QBRA
= 0x49,
17905 NM_PRECEU_PH_QBLA
= 0x59,
17906 NM_PRECEU_PH_QBRA
= 0x69,
17907 NM_REPLV_PH
= 0x01,
17908 NM_REPLV_QB
= 0x09,
17911 NM_RADDU_W_QB
= 0x78,
17917 /* PP.SR instruction pool */
17921 NM_RESTORE_JRC
= 0x03,
17924 /* P.SR.F instruction pool */
17927 NM_RESTOREF
= 0x01,
17930 /* P16.SYSCALL instruction pool */
17932 NM_SYSCALL16
= 0x00,
17933 NM_HYPCALL16
= 0x01,
17936 /* POOL16C_00 instruction pool */
17944 /* PP.LSX and PP.LSXS instruction pool */
17982 /* ERETx instruction pool */
17988 /* POOL32FxF_{0, 1} insturction pool */
17997 NM_CVT_S_PL
= 0x84,
17998 NM_CVT_S_PU
= 0xa4,
18000 NM_CVT_L_S
= 0x004,
18001 NM_CVT_L_D
= 0x104,
18002 NM_CVT_W_S
= 0x024,
18003 NM_CVT_W_D
= 0x124,
18005 NM_RSQRT_S
= 0x008,
18006 NM_RSQRT_D
= 0x108,
18011 NM_RECIP_S
= 0x048,
18012 NM_RECIP_D
= 0x148,
18014 NM_FLOOR_L_S
= 0x00c,
18015 NM_FLOOR_L_D
= 0x10c,
18017 NM_FLOOR_W_S
= 0x02c,
18018 NM_FLOOR_W_D
= 0x12c,
18020 NM_CEIL_L_S
= 0x04c,
18021 NM_CEIL_L_D
= 0x14c,
18022 NM_CEIL_W_S
= 0x06c,
18023 NM_CEIL_W_D
= 0x16c,
18024 NM_TRUNC_L_S
= 0x08c,
18025 NM_TRUNC_L_D
= 0x18c,
18026 NM_TRUNC_W_S
= 0x0ac,
18027 NM_TRUNC_W_D
= 0x1ac,
18028 NM_ROUND_L_S
= 0x0cc,
18029 NM_ROUND_L_D
= 0x1cc,
18030 NM_ROUND_W_S
= 0x0ec,
18031 NM_ROUND_W_D
= 0x1ec,
18039 NM_CVT_D_S
= 0x04d,
18040 NM_CVT_D_W
= 0x0cd,
18041 NM_CVT_D_L
= 0x14d,
18042 NM_CVT_S_D
= 0x06d,
18043 NM_CVT_S_W
= 0x0ed,
18044 NM_CVT_S_L
= 0x16d,
18047 /* P.LL instruction pool */
18053 /* P.SC instruction pool */
18059 /* P.DVP instruction pool */
18068 * nanoMIPS decoding engine
18073 /* extraction utilities */
18075 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18076 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18077 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18078 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18079 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18080 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18082 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18083 static inline int decode_gpr_gpr3(int r
)
18085 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18087 return map
[r
& 0x7];
18090 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18091 static inline int decode_gpr_gpr3_src_store(int r
)
18093 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18095 return map
[r
& 0x7];
18098 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18099 static inline int decode_gpr_gpr4(int r
)
18101 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18102 16, 17, 18, 19, 20, 21, 22, 23 };
18104 return map
[r
& 0xf];
18107 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18108 static inline int decode_gpr_gpr4_zero(int r
)
18110 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18111 16, 17, 18, 19, 20, 21, 22, 23 };
18113 return map
[r
& 0xf];
18117 /* extraction utilities */
18119 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18120 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18121 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18122 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18123 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18124 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18127 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18129 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18132 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18133 uint8_t gp
, uint16_t u
)
18136 TCGv va
= tcg_temp_new();
18137 TCGv t0
= tcg_temp_new();
18139 while (counter
!= count
) {
18140 bool use_gp
= gp
&& (counter
== count
- 1);
18141 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18142 int this_offset
= -((counter
+ 1) << 2);
18143 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18144 gen_load_gpr(t0
, this_rt
);
18145 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18146 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18150 /* adjust stack pointer */
18151 gen_adjust_sp(ctx
, -u
);
18157 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18158 uint8_t gp
, uint16_t u
)
18161 TCGv va
= tcg_temp_new();
18162 TCGv t0
= tcg_temp_new();
18164 while (counter
!= count
) {
18165 bool use_gp
= gp
&& (counter
== count
- 1);
18166 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18167 int this_offset
= u
- ((counter
+ 1) << 2);
18168 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18169 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18170 ctx
->default_tcg_memop_mask
);
18171 tcg_gen_ext32s_tl(t0
, t0
);
18172 gen_store_gpr(t0
, this_rt
);
18176 /* adjust stack pointer */
18177 gen_adjust_sp(ctx
, u
);
18183 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18185 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
18186 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
18188 switch (extract32(ctx
->opcode
, 2, 2)) {
18190 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18193 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18196 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18199 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18204 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18206 int rt
= extract32(ctx
->opcode
, 21, 5);
18207 int rs
= extract32(ctx
->opcode
, 16, 5);
18208 int rd
= extract32(ctx
->opcode
, 11, 5);
18210 switch (extract32(ctx
->opcode
, 3, 7)) {
18212 switch (extract32(ctx
->opcode
, 10, 1)) {
18215 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18219 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18225 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18229 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18232 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18235 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18238 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18241 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18244 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18247 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18250 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18254 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18257 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18260 switch (extract32(ctx
->opcode
, 10, 1)) {
18262 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18265 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18270 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18273 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18276 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18279 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18282 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18287 #ifndef CONFIG_USER_ONLY
18288 TCGv t0
= tcg_temp_new();
18289 switch (extract32(ctx
->opcode
, 10, 1)) {
18292 check_cp0_enabled(ctx
);
18293 gen_helper_dvp(t0
, cpu_env
);
18294 gen_store_gpr(t0
, rt
);
18299 check_cp0_enabled(ctx
);
18300 gen_helper_evp(t0
, cpu_env
);
18301 gen_store_gpr(t0
, rt
);
18308 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18313 TCGv t0
= tcg_temp_new();
18314 TCGv t1
= tcg_temp_new();
18315 TCGv t2
= tcg_temp_new();
18317 gen_load_gpr(t1
, rs
);
18318 gen_load_gpr(t2
, rt
);
18319 tcg_gen_add_tl(t0
, t1
, t2
);
18320 tcg_gen_ext32s_tl(t0
, t0
);
18321 tcg_gen_xor_tl(t1
, t1
, t2
);
18322 tcg_gen_xor_tl(t2
, t0
, t2
);
18323 tcg_gen_andc_tl(t1
, t2
, t1
);
18325 /* operands of same sign, result different sign */
18326 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18327 gen_store_gpr(t0
, rd
);
18335 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18338 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18341 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18344 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18347 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18350 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18353 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18356 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18358 #ifndef CONFIG_USER_ONLY
18360 check_cp0_enabled(ctx
);
18362 /* Treat as NOP. */
18365 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18368 check_cp0_enabled(ctx
);
18370 TCGv t0
= tcg_temp_new();
18372 gen_load_gpr(t0
, rt
);
18373 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18377 case NM_D_E_MT_VPE
:
18379 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18380 TCGv t0
= tcg_temp_new();
18387 gen_helper_dmt(t0
);
18388 gen_store_gpr(t0
, rt
);
18389 } else if (rs
== 0) {
18392 gen_helper_dvpe(t0
, cpu_env
);
18393 gen_store_gpr(t0
, rt
);
18395 generate_exception_end(ctx
, EXCP_RI
);
18402 gen_helper_emt(t0
);
18403 gen_store_gpr(t0
, rt
);
18404 } else if (rs
== 0) {
18407 gen_helper_evpe(t0
, cpu_env
);
18408 gen_store_gpr(t0
, rt
);
18410 generate_exception_end(ctx
, EXCP_RI
);
18421 TCGv t0
= tcg_temp_new();
18422 TCGv t1
= tcg_temp_new();
18424 gen_load_gpr(t0
, rt
);
18425 gen_load_gpr(t1
, rs
);
18426 gen_helper_fork(t0
, t1
);
18433 check_cp0_enabled(ctx
);
18435 /* Treat as NOP. */
18438 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18439 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18443 check_cp0_enabled(ctx
);
18444 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18445 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18450 TCGv t0
= tcg_temp_new();
18452 gen_load_gpr(t0
, rs
);
18453 gen_helper_yield(t0
, cpu_env
, t0
);
18454 gen_store_gpr(t0
, rt
);
18460 generate_exception_end(ctx
, EXCP_RI
);
18466 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18467 int ret
, int v1
, int v2
)
18473 t0
= tcg_temp_new_i32();
18475 v0_t
= tcg_temp_new();
18476 v1_t
= tcg_temp_new();
18478 tcg_gen_movi_i32(t0
, v2
>> 3);
18480 gen_load_gpr(v0_t
, ret
);
18481 gen_load_gpr(v1_t
, v1
);
18484 case NM_MAQ_S_W_PHR
:
18486 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18488 case NM_MAQ_S_W_PHL
:
18490 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18492 case NM_MAQ_SA_W_PHR
:
18494 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18496 case NM_MAQ_SA_W_PHL
:
18498 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18501 generate_exception_end(ctx
, EXCP_RI
);
18505 tcg_temp_free_i32(t0
);
18507 tcg_temp_free(v0_t
);
18508 tcg_temp_free(v1_t
);
18512 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18513 int ret
, int v1
, int v2
)
18516 TCGv t0
= tcg_temp_new();
18517 TCGv t1
= tcg_temp_new();
18518 TCGv v0_t
= tcg_temp_new();
18520 gen_load_gpr(v0_t
, v1
);
18523 case NM_POOL32AXF_1_0
:
18525 switch (extract32(ctx
->opcode
, 12, 2)) {
18527 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
18530 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
18533 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
18536 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
18540 case NM_POOL32AXF_1_1
:
18542 switch (extract32(ctx
->opcode
, 12, 2)) {
18544 tcg_gen_movi_tl(t0
, v2
);
18545 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
18548 tcg_gen_movi_tl(t0
, v2
>> 3);
18549 gen_helper_shilo(t0
, v0_t
, cpu_env
);
18552 generate_exception_end(ctx
, EXCP_RI
);
18556 case NM_POOL32AXF_1_3
:
18558 imm
= extract32(ctx
->opcode
, 14, 7);
18559 switch (extract32(ctx
->opcode
, 12, 2)) {
18561 tcg_gen_movi_tl(t0
, imm
);
18562 gen_helper_rddsp(t0
, t0
, cpu_env
);
18563 gen_store_gpr(t0
, ret
);
18566 gen_load_gpr(t0
, ret
);
18567 tcg_gen_movi_tl(t1
, imm
);
18568 gen_helper_wrdsp(t0
, t1
, cpu_env
);
18571 tcg_gen_movi_tl(t0
, v2
>> 3);
18572 tcg_gen_movi_tl(t1
, v1
);
18573 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
18574 gen_store_gpr(t0
, ret
);
18577 tcg_gen_movi_tl(t0
, v2
>> 3);
18578 tcg_gen_movi_tl(t1
, v1
);
18579 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
18580 gen_store_gpr(t0
, ret
);
18584 case NM_POOL32AXF_1_4
:
18586 tcg_gen_movi_tl(t0
, v2
>> 2);
18587 switch (extract32(ctx
->opcode
, 12, 1)) {
18589 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
18590 gen_store_gpr(t0
, ret
);
18593 gen_helper_shrl_qb(t0
, t0
, v0_t
);
18594 gen_store_gpr(t0
, ret
);
18598 case NM_POOL32AXF_1_5
:
18599 opc
= extract32(ctx
->opcode
, 12, 2);
18600 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
18602 case NM_POOL32AXF_1_7
:
18604 tcg_gen_movi_tl(t0
, v2
>> 3);
18605 tcg_gen_movi_tl(t1
, v1
);
18606 switch (extract32(ctx
->opcode
, 12, 2)) {
18608 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
18609 gen_store_gpr(t0
, ret
);
18612 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
18613 gen_store_gpr(t0
, ret
);
18616 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
18617 gen_store_gpr(t0
, ret
);
18620 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
18621 gen_store_gpr(t0
, ret
);
18626 generate_exception_end(ctx
, EXCP_RI
);
18632 tcg_temp_free(v0_t
);
18635 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
18636 TCGv v0
, TCGv v1
, int rd
)
18640 t0
= tcg_temp_new_i32();
18642 tcg_gen_movi_i32(t0
, rd
>> 3);
18645 case NM_POOL32AXF_2_0_7
:
18646 switch (extract32(ctx
->opcode
, 9, 3)) {
18649 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
18651 case NM_DPAQ_S_W_PH
:
18653 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18657 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
18659 case NM_DPSQ_S_W_PH
:
18661 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18664 generate_exception_end(ctx
, EXCP_RI
);
18668 case NM_POOL32AXF_2_8_15
:
18669 switch (extract32(ctx
->opcode
, 9, 3)) {
18672 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
18674 case NM_DPAQ_SA_L_W
:
18676 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18680 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
18682 case NM_DPSQ_SA_L_W
:
18684 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18687 generate_exception_end(ctx
, EXCP_RI
);
18691 case NM_POOL32AXF_2_16_23
:
18692 switch (extract32(ctx
->opcode
, 9, 3)) {
18693 case NM_DPAU_H_QBL
:
18695 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
18697 case NM_DPAQX_S_W_PH
:
18699 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18701 case NM_DPSU_H_QBL
:
18703 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
18705 case NM_DPSQX_S_W_PH
:
18707 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18709 case NM_MULSA_W_PH
:
18711 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
18714 generate_exception_end(ctx
, EXCP_RI
);
18718 case NM_POOL32AXF_2_24_31
:
18719 switch (extract32(ctx
->opcode
, 9, 3)) {
18720 case NM_DPAU_H_QBR
:
18722 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
18724 case NM_DPAQX_SA_W_PH
:
18726 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18728 case NM_DPSU_H_QBR
:
18730 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
18732 case NM_DPSQX_SA_W_PH
:
18734 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18736 case NM_MULSAQ_S_W_PH
:
18738 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18741 generate_exception_end(ctx
, EXCP_RI
);
18746 generate_exception_end(ctx
, EXCP_RI
);
18750 tcg_temp_free_i32(t0
);
18753 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18754 int rt
, int rs
, int rd
)
18757 TCGv t0
= tcg_temp_new();
18758 TCGv t1
= tcg_temp_new();
18759 TCGv v0_t
= tcg_temp_new();
18760 TCGv v1_t
= tcg_temp_new();
18762 gen_load_gpr(v0_t
, rt
);
18763 gen_load_gpr(v1_t
, rs
);
18766 case NM_POOL32AXF_2_0_7
:
18767 switch (extract32(ctx
->opcode
, 9, 3)) {
18769 case NM_DPAQ_S_W_PH
:
18771 case NM_DPSQ_S_W_PH
:
18772 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18777 gen_load_gpr(t0
, rs
);
18779 if (rd
!= 0 && rd
!= 2) {
18780 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
18781 tcg_gen_ext32u_tl(t0
, t0
);
18782 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
18783 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
18785 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
18791 int acc
= extract32(ctx
->opcode
, 14, 2);
18792 TCGv_i64 t2
= tcg_temp_new_i64();
18793 TCGv_i64 t3
= tcg_temp_new_i64();
18795 gen_load_gpr(t0
, rt
);
18796 gen_load_gpr(t1
, rs
);
18797 tcg_gen_ext_tl_i64(t2
, t0
);
18798 tcg_gen_ext_tl_i64(t3
, t1
);
18799 tcg_gen_mul_i64(t2
, t2
, t3
);
18800 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18801 tcg_gen_add_i64(t2
, t2
, t3
);
18802 tcg_temp_free_i64(t3
);
18803 gen_move_low32(cpu_LO
[acc
], t2
);
18804 gen_move_high32(cpu_HI
[acc
], t2
);
18805 tcg_temp_free_i64(t2
);
18811 int acc
= extract32(ctx
->opcode
, 14, 2);
18812 TCGv_i32 t2
= tcg_temp_new_i32();
18813 TCGv_i32 t3
= tcg_temp_new_i32();
18815 gen_load_gpr(t0
, rs
);
18816 gen_load_gpr(t1
, rt
);
18817 tcg_gen_trunc_tl_i32(t2
, t0
);
18818 tcg_gen_trunc_tl_i32(t3
, t1
);
18819 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
18820 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
18821 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
18822 tcg_temp_free_i32(t2
);
18823 tcg_temp_free_i32(t3
);
18828 gen_load_gpr(v1_t
, rs
);
18829 tcg_gen_movi_tl(t0
, rd
>> 3);
18830 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
18831 gen_store_gpr(t0
, ret
);
18835 case NM_POOL32AXF_2_8_15
:
18836 switch (extract32(ctx
->opcode
, 9, 3)) {
18838 case NM_DPAQ_SA_L_W
:
18840 case NM_DPSQ_SA_L_W
:
18841 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18846 int acc
= extract32(ctx
->opcode
, 14, 2);
18847 TCGv_i64 t2
= tcg_temp_new_i64();
18848 TCGv_i64 t3
= tcg_temp_new_i64();
18850 gen_load_gpr(t0
, rs
);
18851 gen_load_gpr(t1
, rt
);
18852 tcg_gen_ext32u_tl(t0
, t0
);
18853 tcg_gen_ext32u_tl(t1
, t1
);
18854 tcg_gen_extu_tl_i64(t2
, t0
);
18855 tcg_gen_extu_tl_i64(t3
, t1
);
18856 tcg_gen_mul_i64(t2
, t2
, t3
);
18857 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18858 tcg_gen_add_i64(t2
, t2
, t3
);
18859 tcg_temp_free_i64(t3
);
18860 gen_move_low32(cpu_LO
[acc
], t2
);
18861 gen_move_high32(cpu_HI
[acc
], t2
);
18862 tcg_temp_free_i64(t2
);
18868 int acc
= extract32(ctx
->opcode
, 14, 2);
18869 TCGv_i32 t2
= tcg_temp_new_i32();
18870 TCGv_i32 t3
= tcg_temp_new_i32();
18872 gen_load_gpr(t0
, rs
);
18873 gen_load_gpr(t1
, rt
);
18874 tcg_gen_trunc_tl_i32(t2
, t0
);
18875 tcg_gen_trunc_tl_i32(t3
, t1
);
18876 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
18877 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
18878 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
18879 tcg_temp_free_i32(t2
);
18880 tcg_temp_free_i32(t3
);
18885 tcg_gen_movi_tl(t0
, rd
>> 3);
18886 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
18887 gen_store_gpr(t0
, ret
);
18890 generate_exception_end(ctx
, EXCP_RI
);
18894 case NM_POOL32AXF_2_16_23
:
18895 switch (extract32(ctx
->opcode
, 9, 3)) {
18896 case NM_DPAU_H_QBL
:
18897 case NM_DPAQX_S_W_PH
:
18898 case NM_DPSU_H_QBL
:
18899 case NM_DPSQX_S_W_PH
:
18900 case NM_MULSA_W_PH
:
18901 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18905 tcg_gen_movi_tl(t0
, rd
>> 3);
18906 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
18907 gen_store_gpr(t0
, ret
);
18912 int acc
= extract32(ctx
->opcode
, 14, 2);
18913 TCGv_i64 t2
= tcg_temp_new_i64();
18914 TCGv_i64 t3
= tcg_temp_new_i64();
18916 gen_load_gpr(t0
, rs
);
18917 gen_load_gpr(t1
, rt
);
18918 tcg_gen_ext_tl_i64(t2
, t0
);
18919 tcg_gen_ext_tl_i64(t3
, t1
);
18920 tcg_gen_mul_i64(t2
, t2
, t3
);
18921 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18922 tcg_gen_sub_i64(t2
, t3
, t2
);
18923 tcg_temp_free_i64(t3
);
18924 gen_move_low32(cpu_LO
[acc
], t2
);
18925 gen_move_high32(cpu_HI
[acc
], t2
);
18926 tcg_temp_free_i64(t2
);
18929 case NM_EXTRV_RS_W
:
18931 tcg_gen_movi_tl(t0
, rd
>> 3);
18932 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
18933 gen_store_gpr(t0
, ret
);
18937 case NM_POOL32AXF_2_24_31
:
18938 switch (extract32(ctx
->opcode
, 9, 3)) {
18939 case NM_DPAU_H_QBR
:
18940 case NM_DPAQX_SA_W_PH
:
18941 case NM_DPSU_H_QBR
:
18942 case NM_DPSQX_SA_W_PH
:
18943 case NM_MULSAQ_S_W_PH
:
18944 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18948 tcg_gen_movi_tl(t0
, rd
>> 3);
18949 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
18950 gen_store_gpr(t0
, ret
);
18955 int acc
= extract32(ctx
->opcode
, 14, 2);
18956 TCGv_i64 t2
= tcg_temp_new_i64();
18957 TCGv_i64 t3
= tcg_temp_new_i64();
18959 gen_load_gpr(t0
, rs
);
18960 gen_load_gpr(t1
, rt
);
18961 tcg_gen_ext32u_tl(t0
, t0
);
18962 tcg_gen_ext32u_tl(t1
, t1
);
18963 tcg_gen_extu_tl_i64(t2
, t0
);
18964 tcg_gen_extu_tl_i64(t3
, t1
);
18965 tcg_gen_mul_i64(t2
, t2
, t3
);
18966 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18967 tcg_gen_sub_i64(t2
, t3
, t2
);
18968 tcg_temp_free_i64(t3
);
18969 gen_move_low32(cpu_LO
[acc
], t2
);
18970 gen_move_high32(cpu_HI
[acc
], t2
);
18971 tcg_temp_free_i64(t2
);
18976 tcg_gen_movi_tl(t0
, rd
>> 3);
18977 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
18978 gen_store_gpr(t0
, ret
);
18983 generate_exception_end(ctx
, EXCP_RI
);
18990 tcg_temp_free(v0_t
);
18991 tcg_temp_free(v1_t
);
18994 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18998 TCGv t0
= tcg_temp_new();
18999 TCGv v0_t
= tcg_temp_new();
19001 gen_load_gpr(v0_t
, rs
);
19006 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19007 gen_store_gpr(v0_t
, ret
);
19011 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19012 gen_store_gpr(v0_t
, ret
);
19016 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19017 gen_store_gpr(v0_t
, ret
);
19019 case NM_PRECEQ_W_PHL
:
19021 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19022 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19023 gen_store_gpr(v0_t
, ret
);
19025 case NM_PRECEQ_W_PHR
:
19027 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19028 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19029 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19030 gen_store_gpr(v0_t
, ret
);
19032 case NM_PRECEQU_PH_QBL
:
19034 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19035 gen_store_gpr(v0_t
, ret
);
19037 case NM_PRECEQU_PH_QBR
:
19039 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19040 gen_store_gpr(v0_t
, ret
);
19042 case NM_PRECEQU_PH_QBLA
:
19044 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19045 gen_store_gpr(v0_t
, ret
);
19047 case NM_PRECEQU_PH_QBRA
:
19049 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19050 gen_store_gpr(v0_t
, ret
);
19052 case NM_PRECEU_PH_QBL
:
19054 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19055 gen_store_gpr(v0_t
, ret
);
19057 case NM_PRECEU_PH_QBR
:
19059 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19060 gen_store_gpr(v0_t
, ret
);
19062 case NM_PRECEU_PH_QBLA
:
19064 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19065 gen_store_gpr(v0_t
, ret
);
19067 case NM_PRECEU_PH_QBRA
:
19069 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19070 gen_store_gpr(v0_t
, ret
);
19074 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19075 tcg_gen_shli_tl(t0
, v0_t
, 16);
19076 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19077 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19078 gen_store_gpr(v0_t
, ret
);
19082 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19083 tcg_gen_shli_tl(t0
, v0_t
, 8);
19084 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19085 tcg_gen_shli_tl(t0
, v0_t
, 16);
19086 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19087 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19088 gen_store_gpr(v0_t
, ret
);
19092 gen_helper_bitrev(v0_t
, v0_t
);
19093 gen_store_gpr(v0_t
, ret
);
19098 TCGv tv0
= tcg_temp_new();
19100 gen_load_gpr(tv0
, rt
);
19101 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19102 gen_store_gpr(v0_t
, ret
);
19103 tcg_temp_free(tv0
);
19106 case NM_RADDU_W_QB
:
19108 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19109 gen_store_gpr(v0_t
, ret
);
19112 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19116 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19120 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19123 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19126 generate_exception_end(ctx
, EXCP_RI
);
19130 tcg_temp_free(v0_t
);
19134 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19135 int rt
, int rs
, int rd
)
19137 TCGv t0
= tcg_temp_new();
19138 TCGv rs_t
= tcg_temp_new();
19140 gen_load_gpr(rs_t
, rs
);
19145 tcg_gen_movi_tl(t0
, rd
>> 2);
19146 switch (extract32(ctx
->opcode
, 12, 1)) {
19149 gen_helper_shra_qb(t0
, t0
, rs_t
);
19150 gen_store_gpr(t0
, rt
);
19154 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19155 gen_store_gpr(t0
, rt
);
19161 tcg_gen_movi_tl(t0
, rd
>> 1);
19162 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19163 gen_store_gpr(t0
, rt
);
19169 target_long result
;
19170 imm
= extract32(ctx
->opcode
, 13, 8);
19171 result
= (uint32_t)imm
<< 24 |
19172 (uint32_t)imm
<< 16 |
19173 (uint32_t)imm
<< 8 |
19175 result
= (int32_t)result
;
19176 tcg_gen_movi_tl(t0
, result
);
19177 gen_store_gpr(t0
, rt
);
19181 generate_exception_end(ctx
, EXCP_RI
);
19185 tcg_temp_free(rs_t
);
19189 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19191 int rt
= extract32(ctx
->opcode
, 21, 5);
19192 int rs
= extract32(ctx
->opcode
, 16, 5);
19193 int rd
= extract32(ctx
->opcode
, 11, 5);
19195 switch (extract32(ctx
->opcode
, 6, 3)) {
19196 case NM_POOL32AXF_1
:
19198 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19199 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19202 case NM_POOL32AXF_2
:
19204 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19205 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19208 case NM_POOL32AXF_4
:
19210 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19211 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19214 case NM_POOL32AXF_5
:
19215 switch (extract32(ctx
->opcode
, 9, 7)) {
19216 #ifndef CONFIG_USER_ONLY
19218 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19221 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19224 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19227 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19230 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19233 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19236 check_cp0_enabled(ctx
);
19238 TCGv t0
= tcg_temp_new();
19240 save_cpu_state(ctx
, 1);
19241 gen_helper_di(t0
, cpu_env
);
19242 gen_store_gpr(t0
, rt
);
19243 /* Stop translation as we may have switched the execution mode */
19244 ctx
->base
.is_jmp
= DISAS_STOP
;
19249 check_cp0_enabled(ctx
);
19251 TCGv t0
= tcg_temp_new();
19253 save_cpu_state(ctx
, 1);
19254 gen_helper_ei(t0
, cpu_env
);
19255 gen_store_gpr(t0
, rt
);
19256 /* Stop translation as we may have switched the execution mode */
19257 ctx
->base
.is_jmp
= DISAS_STOP
;
19262 gen_load_srsgpr(rs
, rt
);
19265 gen_store_srsgpr(rs
, rt
);
19268 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19271 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19274 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19278 generate_exception_end(ctx
, EXCP_RI
);
19282 case NM_POOL32AXF_7
:
19284 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19285 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19289 generate_exception_end(ctx
, EXCP_RI
);
19294 /* Immediate Value Compact Branches */
19295 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19296 int rt
, int32_t imm
, int32_t offset
)
19299 int bcond_compute
= 0;
19300 TCGv t0
= tcg_temp_new();
19301 TCGv t1
= tcg_temp_new();
19303 gen_load_gpr(t0
, rt
);
19304 tcg_gen_movi_tl(t1
, imm
);
19305 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19307 /* Load needed operands and calculate btarget */
19310 if (rt
== 0 && imm
== 0) {
19311 /* Unconditional branch */
19312 } else if (rt
== 0 && imm
!= 0) {
19317 cond
= TCG_COND_EQ
;
19323 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19324 generate_exception_end(ctx
, EXCP_RI
);
19326 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19327 /* Unconditional branch */
19328 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19332 tcg_gen_shri_tl(t0
, t0
, imm
);
19333 tcg_gen_andi_tl(t0
, t0
, 1);
19334 tcg_gen_movi_tl(t1
, 0);
19336 if (opc
== NM_BBEQZC
) {
19337 cond
= TCG_COND_EQ
;
19339 cond
= TCG_COND_NE
;
19344 if (rt
== 0 && imm
== 0) {
19347 } else if (rt
== 0 && imm
!= 0) {
19348 /* Unconditional branch */
19351 cond
= TCG_COND_NE
;
19355 if (rt
== 0 && imm
== 0) {
19356 /* Unconditional branch */
19359 cond
= TCG_COND_GE
;
19364 cond
= TCG_COND_LT
;
19367 if (rt
== 0 && imm
== 0) {
19368 /* Unconditional branch */
19371 cond
= TCG_COND_GEU
;
19376 cond
= TCG_COND_LTU
;
19379 MIPS_INVAL("Immediate Value Compact branch");
19380 generate_exception_end(ctx
, EXCP_RI
);
19384 if (bcond_compute
== 0) {
19385 /* Uncoditional compact branch */
19386 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19388 /* Conditional compact branch */
19389 TCGLabel
*fs
= gen_new_label();
19391 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19393 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19396 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19404 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19405 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19408 TCGv t0
= tcg_temp_new();
19409 TCGv t1
= tcg_temp_new();
19412 gen_load_gpr(t0
, rs
);
19416 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19419 /* calculate btarget */
19420 tcg_gen_shli_tl(t0
, t0
, 1);
19421 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19422 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19424 /* unconditional branch to register */
19425 tcg_gen_mov_tl(cpu_PC
, btarget
);
19426 tcg_gen_lookup_and_goto_ptr();
19432 /* nanoMIPS Branches */
19433 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19434 int rs
, int rt
, int32_t offset
)
19436 int bcond_compute
= 0;
19437 TCGv t0
= tcg_temp_new();
19438 TCGv t1
= tcg_temp_new();
19440 /* Load needed operands and calculate btarget */
19442 /* compact branch */
19445 gen_load_gpr(t0
, rs
);
19446 gen_load_gpr(t1
, rt
);
19448 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19452 if (rs
== 0 || rs
== rt
) {
19453 /* OPC_BLEZALC, OPC_BGEZALC */
19454 /* OPC_BGTZALC, OPC_BLTZALC */
19455 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19457 gen_load_gpr(t0
, rs
);
19458 gen_load_gpr(t1
, rt
);
19460 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19463 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19467 /* OPC_BEQZC, OPC_BNEZC */
19468 gen_load_gpr(t0
, rs
);
19470 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19472 /* OPC_JIC, OPC_JIALC */
19473 TCGv tbase
= tcg_temp_new();
19474 TCGv toffset
= tcg_temp_new();
19476 gen_load_gpr(tbase
, rt
);
19477 tcg_gen_movi_tl(toffset
, offset
);
19478 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19479 tcg_temp_free(tbase
);
19480 tcg_temp_free(toffset
);
19484 MIPS_INVAL("Compact branch/jump");
19485 generate_exception_end(ctx
, EXCP_RI
);
19489 if (bcond_compute
== 0) {
19490 /* Uncoditional compact branch */
19493 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19496 MIPS_INVAL("Compact branch/jump");
19497 generate_exception_end(ctx
, EXCP_RI
);
19501 /* Conditional compact branch */
19502 TCGLabel
*fs
= gen_new_label();
19506 if (rs
== 0 && rt
!= 0) {
19508 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19509 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19511 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19514 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
19518 if (rs
== 0 && rt
!= 0) {
19520 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19521 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19523 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19526 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
19530 if (rs
== 0 && rt
!= 0) {
19532 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19533 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19535 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19538 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
19542 if (rs
== 0 && rt
!= 0) {
19544 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19545 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19547 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19550 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
19554 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
19557 MIPS_INVAL("Compact conditional branch/jump");
19558 generate_exception_end(ctx
, EXCP_RI
);
19562 /* Generating branch here as compact branches don't have delay slot */
19563 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19566 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19575 /* nanoMIPS CP1 Branches */
19576 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
19577 int32_t ft
, int32_t offset
)
19579 target_ulong btarget
;
19580 TCGv_i64 t0
= tcg_temp_new_i64();
19582 gen_load_fpr64(ctx
, t0
, ft
);
19583 tcg_gen_andi_i64(t0
, t0
, 1);
19585 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19589 tcg_gen_xori_i64(t0
, t0
, 1);
19590 ctx
->hflags
|= MIPS_HFLAG_BC
;
19593 /* t0 already set */
19594 ctx
->hflags
|= MIPS_HFLAG_BC
;
19597 MIPS_INVAL("cp1 cond branch");
19598 generate_exception_end(ctx
, EXCP_RI
);
19602 tcg_gen_trunc_i64_tl(bcond
, t0
);
19604 ctx
->btarget
= btarget
;
19607 tcg_temp_free_i64(t0
);
19611 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
19614 t0
= tcg_temp_new();
19615 t1
= tcg_temp_new();
19617 gen_load_gpr(t0
, rs
);
19618 gen_load_gpr(t1
, rt
);
19620 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
19621 /* PP.LSXS instructions require shifting */
19622 switch (extract32(ctx
->opcode
, 7, 4)) {
19627 tcg_gen_shli_tl(t0
, t0
, 1);
19634 tcg_gen_shli_tl(t0
, t0
, 2);
19638 tcg_gen_shli_tl(t0
, t0
, 3);
19642 gen_op_addr_add(ctx
, t0
, t0
, t1
);
19644 switch (extract32(ctx
->opcode
, 7, 4)) {
19646 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19648 gen_store_gpr(t0
, rd
);
19652 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19654 gen_store_gpr(t0
, rd
);
19658 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19660 gen_store_gpr(t0
, rd
);
19663 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19665 gen_store_gpr(t0
, rd
);
19669 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19671 gen_store_gpr(t0
, rd
);
19675 gen_load_gpr(t1
, rd
);
19676 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19682 gen_load_gpr(t1
, rd
);
19683 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19689 gen_load_gpr(t1
, rd
);
19690 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19694 /*case NM_LWC1XS:*/
19696 /*case NM_LDC1XS:*/
19698 /*case NM_SWC1XS:*/
19700 /*case NM_SDC1XS:*/
19701 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19702 check_cp1_enabled(ctx
);
19703 switch (extract32(ctx
->opcode
, 7, 4)) {
19705 /*case NM_LWC1XS:*/
19706 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
19709 /*case NM_LDC1XS:*/
19710 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
19713 /*case NM_SWC1XS:*/
19714 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
19717 /*case NM_SDC1XS:*/
19718 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
19722 generate_exception_err(ctx
, EXCP_CpU
, 1);
19726 generate_exception_end(ctx
, EXCP_RI
);
19734 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
19738 rt
= extract32(ctx
->opcode
, 21, 5);
19739 rs
= extract32(ctx
->opcode
, 16, 5);
19740 rd
= extract32(ctx
->opcode
, 11, 5);
19742 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
19743 generate_exception_end(ctx
, EXCP_RI
);
19746 check_cp1_enabled(ctx
);
19747 switch (extract32(ctx
->opcode
, 0, 3)) {
19749 switch (extract32(ctx
->opcode
, 3, 7)) {
19751 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
19754 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
19757 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
19760 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
19763 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
19766 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
19769 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
19772 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
19775 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
19778 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
19781 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
19784 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
19787 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
19790 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
19793 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
19796 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
19799 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
19802 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
19805 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
19808 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
19811 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
19814 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
19817 generate_exception_end(ctx
, EXCP_RI
);
19822 switch (extract32(ctx
->opcode
, 3, 3)) {
19824 switch (extract32(ctx
->opcode
, 9, 1)) {
19826 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
19829 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
19834 switch (extract32(ctx
->opcode
, 9, 1)) {
19836 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
19839 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
19844 switch (extract32(ctx
->opcode
, 9, 1)) {
19846 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
19849 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
19854 switch (extract32(ctx
->opcode
, 9, 1)) {
19856 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
19859 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
19864 switch (extract32(ctx
->opcode
, 6, 8)) {
19866 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
19869 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
19872 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
19875 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
19878 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
19881 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
19884 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
19887 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
19890 switch (extract32(ctx
->opcode
, 6, 9)) {
19892 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
19895 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
19898 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
19901 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
19904 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
19907 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
19910 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
19913 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
19916 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
19919 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
19922 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
19925 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
19928 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
19931 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
19934 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
19937 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
19940 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
19943 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
19946 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
19949 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
19952 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
19955 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
19958 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
19961 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
19964 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
19967 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
19970 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
19973 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
19976 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
19979 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
19982 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
19985 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
19988 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
19991 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
19994 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
19997 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20000 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20003 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20006 generate_exception_end(ctx
, EXCP_RI
);
20015 switch (extract32(ctx
->opcode
, 3, 3)) {
20016 case NM_CMP_CONDN_S
:
20017 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20019 case NM_CMP_CONDN_D
:
20020 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20023 generate_exception_end(ctx
, EXCP_RI
);
20028 generate_exception_end(ctx
, EXCP_RI
);
20033 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20034 int rd
, int rs
, int rt
)
20037 TCGv t0
= tcg_temp_new();
20038 TCGv v1_t
= tcg_temp_new();
20039 TCGv v2_t
= tcg_temp_new();
20041 gen_load_gpr(v1_t
, rs
);
20042 gen_load_gpr(v2_t
, rt
);
20047 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20051 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20055 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20057 case NM_CMPU_EQ_QB
:
20059 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20061 case NM_CMPU_LT_QB
:
20063 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20065 case NM_CMPU_LE_QB
:
20067 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20069 case NM_CMPGU_EQ_QB
:
20071 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20072 gen_store_gpr(v1_t
, ret
);
20074 case NM_CMPGU_LT_QB
:
20076 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20077 gen_store_gpr(v1_t
, ret
);
20079 case NM_CMPGU_LE_QB
:
20081 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20082 gen_store_gpr(v1_t
, ret
);
20084 case NM_CMPGDU_EQ_QB
:
20086 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20087 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20088 gen_store_gpr(v1_t
, ret
);
20090 case NM_CMPGDU_LT_QB
:
20092 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20093 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20094 gen_store_gpr(v1_t
, ret
);
20096 case NM_CMPGDU_LE_QB
:
20098 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20099 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20100 gen_store_gpr(v1_t
, ret
);
20104 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20105 gen_store_gpr(v1_t
, ret
);
20109 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20110 gen_store_gpr(v1_t
, ret
);
20114 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20115 gen_store_gpr(v1_t
, ret
);
20119 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20120 gen_store_gpr(v1_t
, ret
);
20124 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20125 gen_store_gpr(v1_t
, ret
);
20129 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20130 gen_store_gpr(v1_t
, ret
);
20134 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20135 gen_store_gpr(v1_t
, ret
);
20139 switch (extract32(ctx
->opcode
, 10, 1)) {
20142 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20143 gen_store_gpr(v1_t
, ret
);
20147 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20148 gen_store_gpr(v1_t
, ret
);
20152 case NM_ADDQH_R_PH
:
20154 switch (extract32(ctx
->opcode
, 10, 1)) {
20157 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20158 gen_store_gpr(v1_t
, ret
);
20162 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20163 gen_store_gpr(v1_t
, ret
);
20169 switch (extract32(ctx
->opcode
, 10, 1)) {
20172 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20173 gen_store_gpr(v1_t
, ret
);
20177 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20178 gen_store_gpr(v1_t
, ret
);
20184 switch (extract32(ctx
->opcode
, 10, 1)) {
20187 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20188 gen_store_gpr(v1_t
, ret
);
20192 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20193 gen_store_gpr(v1_t
, ret
);
20199 switch (extract32(ctx
->opcode
, 10, 1)) {
20202 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20203 gen_store_gpr(v1_t
, ret
);
20207 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20208 gen_store_gpr(v1_t
, ret
);
20212 case NM_ADDUH_R_QB
:
20214 switch (extract32(ctx
->opcode
, 10, 1)) {
20217 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20218 gen_store_gpr(v1_t
, ret
);
20222 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20223 gen_store_gpr(v1_t
, ret
);
20227 case NM_SHRAV_R_PH
:
20229 switch (extract32(ctx
->opcode
, 10, 1)) {
20232 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20233 gen_store_gpr(v1_t
, ret
);
20237 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20238 gen_store_gpr(v1_t
, ret
);
20242 case NM_SHRAV_R_QB
:
20244 switch (extract32(ctx
->opcode
, 10, 1)) {
20247 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20248 gen_store_gpr(v1_t
, ret
);
20252 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20253 gen_store_gpr(v1_t
, ret
);
20259 switch (extract32(ctx
->opcode
, 10, 1)) {
20262 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20263 gen_store_gpr(v1_t
, ret
);
20267 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20268 gen_store_gpr(v1_t
, ret
);
20272 case NM_SUBQH_R_PH
:
20274 switch (extract32(ctx
->opcode
, 10, 1)) {
20277 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20278 gen_store_gpr(v1_t
, ret
);
20282 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20283 gen_store_gpr(v1_t
, ret
);
20289 switch (extract32(ctx
->opcode
, 10, 1)) {
20292 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20293 gen_store_gpr(v1_t
, ret
);
20297 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20298 gen_store_gpr(v1_t
, ret
);
20304 switch (extract32(ctx
->opcode
, 10, 1)) {
20307 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20308 gen_store_gpr(v1_t
, ret
);
20312 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20313 gen_store_gpr(v1_t
, ret
);
20319 switch (extract32(ctx
->opcode
, 10, 1)) {
20322 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20323 gen_store_gpr(v1_t
, ret
);
20327 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20328 gen_store_gpr(v1_t
, ret
);
20332 case NM_SUBUH_R_QB
:
20334 switch (extract32(ctx
->opcode
, 10, 1)) {
20337 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20338 gen_store_gpr(v1_t
, ret
);
20342 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20343 gen_store_gpr(v1_t
, ret
);
20347 case NM_SHLLV_S_PH
:
20349 switch (extract32(ctx
->opcode
, 10, 1)) {
20352 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20353 gen_store_gpr(v1_t
, ret
);
20357 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20358 gen_store_gpr(v1_t
, ret
);
20362 case NM_PRECR_SRA_R_PH_W
:
20364 switch (extract32(ctx
->opcode
, 10, 1)) {
20366 /* PRECR_SRA_PH_W */
20368 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20369 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20371 gen_store_gpr(v1_t
, rt
);
20372 tcg_temp_free_i32(sa_t
);
20376 /* PRECR_SRA_R_PH_W */
20378 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20379 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20381 gen_store_gpr(v1_t
, rt
);
20382 tcg_temp_free_i32(sa_t
);
20387 case NM_MULEU_S_PH_QBL
:
20389 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20390 gen_store_gpr(v1_t
, ret
);
20392 case NM_MULEU_S_PH_QBR
:
20394 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20395 gen_store_gpr(v1_t
, ret
);
20397 case NM_MULQ_RS_PH
:
20399 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20400 gen_store_gpr(v1_t
, ret
);
20404 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20405 gen_store_gpr(v1_t
, ret
);
20409 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20410 gen_store_gpr(v1_t
, ret
);
20414 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20415 gen_store_gpr(v1_t
, ret
);
20419 gen_load_gpr(t0
, rs
);
20421 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20423 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20427 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20428 gen_store_gpr(v1_t
, ret
);
20432 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20433 gen_store_gpr(v1_t
, ret
);
20437 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20438 gen_store_gpr(v1_t
, ret
);
20442 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20443 gen_store_gpr(v1_t
, ret
);
20447 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20448 gen_store_gpr(v1_t
, ret
);
20452 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20453 gen_store_gpr(v1_t
, ret
);
20458 TCGv tv0
= tcg_temp_new();
20459 TCGv tv1
= tcg_temp_new();
20460 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20462 tcg_gen_movi_tl(tv0
, rd
>> 3);
20463 tcg_gen_movi_tl(tv1
, imm
);
20464 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20467 case NM_MULEQ_S_W_PHL
:
20469 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20470 gen_store_gpr(v1_t
, ret
);
20472 case NM_MULEQ_S_W_PHR
:
20474 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20475 gen_store_gpr(v1_t
, ret
);
20479 switch (extract32(ctx
->opcode
, 10, 1)) {
20482 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20483 gen_store_gpr(v1_t
, ret
);
20487 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20488 gen_store_gpr(v1_t
, ret
);
20492 case NM_PRECR_QB_PH
:
20494 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
20495 gen_store_gpr(v1_t
, ret
);
20497 case NM_PRECRQ_QB_PH
:
20499 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
20500 gen_store_gpr(v1_t
, ret
);
20502 case NM_PRECRQ_PH_W
:
20504 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
20505 gen_store_gpr(v1_t
, ret
);
20507 case NM_PRECRQ_RS_PH_W
:
20509 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20510 gen_store_gpr(v1_t
, ret
);
20512 case NM_PRECRQU_S_QB_PH
:
20514 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20515 gen_store_gpr(v1_t
, ret
);
20519 tcg_gen_movi_tl(t0
, rd
);
20520 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
20521 gen_store_gpr(v1_t
, rt
);
20525 tcg_gen_movi_tl(t0
, rd
>> 1);
20526 switch (extract32(ctx
->opcode
, 10, 1)) {
20529 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
20530 gen_store_gpr(v1_t
, rt
);
20534 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
20535 gen_store_gpr(v1_t
, rt
);
20541 tcg_gen_movi_tl(t0
, rd
>> 1);
20542 switch (extract32(ctx
->opcode
, 10, 2)) {
20545 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
20546 gen_store_gpr(v1_t
, rt
);
20550 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
20551 gen_store_gpr(v1_t
, rt
);
20554 generate_exception_end(ctx
, EXCP_RI
);
20560 tcg_gen_movi_tl(t0
, rd
);
20561 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
20562 gen_store_gpr(v1_t
, rt
);
20568 imm
= sextract32(ctx
->opcode
, 11, 11);
20569 imm
= (int16_t)(imm
<< 6) >> 6;
20571 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
20576 generate_exception_end(ctx
, EXCP_RI
);
20581 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
20589 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
20590 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
20592 rt
= extract32(ctx
->opcode
, 21, 5);
20593 rs
= extract32(ctx
->opcode
, 16, 5);
20594 rd
= extract32(ctx
->opcode
, 11, 5);
20596 op
= extract32(ctx
->opcode
, 26, 6);
20601 switch (extract32(ctx
->opcode
, 19, 2)) {
20604 generate_exception_end(ctx
, EXCP_RI
);
20607 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
20608 generate_exception_end(ctx
, EXCP_SYSCALL
);
20610 generate_exception_end(ctx
, EXCP_RI
);
20614 generate_exception_end(ctx
, EXCP_BREAK
);
20617 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
20618 gen_helper_do_semihosting(cpu_env
);
20620 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
20621 generate_exception_end(ctx
, EXCP_RI
);
20623 generate_exception_end(ctx
, EXCP_DBp
);
20630 imm
= extract32(ctx
->opcode
, 0, 16);
20632 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
20634 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
20636 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20641 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
20642 extract32(ctx
->opcode
, 1, 20) << 1;
20643 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20644 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20648 switch (ctx
->opcode
& 0x07) {
20650 gen_pool32a0_nanomips_insn(env
, ctx
);
20654 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
20655 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
20659 switch (extract32(ctx
->opcode
, 3, 3)) {
20661 gen_p_lsx(ctx
, rd
, rs
, rt
);
20664 /* In nanoMIPS, the shift field directly encodes the shift
20665 * amount, meaning that the supported shift values are in
20666 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
20667 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
20668 extract32(ctx
->opcode
, 9, 2) - 1);
20671 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
20674 gen_pool32axf_nanomips_insn(env
, ctx
);
20677 generate_exception_end(ctx
, EXCP_RI
);
20682 generate_exception_end(ctx
, EXCP_RI
);
20687 switch (ctx
->opcode
& 0x03) {
20690 offset
= extract32(ctx
->opcode
, 0, 21);
20691 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
20695 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20698 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20701 generate_exception_end(ctx
, EXCP_RI
);
20707 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
20708 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
20709 switch (extract32(ctx
->opcode
, 16, 5)) {
20713 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
20719 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
20720 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20726 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
20732 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20735 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20742 t0
= tcg_temp_new();
20744 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20747 tcg_gen_movi_tl(t0
, addr
);
20748 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
20756 t0
= tcg_temp_new();
20757 t1
= tcg_temp_new();
20759 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20762 tcg_gen_movi_tl(t0
, addr
);
20763 gen_load_gpr(t1
, rt
);
20765 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
20772 generate_exception_end(ctx
, EXCP_RI
);
20778 switch (extract32(ctx
->opcode
, 12, 4)) {
20780 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20783 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20786 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20789 switch (extract32(ctx
->opcode
, 20, 1)) {
20791 switch (ctx
->opcode
& 3) {
20793 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
20794 extract32(ctx
->opcode
, 2, 1),
20795 extract32(ctx
->opcode
, 3, 9) << 3);
20798 case NM_RESTORE_JRC
:
20799 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
20800 extract32(ctx
->opcode
, 2, 1),
20801 extract32(ctx
->opcode
, 3, 9) << 3);
20802 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
20803 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
20807 generate_exception_end(ctx
, EXCP_RI
);
20812 generate_exception_end(ctx
, EXCP_RI
);
20817 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20820 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20824 TCGv t0
= tcg_temp_new();
20826 imm
= extract32(ctx
->opcode
, 0, 12);
20827 gen_load_gpr(t0
, rs
);
20828 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
20829 gen_store_gpr(t0
, rt
);
20835 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
20836 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
20840 int shift
= extract32(ctx
->opcode
, 0, 5);
20841 switch (extract32(ctx
->opcode
, 5, 4)) {
20843 if (rt
== 0 && shift
== 0) {
20845 } else if (rt
== 0 && shift
== 3) {
20846 /* EHB - treat as NOP */
20847 } else if (rt
== 0 && shift
== 5) {
20848 /* PAUSE - treat as NOP */
20849 } else if (rt
== 0 && shift
== 6) {
20851 gen_sync(extract32(ctx
->opcode
, 16, 5));
20854 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
20855 extract32(ctx
->opcode
, 0, 5));
20859 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
20860 extract32(ctx
->opcode
, 0, 5));
20863 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
20864 extract32(ctx
->opcode
, 0, 5));
20867 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
20868 extract32(ctx
->opcode
, 0, 5));
20876 TCGv t0
= tcg_temp_new();
20877 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
20878 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
20880 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
20882 gen_load_gpr(t0
, rs
);
20883 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
20886 tcg_temp_free_i32(shift
);
20887 tcg_temp_free_i32(shiftx
);
20888 tcg_temp_free_i32(stripe
);
20892 switch (((ctx
->opcode
>> 10) & 2) |
20893 (extract32(ctx
->opcode
, 5, 1))) {
20896 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
20897 extract32(ctx
->opcode
, 6, 5));
20900 generate_exception_end(ctx
, EXCP_RI
);
20905 switch (((ctx
->opcode
>> 10) & 2) |
20906 (extract32(ctx
->opcode
, 5, 1))) {
20909 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
20910 extract32(ctx
->opcode
, 6, 5));
20913 generate_exception_end(ctx
, EXCP_RI
);
20918 generate_exception_end(ctx
, EXCP_RI
);
20923 gen_pool32f_nanomips_insn(ctx
);
20928 switch (extract32(ctx
->opcode
, 1, 1)) {
20931 tcg_gen_movi_tl(cpu_gpr
[rt
],
20932 sextract32(ctx
->opcode
, 0, 1) << 31 |
20933 extract32(ctx
->opcode
, 2, 10) << 21 |
20934 extract32(ctx
->opcode
, 12, 9) << 12);
20939 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
20940 extract32(ctx
->opcode
, 2, 10) << 21 |
20941 extract32(ctx
->opcode
, 12, 9) << 12;
20943 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20944 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20951 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
20953 switch (extract32(ctx
->opcode
, 18, 3)) {
20955 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
20958 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
20961 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
20965 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
20970 switch (ctx
->opcode
& 1) {
20972 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
20975 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
20981 switch (ctx
->opcode
& 1) {
20983 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
20986 generate_exception_end(ctx
, EXCP_RI
);
20992 switch (ctx
->opcode
& 0x3) {
20994 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
20997 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21000 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21003 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21008 generate_exception_end(ctx
, EXCP_RI
);
21015 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21017 switch (extract32(ctx
->opcode
, 12, 4)) {
21021 /* Break the TB to be able to sync copied instructions
21023 ctx
->base
.is_jmp
= DISAS_STOP
;
21026 /* Treat as NOP. */
21030 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21033 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21036 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21039 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21042 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21045 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21048 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21051 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21054 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21057 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21060 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21063 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21066 generate_exception_end(ctx
, EXCP_RI
);
21073 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21074 extract32(ctx
->opcode
, 0, 8);
21076 switch (extract32(ctx
->opcode
, 8, 3)) {
21078 switch (extract32(ctx
->opcode
, 11, 4)) {
21080 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21083 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21086 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21089 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21092 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21095 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21098 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21101 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21104 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21107 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21110 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21113 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21118 /* Break the TB to be able to sync copied instructions
21120 ctx
->base
.is_jmp
= DISAS_STOP
;
21123 /* Treat as NOP. */
21127 generate_exception_end(ctx
, EXCP_RI
);
21132 switch (extract32(ctx
->opcode
, 11, 4)) {
21137 TCGv t0
= tcg_temp_new();
21138 TCGv t1
= tcg_temp_new();
21140 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21142 switch (extract32(ctx
->opcode
, 11, 4)) {
21144 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21146 gen_store_gpr(t0
, rt
);
21149 gen_load_gpr(t1
, rt
);
21150 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21159 switch (ctx
->opcode
& 0x03) {
21161 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21165 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21170 switch (ctx
->opcode
& 0x03) {
21172 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, s
);
21176 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21181 check_cp0_enabled(ctx
);
21182 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21183 gen_cache_operation(ctx
, rt
, rs
, s
);
21192 int count
= extract32(ctx
->opcode
, 12, 3);
21195 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21196 extract32(ctx
->opcode
, 0, 8);
21197 TCGv va
= tcg_temp_new();
21198 TCGv t1
= tcg_temp_new();
21199 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21200 NM_P_LS_UAWM
? MO_UNALN
: 0;
21202 count
= (count
== 0) ? 8 : count
;
21203 while (counter
!= count
) {
21204 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21205 int this_offset
= offset
+ (counter
<< 2);
21207 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21209 switch (extract32(ctx
->opcode
, 11, 1)) {
21211 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21213 gen_store_gpr(t1
, this_rt
);
21214 if ((this_rt
== rs
) &&
21215 (counter
!= (count
- 1))) {
21216 /* UNPREDICTABLE */
21220 this_rt
= (rt
== 0) ? 0 : this_rt
;
21221 gen_load_gpr(t1
, this_rt
);
21222 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21233 generate_exception_end(ctx
, EXCP_RI
);
21241 TCGv t0
= tcg_temp_new();
21242 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21243 extract32(ctx
->opcode
, 1, 20) << 1;
21244 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21245 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21246 extract32(ctx
->opcode
, 21, 3));
21247 gen_load_gpr(t0
, rt
);
21248 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21249 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21255 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21256 extract32(ctx
->opcode
, 1, 24) << 1;
21258 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21260 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21263 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21268 switch (extract32(ctx
->opcode
, 12, 4)) {
21271 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21274 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21277 generate_exception_end(ctx
, EXCP_RI
);
21283 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21284 extract32(ctx
->opcode
, 1, 13) << 1;
21285 switch (extract32(ctx
->opcode
, 14, 2)) {
21288 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21291 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21292 extract32(ctx
->opcode
, 1, 13) << 1;
21293 check_cp1_enabled(ctx
);
21294 switch (extract32(ctx
->opcode
, 16, 5)) {
21296 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21299 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21304 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21305 extract32(ctx
->opcode
, 0, 1) << 13;
21307 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21312 generate_exception_end(ctx
, EXCP_RI
);
21318 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21320 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21324 if (rs
== rt
|| rt
== 0) {
21325 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21326 } else if (rs
== 0) {
21327 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21329 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21337 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21338 extract32(ctx
->opcode
, 1, 13) << 1;
21339 switch (extract32(ctx
->opcode
, 14, 2)) {
21342 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21345 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21347 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21349 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21353 if (rs
== 0 || rs
== rt
) {
21355 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21357 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21361 generate_exception_end(ctx
, EXCP_RI
);
21368 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21369 extract32(ctx
->opcode
, 1, 10) << 1;
21370 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21372 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
21377 generate_exception_end(ctx
, EXCP_RI
);
21383 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21386 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21387 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21388 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS1(ctx
->opcode
));
21392 /* make sure instructions are on a halfword boundary */
21393 if (ctx
->base
.pc_next
& 0x1) {
21394 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
21395 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
21396 tcg_temp_free(tmp
);
21397 generate_exception_end(ctx
, EXCP_AdEL
);
21401 op
= extract32(ctx
->opcode
, 10, 6);
21404 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21407 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
21408 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
21411 switch (extract32(ctx
->opcode
, 3, 2)) {
21412 case NM_P16_SYSCALL
:
21413 if (extract32(ctx
->opcode
, 2, 1) == 0) {
21414 generate_exception_end(ctx
, EXCP_SYSCALL
);
21416 generate_exception_end(ctx
, EXCP_RI
);
21420 generate_exception_end(ctx
, EXCP_BREAK
);
21423 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
21424 gen_helper_do_semihosting(cpu_env
);
21426 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21427 generate_exception_end(ctx
, EXCP_RI
);
21429 generate_exception_end(ctx
, EXCP_DBp
);
21434 generate_exception_end(ctx
, EXCP_RI
);
21441 int shift
= extract32(ctx
->opcode
, 0, 3);
21443 shift
= (shift
== 0) ? 8 : shift
;
21445 switch (extract32(ctx
->opcode
, 3, 1)) {
21453 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
21457 switch (ctx
->opcode
& 1) {
21459 gen_pool16c_nanomips_insn(ctx
);
21462 gen_ldxs(ctx
, rt
, rs
, rd
);
21467 switch (extract32(ctx
->opcode
, 6, 1)) {
21469 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
21470 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
21473 generate_exception_end(ctx
, EXCP_RI
);
21478 switch (extract32(ctx
->opcode
, 3, 1)) {
21480 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
21481 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
21483 case NM_P_ADDIURS5
:
21484 rt
= extract32(ctx
->opcode
, 5, 5);
21486 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
21487 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
21488 (extract32(ctx
->opcode
, 0, 3));
21489 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
21495 switch (ctx
->opcode
& 0x1) {
21497 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
21500 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
21505 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21506 extract32(ctx
->opcode
, 5, 3);
21507 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21508 extract32(ctx
->opcode
, 0, 3);
21509 rt
= decode_gpr_gpr4(rt
);
21510 rs
= decode_gpr_gpr4(rs
);
21511 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
21512 (extract32(ctx
->opcode
, 3, 1))) {
21515 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
21519 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
21522 generate_exception_end(ctx
, EXCP_RI
);
21528 int imm
= extract32(ctx
->opcode
, 0, 7);
21529 imm
= (imm
== 0x7f ? -1 : imm
);
21531 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21537 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
21538 u
= (u
== 12) ? 0xff :
21539 (u
== 13) ? 0xffff : u
;
21540 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
21544 offset
= extract32(ctx
->opcode
, 0, 2);
21545 switch (extract32(ctx
->opcode
, 2, 2)) {
21547 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
21550 rt
= decode_gpr_gpr3_src_store(
21551 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21552 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
21555 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
21558 generate_exception_end(ctx
, EXCP_RI
);
21563 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
21564 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
21566 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
21569 rt
= decode_gpr_gpr3_src_store(
21570 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21571 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
21574 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
21577 generate_exception_end(ctx
, EXCP_RI
);
21582 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21583 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21586 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21587 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21588 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
21592 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21593 extract32(ctx
->opcode
, 5, 3);
21594 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21595 extract32(ctx
->opcode
, 0, 3);
21596 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21597 (extract32(ctx
->opcode
, 8, 1) << 2);
21598 rt
= decode_gpr_gpr4(rt
);
21599 rs
= decode_gpr_gpr4(rs
);
21600 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21604 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21605 extract32(ctx
->opcode
, 5, 3);
21606 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21607 extract32(ctx
->opcode
, 0, 3);
21608 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21609 (extract32(ctx
->opcode
, 8, 1) << 2);
21610 rt
= decode_gpr_gpr4_zero(rt
);
21611 rs
= decode_gpr_gpr4(rs
);
21612 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21615 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21616 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
21619 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21620 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21621 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
21624 rt
= decode_gpr_gpr3_src_store(
21625 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21626 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21627 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21628 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21631 rt
= decode_gpr_gpr3_src_store(
21632 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21633 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21634 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
21637 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
21638 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21639 (extract32(ctx
->opcode
, 1, 9) << 1));
21642 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
21643 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21644 (extract32(ctx
->opcode
, 1, 9) << 1));
21647 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
21648 (sextract32(ctx
->opcode
, 0, 1) << 7) |
21649 (extract32(ctx
->opcode
, 1, 6) << 1));
21652 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
21653 (sextract32(ctx
->opcode
, 0, 1) << 7) |
21654 (extract32(ctx
->opcode
, 1, 6) << 1));
21657 switch (ctx
->opcode
& 0xf) {
21660 switch (extract32(ctx
->opcode
, 4, 1)) {
21662 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
21663 extract32(ctx
->opcode
, 5, 5), 0, 0);
21666 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
21667 extract32(ctx
->opcode
, 5, 5), 31, 0);
21674 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
21675 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
21676 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
21677 extract32(ctx
->opcode
, 0, 4) << 1);
21684 int count
= extract32(ctx
->opcode
, 0, 4);
21685 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
21687 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
21688 switch (extract32(ctx
->opcode
, 8, 1)) {
21690 gen_save(ctx
, rt
, count
, 0, u
);
21692 case NM_RESTORE_JRC16
:
21693 gen_restore(ctx
, rt
, count
, 0, u
);
21694 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21703 static const int gpr2reg1
[] = {4, 5, 6, 7};
21704 static const int gpr2reg2
[] = {5, 6, 7, 8};
21706 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
21707 extract32(ctx
->opcode
, 8, 1);
21708 int r1
= gpr2reg1
[rd2
];
21709 int r2
= gpr2reg2
[rd2
];
21710 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
21711 extract32(ctx
->opcode
, 0, 3);
21712 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
21713 extract32(ctx
->opcode
, 5, 3);
21714 TCGv t0
= tcg_temp_new();
21715 TCGv t1
= tcg_temp_new();
21716 if (op
== NM_MOVEP
) {
21719 rs
= decode_gpr_gpr4_zero(r3
);
21720 rt
= decode_gpr_gpr4_zero(r4
);
21722 rd
= decode_gpr_gpr4(r3
);
21723 re
= decode_gpr_gpr4(r4
);
21727 gen_load_gpr(t0
, rs
);
21728 gen_load_gpr(t1
, rt
);
21729 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21730 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
21736 return decode_nanomips_32_48_opc(env
, ctx
);
21743 /* SmartMIPS extension to MIPS32 */
21745 #if defined(TARGET_MIPS64)
21747 /* MDMX extension to MIPS64 */
21751 /* MIPSDSP functions. */
21752 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
21753 int rd
, int base
, int offset
)
21758 t0
= tcg_temp_new();
21761 gen_load_gpr(t0
, offset
);
21762 } else if (offset
== 0) {
21763 gen_load_gpr(t0
, base
);
21765 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
21770 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
21771 gen_store_gpr(t0
, rd
);
21774 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
21775 gen_store_gpr(t0
, rd
);
21778 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
21779 gen_store_gpr(t0
, rd
);
21781 #if defined(TARGET_MIPS64)
21783 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
21784 gen_store_gpr(t0
, rd
);
21791 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
21792 int ret
, int v1
, int v2
)
21798 /* Treat as NOP. */
21802 v1_t
= tcg_temp_new();
21803 v2_t
= tcg_temp_new();
21805 gen_load_gpr(v1_t
, v1
);
21806 gen_load_gpr(v2_t
, v2
);
21809 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
21810 case OPC_MULT_G_2E
:
21814 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21816 case OPC_ADDUH_R_QB
:
21817 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21820 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21822 case OPC_ADDQH_R_PH
:
21823 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21826 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21828 case OPC_ADDQH_R_W
:
21829 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21832 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21834 case OPC_SUBUH_R_QB
:
21835 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21838 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21840 case OPC_SUBQH_R_PH
:
21841 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21844 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21846 case OPC_SUBQH_R_W
:
21847 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21851 case OPC_ABSQ_S_PH_DSP
:
21853 case OPC_ABSQ_S_QB
:
21855 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
21857 case OPC_ABSQ_S_PH
:
21859 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
21863 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
21865 case OPC_PRECEQ_W_PHL
:
21867 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
21868 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
21870 case OPC_PRECEQ_W_PHR
:
21872 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
21873 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
21874 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
21876 case OPC_PRECEQU_PH_QBL
:
21878 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
21880 case OPC_PRECEQU_PH_QBR
:
21882 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
21884 case OPC_PRECEQU_PH_QBLA
:
21886 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
21888 case OPC_PRECEQU_PH_QBRA
:
21890 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
21892 case OPC_PRECEU_PH_QBL
:
21894 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
21896 case OPC_PRECEU_PH_QBR
:
21898 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
21900 case OPC_PRECEU_PH_QBLA
:
21902 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
21904 case OPC_PRECEU_PH_QBRA
:
21906 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
21910 case OPC_ADDU_QB_DSP
:
21914 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21916 case OPC_ADDQ_S_PH
:
21918 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21922 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21926 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21928 case OPC_ADDU_S_QB
:
21930 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21934 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21936 case OPC_ADDU_S_PH
:
21938 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21942 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21944 case OPC_SUBQ_S_PH
:
21946 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21950 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21954 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21956 case OPC_SUBU_S_QB
:
21958 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21962 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21964 case OPC_SUBU_S_PH
:
21966 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21970 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21974 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21978 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
21980 case OPC_RADDU_W_QB
:
21982 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
21986 case OPC_CMPU_EQ_QB_DSP
:
21988 case OPC_PRECR_QB_PH
:
21990 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21992 case OPC_PRECRQ_QB_PH
:
21994 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21996 case OPC_PRECR_SRA_PH_W
:
21999 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22000 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22002 tcg_temp_free_i32(sa_t
);
22005 case OPC_PRECR_SRA_R_PH_W
:
22008 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22009 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22011 tcg_temp_free_i32(sa_t
);
22014 case OPC_PRECRQ_PH_W
:
22016 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22018 case OPC_PRECRQ_RS_PH_W
:
22020 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22022 case OPC_PRECRQU_S_QB_PH
:
22024 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22028 #ifdef TARGET_MIPS64
22029 case OPC_ABSQ_S_QH_DSP
:
22031 case OPC_PRECEQ_L_PWL
:
22033 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22035 case OPC_PRECEQ_L_PWR
:
22037 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22039 case OPC_PRECEQ_PW_QHL
:
22041 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22043 case OPC_PRECEQ_PW_QHR
:
22045 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22047 case OPC_PRECEQ_PW_QHLA
:
22049 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22051 case OPC_PRECEQ_PW_QHRA
:
22053 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22055 case OPC_PRECEQU_QH_OBL
:
22057 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22059 case OPC_PRECEQU_QH_OBR
:
22061 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22063 case OPC_PRECEQU_QH_OBLA
:
22065 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22067 case OPC_PRECEQU_QH_OBRA
:
22069 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22071 case OPC_PRECEU_QH_OBL
:
22073 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22075 case OPC_PRECEU_QH_OBR
:
22077 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22079 case OPC_PRECEU_QH_OBLA
:
22081 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22083 case OPC_PRECEU_QH_OBRA
:
22085 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22087 case OPC_ABSQ_S_OB
:
22089 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22091 case OPC_ABSQ_S_PW
:
22093 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22095 case OPC_ABSQ_S_QH
:
22097 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22101 case OPC_ADDU_OB_DSP
:
22103 case OPC_RADDU_L_OB
:
22105 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22109 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22111 case OPC_SUBQ_S_PW
:
22113 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22117 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22119 case OPC_SUBQ_S_QH
:
22121 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22125 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22127 case OPC_SUBU_S_OB
:
22129 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22133 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22135 case OPC_SUBU_S_QH
:
22137 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22141 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22143 case OPC_SUBUH_R_OB
:
22145 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22149 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22151 case OPC_ADDQ_S_PW
:
22153 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22157 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22159 case OPC_ADDQ_S_QH
:
22161 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22165 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22167 case OPC_ADDU_S_OB
:
22169 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22173 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22175 case OPC_ADDU_S_QH
:
22177 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22181 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22183 case OPC_ADDUH_R_OB
:
22185 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22189 case OPC_CMPU_EQ_OB_DSP
:
22191 case OPC_PRECR_OB_QH
:
22193 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22195 case OPC_PRECR_SRA_QH_PW
:
22198 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22199 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22200 tcg_temp_free_i32(ret_t
);
22203 case OPC_PRECR_SRA_R_QH_PW
:
22206 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22207 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22208 tcg_temp_free_i32(sa_v
);
22211 case OPC_PRECRQ_OB_QH
:
22213 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22215 case OPC_PRECRQ_PW_L
:
22217 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22219 case OPC_PRECRQ_QH_PW
:
22221 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22223 case OPC_PRECRQ_RS_QH_PW
:
22225 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22227 case OPC_PRECRQU_S_OB_QH
:
22229 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22236 tcg_temp_free(v1_t
);
22237 tcg_temp_free(v2_t
);
22240 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22241 int ret
, int v1
, int v2
)
22249 /* Treat as NOP. */
22253 t0
= tcg_temp_new();
22254 v1_t
= tcg_temp_new();
22255 v2_t
= tcg_temp_new();
22257 tcg_gen_movi_tl(t0
, v1
);
22258 gen_load_gpr(v1_t
, v1
);
22259 gen_load_gpr(v2_t
, v2
);
22262 case OPC_SHLL_QB_DSP
:
22264 op2
= MASK_SHLL_QB(ctx
->opcode
);
22268 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22272 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22276 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22280 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22282 case OPC_SHLL_S_PH
:
22284 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22286 case OPC_SHLLV_S_PH
:
22288 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22292 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22294 case OPC_SHLLV_S_W
:
22296 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22300 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22304 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22308 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22312 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22316 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22318 case OPC_SHRA_R_QB
:
22320 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22324 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22326 case OPC_SHRAV_R_QB
:
22328 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22332 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22334 case OPC_SHRA_R_PH
:
22336 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22340 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22342 case OPC_SHRAV_R_PH
:
22344 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22348 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22350 case OPC_SHRAV_R_W
:
22352 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22354 default: /* Invalid */
22355 MIPS_INVAL("MASK SHLL.QB");
22356 generate_exception_end(ctx
, EXCP_RI
);
22361 #ifdef TARGET_MIPS64
22362 case OPC_SHLL_OB_DSP
:
22363 op2
= MASK_SHLL_OB(ctx
->opcode
);
22367 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22371 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22373 case OPC_SHLL_S_PW
:
22375 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22377 case OPC_SHLLV_S_PW
:
22379 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22383 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22387 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22391 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22395 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22397 case OPC_SHLL_S_QH
:
22399 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22401 case OPC_SHLLV_S_QH
:
22403 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22407 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
22411 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22413 case OPC_SHRA_R_OB
:
22415 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
22417 case OPC_SHRAV_R_OB
:
22419 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22423 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
22427 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22429 case OPC_SHRA_R_PW
:
22431 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
22433 case OPC_SHRAV_R_PW
:
22435 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22439 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
22443 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22445 case OPC_SHRA_R_QH
:
22447 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
22449 case OPC_SHRAV_R_QH
:
22451 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22455 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
22459 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22463 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
22467 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22469 default: /* Invalid */
22470 MIPS_INVAL("MASK SHLL.OB");
22471 generate_exception_end(ctx
, EXCP_RI
);
22479 tcg_temp_free(v1_t
);
22480 tcg_temp_free(v2_t
);
22483 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22484 int ret
, int v1
, int v2
, int check_ret
)
22490 if ((ret
== 0) && (check_ret
== 1)) {
22491 /* Treat as NOP. */
22495 t0
= tcg_temp_new_i32();
22496 v1_t
= tcg_temp_new();
22497 v2_t
= tcg_temp_new();
22499 tcg_gen_movi_i32(t0
, ret
);
22500 gen_load_gpr(v1_t
, v1
);
22501 gen_load_gpr(v2_t
, v2
);
22504 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22505 * the same mask and op1. */
22506 case OPC_MULT_G_2E
:
22510 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22513 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22516 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22518 case OPC_MULQ_RS_W
:
22519 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22523 case OPC_DPA_W_PH_DSP
:
22525 case OPC_DPAU_H_QBL
:
22527 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22529 case OPC_DPAU_H_QBR
:
22531 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22533 case OPC_DPSU_H_QBL
:
22535 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22537 case OPC_DPSU_H_QBR
:
22539 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22543 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22545 case OPC_DPAX_W_PH
:
22547 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22549 case OPC_DPAQ_S_W_PH
:
22551 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22553 case OPC_DPAQX_S_W_PH
:
22555 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22557 case OPC_DPAQX_SA_W_PH
:
22559 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22563 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22565 case OPC_DPSX_W_PH
:
22567 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22569 case OPC_DPSQ_S_W_PH
:
22571 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22573 case OPC_DPSQX_S_W_PH
:
22575 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22577 case OPC_DPSQX_SA_W_PH
:
22579 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22581 case OPC_MULSAQ_S_W_PH
:
22583 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22585 case OPC_DPAQ_SA_L_W
:
22587 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22589 case OPC_DPSQ_SA_L_W
:
22591 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22593 case OPC_MAQ_S_W_PHL
:
22595 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22597 case OPC_MAQ_S_W_PHR
:
22599 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22601 case OPC_MAQ_SA_W_PHL
:
22603 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22605 case OPC_MAQ_SA_W_PHR
:
22607 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22609 case OPC_MULSA_W_PH
:
22611 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22615 #ifdef TARGET_MIPS64
22616 case OPC_DPAQ_W_QH_DSP
:
22618 int ac
= ret
& 0x03;
22619 tcg_gen_movi_i32(t0
, ac
);
22624 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
22628 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
22632 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
22636 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
22640 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22642 case OPC_DPAQ_S_W_QH
:
22644 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22646 case OPC_DPAQ_SA_L_PW
:
22648 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22650 case OPC_DPAU_H_OBL
:
22652 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
22654 case OPC_DPAU_H_OBR
:
22656 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
22660 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22662 case OPC_DPSQ_S_W_QH
:
22664 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22666 case OPC_DPSQ_SA_L_PW
:
22668 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22670 case OPC_DPSU_H_OBL
:
22672 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
22674 case OPC_DPSU_H_OBR
:
22676 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
22678 case OPC_MAQ_S_L_PWL
:
22680 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
22682 case OPC_MAQ_S_L_PWR
:
22684 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
22686 case OPC_MAQ_S_W_QHLL
:
22688 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
22690 case OPC_MAQ_SA_W_QHLL
:
22692 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
22694 case OPC_MAQ_S_W_QHLR
:
22696 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
22698 case OPC_MAQ_SA_W_QHLR
:
22700 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
22702 case OPC_MAQ_S_W_QHRL
:
22704 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
22706 case OPC_MAQ_SA_W_QHRL
:
22708 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
22710 case OPC_MAQ_S_W_QHRR
:
22712 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
22714 case OPC_MAQ_SA_W_QHRR
:
22716 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
22718 case OPC_MULSAQ_S_L_PW
:
22720 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22722 case OPC_MULSAQ_S_W_QH
:
22724 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22730 case OPC_ADDU_QB_DSP
:
22732 case OPC_MULEU_S_PH_QBL
:
22734 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22736 case OPC_MULEU_S_PH_QBR
:
22738 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22740 case OPC_MULQ_RS_PH
:
22742 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22744 case OPC_MULEQ_S_W_PHL
:
22746 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22748 case OPC_MULEQ_S_W_PHR
:
22750 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22752 case OPC_MULQ_S_PH
:
22754 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22758 #ifdef TARGET_MIPS64
22759 case OPC_ADDU_OB_DSP
:
22761 case OPC_MULEQ_S_PW_QHL
:
22763 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22765 case OPC_MULEQ_S_PW_QHR
:
22767 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22769 case OPC_MULEU_S_QH_OBL
:
22771 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22773 case OPC_MULEU_S_QH_OBR
:
22775 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22777 case OPC_MULQ_RS_QH
:
22779 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22786 tcg_temp_free_i32(t0
);
22787 tcg_temp_free(v1_t
);
22788 tcg_temp_free(v2_t
);
22791 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22799 /* Treat as NOP. */
22803 t0
= tcg_temp_new();
22804 val_t
= tcg_temp_new();
22805 gen_load_gpr(val_t
, val
);
22808 case OPC_ABSQ_S_PH_DSP
:
22812 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
22817 target_long result
;
22818 imm
= (ctx
->opcode
>> 16) & 0xFF;
22819 result
= (uint32_t)imm
<< 24 |
22820 (uint32_t)imm
<< 16 |
22821 (uint32_t)imm
<< 8 |
22823 result
= (int32_t)result
;
22824 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
22829 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
22830 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
22831 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22832 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22833 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22834 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22839 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22840 imm
= (int16_t)(imm
<< 6) >> 6;
22841 tcg_gen_movi_tl(cpu_gpr
[ret
], \
22842 (target_long
)((int32_t)imm
<< 16 | \
22848 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
22849 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22850 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22851 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22855 #ifdef TARGET_MIPS64
22856 case OPC_ABSQ_S_QH_DSP
:
22863 imm
= (ctx
->opcode
>> 16) & 0xFF;
22864 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
22865 temp
= (temp
<< 16) | temp
;
22866 temp
= (temp
<< 32) | temp
;
22867 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
22875 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22876 imm
= (int16_t)(imm
<< 6) >> 6;
22877 temp
= ((target_long
)imm
<< 32) \
22878 | ((target_long
)imm
& 0xFFFFFFFF);
22879 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
22887 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22888 imm
= (int16_t)(imm
<< 6) >> 6;
22890 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
22891 ((uint64_t)(uint16_t)imm
<< 32) |
22892 ((uint64_t)(uint16_t)imm
<< 16) |
22893 (uint64_t)(uint16_t)imm
;
22894 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
22899 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
22900 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
22901 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22902 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22903 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22904 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
22905 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22909 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
22910 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
22911 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22915 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
22916 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22917 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22918 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
22919 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22926 tcg_temp_free(val_t
);
22929 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
22930 uint32_t op1
, uint32_t op2
,
22931 int ret
, int v1
, int v2
, int check_ret
)
22937 if ((ret
== 0) && (check_ret
== 1)) {
22938 /* Treat as NOP. */
22942 t1
= tcg_temp_new();
22943 v1_t
= tcg_temp_new();
22944 v2_t
= tcg_temp_new();
22946 gen_load_gpr(v1_t
, v1
);
22947 gen_load_gpr(v2_t
, v2
);
22950 case OPC_CMPU_EQ_QB_DSP
:
22952 case OPC_CMPU_EQ_QB
:
22954 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
22956 case OPC_CMPU_LT_QB
:
22958 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
22960 case OPC_CMPU_LE_QB
:
22962 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
22964 case OPC_CMPGU_EQ_QB
:
22966 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22968 case OPC_CMPGU_LT_QB
:
22970 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22972 case OPC_CMPGU_LE_QB
:
22974 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22976 case OPC_CMPGDU_EQ_QB
:
22978 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
22979 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
22980 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
22981 tcg_gen_shli_tl(t1
, t1
, 24);
22982 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
22984 case OPC_CMPGDU_LT_QB
:
22986 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
22987 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
22988 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
22989 tcg_gen_shli_tl(t1
, t1
, 24);
22990 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
22992 case OPC_CMPGDU_LE_QB
:
22994 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
22995 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
22996 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
22997 tcg_gen_shli_tl(t1
, t1
, 24);
22998 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23000 case OPC_CMP_EQ_PH
:
23002 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23004 case OPC_CMP_LT_PH
:
23006 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23008 case OPC_CMP_LE_PH
:
23010 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23014 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23018 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23020 case OPC_PACKRL_PH
:
23022 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23026 #ifdef TARGET_MIPS64
23027 case OPC_CMPU_EQ_OB_DSP
:
23029 case OPC_CMP_EQ_PW
:
23031 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23033 case OPC_CMP_LT_PW
:
23035 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23037 case OPC_CMP_LE_PW
:
23039 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23041 case OPC_CMP_EQ_QH
:
23043 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23045 case OPC_CMP_LT_QH
:
23047 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23049 case OPC_CMP_LE_QH
:
23051 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23053 case OPC_CMPGDU_EQ_OB
:
23055 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23057 case OPC_CMPGDU_LT_OB
:
23059 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23061 case OPC_CMPGDU_LE_OB
:
23063 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23065 case OPC_CMPGU_EQ_OB
:
23067 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23069 case OPC_CMPGU_LT_OB
:
23071 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23073 case OPC_CMPGU_LE_OB
:
23075 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23077 case OPC_CMPU_EQ_OB
:
23079 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23081 case OPC_CMPU_LT_OB
:
23083 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23085 case OPC_CMPU_LE_OB
:
23087 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23089 case OPC_PACKRL_PW
:
23091 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23095 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23099 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23103 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23111 tcg_temp_free(v1_t
);
23112 tcg_temp_free(v2_t
);
23115 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23116 uint32_t op1
, int rt
, int rs
, int sa
)
23123 /* Treat as NOP. */
23127 t0
= tcg_temp_new();
23128 gen_load_gpr(t0
, rs
);
23131 case OPC_APPEND_DSP
:
23132 switch (MASK_APPEND(ctx
->opcode
)) {
23135 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23137 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23141 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23142 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23143 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23144 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23146 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23150 if (sa
!= 0 && sa
!= 2) {
23151 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23152 tcg_gen_ext32u_tl(t0
, t0
);
23153 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23154 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23156 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23158 default: /* Invalid */
23159 MIPS_INVAL("MASK APPEND");
23160 generate_exception_end(ctx
, EXCP_RI
);
23164 #ifdef TARGET_MIPS64
23165 case OPC_DAPPEND_DSP
:
23166 switch (MASK_DAPPEND(ctx
->opcode
)) {
23169 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23173 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23174 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23175 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23179 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23180 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23181 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23186 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23187 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23188 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23189 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23192 default: /* Invalid */
23193 MIPS_INVAL("MASK DAPPEND");
23194 generate_exception_end(ctx
, EXCP_RI
);
23203 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23204 int ret
, int v1
, int v2
, int check_ret
)
23213 if ((ret
== 0) && (check_ret
== 1)) {
23214 /* Treat as NOP. */
23218 t0
= tcg_temp_new();
23219 t1
= tcg_temp_new();
23220 v1_t
= tcg_temp_new();
23221 v2_t
= tcg_temp_new();
23223 gen_load_gpr(v1_t
, v1
);
23224 gen_load_gpr(v2_t
, v2
);
23227 case OPC_EXTR_W_DSP
:
23231 tcg_gen_movi_tl(t0
, v2
);
23232 tcg_gen_movi_tl(t1
, v1
);
23233 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23236 tcg_gen_movi_tl(t0
, v2
);
23237 tcg_gen_movi_tl(t1
, v1
);
23238 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23240 case OPC_EXTR_RS_W
:
23241 tcg_gen_movi_tl(t0
, v2
);
23242 tcg_gen_movi_tl(t1
, v1
);
23243 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23246 tcg_gen_movi_tl(t0
, v2
);
23247 tcg_gen_movi_tl(t1
, v1
);
23248 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23250 case OPC_EXTRV_S_H
:
23251 tcg_gen_movi_tl(t0
, v2
);
23252 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23255 tcg_gen_movi_tl(t0
, v2
);
23256 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23258 case OPC_EXTRV_R_W
:
23259 tcg_gen_movi_tl(t0
, v2
);
23260 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23262 case OPC_EXTRV_RS_W
:
23263 tcg_gen_movi_tl(t0
, v2
);
23264 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23267 tcg_gen_movi_tl(t0
, v2
);
23268 tcg_gen_movi_tl(t1
, v1
);
23269 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23272 tcg_gen_movi_tl(t0
, v2
);
23273 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23276 tcg_gen_movi_tl(t0
, v2
);
23277 tcg_gen_movi_tl(t1
, v1
);
23278 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23281 tcg_gen_movi_tl(t0
, v2
);
23282 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23285 imm
= (ctx
->opcode
>> 20) & 0x3F;
23286 tcg_gen_movi_tl(t0
, ret
);
23287 tcg_gen_movi_tl(t1
, imm
);
23288 gen_helper_shilo(t0
, t1
, cpu_env
);
23291 tcg_gen_movi_tl(t0
, ret
);
23292 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23295 tcg_gen_movi_tl(t0
, ret
);
23296 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23299 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23300 tcg_gen_movi_tl(t0
, imm
);
23301 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23304 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23305 tcg_gen_movi_tl(t0
, imm
);
23306 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23310 #ifdef TARGET_MIPS64
23311 case OPC_DEXTR_W_DSP
:
23315 tcg_gen_movi_tl(t0
, ret
);
23316 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23320 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23321 int ac
= (ctx
->opcode
>> 11) & 0x03;
23322 tcg_gen_movi_tl(t0
, shift
);
23323 tcg_gen_movi_tl(t1
, ac
);
23324 gen_helper_dshilo(t0
, t1
, cpu_env
);
23329 int ac
= (ctx
->opcode
>> 11) & 0x03;
23330 tcg_gen_movi_tl(t0
, ac
);
23331 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23335 tcg_gen_movi_tl(t0
, v2
);
23336 tcg_gen_movi_tl(t1
, v1
);
23338 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23341 tcg_gen_movi_tl(t0
, v2
);
23342 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23345 tcg_gen_movi_tl(t0
, v2
);
23346 tcg_gen_movi_tl(t1
, v1
);
23347 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23350 tcg_gen_movi_tl(t0
, v2
);
23351 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23354 tcg_gen_movi_tl(t0
, v2
);
23355 tcg_gen_movi_tl(t1
, v1
);
23356 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23358 case OPC_DEXTR_R_L
:
23359 tcg_gen_movi_tl(t0
, v2
);
23360 tcg_gen_movi_tl(t1
, v1
);
23361 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23363 case OPC_DEXTR_RS_L
:
23364 tcg_gen_movi_tl(t0
, v2
);
23365 tcg_gen_movi_tl(t1
, v1
);
23366 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23369 tcg_gen_movi_tl(t0
, v2
);
23370 tcg_gen_movi_tl(t1
, v1
);
23371 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23373 case OPC_DEXTR_R_W
:
23374 tcg_gen_movi_tl(t0
, v2
);
23375 tcg_gen_movi_tl(t1
, v1
);
23376 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23378 case OPC_DEXTR_RS_W
:
23379 tcg_gen_movi_tl(t0
, v2
);
23380 tcg_gen_movi_tl(t1
, v1
);
23381 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23383 case OPC_DEXTR_S_H
:
23384 tcg_gen_movi_tl(t0
, v2
);
23385 tcg_gen_movi_tl(t1
, v1
);
23386 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23388 case OPC_DEXTRV_S_H
:
23389 tcg_gen_movi_tl(t0
, v2
);
23390 tcg_gen_movi_tl(t1
, v1
);
23391 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23394 tcg_gen_movi_tl(t0
, v2
);
23395 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23397 case OPC_DEXTRV_R_L
:
23398 tcg_gen_movi_tl(t0
, v2
);
23399 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23401 case OPC_DEXTRV_RS_L
:
23402 tcg_gen_movi_tl(t0
, v2
);
23403 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23406 tcg_gen_movi_tl(t0
, v2
);
23407 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23409 case OPC_DEXTRV_R_W
:
23410 tcg_gen_movi_tl(t0
, v2
);
23411 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23413 case OPC_DEXTRV_RS_W
:
23414 tcg_gen_movi_tl(t0
, v2
);
23415 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23424 tcg_temp_free(v1_t
);
23425 tcg_temp_free(v2_t
);
23428 /* End MIPSDSP functions. */
23430 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23432 int rs
, rt
, rd
, sa
;
23435 rs
= (ctx
->opcode
>> 21) & 0x1f;
23436 rt
= (ctx
->opcode
>> 16) & 0x1f;
23437 rd
= (ctx
->opcode
>> 11) & 0x1f;
23438 sa
= (ctx
->opcode
>> 6) & 0x1f;
23440 op1
= MASK_SPECIAL(ctx
->opcode
);
23443 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23449 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23459 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23462 MIPS_INVAL("special_r6 muldiv");
23463 generate_exception_end(ctx
, EXCP_RI
);
23469 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23473 if (rt
== 0 && sa
== 1) {
23474 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23475 We need additionally to check other fields */
23476 gen_cl(ctx
, op1
, rd
, rs
);
23478 generate_exception_end(ctx
, EXCP_RI
);
23482 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23483 gen_helper_do_semihosting(cpu_env
);
23485 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
23486 generate_exception_end(ctx
, EXCP_RI
);
23488 generate_exception_end(ctx
, EXCP_DBp
);
23492 #if defined(TARGET_MIPS64)
23494 check_mips_64(ctx
);
23495 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23499 if (rt
== 0 && sa
== 1) {
23500 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23501 We need additionally to check other fields */
23502 check_mips_64(ctx
);
23503 gen_cl(ctx
, op1
, rd
, rs
);
23505 generate_exception_end(ctx
, EXCP_RI
);
23513 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23523 check_mips_64(ctx
);
23524 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23527 MIPS_INVAL("special_r6 muldiv");
23528 generate_exception_end(ctx
, EXCP_RI
);
23533 default: /* Invalid */
23534 MIPS_INVAL("special_r6");
23535 generate_exception_end(ctx
, EXCP_RI
);
23540 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23542 int rs
, rt
, rd
, sa
;
23545 rs
= (ctx
->opcode
>> 21) & 0x1f;
23546 rt
= (ctx
->opcode
>> 16) & 0x1f;
23547 rd
= (ctx
->opcode
>> 11) & 0x1f;
23548 sa
= (ctx
->opcode
>> 6) & 0x1f;
23550 op1
= MASK_SPECIAL(ctx
->opcode
);
23552 case OPC_MOVN
: /* Conditional move */
23554 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
23555 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
23556 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23558 case OPC_MFHI
: /* Move from HI/LO */
23560 gen_HILO(ctx
, op1
, rs
& 3, rd
);
23563 case OPC_MTLO
: /* Move to HI/LO */
23564 gen_HILO(ctx
, op1
, rd
& 3, rs
);
23567 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
23568 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
23569 check_cp1_enabled(ctx
);
23570 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
23571 (ctx
->opcode
>> 16) & 1);
23573 generate_exception_err(ctx
, EXCP_CpU
, 1);
23579 check_insn(ctx
, INSN_VR54XX
);
23580 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
23581 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
23582 } else if (ctx
->insn_flags
& INSN_R5900
) {
23583 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
23585 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
23590 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23592 #if defined(TARGET_MIPS64)
23597 check_insn(ctx
, ISA_MIPS3
);
23598 check_mips_64(ctx
);
23599 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23603 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
23606 #ifdef MIPS_STRICT_STANDARD
23607 MIPS_INVAL("SPIM");
23608 generate_exception_end(ctx
, EXCP_RI
);
23610 /* Implemented as RI exception for now. */
23611 MIPS_INVAL("spim (unofficial)");
23612 generate_exception_end(ctx
, EXCP_RI
);
23615 default: /* Invalid */
23616 MIPS_INVAL("special_legacy");
23617 generate_exception_end(ctx
, EXCP_RI
);
23622 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
23624 int rs
, rt
, rd
, sa
;
23627 rs
= (ctx
->opcode
>> 21) & 0x1f;
23628 rt
= (ctx
->opcode
>> 16) & 0x1f;
23629 rd
= (ctx
->opcode
>> 11) & 0x1f;
23630 sa
= (ctx
->opcode
>> 6) & 0x1f;
23632 op1
= MASK_SPECIAL(ctx
->opcode
);
23634 case OPC_SLL
: /* Shift with immediate */
23635 if (sa
== 5 && rd
== 0 &&
23636 rs
== 0 && rt
== 0) { /* PAUSE */
23637 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
23638 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
23639 generate_exception_end(ctx
, EXCP_RI
);
23645 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23648 switch ((ctx
->opcode
>> 21) & 0x1f) {
23650 /* rotr is decoded as srl on non-R2 CPUs */
23651 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23656 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23659 generate_exception_end(ctx
, EXCP_RI
);
23667 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23669 case OPC_SLLV
: /* Shifts */
23671 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23674 switch ((ctx
->opcode
>> 6) & 0x1f) {
23676 /* rotrv is decoded as srlv on non-R2 CPUs */
23677 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23682 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23685 generate_exception_end(ctx
, EXCP_RI
);
23689 case OPC_SLT
: /* Set on less than */
23691 gen_slt(ctx
, op1
, rd
, rs
, rt
);
23693 case OPC_AND
: /* Logic*/
23697 gen_logic(ctx
, op1
, rd
, rs
, rt
);
23700 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
23702 case OPC_TGE
: /* Traps */
23708 check_insn(ctx
, ISA_MIPS2
);
23709 gen_trap(ctx
, op1
, rs
, rt
, -1);
23711 case OPC_LSA
: /* OPC_PMON */
23712 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
23713 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
23714 decode_opc_special_r6(env
, ctx
);
23716 /* Pmon entry point, also R4010 selsl */
23717 #ifdef MIPS_STRICT_STANDARD
23718 MIPS_INVAL("PMON / selsl");
23719 generate_exception_end(ctx
, EXCP_RI
);
23721 gen_helper_0e0i(pmon
, sa
);
23726 generate_exception_end(ctx
, EXCP_SYSCALL
);
23729 generate_exception_end(ctx
, EXCP_BREAK
);
23732 check_insn(ctx
, ISA_MIPS2
);
23733 gen_sync(extract32(ctx
->opcode
, 6, 5));
23736 #if defined(TARGET_MIPS64)
23737 /* MIPS64 specific opcodes */
23742 check_insn(ctx
, ISA_MIPS3
);
23743 check_mips_64(ctx
);
23744 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23747 switch ((ctx
->opcode
>> 21) & 0x1f) {
23749 /* drotr is decoded as dsrl on non-R2 CPUs */
23750 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23755 check_insn(ctx
, ISA_MIPS3
);
23756 check_mips_64(ctx
);
23757 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23760 generate_exception_end(ctx
, EXCP_RI
);
23765 switch ((ctx
->opcode
>> 21) & 0x1f) {
23767 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
23768 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23773 check_insn(ctx
, ISA_MIPS3
);
23774 check_mips_64(ctx
);
23775 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23778 generate_exception_end(ctx
, EXCP_RI
);
23786 check_insn(ctx
, ISA_MIPS3
);
23787 check_mips_64(ctx
);
23788 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23792 check_insn(ctx
, ISA_MIPS3
);
23793 check_mips_64(ctx
);
23794 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23797 switch ((ctx
->opcode
>> 6) & 0x1f) {
23799 /* drotrv is decoded as dsrlv on non-R2 CPUs */
23800 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23805 check_insn(ctx
, ISA_MIPS3
);
23806 check_mips_64(ctx
);
23807 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23810 generate_exception_end(ctx
, EXCP_RI
);
23815 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
23816 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
23817 decode_opc_special_r6(env
, ctx
);
23822 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
23823 decode_opc_special_r6(env
, ctx
);
23825 decode_opc_special_legacy(env
, ctx
);
23830 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23835 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
23837 rs
= (ctx
->opcode
>> 21) & 0x1f;
23838 rt
= (ctx
->opcode
>> 16) & 0x1f;
23839 rd
= (ctx
->opcode
>> 11) & 0x1f;
23841 op1
= MASK_SPECIAL2(ctx
->opcode
);
23843 case OPC_MADD
: /* Multiply and add/sub */
23847 check_insn(ctx
, ISA_MIPS32
);
23848 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
23851 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23854 case OPC_DIVU_G_2F
:
23855 case OPC_MULT_G_2F
:
23856 case OPC_MULTU_G_2F
:
23858 case OPC_MODU_G_2F
:
23859 check_insn(ctx
, INSN_LOONGSON2F
);
23860 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
23864 check_insn(ctx
, ISA_MIPS32
);
23865 gen_cl(ctx
, op1
, rd
, rs
);
23868 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23869 gen_helper_do_semihosting(cpu_env
);
23871 /* XXX: not clear which exception should be raised
23872 * when in debug mode...
23874 check_insn(ctx
, ISA_MIPS32
);
23875 generate_exception_end(ctx
, EXCP_DBp
);
23878 #if defined(TARGET_MIPS64)
23881 check_insn(ctx
, ISA_MIPS64
);
23882 check_mips_64(ctx
);
23883 gen_cl(ctx
, op1
, rd
, rs
);
23885 case OPC_DMULT_G_2F
:
23886 case OPC_DMULTU_G_2F
:
23887 case OPC_DDIV_G_2F
:
23888 case OPC_DDIVU_G_2F
:
23889 case OPC_DMOD_G_2F
:
23890 case OPC_DMODU_G_2F
:
23891 check_insn(ctx
, INSN_LOONGSON2F
);
23892 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
23895 default: /* Invalid */
23896 MIPS_INVAL("special2_legacy");
23897 generate_exception_end(ctx
, EXCP_RI
);
23902 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23904 int rs
, rt
, rd
, sa
;
23908 rs
= (ctx
->opcode
>> 21) & 0x1f;
23909 rt
= (ctx
->opcode
>> 16) & 0x1f;
23910 rd
= (ctx
->opcode
>> 11) & 0x1f;
23911 sa
= (ctx
->opcode
>> 6) & 0x1f;
23912 imm
= (int16_t)ctx
->opcode
>> 7;
23914 op1
= MASK_SPECIAL3(ctx
->opcode
);
23918 /* hint codes 24-31 are reserved and signal RI */
23919 generate_exception_end(ctx
, EXCP_RI
);
23921 /* Treat as NOP. */
23924 check_cp0_enabled(ctx
);
23925 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
23926 gen_cache_operation(ctx
, rt
, rs
, imm
);
23930 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
23933 gen_ld(ctx
, op1
, rt
, rs
, imm
);
23938 /* Treat as NOP. */
23941 op2
= MASK_BSHFL(ctx
->opcode
);
23944 case OPC_ALIGN_END
:
23945 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
23948 gen_bitswap(ctx
, op2
, rd
, rt
);
23953 #if defined(TARGET_MIPS64)
23955 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
23958 gen_ld(ctx
, op1
, rt
, rs
, imm
);
23961 check_mips_64(ctx
);
23964 /* Treat as NOP. */
23967 op2
= MASK_DBSHFL(ctx
->opcode
);
23970 case OPC_DALIGN_END
:
23971 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
23974 gen_bitswap(ctx
, op2
, rd
, rt
);
23981 default: /* Invalid */
23982 MIPS_INVAL("special3_r6");
23983 generate_exception_end(ctx
, EXCP_RI
);
23988 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23993 rs
= (ctx
->opcode
>> 21) & 0x1f;
23994 rt
= (ctx
->opcode
>> 16) & 0x1f;
23995 rd
= (ctx
->opcode
>> 11) & 0x1f;
23997 op1
= MASK_SPECIAL3(ctx
->opcode
);
24000 case OPC_DIVU_G_2E
:
24002 case OPC_MODU_G_2E
:
24003 case OPC_MULT_G_2E
:
24004 case OPC_MULTU_G_2E
:
24005 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
24006 * the same mask and op1. */
24007 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
24008 op2
= MASK_ADDUH_QB(ctx
->opcode
);
24011 case OPC_ADDUH_R_QB
:
24013 case OPC_ADDQH_R_PH
:
24015 case OPC_ADDQH_R_W
:
24017 case OPC_SUBUH_R_QB
:
24019 case OPC_SUBQH_R_PH
:
24021 case OPC_SUBQH_R_W
:
24022 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24027 case OPC_MULQ_RS_W
:
24028 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24031 MIPS_INVAL("MASK ADDUH.QB");
24032 generate_exception_end(ctx
, EXCP_RI
);
24035 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
24036 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
24038 generate_exception_end(ctx
, EXCP_RI
);
24042 op2
= MASK_LX(ctx
->opcode
);
24044 #if defined(TARGET_MIPS64)
24050 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
24052 default: /* Invalid */
24053 MIPS_INVAL("MASK LX");
24054 generate_exception_end(ctx
, EXCP_RI
);
24058 case OPC_ABSQ_S_PH_DSP
:
24059 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
24061 case OPC_ABSQ_S_QB
:
24062 case OPC_ABSQ_S_PH
:
24064 case OPC_PRECEQ_W_PHL
:
24065 case OPC_PRECEQ_W_PHR
:
24066 case OPC_PRECEQU_PH_QBL
:
24067 case OPC_PRECEQU_PH_QBR
:
24068 case OPC_PRECEQU_PH_QBLA
:
24069 case OPC_PRECEQU_PH_QBRA
:
24070 case OPC_PRECEU_PH_QBL
:
24071 case OPC_PRECEU_PH_QBR
:
24072 case OPC_PRECEU_PH_QBLA
:
24073 case OPC_PRECEU_PH_QBRA
:
24074 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24081 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
24084 MIPS_INVAL("MASK ABSQ_S.PH");
24085 generate_exception_end(ctx
, EXCP_RI
);
24089 case OPC_ADDU_QB_DSP
:
24090 op2
= MASK_ADDU_QB(ctx
->opcode
);
24093 case OPC_ADDQ_S_PH
:
24096 case OPC_ADDU_S_QB
:
24098 case OPC_ADDU_S_PH
:
24100 case OPC_SUBQ_S_PH
:
24103 case OPC_SUBU_S_QB
:
24105 case OPC_SUBU_S_PH
:
24109 case OPC_RADDU_W_QB
:
24110 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24112 case OPC_MULEU_S_PH_QBL
:
24113 case OPC_MULEU_S_PH_QBR
:
24114 case OPC_MULQ_RS_PH
:
24115 case OPC_MULEQ_S_W_PHL
:
24116 case OPC_MULEQ_S_W_PHR
:
24117 case OPC_MULQ_S_PH
:
24118 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24120 default: /* Invalid */
24121 MIPS_INVAL("MASK ADDU.QB");
24122 generate_exception_end(ctx
, EXCP_RI
);
24127 case OPC_CMPU_EQ_QB_DSP
:
24128 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
24130 case OPC_PRECR_SRA_PH_W
:
24131 case OPC_PRECR_SRA_R_PH_W
:
24132 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
24134 case OPC_PRECR_QB_PH
:
24135 case OPC_PRECRQ_QB_PH
:
24136 case OPC_PRECRQ_PH_W
:
24137 case OPC_PRECRQ_RS_PH_W
:
24138 case OPC_PRECRQU_S_QB_PH
:
24139 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24141 case OPC_CMPU_EQ_QB
:
24142 case OPC_CMPU_LT_QB
:
24143 case OPC_CMPU_LE_QB
:
24144 case OPC_CMP_EQ_PH
:
24145 case OPC_CMP_LT_PH
:
24146 case OPC_CMP_LE_PH
:
24147 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24149 case OPC_CMPGU_EQ_QB
:
24150 case OPC_CMPGU_LT_QB
:
24151 case OPC_CMPGU_LE_QB
:
24152 case OPC_CMPGDU_EQ_QB
:
24153 case OPC_CMPGDU_LT_QB
:
24154 case OPC_CMPGDU_LE_QB
:
24157 case OPC_PACKRL_PH
:
24158 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24160 default: /* Invalid */
24161 MIPS_INVAL("MASK CMPU.EQ.QB");
24162 generate_exception_end(ctx
, EXCP_RI
);
24166 case OPC_SHLL_QB_DSP
:
24167 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
24169 case OPC_DPA_W_PH_DSP
:
24170 op2
= MASK_DPA_W_PH(ctx
->opcode
);
24172 case OPC_DPAU_H_QBL
:
24173 case OPC_DPAU_H_QBR
:
24174 case OPC_DPSU_H_QBL
:
24175 case OPC_DPSU_H_QBR
:
24177 case OPC_DPAX_W_PH
:
24178 case OPC_DPAQ_S_W_PH
:
24179 case OPC_DPAQX_S_W_PH
:
24180 case OPC_DPAQX_SA_W_PH
:
24182 case OPC_DPSX_W_PH
:
24183 case OPC_DPSQ_S_W_PH
:
24184 case OPC_DPSQX_S_W_PH
:
24185 case OPC_DPSQX_SA_W_PH
:
24186 case OPC_MULSAQ_S_W_PH
:
24187 case OPC_DPAQ_SA_L_W
:
24188 case OPC_DPSQ_SA_L_W
:
24189 case OPC_MAQ_S_W_PHL
:
24190 case OPC_MAQ_S_W_PHR
:
24191 case OPC_MAQ_SA_W_PHL
:
24192 case OPC_MAQ_SA_W_PHR
:
24193 case OPC_MULSA_W_PH
:
24194 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24196 default: /* Invalid */
24197 MIPS_INVAL("MASK DPAW.PH");
24198 generate_exception_end(ctx
, EXCP_RI
);
24203 op2
= MASK_INSV(ctx
->opcode
);
24214 t0
= tcg_temp_new();
24215 t1
= tcg_temp_new();
24217 gen_load_gpr(t0
, rt
);
24218 gen_load_gpr(t1
, rs
);
24220 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
24226 default: /* Invalid */
24227 MIPS_INVAL("MASK INSV");
24228 generate_exception_end(ctx
, EXCP_RI
);
24232 case OPC_APPEND_DSP
:
24233 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
24235 case OPC_EXTR_W_DSP
:
24236 op2
= MASK_EXTR_W(ctx
->opcode
);
24240 case OPC_EXTR_RS_W
:
24242 case OPC_EXTRV_S_H
:
24244 case OPC_EXTRV_R_W
:
24245 case OPC_EXTRV_RS_W
:
24250 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
24253 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24259 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24261 default: /* Invalid */
24262 MIPS_INVAL("MASK EXTR.W");
24263 generate_exception_end(ctx
, EXCP_RI
);
24267 #if defined(TARGET_MIPS64)
24268 case OPC_DDIV_G_2E
:
24269 case OPC_DDIVU_G_2E
:
24270 case OPC_DMULT_G_2E
:
24271 case OPC_DMULTU_G_2E
:
24272 case OPC_DMOD_G_2E
:
24273 case OPC_DMODU_G_2E
:
24274 check_insn(ctx
, INSN_LOONGSON2E
);
24275 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
24277 case OPC_ABSQ_S_QH_DSP
:
24278 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
24280 case OPC_PRECEQ_L_PWL
:
24281 case OPC_PRECEQ_L_PWR
:
24282 case OPC_PRECEQ_PW_QHL
:
24283 case OPC_PRECEQ_PW_QHR
:
24284 case OPC_PRECEQ_PW_QHLA
:
24285 case OPC_PRECEQ_PW_QHRA
:
24286 case OPC_PRECEQU_QH_OBL
:
24287 case OPC_PRECEQU_QH_OBR
:
24288 case OPC_PRECEQU_QH_OBLA
:
24289 case OPC_PRECEQU_QH_OBRA
:
24290 case OPC_PRECEU_QH_OBL
:
24291 case OPC_PRECEU_QH_OBR
:
24292 case OPC_PRECEU_QH_OBLA
:
24293 case OPC_PRECEU_QH_OBRA
:
24294 case OPC_ABSQ_S_OB
:
24295 case OPC_ABSQ_S_PW
:
24296 case OPC_ABSQ_S_QH
:
24297 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24305 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
24307 default: /* Invalid */
24308 MIPS_INVAL("MASK ABSQ_S.QH");
24309 generate_exception_end(ctx
, EXCP_RI
);
24313 case OPC_ADDU_OB_DSP
:
24314 op2
= MASK_ADDU_OB(ctx
->opcode
);
24316 case OPC_RADDU_L_OB
:
24318 case OPC_SUBQ_S_PW
:
24320 case OPC_SUBQ_S_QH
:
24322 case OPC_SUBU_S_OB
:
24324 case OPC_SUBU_S_QH
:
24326 case OPC_SUBUH_R_OB
:
24328 case OPC_ADDQ_S_PW
:
24330 case OPC_ADDQ_S_QH
:
24332 case OPC_ADDU_S_OB
:
24334 case OPC_ADDU_S_QH
:
24336 case OPC_ADDUH_R_OB
:
24337 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24339 case OPC_MULEQ_S_PW_QHL
:
24340 case OPC_MULEQ_S_PW_QHR
:
24341 case OPC_MULEU_S_QH_OBL
:
24342 case OPC_MULEU_S_QH_OBR
:
24343 case OPC_MULQ_RS_QH
:
24344 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24346 default: /* Invalid */
24347 MIPS_INVAL("MASK ADDU.OB");
24348 generate_exception_end(ctx
, EXCP_RI
);
24352 case OPC_CMPU_EQ_OB_DSP
:
24353 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
24355 case OPC_PRECR_SRA_QH_PW
:
24356 case OPC_PRECR_SRA_R_QH_PW
:
24357 /* Return value is rt. */
24358 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
24360 case OPC_PRECR_OB_QH
:
24361 case OPC_PRECRQ_OB_QH
:
24362 case OPC_PRECRQ_PW_L
:
24363 case OPC_PRECRQ_QH_PW
:
24364 case OPC_PRECRQ_RS_QH_PW
:
24365 case OPC_PRECRQU_S_OB_QH
:
24366 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24368 case OPC_CMPU_EQ_OB
:
24369 case OPC_CMPU_LT_OB
:
24370 case OPC_CMPU_LE_OB
:
24371 case OPC_CMP_EQ_QH
:
24372 case OPC_CMP_LT_QH
:
24373 case OPC_CMP_LE_QH
:
24374 case OPC_CMP_EQ_PW
:
24375 case OPC_CMP_LT_PW
:
24376 case OPC_CMP_LE_PW
:
24377 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24379 case OPC_CMPGDU_EQ_OB
:
24380 case OPC_CMPGDU_LT_OB
:
24381 case OPC_CMPGDU_LE_OB
:
24382 case OPC_CMPGU_EQ_OB
:
24383 case OPC_CMPGU_LT_OB
:
24384 case OPC_CMPGU_LE_OB
:
24385 case OPC_PACKRL_PW
:
24389 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24391 default: /* Invalid */
24392 MIPS_INVAL("MASK CMPU_EQ.OB");
24393 generate_exception_end(ctx
, EXCP_RI
);
24397 case OPC_DAPPEND_DSP
:
24398 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
24400 case OPC_DEXTR_W_DSP
:
24401 op2
= MASK_DEXTR_W(ctx
->opcode
);
24408 case OPC_DEXTR_R_L
:
24409 case OPC_DEXTR_RS_L
:
24411 case OPC_DEXTR_R_W
:
24412 case OPC_DEXTR_RS_W
:
24413 case OPC_DEXTR_S_H
:
24415 case OPC_DEXTRV_R_L
:
24416 case OPC_DEXTRV_RS_L
:
24417 case OPC_DEXTRV_S_H
:
24419 case OPC_DEXTRV_R_W
:
24420 case OPC_DEXTRV_RS_W
:
24421 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
24426 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24428 default: /* Invalid */
24429 MIPS_INVAL("MASK EXTR.W");
24430 generate_exception_end(ctx
, EXCP_RI
);
24434 case OPC_DPAQ_W_QH_DSP
:
24435 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
24437 case OPC_DPAU_H_OBL
:
24438 case OPC_DPAU_H_OBR
:
24439 case OPC_DPSU_H_OBL
:
24440 case OPC_DPSU_H_OBR
:
24442 case OPC_DPAQ_S_W_QH
:
24444 case OPC_DPSQ_S_W_QH
:
24445 case OPC_MULSAQ_S_W_QH
:
24446 case OPC_DPAQ_SA_L_PW
:
24447 case OPC_DPSQ_SA_L_PW
:
24448 case OPC_MULSAQ_S_L_PW
:
24449 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24451 case OPC_MAQ_S_W_QHLL
:
24452 case OPC_MAQ_S_W_QHLR
:
24453 case OPC_MAQ_S_W_QHRL
:
24454 case OPC_MAQ_S_W_QHRR
:
24455 case OPC_MAQ_SA_W_QHLL
:
24456 case OPC_MAQ_SA_W_QHLR
:
24457 case OPC_MAQ_SA_W_QHRL
:
24458 case OPC_MAQ_SA_W_QHRR
:
24459 case OPC_MAQ_S_L_PWL
:
24460 case OPC_MAQ_S_L_PWR
:
24465 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24467 default: /* Invalid */
24468 MIPS_INVAL("MASK DPAQ.W.QH");
24469 generate_exception_end(ctx
, EXCP_RI
);
24473 case OPC_DINSV_DSP
:
24474 op2
= MASK_INSV(ctx
->opcode
);
24485 t0
= tcg_temp_new();
24486 t1
= tcg_temp_new();
24488 gen_load_gpr(t0
, rt
);
24489 gen_load_gpr(t1
, rs
);
24491 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
24497 default: /* Invalid */
24498 MIPS_INVAL("MASK DINSV");
24499 generate_exception_end(ctx
, EXCP_RI
);
24503 case OPC_SHLL_OB_DSP
:
24504 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
24507 default: /* Invalid */
24508 MIPS_INVAL("special3_legacy");
24509 generate_exception_end(ctx
, EXCP_RI
);
24514 static void decode_tx79_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
24516 uint32_t opc
= MASK_TX79_MMI0(ctx
->opcode
);
24519 case TX79_MMI0_PADDW
: /* TODO: TX79_MMI0_PADDW */
24520 case TX79_MMI0_PSUBW
: /* TODO: TX79_MMI0_PSUBW */
24521 case TX79_MMI0_PCGTW
: /* TODO: TX79_MMI0_PCGTW */
24522 case TX79_MMI0_PMAXW
: /* TODO: TX79_MMI0_PMAXW */
24523 case TX79_MMI0_PADDH
: /* TODO: TX79_MMI0_PADDH */
24524 case TX79_MMI0_PSUBH
: /* TODO: TX79_MMI0_PSUBH */
24525 case TX79_MMI0_PCGTH
: /* TODO: TX79_MMI0_PCGTH */
24526 case TX79_MMI0_PMAXH
: /* TODO: TX79_MMI0_PMAXH */
24527 case TX79_MMI0_PADDB
: /* TODO: TX79_MMI0_PADDB */
24528 case TX79_MMI0_PSUBB
: /* TODO: TX79_MMI0_PSUBB */
24529 case TX79_MMI0_PCGTB
: /* TODO: TX79_MMI0_PCGTB */
24530 case TX79_MMI0_PADDSW
: /* TODO: TX79_MMI0_PADDSW */
24531 case TX79_MMI0_PSUBSW
: /* TODO: TX79_MMI0_PSUBSW */
24532 case TX79_MMI0_PEXTLW
: /* TODO: TX79_MMI0_PEXTLW */
24533 case TX79_MMI0_PPACW
: /* TODO: TX79_MMI0_PPACW */
24534 case TX79_MMI0_PADDSH
: /* TODO: TX79_MMI0_PADDSH */
24535 case TX79_MMI0_PSUBSH
: /* TODO: TX79_MMI0_PSUBSH */
24536 case TX79_MMI0_PEXTLH
: /* TODO: TX79_MMI0_PEXTLH */
24537 case TX79_MMI0_PPACH
: /* TODO: TX79_MMI0_PPACH */
24538 case TX79_MMI0_PADDSB
: /* TODO: TX79_MMI0_PADDSB */
24539 case TX79_MMI0_PSUBSB
: /* TODO: TX79_MMI0_PSUBSB */
24540 case TX79_MMI0_PEXTLB
: /* TODO: TX79_MMI0_PEXTLB */
24541 case TX79_MMI0_PPACB
: /* TODO: TX79_MMI0_PPACB */
24542 case TX79_MMI0_PEXT5
: /* TODO: TX79_MMI0_PEXT5 */
24543 case TX79_MMI0_PPAC5
: /* TODO: TX79_MMI0_PPAC5 */
24544 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI0 */
24547 MIPS_INVAL("TX79 MMI class MMI0");
24548 generate_exception_end(ctx
, EXCP_RI
);
24553 static void decode_tx79_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
24555 uint32_t opc
= MASK_TX79_MMI1(ctx
->opcode
);
24558 case TX79_MMI1_PABSW
: /* TODO: TX79_MMI1_PABSW */
24559 case TX79_MMI1_PCEQW
: /* TODO: TX79_MMI1_PCEQW */
24560 case TX79_MMI1_PMINW
: /* TODO: TX79_MMI1_PMINW */
24561 case TX79_MMI1_PADSBH
: /* TODO: TX79_MMI1_PADSBH */
24562 case TX79_MMI1_PABSH
: /* TODO: TX79_MMI1_PABSH */
24563 case TX79_MMI1_PCEQH
: /* TODO: TX79_MMI1_PCEQH */
24564 case TX79_MMI1_PMINH
: /* TODO: TX79_MMI1_PMINH */
24565 case TX79_MMI1_PCEQB
: /* TODO: TX79_MMI1_PCEQB */
24566 case TX79_MMI1_PADDUW
: /* TODO: TX79_MMI1_PADDUW */
24567 case TX79_MMI1_PSUBUW
: /* TODO: TX79_MMI1_PSUBUW */
24568 case TX79_MMI1_PEXTUW
: /* TODO: TX79_MMI1_PEXTUW */
24569 case TX79_MMI1_PADDUH
: /* TODO: TX79_MMI1_PADDUH */
24570 case TX79_MMI1_PSUBUH
: /* TODO: TX79_MMI1_PSUBUH */
24571 case TX79_MMI1_PEXTUH
: /* TODO: TX79_MMI1_PEXTUH */
24572 case TX79_MMI1_PADDUB
: /* TODO: TX79_MMI1_PADDUB */
24573 case TX79_MMI1_PSUBUB
: /* TODO: TX79_MMI1_PSUBUB */
24574 case TX79_MMI1_PEXTUB
: /* TODO: TX79_MMI1_PEXTUB */
24575 case TX79_MMI1_QFSRV
: /* TODO: TX79_MMI1_QFSRV */
24576 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI1 */
24579 MIPS_INVAL("TX79 MMI class MMI1");
24580 generate_exception_end(ctx
, EXCP_RI
);
24585 static void decode_tx79_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
24587 uint32_t opc
= MASK_TX79_MMI2(ctx
->opcode
);
24590 case TX79_MMI2_PMADDW
: /* TODO: TX79_MMI2_PMADDW */
24591 case TX79_MMI2_PSLLVW
: /* TODO: TX79_MMI2_PSLLVW */
24592 case TX79_MMI2_PSRLVW
: /* TODO: TX79_MMI2_PSRLVW */
24593 case TX79_MMI2_PMSUBW
: /* TODO: TX79_MMI2_PMSUBW */
24594 case TX79_MMI2_PMFHI
: /* TODO: TX79_MMI2_PMFHI */
24595 case TX79_MMI2_PMFLO
: /* TODO: TX79_MMI2_PMFLO */
24596 case TX79_MMI2_PINTH
: /* TODO: TX79_MMI2_PINTH */
24597 case TX79_MMI2_PMULTW
: /* TODO: TX79_MMI2_PMULTW */
24598 case TX79_MMI2_PDIVW
: /* TODO: TX79_MMI2_PDIVW */
24599 case TX79_MMI2_PCPYLD
: /* TODO: TX79_MMI2_PCPYLD */
24600 case TX79_MMI2_PMADDH
: /* TODO: TX79_MMI2_PMADDH */
24601 case TX79_MMI2_PHMADH
: /* TODO: TX79_MMI2_PHMADH */
24602 case TX79_MMI2_PAND
: /* TODO: TX79_MMI2_PAND */
24603 case TX79_MMI2_PXOR
: /* TODO: TX79_MMI2_PXOR */
24604 case TX79_MMI2_PMSUBH
: /* TODO: TX79_MMI2_PMSUBH */
24605 case TX79_MMI2_PHMSBH
: /* TODO: TX79_MMI2_PHMSBH */
24606 case TX79_MMI2_PEXEH
: /* TODO: TX79_MMI2_PEXEH */
24607 case TX79_MMI2_PREVH
: /* TODO: TX79_MMI2_PREVH */
24608 case TX79_MMI2_PMULTH
: /* TODO: TX79_MMI2_PMULTH */
24609 case TX79_MMI2_PDIVBW
: /* TODO: TX79_MMI2_PDIVBW */
24610 case TX79_MMI2_PEXEW
: /* TODO: TX79_MMI2_PEXEW */
24611 case TX79_MMI2_PROT3W
: /* TODO: TX79_MMI2_PROT3W */
24612 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI2 */
24615 MIPS_INVAL("TX79 MMI class MMI2");
24616 generate_exception_end(ctx
, EXCP_RI
);
24621 static void decode_tx79_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
24623 uint32_t opc
= MASK_TX79_MMI3(ctx
->opcode
);
24626 case TX79_MMI3_PMADDUW
: /* TODO: TX79_MMI3_PMADDUW */
24627 case TX79_MMI3_PSRAVW
: /* TODO: TX79_MMI3_PSRAVW */
24628 case TX79_MMI3_PMTHI
: /* TODO: TX79_MMI3_PMTHI */
24629 case TX79_MMI3_PMTLO
: /* TODO: TX79_MMI3_PMTLO */
24630 case TX79_MMI3_PINTEH
: /* TODO: TX79_MMI3_PINTEH */
24631 case TX79_MMI3_PMULTUW
: /* TODO: TX79_MMI3_PMULTUW */
24632 case TX79_MMI3_PDIVUW
: /* TODO: TX79_MMI3_PDIVUW */
24633 case TX79_MMI3_PCPYUD
: /* TODO: TX79_MMI3_PCPYUD */
24634 case TX79_MMI3_POR
: /* TODO: TX79_MMI3_POR */
24635 case TX79_MMI3_PNOR
: /* TODO: TX79_MMI3_PNOR */
24636 case TX79_MMI3_PEXCH
: /* TODO: TX79_MMI3_PEXCH */
24637 case TX79_MMI3_PCPYH
: /* TODO: TX79_MMI3_PCPYH */
24638 case TX79_MMI3_PEXCW
: /* TODO: TX79_MMI3_PEXCW */
24639 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI3 */
24642 MIPS_INVAL("TX79 MMI class MMI3");
24643 generate_exception_end(ctx
, EXCP_RI
);
24648 static void decode_tx79_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
24650 uint32_t opc
= MASK_TX79_MMI(ctx
->opcode
);
24651 int rs
= extract32(ctx
->opcode
, 21, 5);
24652 int rt
= extract32(ctx
->opcode
, 16, 5);
24653 int rd
= extract32(ctx
->opcode
, 11, 5);
24656 case TX79_MMI_CLASS_MMI0
:
24657 decode_tx79_mmi0(env
, ctx
);
24659 case TX79_MMI_CLASS_MMI1
:
24660 decode_tx79_mmi1(env
, ctx
);
24662 case TX79_MMI_CLASS_MMI2
:
24663 decode_tx79_mmi2(env
, ctx
);
24665 case TX79_MMI_CLASS_MMI3
:
24666 decode_tx79_mmi3(env
, ctx
);
24668 case TX79_MMI_MULT1
:
24669 case TX79_MMI_MULTU1
:
24670 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
24672 case TX79_MMI_DIV1
:
24673 case TX79_MMI_DIVU1
:
24674 gen_muldiv(ctx
, opc
, 1, rs
, rt
);
24676 case TX79_MMI_MTLO1
:
24677 case TX79_MMI_MTHI1
:
24678 gen_HILO(ctx
, opc
, 1, rs
);
24680 case TX79_MMI_MFLO1
:
24681 case TX79_MMI_MFHI1
:
24682 gen_HILO(ctx
, opc
, 1, rd
);
24684 case TX79_MMI_MADD
: /* TODO: TX79_MMI_MADD */
24685 case TX79_MMI_MADDU
: /* TODO: TX79_MMI_MADDU */
24686 case TX79_MMI_PLZCW
: /* TODO: TX79_MMI_PLZCW */
24687 case TX79_MMI_MADD1
: /* TODO: TX79_MMI_MADD1 */
24688 case TX79_MMI_MADDU1
: /* TODO: TX79_MMI_MADDU1 */
24689 case TX79_MMI_PMFHL
: /* TODO: TX79_MMI_PMFHL */
24690 case TX79_MMI_PMTHL
: /* TODO: TX79_MMI_PMTHL */
24691 case TX79_MMI_PSLLH
: /* TODO: TX79_MMI_PSLLH */
24692 case TX79_MMI_PSRLH
: /* TODO: TX79_MMI_PSRLH */
24693 case TX79_MMI_PSRAH
: /* TODO: TX79_MMI_PSRAH */
24694 case TX79_MMI_PSLLW
: /* TODO: TX79_MMI_PSLLW */
24695 case TX79_MMI_PSRLW
: /* TODO: TX79_MMI_PSRLW */
24696 case TX79_MMI_PSRAW
: /* TODO: TX79_MMI_PSRAW */
24697 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_CLASS_MMI */
24700 MIPS_INVAL("TX79 MMI class");
24701 generate_exception_end(ctx
, EXCP_RI
);
24706 static void decode_tx79_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
24708 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_LQ */
24711 static void gen_tx79_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
24713 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_SQ */
24717 * The TX79-specific instruction Store Quadword
24719 * +--------+-------+-------+------------------------+
24720 * | 011111 | base | rt | offset | SQ
24721 * +--------+-------+-------+------------------------+
24724 * has the same opcode as the Read Hardware Register instruction
24726 * +--------+-------+-------+-------+-------+--------+
24727 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
24728 * +--------+-------+-------+-------+-------+--------+
24731 * that is required, trapped and emulated by the Linux kernel. However, all
24732 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
24733 * offset is odd. Therefore all valid SQ instructions can execute normally.
24734 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
24735 * between SQ and RDHWR, as the Linux kernel does.
24737 static void decode_tx79_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
24739 int base
= extract32(ctx
->opcode
, 21, 5);
24740 int rt
= extract32(ctx
->opcode
, 16, 5);
24741 int offset
= extract32(ctx
->opcode
, 0, 16);
24743 #ifdef CONFIG_USER_ONLY
24744 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
24745 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
24747 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
24748 int rd
= extract32(ctx
->opcode
, 11, 5);
24750 gen_rdhwr(ctx
, rt
, rd
, 0);
24755 gen_tx79_sq(ctx
, base
, rt
, offset
);
24758 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
24760 int rs
, rt
, rd
, sa
;
24764 rs
= (ctx
->opcode
>> 21) & 0x1f;
24765 rt
= (ctx
->opcode
>> 16) & 0x1f;
24766 rd
= (ctx
->opcode
>> 11) & 0x1f;
24767 sa
= (ctx
->opcode
>> 6) & 0x1f;
24768 imm
= sextract32(ctx
->opcode
, 7, 9);
24770 op1
= MASK_SPECIAL3(ctx
->opcode
);
24773 * EVA loads and stores overlap Loongson 2E instructions decoded by
24774 * decode_opc_special3_legacy(), so be careful to allow their decoding when
24781 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24789 check_cp0_enabled(ctx
);
24790 gen_ld(ctx
, op1
, rt
, rs
, imm
);
24794 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24799 check_cp0_enabled(ctx
);
24800 gen_st(ctx
, op1
, rt
, rs
, imm
);
24803 check_cp0_enabled(ctx
);
24804 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
24807 check_cp0_enabled(ctx
);
24808 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
24809 gen_cache_operation(ctx
, rt
, rs
, imm
);
24811 /* Treat as NOP. */
24814 check_cp0_enabled(ctx
);
24815 /* Treat as NOP. */
24823 check_insn(ctx
, ISA_MIPS32R2
);
24824 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
24827 op2
= MASK_BSHFL(ctx
->opcode
);
24830 case OPC_ALIGN_END
:
24832 check_insn(ctx
, ISA_MIPS32R6
);
24833 decode_opc_special3_r6(env
, ctx
);
24836 check_insn(ctx
, ISA_MIPS32R2
);
24837 gen_bshfl(ctx
, op2
, rt
, rd
);
24841 #if defined(TARGET_MIPS64)
24848 check_insn(ctx
, ISA_MIPS64R2
);
24849 check_mips_64(ctx
);
24850 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
24853 op2
= MASK_DBSHFL(ctx
->opcode
);
24856 case OPC_DALIGN_END
:
24858 check_insn(ctx
, ISA_MIPS32R6
);
24859 decode_opc_special3_r6(env
, ctx
);
24862 check_insn(ctx
, ISA_MIPS64R2
);
24863 check_mips_64(ctx
);
24864 op2
= MASK_DBSHFL(ctx
->opcode
);
24865 gen_bshfl(ctx
, op2
, rt
, rd
);
24871 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
24876 TCGv t0
= tcg_temp_new();
24877 TCGv t1
= tcg_temp_new();
24879 gen_load_gpr(t0
, rt
);
24880 gen_load_gpr(t1
, rs
);
24881 gen_helper_fork(t0
, t1
);
24889 TCGv t0
= tcg_temp_new();
24891 gen_load_gpr(t0
, rs
);
24892 gen_helper_yield(t0
, cpu_env
, t0
);
24893 gen_store_gpr(t0
, rd
);
24898 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24899 decode_opc_special3_r6(env
, ctx
);
24901 decode_opc_special3_legacy(env
, ctx
);
24906 /* MIPS SIMD Architecture (MSA) */
24907 static inline int check_msa_access(DisasContext
*ctx
)
24909 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
24910 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
24911 generate_exception_end(ctx
, EXCP_RI
);
24915 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
24916 if (ctx
->insn_flags
& ASE_MSA
) {
24917 generate_exception_end(ctx
, EXCP_MSADIS
);
24920 generate_exception_end(ctx
, EXCP_RI
);
24927 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
24929 /* generates tcg ops to check if any element is 0 */
24930 /* Note this function only works with MSA_WRLEN = 128 */
24931 uint64_t eval_zero_or_big
= 0;
24932 uint64_t eval_big
= 0;
24933 TCGv_i64 t0
= tcg_temp_new_i64();
24934 TCGv_i64 t1
= tcg_temp_new_i64();
24937 eval_zero_or_big
= 0x0101010101010101ULL
;
24938 eval_big
= 0x8080808080808080ULL
;
24941 eval_zero_or_big
= 0x0001000100010001ULL
;
24942 eval_big
= 0x8000800080008000ULL
;
24945 eval_zero_or_big
= 0x0000000100000001ULL
;
24946 eval_big
= 0x8000000080000000ULL
;
24949 eval_zero_or_big
= 0x0000000000000001ULL
;
24950 eval_big
= 0x8000000000000000ULL
;
24953 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
24954 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
24955 tcg_gen_andi_i64(t0
, t0
, eval_big
);
24956 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
24957 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
24958 tcg_gen_andi_i64(t1
, t1
, eval_big
);
24959 tcg_gen_or_i64(t0
, t0
, t1
);
24960 /* if all bits are zero then all elements are not zero */
24961 /* if some bit is non-zero then some element is zero */
24962 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
24963 tcg_gen_trunc_i64_tl(tresult
, t0
);
24964 tcg_temp_free_i64(t0
);
24965 tcg_temp_free_i64(t1
);
24968 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
24970 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
24971 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
24972 int64_t s16
= (int16_t)ctx
->opcode
;
24974 check_msa_access(ctx
);
24976 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
24977 generate_exception_end(ctx
, EXCP_RI
);
24984 TCGv_i64 t0
= tcg_temp_new_i64();
24985 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
24986 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
24987 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
24988 tcg_gen_trunc_i64_tl(bcond
, t0
);
24989 tcg_temp_free_i64(t0
);
24996 gen_check_zero_element(bcond
, df
, wt
);
25002 gen_check_zero_element(bcond
, df
, wt
);
25003 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
25007 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
25009 ctx
->hflags
|= MIPS_HFLAG_BC
;
25010 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
25013 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
25015 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
25016 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
25017 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25018 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25020 TCGv_i32 twd
= tcg_const_i32(wd
);
25021 TCGv_i32 tws
= tcg_const_i32(ws
);
25022 TCGv_i32 ti8
= tcg_const_i32(i8
);
25024 switch (MASK_MSA_I8(ctx
->opcode
)) {
25026 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
25029 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
25032 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
25035 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
25038 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
25041 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
25044 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
25050 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
25051 if (df
== DF_DOUBLE
) {
25052 generate_exception_end(ctx
, EXCP_RI
);
25054 TCGv_i32 tdf
= tcg_const_i32(df
);
25055 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
25056 tcg_temp_free_i32(tdf
);
25061 MIPS_INVAL("MSA instruction");
25062 generate_exception_end(ctx
, EXCP_RI
);
25066 tcg_temp_free_i32(twd
);
25067 tcg_temp_free_i32(tws
);
25068 tcg_temp_free_i32(ti8
);
25071 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
25073 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
25074 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
25075 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
25076 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
25077 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25078 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25080 TCGv_i32 tdf
= tcg_const_i32(df
);
25081 TCGv_i32 twd
= tcg_const_i32(wd
);
25082 TCGv_i32 tws
= tcg_const_i32(ws
);
25083 TCGv_i32 timm
= tcg_temp_new_i32();
25084 tcg_gen_movi_i32(timm
, u5
);
25086 switch (MASK_MSA_I5(ctx
->opcode
)) {
25088 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
25091 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
25093 case OPC_MAXI_S_df
:
25094 tcg_gen_movi_i32(timm
, s5
);
25095 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25097 case OPC_MAXI_U_df
:
25098 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25100 case OPC_MINI_S_df
:
25101 tcg_gen_movi_i32(timm
, s5
);
25102 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25104 case OPC_MINI_U_df
:
25105 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25108 tcg_gen_movi_i32(timm
, s5
);
25109 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
25111 case OPC_CLTI_S_df
:
25112 tcg_gen_movi_i32(timm
, s5
);
25113 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25115 case OPC_CLTI_U_df
:
25116 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25118 case OPC_CLEI_S_df
:
25119 tcg_gen_movi_i32(timm
, s5
);
25120 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25122 case OPC_CLEI_U_df
:
25123 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25127 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
25128 tcg_gen_movi_i32(timm
, s10
);
25129 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
25133 MIPS_INVAL("MSA instruction");
25134 generate_exception_end(ctx
, EXCP_RI
);
25138 tcg_temp_free_i32(tdf
);
25139 tcg_temp_free_i32(twd
);
25140 tcg_temp_free_i32(tws
);
25141 tcg_temp_free_i32(timm
);
25144 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
25146 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
25147 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
25148 uint32_t df
= 0, m
= 0;
25149 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25150 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25157 if ((dfm
& 0x40) == 0x00) {
25160 } else if ((dfm
& 0x60) == 0x40) {
25163 } else if ((dfm
& 0x70) == 0x60) {
25166 } else if ((dfm
& 0x78) == 0x70) {
25170 generate_exception_end(ctx
, EXCP_RI
);
25174 tdf
= tcg_const_i32(df
);
25175 tm
= tcg_const_i32(m
);
25176 twd
= tcg_const_i32(wd
);
25177 tws
= tcg_const_i32(ws
);
25179 switch (MASK_MSA_BIT(ctx
->opcode
)) {
25181 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
25184 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
25187 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
25190 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
25193 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
25196 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
25198 case OPC_BINSLI_df
:
25199 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
25201 case OPC_BINSRI_df
:
25202 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
25205 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
25208 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
25211 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
25214 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
25217 MIPS_INVAL("MSA instruction");
25218 generate_exception_end(ctx
, EXCP_RI
);
25222 tcg_temp_free_i32(tdf
);
25223 tcg_temp_free_i32(tm
);
25224 tcg_temp_free_i32(twd
);
25225 tcg_temp_free_i32(tws
);
25228 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
25230 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
25231 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
25232 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25233 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25234 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25236 TCGv_i32 tdf
= tcg_const_i32(df
);
25237 TCGv_i32 twd
= tcg_const_i32(wd
);
25238 TCGv_i32 tws
= tcg_const_i32(ws
);
25239 TCGv_i32 twt
= tcg_const_i32(wt
);
25241 switch (MASK_MSA_3R(ctx
->opcode
)) {
25243 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
25246 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25249 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25252 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25254 case OPC_SUBS_S_df
:
25255 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25258 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25261 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
25264 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25267 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
25270 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25272 case OPC_ADDS_A_df
:
25273 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25275 case OPC_SUBS_U_df
:
25276 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25279 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25282 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
25285 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
25288 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
25291 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25294 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25296 case OPC_ADDS_S_df
:
25297 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25299 case OPC_SUBSUS_U_df
:
25300 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25303 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25306 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
25309 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25312 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25315 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25318 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25320 case OPC_ADDS_U_df
:
25321 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25323 case OPC_SUBSUU_S_df
:
25324 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25327 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
25330 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
25333 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25336 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25339 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25341 case OPC_ASUB_S_df
:
25342 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25345 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25348 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
25351 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
25354 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25357 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25360 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25362 case OPC_ASUB_U_df
:
25363 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25366 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25369 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25372 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
25375 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25377 case OPC_AVER_S_df
:
25378 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25381 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25384 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
25387 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25390 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25392 case OPC_AVER_U_df
:
25393 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25396 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25399 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
25402 case OPC_DOTP_S_df
:
25403 case OPC_DOTP_U_df
:
25404 case OPC_DPADD_S_df
:
25405 case OPC_DPADD_U_df
:
25406 case OPC_DPSUB_S_df
:
25407 case OPC_HADD_S_df
:
25408 case OPC_DPSUB_U_df
:
25409 case OPC_HADD_U_df
:
25410 case OPC_HSUB_S_df
:
25411 case OPC_HSUB_U_df
:
25412 if (df
== DF_BYTE
) {
25413 generate_exception_end(ctx
, EXCP_RI
);
25416 switch (MASK_MSA_3R(ctx
->opcode
)) {
25417 case OPC_DOTP_S_df
:
25418 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25420 case OPC_DOTP_U_df
:
25421 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25423 case OPC_DPADD_S_df
:
25424 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25426 case OPC_DPADD_U_df
:
25427 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25429 case OPC_DPSUB_S_df
:
25430 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25432 case OPC_HADD_S_df
:
25433 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25435 case OPC_DPSUB_U_df
:
25436 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25438 case OPC_HADD_U_df
:
25439 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25441 case OPC_HSUB_S_df
:
25442 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25444 case OPC_HSUB_U_df
:
25445 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25450 MIPS_INVAL("MSA instruction");
25451 generate_exception_end(ctx
, EXCP_RI
);
25454 tcg_temp_free_i32(twd
);
25455 tcg_temp_free_i32(tws
);
25456 tcg_temp_free_i32(twt
);
25457 tcg_temp_free_i32(tdf
);
25460 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
25462 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
25463 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
25464 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
25465 TCGv telm
= tcg_temp_new();
25466 TCGv_i32 tsr
= tcg_const_i32(source
);
25467 TCGv_i32 tdt
= tcg_const_i32(dest
);
25469 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
25471 gen_load_gpr(telm
, source
);
25472 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
25475 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
25476 gen_store_gpr(telm
, dest
);
25479 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
25482 MIPS_INVAL("MSA instruction");
25483 generate_exception_end(ctx
, EXCP_RI
);
25487 tcg_temp_free(telm
);
25488 tcg_temp_free_i32(tdt
);
25489 tcg_temp_free_i32(tsr
);
25492 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
25495 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
25496 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25497 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25499 TCGv_i32 tws
= tcg_const_i32(ws
);
25500 TCGv_i32 twd
= tcg_const_i32(wd
);
25501 TCGv_i32 tn
= tcg_const_i32(n
);
25502 TCGv_i32 tdf
= tcg_const_i32(df
);
25504 switch (MASK_MSA_ELM(ctx
->opcode
)) {
25506 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
25508 case OPC_SPLATI_df
:
25509 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
25512 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
25514 case OPC_COPY_S_df
:
25515 case OPC_COPY_U_df
:
25516 case OPC_INSERT_df
:
25517 #if !defined(TARGET_MIPS64)
25518 /* Double format valid only for MIPS64 */
25519 if (df
== DF_DOUBLE
) {
25520 generate_exception_end(ctx
, EXCP_RI
);
25524 switch (MASK_MSA_ELM(ctx
->opcode
)) {
25525 case OPC_COPY_S_df
:
25526 if (likely(wd
!= 0)) {
25527 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
25530 case OPC_COPY_U_df
:
25531 if (likely(wd
!= 0)) {
25532 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
25535 case OPC_INSERT_df
:
25536 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
25541 MIPS_INVAL("MSA instruction");
25542 generate_exception_end(ctx
, EXCP_RI
);
25544 tcg_temp_free_i32(twd
);
25545 tcg_temp_free_i32(tws
);
25546 tcg_temp_free_i32(tn
);
25547 tcg_temp_free_i32(tdf
);
25550 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
25552 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
25553 uint32_t df
= 0, n
= 0;
25555 if ((dfn
& 0x30) == 0x00) {
25558 } else if ((dfn
& 0x38) == 0x20) {
25561 } else if ((dfn
& 0x3c) == 0x30) {
25564 } else if ((dfn
& 0x3e) == 0x38) {
25567 } else if (dfn
== 0x3E) {
25568 /* CTCMSA, CFCMSA, MOVE.V */
25569 gen_msa_elm_3e(env
, ctx
);
25572 generate_exception_end(ctx
, EXCP_RI
);
25576 gen_msa_elm_df(env
, ctx
, df
, n
);
25579 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
25581 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
25582 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
25583 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25584 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25585 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25587 TCGv_i32 twd
= tcg_const_i32(wd
);
25588 TCGv_i32 tws
= tcg_const_i32(ws
);
25589 TCGv_i32 twt
= tcg_const_i32(wt
);
25590 TCGv_i32 tdf
= tcg_temp_new_i32();
25592 /* adjust df value for floating-point instruction */
25593 tcg_gen_movi_i32(tdf
, df
+ 2);
25595 switch (MASK_MSA_3RF(ctx
->opcode
)) {
25597 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25600 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
25603 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
25606 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
25609 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
25612 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25615 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
25618 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
25621 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25624 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25627 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
25630 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
25633 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
25636 tcg_gen_movi_i32(tdf
, df
+ 1);
25637 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25640 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
25643 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
25645 case OPC_MADD_Q_df
:
25646 tcg_gen_movi_i32(tdf
, df
+ 1);
25647 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25650 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
25652 case OPC_MSUB_Q_df
:
25653 tcg_gen_movi_i32(tdf
, df
+ 1);
25654 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25657 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
25660 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
25663 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25666 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
25669 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
25672 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
25675 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25678 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25681 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
25684 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25687 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
25690 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
25693 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
25695 case OPC_MULR_Q_df
:
25696 tcg_gen_movi_i32(tdf
, df
+ 1);
25697 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25700 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
25702 case OPC_FMIN_A_df
:
25703 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25705 case OPC_MADDR_Q_df
:
25706 tcg_gen_movi_i32(tdf
, df
+ 1);
25707 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25710 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
25713 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
25715 case OPC_MSUBR_Q_df
:
25716 tcg_gen_movi_i32(tdf
, df
+ 1);
25717 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25720 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
25722 case OPC_FMAX_A_df
:
25723 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25726 MIPS_INVAL("MSA instruction");
25727 generate_exception_end(ctx
, EXCP_RI
);
25731 tcg_temp_free_i32(twd
);
25732 tcg_temp_free_i32(tws
);
25733 tcg_temp_free_i32(twt
);
25734 tcg_temp_free_i32(tdf
);
25737 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
25739 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
25740 (op & (0x7 << 18)))
25741 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25742 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25743 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25744 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
25745 TCGv_i32 twd
= tcg_const_i32(wd
);
25746 TCGv_i32 tws
= tcg_const_i32(ws
);
25747 TCGv_i32 twt
= tcg_const_i32(wt
);
25748 TCGv_i32 tdf
= tcg_const_i32(df
);
25750 switch (MASK_MSA_2R(ctx
->opcode
)) {
25752 #if !defined(TARGET_MIPS64)
25753 /* Double format valid only for MIPS64 */
25754 if (df
== DF_DOUBLE
) {
25755 generate_exception_end(ctx
, EXCP_RI
);
25759 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
25762 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
25765 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
25768 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
25771 MIPS_INVAL("MSA instruction");
25772 generate_exception_end(ctx
, EXCP_RI
);
25776 tcg_temp_free_i32(twd
);
25777 tcg_temp_free_i32(tws
);
25778 tcg_temp_free_i32(twt
);
25779 tcg_temp_free_i32(tdf
);
25782 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
25784 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
25785 (op & (0xf << 17)))
25786 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25787 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25788 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25789 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
25790 TCGv_i32 twd
= tcg_const_i32(wd
);
25791 TCGv_i32 tws
= tcg_const_i32(ws
);
25792 TCGv_i32 twt
= tcg_const_i32(wt
);
25793 /* adjust df value for floating-point instruction */
25794 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
25796 switch (MASK_MSA_2RF(ctx
->opcode
)) {
25797 case OPC_FCLASS_df
:
25798 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
25800 case OPC_FTRUNC_S_df
:
25801 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
25803 case OPC_FTRUNC_U_df
:
25804 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
25807 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
25809 case OPC_FRSQRT_df
:
25810 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
25813 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
25816 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
25819 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
25821 case OPC_FEXUPL_df
:
25822 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
25824 case OPC_FEXUPR_df
:
25825 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
25828 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
25831 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
25833 case OPC_FTINT_S_df
:
25834 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
25836 case OPC_FTINT_U_df
:
25837 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
25839 case OPC_FFINT_S_df
:
25840 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
25842 case OPC_FFINT_U_df
:
25843 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
25847 tcg_temp_free_i32(twd
);
25848 tcg_temp_free_i32(tws
);
25849 tcg_temp_free_i32(twt
);
25850 tcg_temp_free_i32(tdf
);
25853 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
25855 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
25856 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25857 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25858 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25859 TCGv_i32 twd
= tcg_const_i32(wd
);
25860 TCGv_i32 tws
= tcg_const_i32(ws
);
25861 TCGv_i32 twt
= tcg_const_i32(wt
);
25863 switch (MASK_MSA_VEC(ctx
->opcode
)) {
25865 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
25868 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
25871 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
25874 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
25877 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
25880 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
25883 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
25886 MIPS_INVAL("MSA instruction");
25887 generate_exception_end(ctx
, EXCP_RI
);
25891 tcg_temp_free_i32(twd
);
25892 tcg_temp_free_i32(tws
);
25893 tcg_temp_free_i32(twt
);
25896 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
25898 switch (MASK_MSA_VEC(ctx
->opcode
)) {
25906 gen_msa_vec_v(env
, ctx
);
25909 gen_msa_2r(env
, ctx
);
25912 gen_msa_2rf(env
, ctx
);
25915 MIPS_INVAL("MSA instruction");
25916 generate_exception_end(ctx
, EXCP_RI
);
25921 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
25923 uint32_t opcode
= ctx
->opcode
;
25924 check_insn(ctx
, ASE_MSA
);
25925 check_msa_access(ctx
);
25927 switch (MASK_MSA_MINOR(opcode
)) {
25928 case OPC_MSA_I8_00
:
25929 case OPC_MSA_I8_01
:
25930 case OPC_MSA_I8_02
:
25931 gen_msa_i8(env
, ctx
);
25933 case OPC_MSA_I5_06
:
25934 case OPC_MSA_I5_07
:
25935 gen_msa_i5(env
, ctx
);
25937 case OPC_MSA_BIT_09
:
25938 case OPC_MSA_BIT_0A
:
25939 gen_msa_bit(env
, ctx
);
25941 case OPC_MSA_3R_0D
:
25942 case OPC_MSA_3R_0E
:
25943 case OPC_MSA_3R_0F
:
25944 case OPC_MSA_3R_10
:
25945 case OPC_MSA_3R_11
:
25946 case OPC_MSA_3R_12
:
25947 case OPC_MSA_3R_13
:
25948 case OPC_MSA_3R_14
:
25949 case OPC_MSA_3R_15
:
25950 gen_msa_3r(env
, ctx
);
25953 gen_msa_elm(env
, ctx
);
25955 case OPC_MSA_3RF_1A
:
25956 case OPC_MSA_3RF_1B
:
25957 case OPC_MSA_3RF_1C
:
25958 gen_msa_3rf(env
, ctx
);
25961 gen_msa_vec(env
, ctx
);
25972 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
25973 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
25974 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25975 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
25977 TCGv_i32 twd
= tcg_const_i32(wd
);
25978 TCGv taddr
= tcg_temp_new();
25979 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
25981 switch (MASK_MSA_MINOR(opcode
)) {
25983 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
25986 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
25989 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
25992 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
25995 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
25998 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
26001 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
26004 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
26008 tcg_temp_free_i32(twd
);
26009 tcg_temp_free(taddr
);
26013 MIPS_INVAL("MSA instruction");
26014 generate_exception_end(ctx
, EXCP_RI
);
26020 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
26023 int rs
, rt
, rd
, sa
;
26027 /* make sure instructions are on a word boundary */
26028 if (ctx
->base
.pc_next
& 0x3) {
26029 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
26030 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
26034 /* Handle blikely not taken case */
26035 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
26036 TCGLabel
*l1
= gen_new_label();
26038 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
26039 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
26040 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
26044 op
= MASK_OP_MAJOR(ctx
->opcode
);
26045 rs
= (ctx
->opcode
>> 21) & 0x1f;
26046 rt
= (ctx
->opcode
>> 16) & 0x1f;
26047 rd
= (ctx
->opcode
>> 11) & 0x1f;
26048 sa
= (ctx
->opcode
>> 6) & 0x1f;
26049 imm
= (int16_t)ctx
->opcode
;
26052 decode_opc_special(env
, ctx
);
26055 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
26056 decode_tx79_mmi(env
, ctx
);
26058 decode_opc_special2_legacy(env
, ctx
);
26062 if (ctx
->insn_flags
& INSN_R5900
) {
26063 decode_tx79_sq(env
, ctx
); /* TX79_SQ */
26065 decode_opc_special3(env
, ctx
);
26069 op1
= MASK_REGIMM(ctx
->opcode
);
26071 case OPC_BLTZL
: /* REGIMM branches */
26075 check_insn(ctx
, ISA_MIPS2
);
26076 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26080 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
26084 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26086 /* OPC_NAL, OPC_BAL */
26087 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
26089 generate_exception_end(ctx
, EXCP_RI
);
26092 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
26095 case OPC_TGEI
: /* REGIMM traps */
26102 check_insn(ctx
, ISA_MIPS2
);
26103 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26104 gen_trap(ctx
, op1
, rs
, -1, imm
);
26107 check_insn(ctx
, ISA_MIPS32R6
);
26108 generate_exception_end(ctx
, EXCP_RI
);
26111 check_insn(ctx
, ISA_MIPS32R2
);
26112 /* Break the TB to be able to sync copied instructions
26114 ctx
->base
.is_jmp
= DISAS_STOP
;
26116 case OPC_BPOSGE32
: /* MIPS DSP branch */
26117 #if defined(TARGET_MIPS64)
26121 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
26123 #if defined(TARGET_MIPS64)
26125 check_insn(ctx
, ISA_MIPS32R6
);
26126 check_mips_64(ctx
);
26128 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
26132 check_insn(ctx
, ISA_MIPS32R6
);
26133 check_mips_64(ctx
);
26135 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
26139 default: /* Invalid */
26140 MIPS_INVAL("regimm");
26141 generate_exception_end(ctx
, EXCP_RI
);
26146 check_cp0_enabled(ctx
);
26147 op1
= MASK_CP0(ctx
->opcode
);
26155 #if defined(TARGET_MIPS64)
26159 #ifndef CONFIG_USER_ONLY
26160 gen_cp0(env
, ctx
, op1
, rt
, rd
);
26161 #endif /* !CONFIG_USER_ONLY */
26179 #ifndef CONFIG_USER_ONLY
26180 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
26181 #endif /* !CONFIG_USER_ONLY */
26184 #ifndef CONFIG_USER_ONLY
26187 TCGv t0
= tcg_temp_new();
26189 op2
= MASK_MFMC0(ctx
->opcode
);
26193 gen_helper_dmt(t0
);
26194 gen_store_gpr(t0
, rt
);
26198 gen_helper_emt(t0
);
26199 gen_store_gpr(t0
, rt
);
26203 gen_helper_dvpe(t0
, cpu_env
);
26204 gen_store_gpr(t0
, rt
);
26208 gen_helper_evpe(t0
, cpu_env
);
26209 gen_store_gpr(t0
, rt
);
26212 check_insn(ctx
, ISA_MIPS32R6
);
26214 gen_helper_dvp(t0
, cpu_env
);
26215 gen_store_gpr(t0
, rt
);
26219 check_insn(ctx
, ISA_MIPS32R6
);
26221 gen_helper_evp(t0
, cpu_env
);
26222 gen_store_gpr(t0
, rt
);
26226 check_insn(ctx
, ISA_MIPS32R2
);
26227 save_cpu_state(ctx
, 1);
26228 gen_helper_di(t0
, cpu_env
);
26229 gen_store_gpr(t0
, rt
);
26230 /* Stop translation as we may have switched
26231 the execution mode. */
26232 ctx
->base
.is_jmp
= DISAS_STOP
;
26235 check_insn(ctx
, ISA_MIPS32R2
);
26236 save_cpu_state(ctx
, 1);
26237 gen_helper_ei(t0
, cpu_env
);
26238 gen_store_gpr(t0
, rt
);
26239 /* DISAS_STOP isn't sufficient, we need to ensure we break
26240 out of translated code to check for pending interrupts */
26241 gen_save_pc(ctx
->base
.pc_next
+ 4);
26242 ctx
->base
.is_jmp
= DISAS_EXIT
;
26244 default: /* Invalid */
26245 MIPS_INVAL("mfmc0");
26246 generate_exception_end(ctx
, EXCP_RI
);
26251 #endif /* !CONFIG_USER_ONLY */
26254 check_insn(ctx
, ISA_MIPS32R2
);
26255 gen_load_srsgpr(rt
, rd
);
26258 check_insn(ctx
, ISA_MIPS32R2
);
26259 gen_store_srsgpr(rt
, rd
);
26263 generate_exception_end(ctx
, EXCP_RI
);
26267 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
26268 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26269 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
26270 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26273 /* Arithmetic with immediate opcode */
26274 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26278 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26280 case OPC_SLTI
: /* Set on less than with immediate opcode */
26282 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
26284 case OPC_ANDI
: /* Arithmetic with immediate opcode */
26285 case OPC_LUI
: /* OPC_AUI */
26288 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
26290 case OPC_J
: /* Jump */
26292 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
26293 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
26296 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
26297 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26299 generate_exception_end(ctx
, EXCP_RI
);
26302 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
26303 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26306 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26309 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
26310 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26312 generate_exception_end(ctx
, EXCP_RI
);
26315 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
26316 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26319 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26322 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
26325 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26327 check_insn(ctx
, ISA_MIPS32R6
);
26328 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
26329 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26332 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
26335 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26337 check_insn(ctx
, ISA_MIPS32R6
);
26338 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
26339 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26344 check_insn(ctx
, ISA_MIPS2
);
26345 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26349 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26351 case OPC_LL
: /* Load and stores */
26352 check_insn(ctx
, ISA_MIPS2
);
26356 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26364 gen_ld(ctx
, op
, rt
, rs
, imm
);
26368 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26373 gen_st(ctx
, op
, rt
, rs
, imm
);
26376 check_insn(ctx
, ISA_MIPS2
);
26377 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26378 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
26381 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26382 check_cp0_enabled(ctx
);
26383 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
26384 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
26385 gen_cache_operation(ctx
, rt
, rs
, imm
);
26387 /* Treat as NOP. */
26390 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26391 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
26392 /* Treat as NOP. */
26395 /* Floating point (COP1). */
26400 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
26404 op1
= MASK_CP1(ctx
->opcode
);
26409 check_cp1_enabled(ctx
);
26410 check_insn(ctx
, ISA_MIPS32R2
);
26416 check_cp1_enabled(ctx
);
26417 gen_cp1(ctx
, op1
, rt
, rd
);
26419 #if defined(TARGET_MIPS64)
26422 check_cp1_enabled(ctx
);
26423 check_insn(ctx
, ISA_MIPS3
);
26424 check_mips_64(ctx
);
26425 gen_cp1(ctx
, op1
, rt
, rd
);
26428 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
26429 check_cp1_enabled(ctx
);
26430 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26432 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
26437 check_insn(ctx
, ASE_MIPS3D
);
26438 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
26439 (rt
>> 2) & 0x7, imm
<< 2);
26443 check_cp1_enabled(ctx
);
26444 check_insn(ctx
, ISA_MIPS32R6
);
26445 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
26449 check_cp1_enabled(ctx
);
26450 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26452 check_insn(ctx
, ASE_MIPS3D
);
26455 check_cp1_enabled(ctx
);
26456 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26457 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
26458 (rt
>> 2) & 0x7, imm
<< 2);
26465 check_cp1_enabled(ctx
);
26466 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
26472 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
26473 check_cp1_enabled(ctx
);
26474 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26476 case R6_OPC_CMP_AF_S
:
26477 case R6_OPC_CMP_UN_S
:
26478 case R6_OPC_CMP_EQ_S
:
26479 case R6_OPC_CMP_UEQ_S
:
26480 case R6_OPC_CMP_LT_S
:
26481 case R6_OPC_CMP_ULT_S
:
26482 case R6_OPC_CMP_LE_S
:
26483 case R6_OPC_CMP_ULE_S
:
26484 case R6_OPC_CMP_SAF_S
:
26485 case R6_OPC_CMP_SUN_S
:
26486 case R6_OPC_CMP_SEQ_S
:
26487 case R6_OPC_CMP_SEUQ_S
:
26488 case R6_OPC_CMP_SLT_S
:
26489 case R6_OPC_CMP_SULT_S
:
26490 case R6_OPC_CMP_SLE_S
:
26491 case R6_OPC_CMP_SULE_S
:
26492 case R6_OPC_CMP_OR_S
:
26493 case R6_OPC_CMP_UNE_S
:
26494 case R6_OPC_CMP_NE_S
:
26495 case R6_OPC_CMP_SOR_S
:
26496 case R6_OPC_CMP_SUNE_S
:
26497 case R6_OPC_CMP_SNE_S
:
26498 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
26500 case R6_OPC_CMP_AF_D
:
26501 case R6_OPC_CMP_UN_D
:
26502 case R6_OPC_CMP_EQ_D
:
26503 case R6_OPC_CMP_UEQ_D
:
26504 case R6_OPC_CMP_LT_D
:
26505 case R6_OPC_CMP_ULT_D
:
26506 case R6_OPC_CMP_LE_D
:
26507 case R6_OPC_CMP_ULE_D
:
26508 case R6_OPC_CMP_SAF_D
:
26509 case R6_OPC_CMP_SUN_D
:
26510 case R6_OPC_CMP_SEQ_D
:
26511 case R6_OPC_CMP_SEUQ_D
:
26512 case R6_OPC_CMP_SLT_D
:
26513 case R6_OPC_CMP_SULT_D
:
26514 case R6_OPC_CMP_SLE_D
:
26515 case R6_OPC_CMP_SULE_D
:
26516 case R6_OPC_CMP_OR_D
:
26517 case R6_OPC_CMP_UNE_D
:
26518 case R6_OPC_CMP_NE_D
:
26519 case R6_OPC_CMP_SOR_D
:
26520 case R6_OPC_CMP_SUNE_D
:
26521 case R6_OPC_CMP_SNE_D
:
26522 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
26525 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
26526 rt
, rd
, sa
, (imm
>> 8) & 0x7);
26531 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
26546 check_insn(ctx
, ASE_MSA
);
26547 gen_msa_branch(env
, ctx
, op1
);
26551 generate_exception_end(ctx
, EXCP_RI
);
26556 /* Compact branches [R6] and COP2 [non-R6] */
26557 case OPC_BC
: /* OPC_LWC2 */
26558 case OPC_BALC
: /* OPC_SWC2 */
26559 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26560 /* OPC_BC, OPC_BALC */
26561 gen_compute_compact_branch(ctx
, op
, 0, 0,
26562 sextract32(ctx
->opcode
<< 2, 0, 28));
26564 /* OPC_LWC2, OPC_SWC2 */
26565 /* COP2: Not implemented. */
26566 generate_exception_err(ctx
, EXCP_CpU
, 2);
26569 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
26570 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
26571 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26573 /* OPC_BEQZC, OPC_BNEZC */
26574 gen_compute_compact_branch(ctx
, op
, rs
, 0,
26575 sextract32(ctx
->opcode
<< 2, 0, 23));
26577 /* OPC_JIC, OPC_JIALC */
26578 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
26581 /* OPC_LWC2, OPC_SWC2 */
26582 /* COP2: Not implemented. */
26583 generate_exception_err(ctx
, EXCP_CpU
, 2);
26587 check_insn(ctx
, INSN_LOONGSON2F
);
26588 /* Note that these instructions use different fields. */
26589 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
26593 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26594 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
26595 check_cp1_enabled(ctx
);
26596 op1
= MASK_CP3(ctx
->opcode
);
26600 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
26606 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26607 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
26610 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26611 /* Treat as NOP. */
26614 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
26628 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26629 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
26633 generate_exception_end(ctx
, EXCP_RI
);
26637 generate_exception_err(ctx
, EXCP_CpU
, 1);
26641 #if defined(TARGET_MIPS64)
26642 /* MIPS64 opcodes */
26646 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26650 check_insn(ctx
, ISA_MIPS3
);
26651 check_mips_64(ctx
);
26652 gen_ld(ctx
, op
, rt
, rs
, imm
);
26656 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26659 check_insn(ctx
, ISA_MIPS3
);
26660 check_mips_64(ctx
);
26661 gen_st(ctx
, op
, rt
, rs
, imm
);
26664 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26665 check_insn(ctx
, ISA_MIPS3
);
26666 check_mips_64(ctx
);
26667 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
26669 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
26670 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26671 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
26672 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26675 check_insn(ctx
, ISA_MIPS3
);
26676 check_mips_64(ctx
);
26677 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26681 check_insn(ctx
, ISA_MIPS3
);
26682 check_mips_64(ctx
);
26683 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26686 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
26687 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26688 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26690 MIPS_INVAL("major opcode");
26691 generate_exception_end(ctx
, EXCP_RI
);
26695 case OPC_DAUI
: /* OPC_JALX */
26696 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26697 #if defined(TARGET_MIPS64)
26699 check_mips_64(ctx
);
26701 generate_exception(ctx
, EXCP_RI
);
26702 } else if (rt
!= 0) {
26703 TCGv t0
= tcg_temp_new();
26704 gen_load_gpr(t0
, rs
);
26705 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
26709 generate_exception_end(ctx
, EXCP_RI
);
26710 MIPS_INVAL("major opcode");
26714 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
26715 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
26716 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
26719 case OPC_MSA
: /* OPC_MDMX */
26720 if (ctx
->insn_flags
& INSN_R5900
) {
26721 decode_tx79_lq(env
, ctx
); /* TX79_LQ */
26723 /* MDMX: Not implemented. */
26728 check_insn(ctx
, ISA_MIPS32R6
);
26729 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
26731 default: /* Invalid */
26732 MIPS_INVAL("major opcode");
26733 generate_exception_end(ctx
, EXCP_RI
);
26738 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
26740 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26741 CPUMIPSState
*env
= cs
->env_ptr
;
26743 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
26744 ctx
->saved_pc
= -1;
26745 ctx
->insn_flags
= env
->insn_flags
;
26746 ctx
->CP0_Config1
= env
->CP0_Config1
;
26747 ctx
->CP0_Config2
= env
->CP0_Config2
;
26748 ctx
->CP0_Config3
= env
->CP0_Config3
;
26749 ctx
->CP0_Config5
= env
->CP0_Config5
;
26751 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
26752 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
26753 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
26754 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
26755 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
26756 ctx
->PAMask
= env
->PAMask
;
26757 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
26758 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
26759 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
26760 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
26761 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
26762 /* Restore delay slot state from the tb context. */
26763 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
26764 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
26765 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
26766 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
26767 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
26768 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
26769 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
26770 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
26771 restore_cpu_state(env
, ctx
);
26772 #ifdef CONFIG_USER_ONLY
26773 ctx
->mem_idx
= MIPS_HFLAG_UM
;
26775 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
26777 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
26778 MO_UNALN
: MO_ALIGN
;
26780 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
26784 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
26788 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
26790 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26792 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
26796 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
26797 const CPUBreakpoint
*bp
)
26799 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26801 save_cpu_state(ctx
, 1);
26802 ctx
->base
.is_jmp
= DISAS_NORETURN
;
26803 gen_helper_raise_exception_debug(cpu_env
);
26804 /* The address covered by the breakpoint must be included in
26805 [tb->pc, tb->pc + tb->size) in order to for it to be
26806 properly cleared -- thus we increment the PC here so that
26807 the logic setting tb->size below does the right thing. */
26808 ctx
->base
.pc_next
+= 4;
26812 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
26814 CPUMIPSState
*env
= cs
->env_ptr
;
26815 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26819 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
26820 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
26821 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
26822 insn_bytes
= decode_nanomips_opc(env
, ctx
);
26823 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
26824 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
26826 decode_opc(env
, ctx
);
26827 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
26828 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
26829 insn_bytes
= decode_micromips_opc(env
, ctx
);
26830 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
26831 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
26832 insn_bytes
= decode_mips16_opc(env
, ctx
);
26834 generate_exception_end(ctx
, EXCP_RI
);
26835 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
26839 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
26840 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
26841 MIPS_HFLAG_FBNSLOT
))) {
26842 /* force to generate branch as there is neither delay nor
26846 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
26847 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
26848 /* Force to generate branch as microMIPS R6 doesn't restrict
26849 branches in the forbidden slot. */
26854 gen_branch(ctx
, insn_bytes
);
26856 ctx
->base
.pc_next
+= insn_bytes
;
26858 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
26861 /* Execute a branch and its delay slot as a single instruction.
26862 This is what GDB expects and is consistent with what the
26863 hardware does (e.g. if a delay slot instruction faults, the
26864 reported PC is the PC of the branch). */
26865 if (ctx
->base
.singlestep_enabled
&&
26866 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
26867 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
26869 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
26870 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
26874 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
26876 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26878 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
26879 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
26880 gen_helper_raise_exception_debug(cpu_env
);
26882 switch (ctx
->base
.is_jmp
) {
26884 gen_save_pc(ctx
->base
.pc_next
);
26885 tcg_gen_lookup_and_goto_ptr();
26888 case DISAS_TOO_MANY
:
26889 save_cpu_state(ctx
, 0);
26890 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
26893 tcg_gen_exit_tb(NULL
, 0);
26895 case DISAS_NORETURN
:
26898 g_assert_not_reached();
26903 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
26905 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
26906 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
26909 static const TranslatorOps mips_tr_ops
= {
26910 .init_disas_context
= mips_tr_init_disas_context
,
26911 .tb_start
= mips_tr_tb_start
,
26912 .insn_start
= mips_tr_insn_start
,
26913 .breakpoint_check
= mips_tr_breakpoint_check
,
26914 .translate_insn
= mips_tr_translate_insn
,
26915 .tb_stop
= mips_tr_tb_stop
,
26916 .disas_log
= mips_tr_disas_log
,
26919 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
26923 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
26926 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
26930 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
26932 #define printfpr(fp) \
26935 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
26936 " fd:%13g fs:%13g psu: %13g\n", \
26937 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
26938 (double)(fp)->fd, \
26939 (double)(fp)->fs[FP_ENDIAN_IDX], \
26940 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
26943 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
26944 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
26945 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
26946 " fd:%13g fs:%13g psu:%13g\n", \
26947 tmp.w[FP_ENDIAN_IDX], tmp.d, \
26949 (double)tmp.fs[FP_ENDIAN_IDX], \
26950 (double)tmp.fs[!FP_ENDIAN_IDX]); \
26955 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
26956 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
26957 get_float_exception_flags(&env
->active_fpu
.fp_status
));
26958 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
26959 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
26960 printfpr(&env
->active_fpu
.fpr
[i
]);
26966 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
26969 MIPSCPU
*cpu
= MIPS_CPU(cs
);
26970 CPUMIPSState
*env
= &cpu
->env
;
26973 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
26974 " LO=0x" TARGET_FMT_lx
" ds %04x "
26975 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
26976 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
26977 env
->hflags
, env
->btarget
, env
->bcond
);
26978 for (i
= 0; i
< 32; i
++) {
26980 cpu_fprintf(f
, "GPR%02d:", i
);
26981 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
26983 cpu_fprintf(f
, "\n");
26986 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
26987 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
26988 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
26990 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
26991 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
26992 env
->CP0_Config2
, env
->CP0_Config3
);
26993 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
26994 env
->CP0_Config4
, env
->CP0_Config5
);
26995 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
26996 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
27000 void mips_tcg_init(void)
27005 for (i
= 1; i
< 32; i
++)
27006 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
27007 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
27010 for (i
= 0; i
< 32; i
++) {
27011 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
27013 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
27014 /* The scalar floating-point unit (FPU) registers are mapped on
27015 * the MSA vector registers. */
27016 fpu_f64
[i
] = msa_wr_d
[i
* 2];
27017 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
27018 msa_wr_d
[i
* 2 + 1] =
27019 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
27022 cpu_PC
= tcg_global_mem_new(cpu_env
,
27023 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
27024 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
27025 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
27026 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
27028 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
27029 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
27032 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
27033 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
27035 bcond
= tcg_global_mem_new(cpu_env
,
27036 offsetof(CPUMIPSState
, bcond
), "bcond");
27037 btarget
= tcg_global_mem_new(cpu_env
,
27038 offsetof(CPUMIPSState
, btarget
), "btarget");
27039 hflags
= tcg_global_mem_new_i32(cpu_env
,
27040 offsetof(CPUMIPSState
, hflags
), "hflags");
27042 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
27043 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
27045 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
27046 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
27050 #include "translate_init.inc.c"
27052 void cpu_mips_realize_env(CPUMIPSState
*env
)
27054 env
->exception_base
= (int32_t)0xBFC00000;
27056 #ifndef CONFIG_USER_ONLY
27057 mmu_init(env
, env
->cpu_model
);
27059 fpu_init(env
, env
->cpu_model
);
27060 mvp_init(env
, env
->cpu_model
);
27063 bool cpu_supports_cps_smp(const char *cpu_type
)
27065 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
27066 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
27069 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
27071 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
27072 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
27075 void cpu_set_exception_base(int vp_index
, target_ulong address
)
27077 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
27078 vp
->env
.exception_base
= address
;
27081 void cpu_state_reset(CPUMIPSState
*env
)
27083 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
27084 CPUState
*cs
= CPU(cpu
);
27086 /* Reset registers to their default values */
27087 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
27088 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
27089 #ifdef TARGET_WORDS_BIGENDIAN
27090 env
->CP0_Config0
|= (1 << CP0C0_BE
);
27092 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
27093 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
27094 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
27095 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
27096 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
27097 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
27098 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
27099 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
27100 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
27101 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
27102 << env
->cpu_model
->CP0_LLAddr_shift
;
27103 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
27104 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
27105 env
->CCRes
= env
->cpu_model
->CCRes
;
27106 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
27107 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
27108 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
27109 env
->current_tc
= 0;
27110 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
27111 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
27112 #if defined(TARGET_MIPS64)
27113 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
27114 env
->SEGMask
|= 3ULL << 62;
27117 env
->PABITS
= env
->cpu_model
->PABITS
;
27118 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
27119 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
27120 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
27121 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
27122 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
27123 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
27124 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
27125 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
27126 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
27127 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
27128 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
27129 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
27130 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
27131 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
27132 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
27133 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
27134 env
->msair
= env
->cpu_model
->MSAIR
;
27135 env
->insn_flags
= env
->cpu_model
->insn_flags
;
27137 #if defined(CONFIG_USER_ONLY)
27138 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
27139 # ifdef TARGET_MIPS64
27140 /* Enable 64-bit register mode. */
27141 env
->CP0_Status
|= (1 << CP0St_PX
);
27143 # ifdef TARGET_ABI_MIPSN64
27144 /* Enable 64-bit address mode. */
27145 env
->CP0_Status
|= (1 << CP0St_UX
);
27147 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
27148 hardware registers. */
27149 env
->CP0_HWREna
|= 0x0000000F;
27150 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
27151 env
->CP0_Status
|= (1 << CP0St_CU1
);
27153 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
27154 env
->CP0_Status
|= (1 << CP0St_MX
);
27156 # if defined(TARGET_MIPS64)
27157 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
27158 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
27159 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
27160 env
->CP0_Status
|= (1 << CP0St_FR
);
27164 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
27165 /* If the exception was raised from a delay slot,
27166 come back to the jump. */
27167 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
27168 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
27170 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
27172 env
->active_tc
.PC
= env
->exception_base
;
27173 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
27174 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
27175 env
->CP0_Wired
= 0;
27176 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
27177 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
27178 if (mips_um_ksegs_enabled()) {
27179 env
->CP0_EBase
|= 0x40000000;
27181 env
->CP0_EBase
|= (int32_t)0x80000000;
27183 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
27184 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
27186 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
27188 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
27189 /* vectored interrupts not implemented, timer on int 7,
27190 no performance counters. */
27191 env
->CP0_IntCtl
= 0xe0000000;
27195 for (i
= 0; i
< 7; i
++) {
27196 env
->CP0_WatchLo
[i
] = 0;
27197 env
->CP0_WatchHi
[i
] = 0x80000000;
27199 env
->CP0_WatchLo
[7] = 0;
27200 env
->CP0_WatchHi
[7] = 0;
27202 /* Count register increments in debug mode, EJTAG version 1 */
27203 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
27205 cpu_mips_store_count(env
, 1);
27207 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
27210 /* Only TC0 on VPE 0 starts as active. */
27211 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
27212 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
27213 env
->tcs
[i
].CP0_TCHalt
= 1;
27215 env
->active_tc
.CP0_TCHalt
= 1;
27218 if (cs
->cpu_index
== 0) {
27219 /* VPE0 starts up enabled. */
27220 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
27221 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
27223 /* TC0 starts up unhalted. */
27225 env
->active_tc
.CP0_TCHalt
= 0;
27226 env
->tcs
[0].CP0_TCHalt
= 0;
27227 /* With thread 0 active. */
27228 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
27229 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
27234 * Configure default legacy segmentation control. We use this regardless of
27235 * whether segmentation control is presented to the guest.
27237 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
27238 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
27239 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
27240 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
27241 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
27242 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
27244 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
27245 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
27246 (3 << CP0SC_C
)) << 16;
27247 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
27248 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
27249 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
27250 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
27251 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
27252 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
27253 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
27254 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
27256 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
27257 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
27258 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
27259 env
->CP0_Status
|= (1 << CP0St_FR
);
27262 if (env
->insn_flags
& ISA_MIPS32R6
) {
27264 env
->CP0_PWSize
= 0x40;
27270 env
->CP0_PWField
= 0x0C30C302;
27277 env
->CP0_PWField
= 0x02;
27280 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
27281 /* microMIPS on reset when Config3.ISA is 3 */
27282 env
->hflags
|= MIPS_HFLAG_M16
;
27286 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
27290 compute_hflags(env
);
27291 restore_fp_status(env
);
27292 restore_pamask(env
);
27293 cs
->exception_index
= EXCP_NONE
;
27295 if (semihosting_get_argc()) {
27296 /* UHI interface can be used to obtain argc and argv */
27297 env
->active_tc
.gpr
[4] = -1;
27301 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
27302 target_ulong
*data
)
27304 env
->active_tc
.PC
= data
[0];
27305 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
27306 env
->hflags
|= data
[1];
27307 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
27308 case MIPS_HFLAG_BR
:
27310 case MIPS_HFLAG_BC
:
27311 case MIPS_HFLAG_BL
:
27313 env
->btarget
= data
[2];