2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
467 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
468 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
472 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
475 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
476 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
477 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
478 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
479 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
482 /* MIPS DSP REGIMM opcodes */
484 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
485 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
488 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
491 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
492 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
493 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
494 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
497 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 /* MIPS DSP Arithmetic Sub-class */
500 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
501 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
502 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
503 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
504 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
505 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
506 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
507 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
508 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
509 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
510 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
517 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
518 /* MIPS DSP Multiply Sub-class insns */
519 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
522 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
523 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
527 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
528 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
532 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
533 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
534 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
535 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
536 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
537 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
538 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
539 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
543 /* MIPS DSP Multiply Sub-class insns */
544 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
550 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
552 /* MIPS DSP Arithmetic Sub-class */
553 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
554 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
555 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
556 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
557 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
558 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
559 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
560 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
561 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
566 /* DSP Bit/Manipulation Sub-class */
567 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
574 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
576 /* MIPS DSP Arithmetic Sub-class */
577 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
581 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
582 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
583 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
584 /* DSP Compare-Pick Sub-class */
585 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
602 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
604 /* MIPS DSP GPR-Based Shift Sub-class */
605 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
607 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
608 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
609 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
610 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
611 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
612 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
613 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
629 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
631 /* MIPS DSP Multiply Sub-class insns */
632 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
634 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
635 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
636 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
637 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
638 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
639 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
640 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
656 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
658 /* DSP Bit/Manipulation Sub-class */
659 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
662 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
664 /* MIPS DSP Append Sub-class */
665 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
666 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
667 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
670 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
673 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
674 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
675 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
676 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
677 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
678 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
679 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
680 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
681 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
685 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
686 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
687 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
688 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
689 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
692 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
694 /* MIPS DSP Arithmetic Sub-class */
695 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
699 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
700 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
701 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
702 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
703 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
712 /* DSP Bit/Manipulation Sub-class */
713 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
721 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
723 /* MIPS DSP Multiply Sub-class insns */
724 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
725 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
726 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
729 /* MIPS DSP Arithmetic Sub-class */
730 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
731 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
732 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
733 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
734 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
735 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
737 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
738 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
741 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
743 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
744 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
745 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
753 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
755 /* DSP Compare-Pick Sub-class */
756 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
764 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
775 /* MIPS DSP Arithmetic Sub-class */
776 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
786 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
788 /* DSP Append Sub-class */
789 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
790 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
791 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
792 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
795 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
797 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
798 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
799 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
800 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
801 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
802 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
803 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
804 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
805 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
806 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
821 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
823 /* DSP Bit/Manipulation Sub-class */
824 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
827 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
829 /* MIPS DSP Multiply Sub-class insns */
830 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
836 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
837 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
838 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
858 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
860 /* MIPS DSP GPR-Based Shift Sub-class */
861 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
867 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
868 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
869 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
889 /* Coprocessor 0 (rs field) */
890 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
893 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
894 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
895 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
896 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
897 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
898 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
899 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
900 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
901 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
902 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
903 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
904 OPC_C0
= (0x10 << 21) | OPC_CP0
,
905 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
906 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
907 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
908 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
909 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
910 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
911 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
912 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
913 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
914 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
915 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
916 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
917 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
918 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
919 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
923 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
926 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
927 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
928 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
929 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
930 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
931 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
932 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
933 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
936 /* Coprocessor 0 (with rs == C0) */
937 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
940 OPC_TLBR
= 0x01 | OPC_C0
,
941 OPC_TLBWI
= 0x02 | OPC_C0
,
942 OPC_TLBINV
= 0x03 | OPC_C0
,
943 OPC_TLBINVF
= 0x04 | OPC_C0
,
944 OPC_TLBWR
= 0x06 | OPC_C0
,
945 OPC_TLBP
= 0x08 | OPC_C0
,
946 OPC_RFE
= 0x10 | OPC_C0
,
947 OPC_ERET
= 0x18 | OPC_C0
,
948 OPC_DERET
= 0x1F | OPC_C0
,
949 OPC_WAIT
= 0x20 | OPC_C0
,
952 /* Coprocessor 1 (rs field) */
953 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
955 /* Values for the fmt field in FP instructions */
957 /* 0 - 15 are reserved */
958 FMT_S
= 16, /* single fp */
959 FMT_D
= 17, /* double fp */
960 FMT_E
= 18, /* extended fp */
961 FMT_Q
= 19, /* quad fp */
962 FMT_W
= 20, /* 32-bit fixed */
963 FMT_L
= 21, /* 64-bit fixed */
964 FMT_PS
= 22, /* paired single fp */
965 /* 23 - 31 are reserved */
969 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
970 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
971 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
972 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
973 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
974 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
975 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
976 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
977 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
978 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
979 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
980 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
981 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
982 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
983 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
984 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
985 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
986 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
987 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
988 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
989 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
990 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
991 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
992 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
993 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
994 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
995 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
996 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
997 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
998 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1001 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1002 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1005 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1006 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1007 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1008 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1012 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1013 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1017 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1018 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1021 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1024 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1025 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1026 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1027 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1028 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1029 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1030 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1031 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1032 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1033 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1034 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1037 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1040 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1041 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1042 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1043 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1044 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1045 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1046 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1047 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1050 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1051 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1052 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1053 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1054 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1055 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1056 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1059 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1060 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1061 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1062 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1063 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1064 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1065 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1068 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1069 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1070 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1071 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1072 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1073 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1074 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1077 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1078 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1079 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1080 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1081 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1083 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1084 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1085 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1086 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1087 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1088 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1090 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1091 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1092 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1093 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1094 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1095 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1097 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1098 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1099 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1100 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1101 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1102 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1104 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1105 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1106 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1107 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1108 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1109 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1111 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1112 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1113 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1114 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1115 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1116 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1118 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1119 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1120 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1121 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1122 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1123 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1125 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1126 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1127 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1128 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1129 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1130 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1134 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1137 OPC_LWXC1
= 0x00 | OPC_CP3
,
1138 OPC_LDXC1
= 0x01 | OPC_CP3
,
1139 OPC_LUXC1
= 0x05 | OPC_CP3
,
1140 OPC_SWXC1
= 0x08 | OPC_CP3
,
1141 OPC_SDXC1
= 0x09 | OPC_CP3
,
1142 OPC_SUXC1
= 0x0D | OPC_CP3
,
1143 OPC_PREFX
= 0x0F | OPC_CP3
,
1144 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1145 OPC_MADD_S
= 0x20 | OPC_CP3
,
1146 OPC_MADD_D
= 0x21 | OPC_CP3
,
1147 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1148 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1149 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1150 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1151 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1152 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1153 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1154 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1155 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1156 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1160 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1162 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1163 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1164 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1165 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1166 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1167 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1168 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1169 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1170 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1171 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1172 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1173 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1174 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1175 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1176 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1177 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1178 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1179 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1180 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1181 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1182 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1184 /* MI10 instruction */
1185 OPC_LD_B
= (0x20) | OPC_MSA
,
1186 OPC_LD_H
= (0x21) | OPC_MSA
,
1187 OPC_LD_W
= (0x22) | OPC_MSA
,
1188 OPC_LD_D
= (0x23) | OPC_MSA
,
1189 OPC_ST_B
= (0x24) | OPC_MSA
,
1190 OPC_ST_H
= (0x25) | OPC_MSA
,
1191 OPC_ST_W
= (0x26) | OPC_MSA
,
1192 OPC_ST_D
= (0x27) | OPC_MSA
,
1196 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1197 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1198 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1199 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1200 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1201 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1202 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1203 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1204 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1205 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1206 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1207 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1208 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1210 /* I8 instruction */
1211 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1212 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1213 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1214 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1215 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1216 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1217 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1218 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1219 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1220 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1222 /* VEC/2R/2RF instruction */
1223 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1224 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1225 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1226 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1227 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1228 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1229 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1231 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1232 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1234 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1235 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1236 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1237 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1238 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1240 /* 2RF instruction df(bit 16) = _w, _d */
1241 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1242 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1243 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1244 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1245 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1246 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1247 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1248 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1249 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1250 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1251 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1252 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1253 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1254 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1255 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1256 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1258 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1259 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1260 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1261 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1262 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1263 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1264 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1265 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1266 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1267 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1268 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1269 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1270 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1272 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1274 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1275 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1278 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1279 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1280 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1281 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1282 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1283 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1284 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1285 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1286 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1287 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1288 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1289 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1290 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1292 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1293 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1294 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1295 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1296 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1297 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1298 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1299 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1300 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1301 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1302 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1303 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1304 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1305 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1306 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1307 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1308 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1309 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1310 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1311 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1312 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1313 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1314 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1315 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1316 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1317 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1318 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1319 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1320 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1321 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1323 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1324 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1325 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1326 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1327 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1328 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1329 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1330 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1331 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1332 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1334 /* 3RF instruction _df(bit 21) = _w, _d */
1335 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1336 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1337 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1338 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1339 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1340 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1341 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1342 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1343 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1346 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1348 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1349 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1350 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1351 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1352 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1353 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1357 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1358 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1359 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1361 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1362 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1363 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1364 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1365 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1366 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1367 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1370 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1373 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1374 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1375 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1377 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1378 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1379 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1380 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1381 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1382 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1383 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1384 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1385 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1386 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1387 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1388 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1389 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1394 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1395 * ============================================
1397 * MXU (full name: MIPS eXtension/enhanced Unit) is an SIMD extension of MIPS32
1398 * instructions set. It is designed to fit the needs of signal, graphical and
1399 * video processing applications. MXU instruction set is used in Xburst family
1400 * of microprocessors by Ingenic.
1402 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1403 * the control register.
1405 * The notation used in MXU assembler mnemonics:
1407 * XRa, XRb, XRc, XRd - MXU registers
1408 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1409 * s12 - a subfield of an instruction code
1410 * strd2 - a subfield of an instruction code
1411 * eptn2 - a subfield of an instruction code
1412 * eptn3 - a subfield of an instruction code
1413 * optn2 - a subfield of an instruction code
1414 * optn3 - a subfield of an instruction code
1415 * sft4 - a subfield of an instruction code
1417 * Load/Store instructions Multiplication instructions
1418 * ----------------------- ---------------------------
1420 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1421 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1422 * S32LDDV XRa, Rb, rc, strd2 S32SUB XRa, XRd, Rs, Rt
1423 * S32STDV XRa, Rb, rc, strd2 S32SUBU XRa, XRd, Rs, Rt
1424 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1425 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1426 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1427 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1428 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1429 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1430 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1431 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1432 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1433 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1434 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1435 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1436 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1437 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1438 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1439 * S16SDI XRa, Rb, s10, eptn2
1440 * S8LDD XRa, Rb, s8, eptn3
1441 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1442 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1443 * S8SDI XRa, Rb, s8, eptn3
1444 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1445 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1446 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1447 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1448 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1449 * S32CPS XRa, XRb, XRc
1450 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1451 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1452 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1453 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1454 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1455 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1456 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1457 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1458 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1459 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1460 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1461 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1462 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1463 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1464 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1465 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1466 * Q8SLT XRa, XRb, XRc
1467 * Q8SLTU XRa, XRb, XRc
1468 * Q8MOVZ XRa, XRb, XRc Shift instructions
1469 * Q8MOVN XRa, XRb, XRc ------------------
1471 * D32SLL XRa, XRb, XRc, XRd, sft4
1472 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1473 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1474 * D32SARL XRa, XRb, XRc, sft4
1475 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1476 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1477 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1478 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1479 * Q16SLL XRa, XRb, XRc, XRd, sft4
1480 * Q16SLR XRa, XRb, XRc, XRd, sft4
1481 * Miscelaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1482 * ------------------------- Q16SLLV XRa, XRb, Rb
1483 * Q16SLRV XRa, XRb, Rb
1484 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1485 * S32ALN XRa, XRb, XRc, Rb
1486 * S32ALNI XRa, XRb, XRc, s3
1487 * S32LUI XRa, s8, optn3 Move instructions
1488 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1489 * S32EXTRV XRa, XRb, Rs, Rt
1490 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1491 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1497 * ┌─ 000000 ─ OPC_MXU_S32MADD
1498 * ├─ 000001 ─ OPC_MXU_S32MADDU
1499 * ├─ 000010 ─ <not assigned>
1501 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1502 * │ ├─ 001 ─ OPC_MXU_S32MIN
1503 * │ ├─ 010 ─ OPC_MXU_D16MAX
1504 * │ ├─ 011 ─ OPC_MXU_D16MIN
1505 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1506 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1507 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1508 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1509 * ├─ 000100 ─ OPC_MXU_S32MSUB
1510 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1511 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1512 * │ ├─ 001 ─ OPC_MXU_D16SLT
1513 * │ ├─ 010 ─ OPC_MXU_D16AVG
1514 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1515 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1516 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1517 * │ └─ 111 ─ OPC_MXU_Q8ADD
1520 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1521 * │ ├─ 010 ─ OPC_MXU_D16CPS
1522 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1523 * │ └─ 110 ─ OPC_MXU_Q16SAT
1524 * ├─ 001000 ─ OPC_MXU_D16MUL
1526 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1527 * │ └─ 01 ─ OPC_MXU_D16MULE
1528 * ├─ 001010 ─ OPC_MXU_D16MAC
1529 * ├─ 001011 ─ OPC_MXU_D16MACF
1530 * ├─ 001100 ─ OPC_MXU_D16MADL
1532 * ├─ 001101 ─ OPC_MXU__POOL04 ─┬─ 00 ─ OPC_MXU_S16MAD
1533 * │ └─ 01 ─ OPC_MXU_S16MAD_1
1534 * ├─ 001110 ─ OPC_MXU_Q16ADD
1535 * ├─ 001111 ─ OPC_MXU_D16MACE
1537 * ├─ 010000 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32LDD
1538 * │ └─ 1 ─ OPC_MXU_S32LDDR
1541 * ├─ 010001 ─ OPC_MXU__POOL06 ─┬─ 0 ─ OPC_MXU_S32STD
1542 * │ └─ 1 ─ OPC_MXU_S32STDR
1545 * ├─ 010010 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1546 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1549 * ├─ 010011 ─ OPC_MXU__POOL08 ─┬─ 0000 ─ OPC_MXU_S32STDV
1550 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1553 * ├─ 010100 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32LDI
1554 * │ └─ 1 ─ OPC_MXU_S32LDIR
1557 * ├─ 010101 ─ OPC_MXU__POOL10 ─┬─ 0 ─ OPC_MXU_S32SDI
1558 * │ └─ 1 ─ OPC_MXU_S32SDIR
1561 * ├─ 010110 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1562 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1565 * ├─ 010111 ─ OPC_MXU__POOL12 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1566 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1567 * ├─ 011000 ─ OPC_MXU_D32ADD
1569 * MXU ├─ 011001 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_D32ACC
1570 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1571 * │ └─ 10 ─ OPC_MXU_D32ASUM
1572 * ├─ 011010 ─ <not assigned>
1574 * ├─ 011011 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q16ACC
1575 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1576 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1579 * ├─ 011100 ─ OPC_MXU__POOL15 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1580 * │ ├─ 01 ─ OPC_MXU_D8SUM
1581 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1582 * ├─ 011110 ─ <not assigned>
1583 * ├─ 011111 ─ <not assigned>
1584 * ├─ 100000 ─ <not assigned>
1585 * ├─ 100001 ─ <not assigned>
1586 * ├─ 100010 ─ OPC_MXU_S8LDD
1587 * ├─ 100011 ─ OPC_MXU_S8STD
1588 * ├─ 100100 ─ OPC_MXU_S8LDI
1589 * ├─ 100101 ─ OPC_MXU_S8SDI
1591 * ├─ 100110 ─ OPC_MXU__POOL16 ─┬─ 00 ─ OPC_MXU_S32MUL
1592 * │ ├─ 00 ─ OPC_MXU_S32MULU
1593 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1594 * │ └─ 00 ─ OPC_MXU_S32EXTRV
1597 * ├─ 100111 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_D32SARW
1598 * │ ├─ 001 ─ OPC_MXU_S32ALN
1599 * ├─ 101000 ─ OPC_MXU_LXB ├─ 010 ─ OPC_MXU_S32ALNI
1600 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_S32NOR
1601 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_S32AND
1602 * ├─ 101011 ─ OPC_MXU_S16STD ├─ 101 ─ OPC_MXU_S32OR
1603 * ├─ 101100 ─ OPC_MXU_S16LDI ├─ 110 ─ OPC_MXU_S32XOR
1604 * ├─ 101101 ─ OPC_MXU_S16SDI └─ 111 ─ OPC_MXU_S32LUI
1605 * ├─ 101000 ─ <not assigned>
1606 * ├─ 101001 ─ <not assigned>
1607 * ├─ 101010 ─ <not assigned>
1608 * ├─ 101011 ─ <not assigned>
1609 * ├─ 101100 ─ <not assigned>
1610 * ├─ 101101 ─ <not assigned>
1611 * ├─ 101110 ─ OPC_MXU_S32M2I
1612 * ├─ 101111 ─ OPC_MXU_S32I2M
1613 * ├─ 110000 ─ OPC_MXU_D32SLL
1614 * ├─ 110001 ─ OPC_MXU_D32SLR
1615 * ├─ 110010 ─ OPC_MXU_D32SARL
1616 * ├─ 110011 ─ OPC_MXU_D32SAR
1617 * ├─ 110100 ─ OPC_MXU_Q16SLL
1618 * ├─ 110101 ─ OPC_MXU_Q16SLR 20..18
1619 * ├─ 110110 ─ OPC_MXU__POOL18 ─┬─ 000 ─ OPC_MXU_D32SLLV
1620 * │ ├─ 001 ─ OPC_MXU_D32SLRV
1621 * │ ├─ 010 ─ OPC_MXU_D32SARV
1622 * │ ├─ 011 ─ OPC_MXU_Q16SLLV
1623 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1624 * │ └─ 101 ─ OPC_MXU_Q16SARV
1625 * ├─ 110111 ─ OPC_MXU_Q16SAR
1627 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1628 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1631 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1632 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1633 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1634 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1635 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1636 * │ └─ 101 ─ OPC_MXU_S32MOV
1639 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1640 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1641 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1642 * ├─ 111100 ─ OPC_MXU_Q8MADL
1643 * ├─ 111101 ─ OPC_MXU_S32SFL
1644 * ├─ 111110 ─ OPC_MXU_Q8SAD
1645 * └─ 111111 ─ <not assigned>
1650 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1651 * Programming Manual", Ingenic Semiconductor Co, Ltd., 2017
1655 OPC_MXU_S32MADD
= 0x00,
1656 OPC_MXU_S32MADDU
= 0x01,
1657 /* not assigned 0x02 */
1658 OPC_MXU__POOL00
= 0x03,
1659 OPC_MXU_S32MSUB
= 0x04,
1660 OPC_MXU_S32MSUBU
= 0x05,
1661 OPC_MXU__POOL01
= 0x06,
1662 OPC_MXU__POOL02
= 0x07,
1663 OPC_MXU_D16MUL
= 0x08,
1664 OPC_MXU__POOL03
= 0x09,
1665 OPC_MXU_D16MAC
= 0x0A,
1666 OPC_MXU_D16MACF
= 0x0B,
1667 OPC_MXU_D16MADL
= 0x0C,
1668 OPC_MXU__POOL04
= 0x0D,
1669 OPC_MXU_Q16ADD
= 0x0E,
1670 OPC_MXU_D16MACE
= 0x0F,
1671 OPC_MXU__POOL05
= 0x10,
1672 OPC_MXU__POOL06
= 0x11,
1673 OPC_MXU__POOL07
= 0x12,
1674 OPC_MXU__POOL08
= 0x13,
1675 OPC_MXU__POOL09
= 0x14,
1676 OPC_MXU__POOL10
= 0x15,
1677 OPC_MXU__POOL11
= 0x16,
1678 OPC_MXU__POOL12
= 0x17,
1679 OPC_MXU_D32ADD
= 0x18,
1680 OPC_MXU__POOL13
= 0x19,
1681 /* not assigned 0x1A */
1682 OPC_MXU__POOL14
= 0x1B,
1683 OPC_MXU__POOL15
= 0x1C,
1684 OPC_MXU_Q8ACCE
= 0x1D,
1685 /* not assigned 0x1E */
1686 /* not assigned 0x1F */
1687 /* not assigned 0x20 */
1688 /* not assigned 0x21 */
1689 OPC_MXU_S8LDD
= 0x22,
1690 OPC_MXU_S8STD
= 0x23,
1691 OPC_MXU_S8LDI
= 0x24,
1692 OPC_MXU_S8SDI
= 0x25,
1693 OPC_MXU__POOL16
= 0x26,
1694 OPC_MXU__POOL17
= 0x27,
1696 /* not assigned 0x29 */
1697 OPC_MXU_S16LDD
= 0x2A,
1698 OPC_MXU_S16STD
= 0x2B,
1699 OPC_MXU_S16LDI
= 0x2C,
1700 OPC_MXU_S16SDI
= 0x2D,
1701 OPC_MXU_S32M2I
= 0x2E,
1702 OPC_MXU_S32I2M
= 0x2F,
1703 OPC_MXU_D32SLL
= 0x30,
1704 OPC_MXU_D32SLR
= 0x31,
1705 OPC_MXU_D32SARL
= 0x32,
1706 OPC_MXU_D32SAR
= 0x33,
1707 OPC_MXU_Q16SLL
= 0x34,
1708 OPC_MXU_Q16SLR
= 0x35,
1709 OPC_MXU__POOL18
= 0x36,
1710 OPC_MXU_Q16SAR
= 0x37,
1711 OPC_MXU__POOL19
= 0x38,
1712 OPC_MXU__POOL20
= 0x39,
1713 OPC_MXU__POOL21
= 0x3A,
1714 OPC_MXU_Q16SCOP
= 0x3B,
1715 OPC_MXU_Q8MADL
= 0x3C,
1716 OPC_MXU_S32SFL
= 0x3D,
1717 OPC_MXU_Q8SAD
= 0x3E,
1718 /* not assigned 0x3F */
1726 OPC_MXU_S32MAX
= 0x00,
1727 OPC_MXU_S32MIN
= 0x01,
1728 OPC_MXU_D16MAX
= 0x02,
1729 OPC_MXU_D16MIN
= 0x03,
1730 OPC_MXU_Q8MAX
= 0x04,
1731 OPC_MXU_Q8MIN
= 0x05,
1732 OPC_MXU_Q8SLT
= 0x06,
1733 OPC_MXU_Q8SLTU
= 0x07,
1740 OPC_MXU_S32SLT
= 0x00,
1741 OPC_MXU_D16SLT
= 0x01,
1742 OPC_MXU_D16AVG
= 0x02,
1743 OPC_MXU_D16AVGR
= 0x03,
1744 OPC_MXU_Q8AVG
= 0x04,
1745 OPC_MXU_Q8AVGR
= 0x05,
1746 OPC_MXU_Q8ADD
= 0x07,
1753 OPC_MXU_S32CPS
= 0x00,
1754 OPC_MXU_D16CPS
= 0x02,
1755 OPC_MXU_Q8ABD
= 0x04,
1756 OPC_MXU_Q16SAT
= 0x06,
1763 OPC_MXU_D16MULF
= 0x00,
1764 OPC_MXU_D16MULE
= 0x01,
1771 OPC_MXU_S16MAD
= 0x00,
1772 OPC_MXU_S16MAD_1
= 0x01,
1779 OPC_MXU_S32LDD
= 0x00,
1780 OPC_MXU_S32LDDR
= 0x01,
1787 OPC_MXU_S32STD
= 0x00,
1788 OPC_MXU_S32STDR
= 0x01,
1795 OPC_MXU_S32LDDV
= 0x00,
1796 OPC_MXU_S32LDDVR
= 0x01,
1803 OPC_MXU_S32STDV
= 0x00,
1804 OPC_MXU_S32STDVR
= 0x01,
1811 OPC_MXU_S32LDI
= 0x00,
1812 OPC_MXU_S32LDIR
= 0x01,
1819 OPC_MXU_S32SDI
= 0x00,
1820 OPC_MXU_S32SDIR
= 0x01,
1827 OPC_MXU_S32LDIV
= 0x00,
1828 OPC_MXU_S32LDIVR
= 0x01,
1835 OPC_MXU_S32SDIV
= 0x00,
1836 OPC_MXU_S32SDIVR
= 0x01,
1843 OPC_MXU_D32ACC
= 0x00,
1844 OPC_MXU_D32ACCM
= 0x01,
1845 OPC_MXU_D32ASUM
= 0x02,
1852 OPC_MXU_Q16ACC
= 0x00,
1853 OPC_MXU_Q16ACCM
= 0x01,
1854 OPC_MXU_Q16ASUM
= 0x02,
1861 OPC_MXU_Q8ADDE
= 0x00,
1862 OPC_MXU_D8SUM
= 0x01,
1863 OPC_MXU_D8SUMC
= 0x02,
1870 OPC_MXU_S32MUL
= 0x00,
1871 OPC_MXU_S32MULU
= 0x01,
1872 OPC_MXU_S32EXTR
= 0x02,
1873 OPC_MXU_S32EXTRV
= 0x03,
1880 OPC_MXU_D32SARW
= 0x00,
1881 OPC_MXU_S32ALN
= 0x01,
1882 OPC_MXU_S32ALNI
= 0x02,
1883 OPC_MXU_S32NOR
= 0x03,
1884 OPC_MXU_S32AND
= 0x04,
1885 OPC_MXU_S32OR
= 0x05,
1886 OPC_MXU_S32XOR
= 0x06,
1887 OPC_MXU_S32LUI
= 0x07,
1894 OPC_MXU_D32SLLV
= 0x00,
1895 OPC_MXU_D32SLRV
= 0x01,
1896 OPC_MXU_D32SARV
= 0x03,
1897 OPC_MXU_Q16SLLV
= 0x04,
1898 OPC_MXU_Q16SLRV
= 0x05,
1899 OPC_MXU_Q16SARV
= 0x07,
1906 OPC_MXU_Q8MUL
= 0x00,
1907 OPC_MXU_Q8MULSU
= 0x01,
1914 OPC_MXU_Q8MOVZ
= 0x00,
1915 OPC_MXU_Q8MOVN
= 0x01,
1916 OPC_MXU_D16MOVZ
= 0x02,
1917 OPC_MXU_D16MOVN
= 0x03,
1918 OPC_MXU_S32MOVZ
= 0x04,
1919 OPC_MXU_S32MOVN
= 0x05,
1926 OPC_MXU_Q8MAC
= 0x00,
1927 OPC_MXU_Q8MACSU
= 0x01,
1931 * Overview of the TX79-specific instruction set
1932 * =============================================
1934 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
1935 * are only used by the specific quadword (128-bit) LQ/SQ load/store
1936 * instructions and certain multimedia instructions (MMIs). These MMIs
1937 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
1938 * or sixteen 8-bit paths.
1942 * The Toshiba TX System RISC TX79 Core Architecture manual,
1943 * https://wiki.qemu.org/File:C790.pdf
1945 * Three-Operand Multiply and Multiply-Add (4 instructions)
1946 * --------------------------------------------------------
1947 * MADD [rd,] rs, rt Multiply/Add
1948 * MADDU [rd,] rs, rt Multiply/Add Unsigned
1949 * MULT [rd,] rs, rt Multiply (3-operand)
1950 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
1952 * Multiply Instructions for Pipeline 1 (10 instructions)
1953 * ------------------------------------------------------
1954 * MULT1 [rd,] rs, rt Multiply Pipeline 1
1955 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
1956 * DIV1 rs, rt Divide Pipeline 1
1957 * DIVU1 rs, rt Divide Unsigned Pipeline 1
1958 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
1959 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
1960 * MFHI1 rd Move From HI1 Register
1961 * MFLO1 rd Move From LO1 Register
1962 * MTHI1 rs Move To HI1 Register
1963 * MTLO1 rs Move To LO1 Register
1965 * Arithmetic (19 instructions)
1966 * ----------------------------
1967 * PADDB rd, rs, rt Parallel Add Byte
1968 * PSUBB rd, rs, rt Parallel Subtract Byte
1969 * PADDH rd, rs, rt Parallel Add Halfword
1970 * PSUBH rd, rs, rt Parallel Subtract Halfword
1971 * PADDW rd, rs, rt Parallel Add Word
1972 * PSUBW rd, rs, rt Parallel Subtract Word
1973 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
1974 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
1975 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
1976 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
1977 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
1978 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
1979 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
1980 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
1981 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
1982 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
1983 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
1984 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
1985 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
1987 * Min/Max (4 instructions)
1988 * ------------------------
1989 * PMAXH rd, rs, rt Parallel Maximum Halfword
1990 * PMINH rd, rs, rt Parallel Minimum Halfword
1991 * PMAXW rd, rs, rt Parallel Maximum Word
1992 * PMINW rd, rs, rt Parallel Minimum Word
1994 * Absolute (2 instructions)
1995 * -------------------------
1996 * PABSH rd, rt Parallel Absolute Halfword
1997 * PABSW rd, rt Parallel Absolute Word
1999 * Logical (4 instructions)
2000 * ------------------------
2001 * PAND rd, rs, rt Parallel AND
2002 * POR rd, rs, rt Parallel OR
2003 * PXOR rd, rs, rt Parallel XOR
2004 * PNOR rd, rs, rt Parallel NOR
2006 * Shift (9 instructions)
2007 * ----------------------
2008 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2009 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2010 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2011 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2012 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2013 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2014 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2015 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2016 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2018 * Compare (6 instructions)
2019 * ------------------------
2020 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2021 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2022 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2023 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2024 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2025 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2027 * LZC (1 instruction)
2028 * -------------------
2029 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2031 * Quadword Load and Store (2 instructions)
2032 * ----------------------------------------
2033 * LQ rt, offset(base) Load Quadword
2034 * SQ rt, offset(base) Store Quadword
2036 * Multiply and Divide (19 instructions)
2037 * -------------------------------------
2038 * PMULTW rd, rs, rt Parallel Multiply Word
2039 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2040 * PDIVW rs, rt Parallel Divide Word
2041 * PDIVUW rs, rt Parallel Divide Unsigned Word
2042 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2043 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2044 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2045 * PMULTH rd, rs, rt Parallel Multiply Halfword
2046 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2047 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2048 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2049 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2050 * PDIVBW rs, rt Parallel Divide Broadcast Word
2051 * PMFHI rd Parallel Move From HI Register
2052 * PMFLO rd Parallel Move From LO Register
2053 * PMTHI rs Parallel Move To HI Register
2054 * PMTLO rs Parallel Move To LO Register
2055 * PMFHL rd Parallel Move From HI/LO Register
2056 * PMTHL rs Parallel Move To HI/LO Register
2058 * Pack/Extend (11 instructions)
2059 * -----------------------------
2060 * PPAC5 rd, rt Parallel Pack to 5 bits
2061 * PPACB rd, rs, rt Parallel Pack to Byte
2062 * PPACH rd, rs, rt Parallel Pack to Halfword
2063 * PPACW rd, rs, rt Parallel Pack to Word
2064 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2065 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2066 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2067 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2068 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2069 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2070 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2072 * Others (16 instructions)
2073 * ------------------------
2074 * PCPYH rd, rt Parallel Copy Halfword
2075 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2076 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2077 * PREVH rd, rt Parallel Reverse Halfword
2078 * PINTH rd, rs, rt Parallel Interleave Halfword
2079 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2080 * PEXEH rd, rt Parallel Exchange Even Halfword
2081 * PEXCH rd, rt Parallel Exchange Center Halfword
2082 * PEXEW rd, rt Parallel Exchange Even Word
2083 * PEXCW rd, rt Parallel Exchange Center Word
2084 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2085 * MFSA rd Move from Shift Amount Register
2086 * MTSA rs Move to Shift Amount Register
2087 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2088 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2089 * PROT3W rd, rt Parallel Rotate 3 Words
2091 * The TX79-specific Multimedia Instruction encodings
2092 * ==================================================
2094 * TX79 Multimedia Instruction encoding table keys:
2096 * * This code is reserved for future use. An attempt to execute it
2097 * causes a Reserved Instruction exception.
2098 * % This code indicates an instruction class. The instruction word
2099 * must be further decoded by examining additional tables that show
2100 * the values for other instruction fields.
2101 * # This code is reserved for the unsupported instructions DMULT,
2102 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2103 * to execute it causes a Reserved Instruction exception.
2105 * TX79 Multimedia Instructions encoded by opcode field (MMI, LQ, SQ):
2108 * +--------+----------------------------------------+
2110 * +--------+----------------------------------------+
2112 * opcode bits 28..26
2113 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2114 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2115 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2116 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2117 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2118 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2119 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2120 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2121 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2122 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2123 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2127 TX79_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2128 TX79_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2129 TX79_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2133 * TX79 Multimedia Instructions with opcode field = MMI:
2136 * +--------+-------------------------------+--------+
2137 * | MMI | |function|
2138 * +--------+-------------------------------+--------+
2140 * function bits 2..0
2141 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2142 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2143 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2144 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2145 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2146 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2147 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2148 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2149 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2150 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2151 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2154 #define MASK_TX79_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2156 TX79_MMI_MADD
= 0x00 | TX79_CLASS_MMI
, /* Same as OPC_MADD */
2157 TX79_MMI_MADDU
= 0x01 | TX79_CLASS_MMI
, /* Same as OPC_MADDU */
2158 TX79_MMI_PLZCW
= 0x04 | TX79_CLASS_MMI
,
2159 TX79_MMI_CLASS_MMI0
= 0x08 | TX79_CLASS_MMI
,
2160 TX79_MMI_CLASS_MMI2
= 0x09 | TX79_CLASS_MMI
,
2161 TX79_MMI_MFHI1
= 0x10 | TX79_CLASS_MMI
, /* Same minor as OPC_MFHI */
2162 TX79_MMI_MTHI1
= 0x11 | TX79_CLASS_MMI
, /* Same minor as OPC_MTHI */
2163 TX79_MMI_MFLO1
= 0x12 | TX79_CLASS_MMI
, /* Same minor as OPC_MFLO */
2164 TX79_MMI_MTLO1
= 0x13 | TX79_CLASS_MMI
, /* Same minor as OPC_MTLO */
2165 TX79_MMI_MULT1
= 0x18 | TX79_CLASS_MMI
, /* Same minor as OPC_MULT */
2166 TX79_MMI_MULTU1
= 0x19 | TX79_CLASS_MMI
, /* Same minor as OPC_MULTU */
2167 TX79_MMI_DIV1
= 0x1A | TX79_CLASS_MMI
, /* Same minor as OPC_DIV */
2168 TX79_MMI_DIVU1
= 0x1B | TX79_CLASS_MMI
, /* Same minor as OPC_DIVU */
2169 TX79_MMI_MADD1
= 0x20 | TX79_CLASS_MMI
,
2170 TX79_MMI_MADDU1
= 0x21 | TX79_CLASS_MMI
,
2171 TX79_MMI_CLASS_MMI1
= 0x28 | TX79_CLASS_MMI
,
2172 TX79_MMI_CLASS_MMI3
= 0x29 | TX79_CLASS_MMI
,
2173 TX79_MMI_PMFHL
= 0x30 | TX79_CLASS_MMI
,
2174 TX79_MMI_PMTHL
= 0x31 | TX79_CLASS_MMI
,
2175 TX79_MMI_PSLLH
= 0x34 | TX79_CLASS_MMI
,
2176 TX79_MMI_PSRLH
= 0x36 | TX79_CLASS_MMI
,
2177 TX79_MMI_PSRAH
= 0x37 | TX79_CLASS_MMI
,
2178 TX79_MMI_PSLLW
= 0x3C | TX79_CLASS_MMI
,
2179 TX79_MMI_PSRLW
= 0x3E | TX79_CLASS_MMI
,
2180 TX79_MMI_PSRAW
= 0x3F | TX79_CLASS_MMI
,
2184 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI0:
2187 * +--------+----------------------+--------+--------+
2188 * | MMI | |function| MMI0 |
2189 * +--------+----------------------+--------+--------+
2191 * function bits 7..6
2192 * bits | 0 | 1 | 2 | 3
2193 * 10..8 | 00 | 01 | 10 | 11
2194 * -------+-------+-------+-------+-------
2195 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2196 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2197 * 2 010 | PADDB | PSUBB | PCGTB | *
2198 * 3 011 | * | * | * | *
2199 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2200 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2201 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2202 * 7 111 | * | * | PEXT5 | PPAC5
2205 #define MASK_TX79_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2207 TX79_MMI0_PADDW
= (0x00 << 6) | TX79_MMI_CLASS_MMI0
,
2208 TX79_MMI0_PSUBW
= (0x01 << 6) | TX79_MMI_CLASS_MMI0
,
2209 TX79_MMI0_PCGTW
= (0x02 << 6) | TX79_MMI_CLASS_MMI0
,
2210 TX79_MMI0_PMAXW
= (0x03 << 6) | TX79_MMI_CLASS_MMI0
,
2211 TX79_MMI0_PADDH
= (0x04 << 6) | TX79_MMI_CLASS_MMI0
,
2212 TX79_MMI0_PSUBH
= (0x05 << 6) | TX79_MMI_CLASS_MMI0
,
2213 TX79_MMI0_PCGTH
= (0x06 << 6) | TX79_MMI_CLASS_MMI0
,
2214 TX79_MMI0_PMAXH
= (0x07 << 6) | TX79_MMI_CLASS_MMI0
,
2215 TX79_MMI0_PADDB
= (0x08 << 6) | TX79_MMI_CLASS_MMI0
,
2216 TX79_MMI0_PSUBB
= (0x09 << 6) | TX79_MMI_CLASS_MMI0
,
2217 TX79_MMI0_PCGTB
= (0x0A << 6) | TX79_MMI_CLASS_MMI0
,
2218 TX79_MMI0_PADDSW
= (0x10 << 6) | TX79_MMI_CLASS_MMI0
,
2219 TX79_MMI0_PSUBSW
= (0x11 << 6) | TX79_MMI_CLASS_MMI0
,
2220 TX79_MMI0_PEXTLW
= (0x12 << 6) | TX79_MMI_CLASS_MMI0
,
2221 TX79_MMI0_PPACW
= (0x13 << 6) | TX79_MMI_CLASS_MMI0
,
2222 TX79_MMI0_PADDSH
= (0x14 << 6) | TX79_MMI_CLASS_MMI0
,
2223 TX79_MMI0_PSUBSH
= (0x15 << 6) | TX79_MMI_CLASS_MMI0
,
2224 TX79_MMI0_PEXTLH
= (0x16 << 6) | TX79_MMI_CLASS_MMI0
,
2225 TX79_MMI0_PPACH
= (0x17 << 6) | TX79_MMI_CLASS_MMI0
,
2226 TX79_MMI0_PADDSB
= (0x18 << 6) | TX79_MMI_CLASS_MMI0
,
2227 TX79_MMI0_PSUBSB
= (0x19 << 6) | TX79_MMI_CLASS_MMI0
,
2228 TX79_MMI0_PEXTLB
= (0x1A << 6) | TX79_MMI_CLASS_MMI0
,
2229 TX79_MMI0_PPACB
= (0x1B << 6) | TX79_MMI_CLASS_MMI0
,
2230 TX79_MMI0_PEXT5
= (0x1E << 6) | TX79_MMI_CLASS_MMI0
,
2231 TX79_MMI0_PPAC5
= (0x1F << 6) | TX79_MMI_CLASS_MMI0
,
2235 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI1:
2238 * +--------+----------------------+--------+--------+
2239 * | MMI | |function| MMI1 |
2240 * +--------+----------------------+--------+--------+
2242 * function bits 7..6
2243 * bits | 0 | 1 | 2 | 3
2244 * 10..8 | 00 | 01 | 10 | 11
2245 * -------+-------+-------+-------+-------
2246 * 0 000 | * | PABSW | PCEQW | PMINW
2247 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2248 * 2 010 | * | * | PCEQB | *
2249 * 3 011 | * | * | * | *
2250 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2251 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2252 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2253 * 7 111 | * | * | * | *
2256 #define MASK_TX79_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2258 TX79_MMI1_PABSW
= (0x01 << 6) | TX79_MMI_CLASS_MMI1
,
2259 TX79_MMI1_PCEQW
= (0x02 << 6) | TX79_MMI_CLASS_MMI1
,
2260 TX79_MMI1_PMINW
= (0x03 << 6) | TX79_MMI_CLASS_MMI1
,
2261 TX79_MMI1_PADSBH
= (0x04 << 6) | TX79_MMI_CLASS_MMI1
,
2262 TX79_MMI1_PABSH
= (0x05 << 6) | TX79_MMI_CLASS_MMI1
,
2263 TX79_MMI1_PCEQH
= (0x06 << 6) | TX79_MMI_CLASS_MMI1
,
2264 TX79_MMI1_PMINH
= (0x07 << 6) | TX79_MMI_CLASS_MMI1
,
2265 TX79_MMI1_PCEQB
= (0x0A << 6) | TX79_MMI_CLASS_MMI1
,
2266 TX79_MMI1_PADDUW
= (0x10 << 6) | TX79_MMI_CLASS_MMI1
,
2267 TX79_MMI1_PSUBUW
= (0x11 << 6) | TX79_MMI_CLASS_MMI1
,
2268 TX79_MMI1_PEXTUW
= (0x12 << 6) | TX79_MMI_CLASS_MMI1
,
2269 TX79_MMI1_PADDUH
= (0x14 << 6) | TX79_MMI_CLASS_MMI1
,
2270 TX79_MMI1_PSUBUH
= (0x15 << 6) | TX79_MMI_CLASS_MMI1
,
2271 TX79_MMI1_PEXTUH
= (0x16 << 6) | TX79_MMI_CLASS_MMI1
,
2272 TX79_MMI1_PADDUB
= (0x18 << 6) | TX79_MMI_CLASS_MMI1
,
2273 TX79_MMI1_PSUBUB
= (0x19 << 6) | TX79_MMI_CLASS_MMI1
,
2274 TX79_MMI1_PEXTUB
= (0x1A << 6) | TX79_MMI_CLASS_MMI1
,
2275 TX79_MMI1_QFSRV
= (0x1B << 6) | TX79_MMI_CLASS_MMI1
,
2279 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI2:
2282 * +--------+----------------------+--------+--------+
2283 * | MMI | |function| MMI2 |
2284 * +--------+----------------------+--------+--------+
2286 * function bits 7..6
2287 * bits | 0 | 1 | 2 | 3
2288 * 10..8 | 00 | 01 | 10 | 11
2289 * -------+-------+-------+-------+-------
2290 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2291 * 1 001 | PMSUBW| * | * | *
2292 * 2 010 | PMFHI | PMFLO | PINTH | *
2293 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2294 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2295 * 5 101 | PMSUBH| PHMSBH| * | *
2296 * 6 110 | * | * | PEXEH | PREVH
2297 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2300 #define MASK_TX79_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2302 TX79_MMI2_PMADDW
= (0x00 << 6) | TX79_MMI_CLASS_MMI2
,
2303 TX79_MMI2_PSLLVW
= (0x02 << 6) | TX79_MMI_CLASS_MMI2
,
2304 TX79_MMI2_PSRLVW
= (0x03 << 6) | TX79_MMI_CLASS_MMI2
,
2305 TX79_MMI2_PMSUBW
= (0x04 << 6) | TX79_MMI_CLASS_MMI2
,
2306 TX79_MMI2_PMFHI
= (0x08 << 6) | TX79_MMI_CLASS_MMI2
,
2307 TX79_MMI2_PMFLO
= (0x09 << 6) | TX79_MMI_CLASS_MMI2
,
2308 TX79_MMI2_PINTH
= (0x0A << 6) | TX79_MMI_CLASS_MMI2
,
2309 TX79_MMI2_PMULTW
= (0x0C << 6) | TX79_MMI_CLASS_MMI2
,
2310 TX79_MMI2_PDIVW
= (0x0D << 6) | TX79_MMI_CLASS_MMI2
,
2311 TX79_MMI2_PCPYLD
= (0x0E << 6) | TX79_MMI_CLASS_MMI2
,
2312 TX79_MMI2_PMADDH
= (0x10 << 6) | TX79_MMI_CLASS_MMI2
,
2313 TX79_MMI2_PHMADH
= (0x11 << 6) | TX79_MMI_CLASS_MMI2
,
2314 TX79_MMI2_PAND
= (0x12 << 6) | TX79_MMI_CLASS_MMI2
,
2315 TX79_MMI2_PXOR
= (0x13 << 6) | TX79_MMI_CLASS_MMI2
,
2316 TX79_MMI2_PMSUBH
= (0x14 << 6) | TX79_MMI_CLASS_MMI2
,
2317 TX79_MMI2_PHMSBH
= (0x15 << 6) | TX79_MMI_CLASS_MMI2
,
2318 TX79_MMI2_PEXEH
= (0x1A << 6) | TX79_MMI_CLASS_MMI2
,
2319 TX79_MMI2_PREVH
= (0x1B << 6) | TX79_MMI_CLASS_MMI2
,
2320 TX79_MMI2_PMULTH
= (0x1C << 6) | TX79_MMI_CLASS_MMI2
,
2321 TX79_MMI2_PDIVBW
= (0x1D << 6) | TX79_MMI_CLASS_MMI2
,
2322 TX79_MMI2_PEXEW
= (0x1E << 6) | TX79_MMI_CLASS_MMI2
,
2323 TX79_MMI2_PROT3W
= (0x1F << 6) | TX79_MMI_CLASS_MMI2
,
2327 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI3:
2330 * +--------+----------------------+--------+--------+
2331 * | MMI | |function| MMI3 |
2332 * +--------+----------------------+--------+--------+
2334 * function bits 7..6
2335 * bits | 0 | 1 | 2 | 3
2336 * 10..8 | 00 | 01 | 10 | 11
2337 * -------+-------+-------+-------+-------
2338 * 0 000 |PMADDUW| * | * | PSRAVW
2339 * 1 001 | * | * | * | *
2340 * 2 010 | PMTHI | PMTLO | PINTEH| *
2341 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2342 * 4 100 | * | * | POR | PNOR
2343 * 5 101 | * | * | * | *
2344 * 6 110 | * | * | PEXCH | PCPYH
2345 * 7 111 | * | * | PEXCW | *
2348 #define MASK_TX79_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2350 TX79_MMI3_PMADDUW
= (0x00 << 6) | TX79_MMI_CLASS_MMI3
,
2351 TX79_MMI3_PSRAVW
= (0x03 << 6) | TX79_MMI_CLASS_MMI3
,
2352 TX79_MMI3_PMTHI
= (0x08 << 6) | TX79_MMI_CLASS_MMI3
,
2353 TX79_MMI3_PMTLO
= (0x09 << 6) | TX79_MMI_CLASS_MMI3
,
2354 TX79_MMI3_PINTEH
= (0x0A << 6) | TX79_MMI_CLASS_MMI3
,
2355 TX79_MMI3_PMULTUW
= (0x0C << 6) | TX79_MMI_CLASS_MMI3
,
2356 TX79_MMI3_PDIVUW
= (0x0D << 6) | TX79_MMI_CLASS_MMI3
,
2357 TX79_MMI3_PCPYUD
= (0x0E << 6) | TX79_MMI_CLASS_MMI3
,
2358 TX79_MMI3_POR
= (0x12 << 6) | TX79_MMI_CLASS_MMI3
,
2359 TX79_MMI3_PNOR
= (0x13 << 6) | TX79_MMI_CLASS_MMI3
,
2360 TX79_MMI3_PEXCH
= (0x1A << 6) | TX79_MMI_CLASS_MMI3
,
2361 TX79_MMI3_PCPYH
= (0x1B << 6) | TX79_MMI_CLASS_MMI3
,
2362 TX79_MMI3_PEXCW
= (0x1E << 6) | TX79_MMI_CLASS_MMI3
,
2365 /* global register indices */
2366 static TCGv cpu_gpr
[32], cpu_PC
;
2367 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2368 static TCGv cpu_dspctrl
, btarget
, bcond
;
2369 static TCGv_i32 hflags
;
2370 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2371 static TCGv_i64 fpu_f64
[32];
2372 static TCGv_i64 msa_wr_d
[64];
2374 #include "exec/gen-icount.h"
2376 #define gen_helper_0e0i(name, arg) do { \
2377 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2378 gen_helper_##name(cpu_env, helper_tmp); \
2379 tcg_temp_free_i32(helper_tmp); \
2382 #define gen_helper_0e1i(name, arg1, arg2) do { \
2383 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2384 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2385 tcg_temp_free_i32(helper_tmp); \
2388 #define gen_helper_1e0i(name, ret, arg1) do { \
2389 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2390 gen_helper_##name(ret, cpu_env, helper_tmp); \
2391 tcg_temp_free_i32(helper_tmp); \
2394 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2395 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2396 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2397 tcg_temp_free_i32(helper_tmp); \
2400 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2401 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2402 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2403 tcg_temp_free_i32(helper_tmp); \
2406 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2407 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2408 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2409 tcg_temp_free_i32(helper_tmp); \
2412 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2413 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2414 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2415 tcg_temp_free_i32(helper_tmp); \
2418 typedef struct DisasContext
{
2419 DisasContextBase base
;
2420 target_ulong saved_pc
;
2421 target_ulong page_start
;
2423 uint64_t insn_flags
;
2424 int32_t CP0_Config1
;
2425 int32_t CP0_Config2
;
2426 int32_t CP0_Config3
;
2427 int32_t CP0_Config5
;
2428 /* Routine used to access memory */
2430 TCGMemOp default_tcg_memop_mask
;
2431 uint32_t hflags
, saved_hflags
;
2432 target_ulong btarget
;
2443 int CP0_LLAddr_shift
;
2452 #define DISAS_STOP DISAS_TARGET_0
2453 #define DISAS_EXIT DISAS_TARGET_1
2455 static const char * const regnames
[] = {
2456 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2457 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2458 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2459 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2462 static const char * const regnames_HI
[] = {
2463 "HI0", "HI1", "HI2", "HI3",
2466 static const char * const regnames_LO
[] = {
2467 "LO0", "LO1", "LO2", "LO3",
2470 static const char * const fregnames
[] = {
2471 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2472 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2473 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2474 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2477 static const char * const msaregnames
[] = {
2478 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2479 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2480 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2481 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2482 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2483 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2484 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2485 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2486 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2487 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2488 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2489 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2490 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2491 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2492 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2493 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2496 #define LOG_DISAS(...) \
2498 if (MIPS_DEBUG_DISAS) { \
2499 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2503 #define MIPS_INVAL(op) \
2505 if (MIPS_DEBUG_DISAS) { \
2506 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2507 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2508 ctx->base.pc_next, ctx->opcode, op, \
2509 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2510 ((ctx->opcode >> 16) & 0x1F)); \
2514 /* General purpose registers moves. */
2515 static inline void gen_load_gpr (TCGv t
, int reg
)
2518 tcg_gen_movi_tl(t
, 0);
2520 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2523 static inline void gen_store_gpr (TCGv t
, int reg
)
2526 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2529 /* Moves to/from shadow registers. */
2530 static inline void gen_load_srsgpr (int from
, int to
)
2532 TCGv t0
= tcg_temp_new();
2535 tcg_gen_movi_tl(t0
, 0);
2537 TCGv_i32 t2
= tcg_temp_new_i32();
2538 TCGv_ptr addr
= tcg_temp_new_ptr();
2540 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2541 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2542 tcg_gen_andi_i32(t2
, t2
, 0xf);
2543 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2544 tcg_gen_ext_i32_ptr(addr
, t2
);
2545 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2547 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2548 tcg_temp_free_ptr(addr
);
2549 tcg_temp_free_i32(t2
);
2551 gen_store_gpr(t0
, to
);
2555 static inline void gen_store_srsgpr (int from
, int to
)
2558 TCGv t0
= tcg_temp_new();
2559 TCGv_i32 t2
= tcg_temp_new_i32();
2560 TCGv_ptr addr
= tcg_temp_new_ptr();
2562 gen_load_gpr(t0
, from
);
2563 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2564 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2565 tcg_gen_andi_i32(t2
, t2
, 0xf);
2566 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2567 tcg_gen_ext_i32_ptr(addr
, t2
);
2568 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2570 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2571 tcg_temp_free_ptr(addr
);
2572 tcg_temp_free_i32(t2
);
2578 static inline void gen_save_pc(target_ulong pc
)
2580 tcg_gen_movi_tl(cpu_PC
, pc
);
2583 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2585 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2586 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2587 gen_save_pc(ctx
->base
.pc_next
);
2588 ctx
->saved_pc
= ctx
->base
.pc_next
;
2590 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2591 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2592 ctx
->saved_hflags
= ctx
->hflags
;
2593 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2599 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2605 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2607 ctx
->saved_hflags
= ctx
->hflags
;
2608 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2614 ctx
->btarget
= env
->btarget
;
2619 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2621 TCGv_i32 texcp
= tcg_const_i32(excp
);
2622 TCGv_i32 terr
= tcg_const_i32(err
);
2623 save_cpu_state(ctx
, 1);
2624 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2625 tcg_temp_free_i32(terr
);
2626 tcg_temp_free_i32(texcp
);
2627 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2630 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2632 gen_helper_0e0i(raise_exception
, excp
);
2635 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2637 generate_exception_err(ctx
, excp
, 0);
2640 /* Floating point register moves. */
2641 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2643 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2644 generate_exception(ctx
, EXCP_RI
);
2646 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2649 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2652 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2653 generate_exception(ctx
, EXCP_RI
);
2655 t64
= tcg_temp_new_i64();
2656 tcg_gen_extu_i32_i64(t64
, t
);
2657 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2658 tcg_temp_free_i64(t64
);
2661 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2663 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2664 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2666 gen_load_fpr32(ctx
, t
, reg
| 1);
2670 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2672 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2673 TCGv_i64 t64
= tcg_temp_new_i64();
2674 tcg_gen_extu_i32_i64(t64
, t
);
2675 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2676 tcg_temp_free_i64(t64
);
2678 gen_store_fpr32(ctx
, t
, reg
| 1);
2682 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2684 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2685 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2687 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2691 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2693 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2694 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2697 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2698 t0
= tcg_temp_new_i64();
2699 tcg_gen_shri_i64(t0
, t
, 32);
2700 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2701 tcg_temp_free_i64(t0
);
2705 static inline int get_fp_bit (int cc
)
2713 /* Addresses computation */
2714 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
2716 tcg_gen_add_tl(ret
, arg0
, arg1
);
2718 #if defined(TARGET_MIPS64)
2719 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2720 tcg_gen_ext32s_i64(ret
, ret
);
2725 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2728 tcg_gen_addi_tl(ret
, base
, ofs
);
2730 #if defined(TARGET_MIPS64)
2731 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2732 tcg_gen_ext32s_i64(ret
, ret
);
2737 /* Addresses computation (translation time) */
2738 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2741 target_long sum
= base
+ offset
;
2743 #if defined(TARGET_MIPS64)
2744 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2751 /* Sign-extract the low 32-bits to a target_long. */
2752 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2754 #if defined(TARGET_MIPS64)
2755 tcg_gen_ext32s_i64(ret
, arg
);
2757 tcg_gen_extrl_i64_i32(ret
, arg
);
2761 /* Sign-extract the high 32-bits to a target_long. */
2762 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2764 #if defined(TARGET_MIPS64)
2765 tcg_gen_sari_i64(ret
, arg
, 32);
2767 tcg_gen_extrh_i64_i32(ret
, arg
);
2771 static inline void check_cp0_enabled(DisasContext
*ctx
)
2773 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
2774 generate_exception_err(ctx
, EXCP_CpU
, 0);
2777 static inline void check_cp1_enabled(DisasContext
*ctx
)
2779 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
2780 generate_exception_err(ctx
, EXCP_CpU
, 1);
2783 /* Verify that the processor is running with COP1X instructions enabled.
2784 This is associated with the nabla symbol in the MIPS32 and MIPS64
2787 static inline void check_cop1x(DisasContext
*ctx
)
2789 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
2790 generate_exception_end(ctx
, EXCP_RI
);
2793 /* Verify that the processor is running with 64-bit floating-point
2794 operations enabled. */
2796 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2798 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
2799 generate_exception_end(ctx
, EXCP_RI
);
2803 * Verify if floating point register is valid; an operation is not defined
2804 * if bit 0 of any register specification is set and the FR bit in the
2805 * Status register equals zero, since the register numbers specify an
2806 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2807 * in the Status register equals one, both even and odd register numbers
2808 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2810 * Multiple 64 bit wide registers can be checked by calling
2811 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2813 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2815 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
2816 generate_exception_end(ctx
, EXCP_RI
);
2819 /* Verify that the processor is running with DSP instructions enabled.
2820 This is enabled by CP0 Status register MX(24) bit.
2823 static inline void check_dsp(DisasContext
*ctx
)
2825 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2826 if (ctx
->insn_flags
& ASE_DSP
) {
2827 generate_exception_end(ctx
, EXCP_DSPDIS
);
2829 generate_exception_end(ctx
, EXCP_RI
);
2834 static inline void check_dsp_r2(DisasContext
*ctx
)
2836 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2837 if (ctx
->insn_flags
& ASE_DSP
) {
2838 generate_exception_end(ctx
, EXCP_DSPDIS
);
2840 generate_exception_end(ctx
, EXCP_RI
);
2845 static inline void check_dsp_r3(DisasContext
*ctx
)
2847 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2848 if (ctx
->insn_flags
& ASE_DSP
) {
2849 generate_exception_end(ctx
, EXCP_DSPDIS
);
2851 generate_exception_end(ctx
, EXCP_RI
);
2856 /* This code generates a "reserved instruction" exception if the
2857 CPU does not support the instruction set corresponding to flags. */
2858 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
2860 if (unlikely(!(ctx
->insn_flags
& flags
))) {
2861 generate_exception_end(ctx
, EXCP_RI
);
2865 /* This code generates a "reserved instruction" exception if the
2866 CPU has corresponding flag set which indicates that the instruction
2867 has been removed. */
2868 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
2870 if (unlikely(ctx
->insn_flags
& flags
)) {
2871 generate_exception_end(ctx
, EXCP_RI
);
2875 /* This code generates a "reserved instruction" exception if the
2876 CPU does not support 64-bit paired-single (PS) floating point data type */
2877 static inline void check_ps(DisasContext
*ctx
)
2879 if (unlikely(!ctx
->ps
)) {
2880 generate_exception(ctx
, EXCP_RI
);
2882 check_cp1_64bitmode(ctx
);
2885 #ifdef TARGET_MIPS64
2886 /* This code generates a "reserved instruction" exception if 64-bit
2887 instructions are not enabled. */
2888 static inline void check_mips_64(DisasContext
*ctx
)
2890 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
2891 generate_exception_end(ctx
, EXCP_RI
);
2895 #ifndef CONFIG_USER_ONLY
2896 static inline void check_mvh(DisasContext
*ctx
)
2898 if (unlikely(!ctx
->mvh
)) {
2899 generate_exception(ctx
, EXCP_RI
);
2905 * This code generates a "reserved instruction" exception if the
2906 * Config5 XNP bit is set.
2908 static inline void check_xnp(DisasContext
*ctx
)
2910 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
2911 generate_exception_end(ctx
, EXCP_RI
);
2915 #ifndef CONFIG_USER_ONLY
2917 * This code generates a "reserved instruction" exception if the
2918 * Config3 PW bit is NOT set.
2920 static inline void check_pw(DisasContext
*ctx
)
2922 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
2923 generate_exception_end(ctx
, EXCP_RI
);
2929 * This code generates a "reserved instruction" exception if the
2930 * Config3 MT bit is NOT set.
2932 static inline void check_mt(DisasContext
*ctx
)
2934 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2935 generate_exception_end(ctx
, EXCP_RI
);
2939 #ifndef CONFIG_USER_ONLY
2941 * This code generates a "coprocessor unusable" exception if CP0 is not
2942 * available, and, if that is not the case, generates a "reserved instruction"
2943 * exception if the Config5 MT bit is NOT set. This is needed for availability
2944 * control of some of MT ASE instructions.
2946 static inline void check_cp0_mt(DisasContext
*ctx
)
2948 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
2949 generate_exception_err(ctx
, EXCP_CpU
, 0);
2951 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2952 generate_exception_err(ctx
, EXCP_RI
, 0);
2959 * This code generates a "reserved instruction" exception if the
2960 * Config5 NMS bit is set.
2962 static inline void check_nms(DisasContext
*ctx
)
2964 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
2965 generate_exception_end(ctx
, EXCP_RI
);
2970 /* Define small wrappers for gen_load_fpr* so that we have a uniform
2971 calling interface for 32 and 64-bit FPRs. No sense in changing
2972 all callers for gen_load_fpr32 when we need the CTX parameter for
2974 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
2975 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
2976 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
2977 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
2978 int ft, int fs, int cc) \
2980 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
2981 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
2990 check_cp1_registers(ctx, fs | ft); \
2998 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
2999 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
3001 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
3002 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
3003 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
3004 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
3005 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
3006 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
3007 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
3008 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
3009 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
3010 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
3011 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
3012 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
3013 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
3014 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
3015 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
3016 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
3019 tcg_temp_free_i##bits (fp0); \
3020 tcg_temp_free_i##bits (fp1); \
3023 FOP_CONDS(, 0, d
, FMT_D
, 64)
3024 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3025 FOP_CONDS(, 0, s
, FMT_S
, 32)
3026 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3027 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3028 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3031 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3032 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
3033 int ft, int fs, int fd) \
3035 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3036 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3037 if (ifmt == FMT_D) { \
3038 check_cp1_registers(ctx, fs | ft | fd); \
3040 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3041 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3044 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3047 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3050 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3053 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3056 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3059 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3062 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3065 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3068 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3071 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3074 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3077 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3080 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3083 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3086 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3089 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3092 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3095 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3098 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3101 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3104 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3107 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3113 tcg_temp_free_i ## bits (fp0); \
3114 tcg_temp_free_i ## bits (fp1); \
3117 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3118 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3120 #undef gen_ldcmp_fpr32
3121 #undef gen_ldcmp_fpr64
3123 /* load/store instructions. */
3124 #ifdef CONFIG_USER_ONLY
3125 #define OP_LD_ATOMIC(insn,fname) \
3126 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3127 DisasContext *ctx) \
3129 TCGv t0 = tcg_temp_new(); \
3130 tcg_gen_mov_tl(t0, arg1); \
3131 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3132 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3133 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3134 tcg_temp_free(t0); \
3137 #define OP_LD_ATOMIC(insn,fname) \
3138 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3139 DisasContext *ctx) \
3141 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3144 OP_LD_ATOMIC(ll
,ld32s
);
3145 #if defined(TARGET_MIPS64)
3146 OP_LD_ATOMIC(lld
,ld64
);
3150 #ifdef CONFIG_USER_ONLY
3151 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3152 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3153 DisasContext *ctx) \
3155 TCGv t0 = tcg_temp_new(); \
3156 TCGLabel *l1 = gen_new_label(); \
3157 TCGLabel *l2 = gen_new_label(); \
3159 tcg_gen_andi_tl(t0, arg2, almask); \
3160 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
3161 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
3162 generate_exception(ctx, EXCP_AdES); \
3163 gen_set_label(l1); \
3164 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3165 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
3166 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
3167 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
3168 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
3169 generate_exception_end(ctx, EXCP_SC); \
3170 gen_set_label(l2); \
3171 tcg_gen_movi_tl(t0, 0); \
3172 gen_store_gpr(t0, rt); \
3173 tcg_temp_free(t0); \
3176 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3177 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3178 DisasContext *ctx) \
3180 TCGv t0 = tcg_temp_new(); \
3181 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
3182 gen_store_gpr(t0, rt); \
3183 tcg_temp_free(t0); \
3186 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
3187 #if defined(TARGET_MIPS64)
3188 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
3192 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
3193 int base
, int offset
)
3196 tcg_gen_movi_tl(addr
, offset
);
3197 } else if (offset
== 0) {
3198 gen_load_gpr(addr
, base
);
3200 tcg_gen_movi_tl(addr
, offset
);
3201 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3205 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
3207 target_ulong pc
= ctx
->base
.pc_next
;
3209 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3210 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3215 pc
&= ~(target_ulong
)3;
3220 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3221 int rt
, int base
, int offset
)
3224 int mem_idx
= ctx
->mem_idx
;
3226 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3227 /* Loongson CPU uses a load to zero register for prefetch.
3228 We emulate it as a NOP. On other CPU we must perform the
3229 actual memory access. */
3233 t0
= tcg_temp_new();
3234 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3237 #if defined(TARGET_MIPS64)
3239 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3240 ctx
->default_tcg_memop_mask
);
3241 gen_store_gpr(t0
, rt
);
3244 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3245 ctx
->default_tcg_memop_mask
);
3246 gen_store_gpr(t0
, rt
);
3250 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3251 gen_store_gpr(t0
, rt
);
3254 t1
= tcg_temp_new();
3255 /* Do a byte access to possibly trigger a page
3256 fault with the unaligned address. */
3257 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3258 tcg_gen_andi_tl(t1
, t0
, 7);
3259 #ifndef TARGET_WORDS_BIGENDIAN
3260 tcg_gen_xori_tl(t1
, t1
, 7);
3262 tcg_gen_shli_tl(t1
, t1
, 3);
3263 tcg_gen_andi_tl(t0
, t0
, ~7);
3264 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3265 tcg_gen_shl_tl(t0
, t0
, t1
);
3266 t2
= tcg_const_tl(-1);
3267 tcg_gen_shl_tl(t2
, t2
, t1
);
3268 gen_load_gpr(t1
, rt
);
3269 tcg_gen_andc_tl(t1
, t1
, t2
);
3271 tcg_gen_or_tl(t0
, t0
, t1
);
3273 gen_store_gpr(t0
, rt
);
3276 t1
= tcg_temp_new();
3277 /* Do a byte access to possibly trigger a page
3278 fault with the unaligned address. */
3279 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3280 tcg_gen_andi_tl(t1
, t0
, 7);
3281 #ifdef TARGET_WORDS_BIGENDIAN
3282 tcg_gen_xori_tl(t1
, t1
, 7);
3284 tcg_gen_shli_tl(t1
, t1
, 3);
3285 tcg_gen_andi_tl(t0
, t0
, ~7);
3286 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3287 tcg_gen_shr_tl(t0
, t0
, t1
);
3288 tcg_gen_xori_tl(t1
, t1
, 63);
3289 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3290 tcg_gen_shl_tl(t2
, t2
, t1
);
3291 gen_load_gpr(t1
, rt
);
3292 tcg_gen_and_tl(t1
, t1
, t2
);
3294 tcg_gen_or_tl(t0
, t0
, t1
);
3296 gen_store_gpr(t0
, rt
);
3299 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3300 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3302 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3303 gen_store_gpr(t0
, rt
);
3307 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3308 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3310 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3311 gen_store_gpr(t0
, rt
);
3314 mem_idx
= MIPS_HFLAG_UM
;
3317 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3318 ctx
->default_tcg_memop_mask
);
3319 gen_store_gpr(t0
, rt
);
3322 mem_idx
= MIPS_HFLAG_UM
;
3325 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3326 ctx
->default_tcg_memop_mask
);
3327 gen_store_gpr(t0
, rt
);
3330 mem_idx
= MIPS_HFLAG_UM
;
3333 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3334 ctx
->default_tcg_memop_mask
);
3335 gen_store_gpr(t0
, rt
);
3338 mem_idx
= MIPS_HFLAG_UM
;
3341 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3342 gen_store_gpr(t0
, rt
);
3345 mem_idx
= MIPS_HFLAG_UM
;
3348 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3349 gen_store_gpr(t0
, rt
);
3352 mem_idx
= MIPS_HFLAG_UM
;
3355 t1
= tcg_temp_new();
3356 /* Do a byte access to possibly trigger a page
3357 fault with the unaligned address. */
3358 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3359 tcg_gen_andi_tl(t1
, t0
, 3);
3360 #ifndef TARGET_WORDS_BIGENDIAN
3361 tcg_gen_xori_tl(t1
, t1
, 3);
3363 tcg_gen_shli_tl(t1
, t1
, 3);
3364 tcg_gen_andi_tl(t0
, t0
, ~3);
3365 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3366 tcg_gen_shl_tl(t0
, t0
, t1
);
3367 t2
= tcg_const_tl(-1);
3368 tcg_gen_shl_tl(t2
, t2
, t1
);
3369 gen_load_gpr(t1
, rt
);
3370 tcg_gen_andc_tl(t1
, t1
, t2
);
3372 tcg_gen_or_tl(t0
, t0
, t1
);
3374 tcg_gen_ext32s_tl(t0
, t0
);
3375 gen_store_gpr(t0
, rt
);
3378 mem_idx
= MIPS_HFLAG_UM
;
3381 t1
= tcg_temp_new();
3382 /* Do a byte access to possibly trigger a page
3383 fault with the unaligned address. */
3384 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3385 tcg_gen_andi_tl(t1
, t0
, 3);
3386 #ifdef TARGET_WORDS_BIGENDIAN
3387 tcg_gen_xori_tl(t1
, t1
, 3);
3389 tcg_gen_shli_tl(t1
, t1
, 3);
3390 tcg_gen_andi_tl(t0
, t0
, ~3);
3391 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3392 tcg_gen_shr_tl(t0
, t0
, t1
);
3393 tcg_gen_xori_tl(t1
, t1
, 31);
3394 t2
= tcg_const_tl(0xfffffffeull
);
3395 tcg_gen_shl_tl(t2
, t2
, t1
);
3396 gen_load_gpr(t1
, rt
);
3397 tcg_gen_and_tl(t1
, t1
, t2
);
3399 tcg_gen_or_tl(t0
, t0
, t1
);
3401 tcg_gen_ext32s_tl(t0
, t0
);
3402 gen_store_gpr(t0
, rt
);
3405 mem_idx
= MIPS_HFLAG_UM
;
3409 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3410 gen_store_gpr(t0
, rt
);
3416 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3417 uint32_t reg1
, uint32_t reg2
)
3419 TCGv taddr
= tcg_temp_new();
3420 TCGv_i64 tval
= tcg_temp_new_i64();
3421 TCGv tmp1
= tcg_temp_new();
3422 TCGv tmp2
= tcg_temp_new();
3424 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3425 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3426 #ifdef TARGET_WORDS_BIGENDIAN
3427 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3429 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3431 gen_store_gpr(tmp1
, reg1
);
3432 tcg_temp_free(tmp1
);
3433 gen_store_gpr(tmp2
, reg2
);
3434 tcg_temp_free(tmp2
);
3435 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3436 tcg_temp_free_i64(tval
);
3437 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3438 tcg_temp_free(taddr
);
3442 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
3443 int base
, int offset
)
3445 TCGv t0
= tcg_temp_new();
3446 TCGv t1
= tcg_temp_new();
3447 int mem_idx
= ctx
->mem_idx
;
3449 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3450 gen_load_gpr(t1
, rt
);
3452 #if defined(TARGET_MIPS64)
3454 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3455 ctx
->default_tcg_memop_mask
);
3458 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3461 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3465 mem_idx
= MIPS_HFLAG_UM
;
3468 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3469 ctx
->default_tcg_memop_mask
);
3472 mem_idx
= MIPS_HFLAG_UM
;
3475 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3476 ctx
->default_tcg_memop_mask
);
3479 mem_idx
= MIPS_HFLAG_UM
;
3482 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3485 mem_idx
= MIPS_HFLAG_UM
;
3488 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3491 mem_idx
= MIPS_HFLAG_UM
;
3494 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3502 /* Store conditional */
3503 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
3504 int base
, int16_t offset
)
3507 int mem_idx
= ctx
->mem_idx
;
3509 #ifdef CONFIG_USER_ONLY
3510 t0
= tcg_temp_local_new();
3511 t1
= tcg_temp_local_new();
3513 t0
= tcg_temp_new();
3514 t1
= tcg_temp_new();
3516 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3517 gen_load_gpr(t1
, rt
);
3519 #if defined(TARGET_MIPS64)
3522 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
3526 mem_idx
= MIPS_HFLAG_UM
;
3530 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
3537 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3538 uint32_t reg1
, uint32_t reg2
)
3540 TCGv taddr
= tcg_temp_local_new();
3541 TCGv lladdr
= tcg_temp_local_new();
3542 TCGv_i64 tval
= tcg_temp_new_i64();
3543 TCGv_i64 llval
= tcg_temp_new_i64();
3544 TCGv_i64 val
= tcg_temp_new_i64();
3545 TCGv tmp1
= tcg_temp_new();
3546 TCGv tmp2
= tcg_temp_new();
3547 TCGLabel
*lab_fail
= gen_new_label();
3548 TCGLabel
*lab_done
= gen_new_label();
3550 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3552 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3553 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3555 gen_load_gpr(tmp1
, reg1
);
3556 gen_load_gpr(tmp2
, reg2
);
3558 #ifdef TARGET_WORDS_BIGENDIAN
3559 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3561 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3564 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3565 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3566 ctx
->mem_idx
, MO_64
);
3568 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3570 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3572 gen_set_label(lab_fail
);
3575 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3577 gen_set_label(lab_done
);
3578 tcg_gen_movi_tl(lladdr
, -1);
3579 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3582 /* Load and store */
3583 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
3586 /* Don't do NOP if destination is zero: we must perform the actual
3591 TCGv_i32 fp0
= tcg_temp_new_i32();
3592 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3593 ctx
->default_tcg_memop_mask
);
3594 gen_store_fpr32(ctx
, fp0
, ft
);
3595 tcg_temp_free_i32(fp0
);
3600 TCGv_i32 fp0
= tcg_temp_new_i32();
3601 gen_load_fpr32(ctx
, fp0
, ft
);
3602 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3603 ctx
->default_tcg_memop_mask
);
3604 tcg_temp_free_i32(fp0
);
3609 TCGv_i64 fp0
= tcg_temp_new_i64();
3610 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3611 ctx
->default_tcg_memop_mask
);
3612 gen_store_fpr64(ctx
, fp0
, ft
);
3613 tcg_temp_free_i64(fp0
);
3618 TCGv_i64 fp0
= tcg_temp_new_i64();
3619 gen_load_fpr64(ctx
, fp0
, ft
);
3620 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3621 ctx
->default_tcg_memop_mask
);
3622 tcg_temp_free_i64(fp0
);
3626 MIPS_INVAL("flt_ldst");
3627 generate_exception_end(ctx
, EXCP_RI
);
3632 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3633 int rs
, int16_t imm
)
3635 TCGv t0
= tcg_temp_new();
3637 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3638 check_cp1_enabled(ctx
);
3642 check_insn(ctx
, ISA_MIPS2
);
3645 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3646 gen_flt_ldst(ctx
, op
, rt
, t0
);
3649 generate_exception_err(ctx
, EXCP_CpU
, 1);
3654 /* Arithmetic with immediate operand */
3655 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3656 int rt
, int rs
, int imm
)
3658 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3660 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3661 /* If no destination, treat it as a NOP.
3662 For addi, we must generate the overflow exception when needed. */
3668 TCGv t0
= tcg_temp_local_new();
3669 TCGv t1
= tcg_temp_new();
3670 TCGv t2
= tcg_temp_new();
3671 TCGLabel
*l1
= gen_new_label();
3673 gen_load_gpr(t1
, rs
);
3674 tcg_gen_addi_tl(t0
, t1
, uimm
);
3675 tcg_gen_ext32s_tl(t0
, t0
);
3677 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3678 tcg_gen_xori_tl(t2
, t0
, uimm
);
3679 tcg_gen_and_tl(t1
, t1
, t2
);
3681 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3683 /* operands of same sign, result different sign */
3684 generate_exception(ctx
, EXCP_OVERFLOW
);
3686 tcg_gen_ext32s_tl(t0
, t0
);
3687 gen_store_gpr(t0
, rt
);
3693 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3694 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3696 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3699 #if defined(TARGET_MIPS64)
3702 TCGv t0
= tcg_temp_local_new();
3703 TCGv t1
= tcg_temp_new();
3704 TCGv t2
= tcg_temp_new();
3705 TCGLabel
*l1
= gen_new_label();
3707 gen_load_gpr(t1
, rs
);
3708 tcg_gen_addi_tl(t0
, t1
, uimm
);
3710 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3711 tcg_gen_xori_tl(t2
, t0
, uimm
);
3712 tcg_gen_and_tl(t1
, t1
, t2
);
3714 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3716 /* operands of same sign, result different sign */
3717 generate_exception(ctx
, EXCP_OVERFLOW
);
3719 gen_store_gpr(t0
, rt
);
3725 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3727 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3734 /* Logic with immediate operand */
3735 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3736 int rt
, int rs
, int16_t imm
)
3741 /* If no destination, treat it as a NOP. */
3744 uimm
= (uint16_t)imm
;
3747 if (likely(rs
!= 0))
3748 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3750 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3754 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3756 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3759 if (likely(rs
!= 0))
3760 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3762 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3765 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3767 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3768 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3770 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3779 /* Set on less than with immediate operand */
3780 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3781 int rt
, int rs
, int16_t imm
)
3783 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3787 /* If no destination, treat it as a NOP. */
3790 t0
= tcg_temp_new();
3791 gen_load_gpr(t0
, rs
);
3794 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3797 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3803 /* Shifts with immediate operand */
3804 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3805 int rt
, int rs
, int16_t imm
)
3807 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3811 /* If no destination, treat it as a NOP. */
3815 t0
= tcg_temp_new();
3816 gen_load_gpr(t0
, rs
);
3819 tcg_gen_shli_tl(t0
, t0
, uimm
);
3820 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3823 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3827 tcg_gen_ext32u_tl(t0
, t0
);
3828 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3830 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3835 TCGv_i32 t1
= tcg_temp_new_i32();
3837 tcg_gen_trunc_tl_i32(t1
, t0
);
3838 tcg_gen_rotri_i32(t1
, t1
, uimm
);
3839 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
3840 tcg_temp_free_i32(t1
);
3842 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3845 #if defined(TARGET_MIPS64)
3847 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
3850 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3853 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3857 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
3859 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
3863 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3866 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3869 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3872 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3880 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
3881 int rd
, int rs
, int rt
)
3883 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
3884 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
3885 /* If no destination, treat it as a NOP.
3886 For add & sub, we must generate the overflow exception when needed. */
3893 TCGv t0
= tcg_temp_local_new();
3894 TCGv t1
= tcg_temp_new();
3895 TCGv t2
= tcg_temp_new();
3896 TCGLabel
*l1
= gen_new_label();
3898 gen_load_gpr(t1
, rs
);
3899 gen_load_gpr(t2
, rt
);
3900 tcg_gen_add_tl(t0
, t1
, t2
);
3901 tcg_gen_ext32s_tl(t0
, t0
);
3902 tcg_gen_xor_tl(t1
, t1
, t2
);
3903 tcg_gen_xor_tl(t2
, t0
, t2
);
3904 tcg_gen_andc_tl(t1
, t2
, t1
);
3906 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3908 /* operands of same sign, result different sign */
3909 generate_exception(ctx
, EXCP_OVERFLOW
);
3911 gen_store_gpr(t0
, rd
);
3916 if (rs
!= 0 && rt
!= 0) {
3917 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3918 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3919 } else if (rs
== 0 && rt
!= 0) {
3920 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3921 } else if (rs
!= 0 && rt
== 0) {
3922 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3924 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3929 TCGv t0
= tcg_temp_local_new();
3930 TCGv t1
= tcg_temp_new();
3931 TCGv t2
= tcg_temp_new();
3932 TCGLabel
*l1
= gen_new_label();
3934 gen_load_gpr(t1
, rs
);
3935 gen_load_gpr(t2
, rt
);
3936 tcg_gen_sub_tl(t0
, t1
, t2
);
3937 tcg_gen_ext32s_tl(t0
, t0
);
3938 tcg_gen_xor_tl(t2
, t1
, t2
);
3939 tcg_gen_xor_tl(t1
, t0
, t1
);
3940 tcg_gen_and_tl(t1
, t1
, t2
);
3942 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3944 /* operands of different sign, first operand and result different sign */
3945 generate_exception(ctx
, EXCP_OVERFLOW
);
3947 gen_store_gpr(t0
, rd
);
3952 if (rs
!= 0 && rt
!= 0) {
3953 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3954 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3955 } else if (rs
== 0 && rt
!= 0) {
3956 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3957 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3958 } else if (rs
!= 0 && rt
== 0) {
3959 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3961 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3964 #if defined(TARGET_MIPS64)
3967 TCGv t0
= tcg_temp_local_new();
3968 TCGv t1
= tcg_temp_new();
3969 TCGv t2
= tcg_temp_new();
3970 TCGLabel
*l1
= gen_new_label();
3972 gen_load_gpr(t1
, rs
);
3973 gen_load_gpr(t2
, rt
);
3974 tcg_gen_add_tl(t0
, t1
, t2
);
3975 tcg_gen_xor_tl(t1
, t1
, t2
);
3976 tcg_gen_xor_tl(t2
, t0
, t2
);
3977 tcg_gen_andc_tl(t1
, t2
, t1
);
3979 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3981 /* operands of same sign, result different sign */
3982 generate_exception(ctx
, EXCP_OVERFLOW
);
3984 gen_store_gpr(t0
, rd
);
3989 if (rs
!= 0 && rt
!= 0) {
3990 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3991 } else if (rs
== 0 && rt
!= 0) {
3992 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3993 } else if (rs
!= 0 && rt
== 0) {
3994 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3996 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4001 TCGv t0
= tcg_temp_local_new();
4002 TCGv t1
= tcg_temp_new();
4003 TCGv t2
= tcg_temp_new();
4004 TCGLabel
*l1
= gen_new_label();
4006 gen_load_gpr(t1
, rs
);
4007 gen_load_gpr(t2
, rt
);
4008 tcg_gen_sub_tl(t0
, t1
, t2
);
4009 tcg_gen_xor_tl(t2
, t1
, t2
);
4010 tcg_gen_xor_tl(t1
, t0
, t1
);
4011 tcg_gen_and_tl(t1
, t1
, t2
);
4013 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4015 /* operands of different sign, first operand and result different sign */
4016 generate_exception(ctx
, EXCP_OVERFLOW
);
4018 gen_store_gpr(t0
, rd
);
4023 if (rs
!= 0 && rt
!= 0) {
4024 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4025 } else if (rs
== 0 && rt
!= 0) {
4026 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4027 } else if (rs
!= 0 && rt
== 0) {
4028 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4030 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4035 if (likely(rs
!= 0 && rt
!= 0)) {
4036 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4037 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4039 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4045 /* Conditional move */
4046 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4047 int rd
, int rs
, int rt
)
4052 /* If no destination, treat it as a NOP. */
4056 t0
= tcg_temp_new();
4057 gen_load_gpr(t0
, rt
);
4058 t1
= tcg_const_tl(0);
4059 t2
= tcg_temp_new();
4060 gen_load_gpr(t2
, rs
);
4063 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4066 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4069 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4072 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4081 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4082 int rd
, int rs
, int rt
)
4085 /* If no destination, treat it as a NOP. */
4091 if (likely(rs
!= 0 && rt
!= 0)) {
4092 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4094 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4098 if (rs
!= 0 && rt
!= 0) {
4099 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4100 } else if (rs
== 0 && rt
!= 0) {
4101 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4102 } else if (rs
!= 0 && rt
== 0) {
4103 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4105 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4109 if (likely(rs
!= 0 && rt
!= 0)) {
4110 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4111 } else if (rs
== 0 && rt
!= 0) {
4112 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4113 } else if (rs
!= 0 && rt
== 0) {
4114 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4116 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4120 if (likely(rs
!= 0 && rt
!= 0)) {
4121 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4122 } else if (rs
== 0 && rt
!= 0) {
4123 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4124 } else if (rs
!= 0 && rt
== 0) {
4125 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4127 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4133 /* Set on lower than */
4134 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4135 int rd
, int rs
, int rt
)
4140 /* If no destination, treat it as a NOP. */
4144 t0
= tcg_temp_new();
4145 t1
= tcg_temp_new();
4146 gen_load_gpr(t0
, rs
);
4147 gen_load_gpr(t1
, rt
);
4150 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4153 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4161 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4162 int rd
, int rs
, int rt
)
4167 /* If no destination, treat it as a NOP.
4168 For add & sub, we must generate the overflow exception when needed. */
4172 t0
= tcg_temp_new();
4173 t1
= tcg_temp_new();
4174 gen_load_gpr(t0
, rs
);
4175 gen_load_gpr(t1
, rt
);
4178 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4179 tcg_gen_shl_tl(t0
, t1
, t0
);
4180 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4183 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4184 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4187 tcg_gen_ext32u_tl(t1
, t1
);
4188 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4189 tcg_gen_shr_tl(t0
, t1
, t0
);
4190 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4194 TCGv_i32 t2
= tcg_temp_new_i32();
4195 TCGv_i32 t3
= tcg_temp_new_i32();
4197 tcg_gen_trunc_tl_i32(t2
, t0
);
4198 tcg_gen_trunc_tl_i32(t3
, t1
);
4199 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4200 tcg_gen_rotr_i32(t2
, t3
, t2
);
4201 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4202 tcg_temp_free_i32(t2
);
4203 tcg_temp_free_i32(t3
);
4206 #if defined(TARGET_MIPS64)
4208 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4209 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4212 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4213 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4216 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4217 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4220 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4221 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4229 /* Arithmetic on HI/LO registers */
4230 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4232 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== TX79_MMI_MFHI1
||
4233 opc
== OPC_MFLO
|| opc
== TX79_MMI_MFLO1
)) {
4239 if (!(ctx
->insn_flags
& INSN_R5900
)) {
4246 case TX79_MMI_MFHI1
:
4247 #if defined(TARGET_MIPS64)
4249 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4253 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4257 case TX79_MMI_MFLO1
:
4258 #if defined(TARGET_MIPS64)
4260 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4264 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4268 case TX79_MMI_MTHI1
:
4270 #if defined(TARGET_MIPS64)
4272 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4276 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4279 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4283 case TX79_MMI_MTLO1
:
4285 #if defined(TARGET_MIPS64)
4287 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4291 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4294 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4300 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4303 TCGv t0
= tcg_const_tl(addr
);
4304 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4305 gen_store_gpr(t0
, reg
);
4309 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4315 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4318 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4319 addr
= addr_add(ctx
, pc
, offset
);
4320 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4324 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4325 addr
= addr_add(ctx
, pc
, offset
);
4326 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4328 #if defined(TARGET_MIPS64)
4331 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4332 addr
= addr_add(ctx
, pc
, offset
);
4333 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4337 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4340 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4341 addr
= addr_add(ctx
, pc
, offset
);
4342 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4347 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4348 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4349 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4352 #if defined(TARGET_MIPS64)
4353 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4354 case R6_OPC_LDPC
+ (1 << 16):
4355 case R6_OPC_LDPC
+ (2 << 16):
4356 case R6_OPC_LDPC
+ (3 << 16):
4358 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4359 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4360 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4364 MIPS_INVAL("OPC_PCREL");
4365 generate_exception_end(ctx
, EXCP_RI
);
4372 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4381 t0
= tcg_temp_new();
4382 t1
= tcg_temp_new();
4384 gen_load_gpr(t0
, rs
);
4385 gen_load_gpr(t1
, rt
);
4390 TCGv t2
= tcg_temp_new();
4391 TCGv t3
= tcg_temp_new();
4392 tcg_gen_ext32s_tl(t0
, t0
);
4393 tcg_gen_ext32s_tl(t1
, t1
);
4394 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4395 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4396 tcg_gen_and_tl(t2
, t2
, t3
);
4397 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4398 tcg_gen_or_tl(t2
, t2
, t3
);
4399 tcg_gen_movi_tl(t3
, 0);
4400 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4401 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4402 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4409 TCGv t2
= tcg_temp_new();
4410 TCGv t3
= tcg_temp_new();
4411 tcg_gen_ext32s_tl(t0
, t0
);
4412 tcg_gen_ext32s_tl(t1
, t1
);
4413 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4414 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4415 tcg_gen_and_tl(t2
, t2
, t3
);
4416 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4417 tcg_gen_or_tl(t2
, t2
, t3
);
4418 tcg_gen_movi_tl(t3
, 0);
4419 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4420 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4421 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4428 TCGv t2
= tcg_const_tl(0);
4429 TCGv t3
= tcg_const_tl(1);
4430 tcg_gen_ext32u_tl(t0
, t0
);
4431 tcg_gen_ext32u_tl(t1
, t1
);
4432 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4433 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4434 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4441 TCGv t2
= tcg_const_tl(0);
4442 TCGv t3
= tcg_const_tl(1);
4443 tcg_gen_ext32u_tl(t0
, t0
);
4444 tcg_gen_ext32u_tl(t1
, t1
);
4445 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4446 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4447 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4454 TCGv_i32 t2
= tcg_temp_new_i32();
4455 TCGv_i32 t3
= tcg_temp_new_i32();
4456 tcg_gen_trunc_tl_i32(t2
, t0
);
4457 tcg_gen_trunc_tl_i32(t3
, t1
);
4458 tcg_gen_mul_i32(t2
, t2
, t3
);
4459 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4460 tcg_temp_free_i32(t2
);
4461 tcg_temp_free_i32(t3
);
4466 TCGv_i32 t2
= tcg_temp_new_i32();
4467 TCGv_i32 t3
= tcg_temp_new_i32();
4468 tcg_gen_trunc_tl_i32(t2
, t0
);
4469 tcg_gen_trunc_tl_i32(t3
, t1
);
4470 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4471 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4472 tcg_temp_free_i32(t2
);
4473 tcg_temp_free_i32(t3
);
4478 TCGv_i32 t2
= tcg_temp_new_i32();
4479 TCGv_i32 t3
= tcg_temp_new_i32();
4480 tcg_gen_trunc_tl_i32(t2
, t0
);
4481 tcg_gen_trunc_tl_i32(t3
, t1
);
4482 tcg_gen_mul_i32(t2
, t2
, t3
);
4483 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4484 tcg_temp_free_i32(t2
);
4485 tcg_temp_free_i32(t3
);
4490 TCGv_i32 t2
= tcg_temp_new_i32();
4491 TCGv_i32 t3
= tcg_temp_new_i32();
4492 tcg_gen_trunc_tl_i32(t2
, t0
);
4493 tcg_gen_trunc_tl_i32(t3
, t1
);
4494 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4495 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4496 tcg_temp_free_i32(t2
);
4497 tcg_temp_free_i32(t3
);
4500 #if defined(TARGET_MIPS64)
4503 TCGv t2
= tcg_temp_new();
4504 TCGv t3
= tcg_temp_new();
4505 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4506 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4507 tcg_gen_and_tl(t2
, t2
, t3
);
4508 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4509 tcg_gen_or_tl(t2
, t2
, t3
);
4510 tcg_gen_movi_tl(t3
, 0);
4511 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4512 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4519 TCGv t2
= tcg_temp_new();
4520 TCGv t3
= tcg_temp_new();
4521 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4522 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4523 tcg_gen_and_tl(t2
, t2
, t3
);
4524 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4525 tcg_gen_or_tl(t2
, t2
, t3
);
4526 tcg_gen_movi_tl(t3
, 0);
4527 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4528 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4535 TCGv t2
= tcg_const_tl(0);
4536 TCGv t3
= tcg_const_tl(1);
4537 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4538 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4545 TCGv t2
= tcg_const_tl(0);
4546 TCGv t3
= tcg_const_tl(1);
4547 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4548 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4554 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4558 TCGv t2
= tcg_temp_new();
4559 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4564 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4568 TCGv t2
= tcg_temp_new();
4569 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4575 MIPS_INVAL("r6 mul/div");
4576 generate_exception_end(ctx
, EXCP_RI
);
4584 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4585 int acc
, int rs
, int rt
)
4589 t0
= tcg_temp_new();
4590 t1
= tcg_temp_new();
4592 gen_load_gpr(t0
, rs
);
4593 gen_load_gpr(t1
, rt
);
4602 TCGv t2
= tcg_temp_new();
4603 TCGv t3
= tcg_temp_new();
4604 tcg_gen_ext32s_tl(t0
, t0
);
4605 tcg_gen_ext32s_tl(t1
, t1
);
4606 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4607 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4608 tcg_gen_and_tl(t2
, t2
, t3
);
4609 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4610 tcg_gen_or_tl(t2
, t2
, t3
);
4611 tcg_gen_movi_tl(t3
, 0);
4612 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4613 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4614 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4615 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4616 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4623 TCGv t2
= tcg_const_tl(0);
4624 TCGv t3
= tcg_const_tl(1);
4625 tcg_gen_ext32u_tl(t0
, t0
);
4626 tcg_gen_ext32u_tl(t1
, t1
);
4627 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4628 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4629 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4630 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4631 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4638 TCGv_i32 t2
= tcg_temp_new_i32();
4639 TCGv_i32 t3
= tcg_temp_new_i32();
4640 tcg_gen_trunc_tl_i32(t2
, t0
);
4641 tcg_gen_trunc_tl_i32(t3
, t1
);
4642 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4643 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4644 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4645 tcg_temp_free_i32(t2
);
4646 tcg_temp_free_i32(t3
);
4651 TCGv_i32 t2
= tcg_temp_new_i32();
4652 TCGv_i32 t3
= tcg_temp_new_i32();
4653 tcg_gen_trunc_tl_i32(t2
, t0
);
4654 tcg_gen_trunc_tl_i32(t3
, t1
);
4655 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4656 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4657 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4658 tcg_temp_free_i32(t2
);
4659 tcg_temp_free_i32(t3
);
4662 #if defined(TARGET_MIPS64)
4665 TCGv t2
= tcg_temp_new();
4666 TCGv t3
= tcg_temp_new();
4667 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4668 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4669 tcg_gen_and_tl(t2
, t2
, t3
);
4670 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4671 tcg_gen_or_tl(t2
, t2
, t3
);
4672 tcg_gen_movi_tl(t3
, 0);
4673 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4674 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4675 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4682 TCGv t2
= tcg_const_tl(0);
4683 TCGv t3
= tcg_const_tl(1);
4684 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4685 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4686 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4692 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4695 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4700 TCGv_i64 t2
= tcg_temp_new_i64();
4701 TCGv_i64 t3
= tcg_temp_new_i64();
4703 tcg_gen_ext_tl_i64(t2
, t0
);
4704 tcg_gen_ext_tl_i64(t3
, t1
);
4705 tcg_gen_mul_i64(t2
, t2
, t3
);
4706 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4707 tcg_gen_add_i64(t2
, t2
, t3
);
4708 tcg_temp_free_i64(t3
);
4709 gen_move_low32(cpu_LO
[acc
], t2
);
4710 gen_move_high32(cpu_HI
[acc
], t2
);
4711 tcg_temp_free_i64(t2
);
4716 TCGv_i64 t2
= tcg_temp_new_i64();
4717 TCGv_i64 t3
= tcg_temp_new_i64();
4719 tcg_gen_ext32u_tl(t0
, t0
);
4720 tcg_gen_ext32u_tl(t1
, t1
);
4721 tcg_gen_extu_tl_i64(t2
, t0
);
4722 tcg_gen_extu_tl_i64(t3
, t1
);
4723 tcg_gen_mul_i64(t2
, t2
, t3
);
4724 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4725 tcg_gen_add_i64(t2
, t2
, t3
);
4726 tcg_temp_free_i64(t3
);
4727 gen_move_low32(cpu_LO
[acc
], t2
);
4728 gen_move_high32(cpu_HI
[acc
], t2
);
4729 tcg_temp_free_i64(t2
);
4734 TCGv_i64 t2
= tcg_temp_new_i64();
4735 TCGv_i64 t3
= tcg_temp_new_i64();
4737 tcg_gen_ext_tl_i64(t2
, t0
);
4738 tcg_gen_ext_tl_i64(t3
, t1
);
4739 tcg_gen_mul_i64(t2
, t2
, t3
);
4740 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4741 tcg_gen_sub_i64(t2
, t3
, t2
);
4742 tcg_temp_free_i64(t3
);
4743 gen_move_low32(cpu_LO
[acc
], t2
);
4744 gen_move_high32(cpu_HI
[acc
], t2
);
4745 tcg_temp_free_i64(t2
);
4750 TCGv_i64 t2
= tcg_temp_new_i64();
4751 TCGv_i64 t3
= tcg_temp_new_i64();
4753 tcg_gen_ext32u_tl(t0
, t0
);
4754 tcg_gen_ext32u_tl(t1
, t1
);
4755 tcg_gen_extu_tl_i64(t2
, t0
);
4756 tcg_gen_extu_tl_i64(t3
, t1
);
4757 tcg_gen_mul_i64(t2
, t2
, t3
);
4758 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4759 tcg_gen_sub_i64(t2
, t3
, t2
);
4760 tcg_temp_free_i64(t3
);
4761 gen_move_low32(cpu_LO
[acc
], t2
);
4762 gen_move_high32(cpu_HI
[acc
], t2
);
4763 tcg_temp_free_i64(t2
);
4767 MIPS_INVAL("mul/div");
4768 generate_exception_end(ctx
, EXCP_RI
);
4777 * These MULT and MULTU instructions implemented in for example the
4778 * Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
4779 * architectures are special three-operand variants with the syntax
4781 * MULT[U][1] rd, rs, rt
4785 * (rd, LO, HI) <- rs * rt
4787 * where the low-order 32-bits of the result is placed into both the
4788 * GPR rd and the special register LO. The high-order 32-bits of the
4789 * result is placed into the special register HI.
4791 * If the GPR rd is omitted in assembly language, it is taken to be 0,
4792 * which is the zero register that always reads as 0.
4794 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
4795 int rd
, int rs
, int rt
)
4797 TCGv t0
= tcg_temp_new();
4798 TCGv t1
= tcg_temp_new();
4801 gen_load_gpr(t0
, rs
);
4802 gen_load_gpr(t1
, rt
);
4805 case TX79_MMI_MULT1
:
4810 TCGv_i32 t2
= tcg_temp_new_i32();
4811 TCGv_i32 t3
= tcg_temp_new_i32();
4812 tcg_gen_trunc_tl_i32(t2
, t0
);
4813 tcg_gen_trunc_tl_i32(t3
, t1
);
4814 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4816 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4818 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4819 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4820 tcg_temp_free_i32(t2
);
4821 tcg_temp_free_i32(t3
);
4824 case TX79_MMI_MULTU1
:
4829 TCGv_i32 t2
= tcg_temp_new_i32();
4830 TCGv_i32 t3
= tcg_temp_new_i32();
4831 tcg_gen_trunc_tl_i32(t2
, t0
);
4832 tcg_gen_trunc_tl_i32(t3
, t1
);
4833 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4835 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4837 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4838 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4839 tcg_temp_free_i32(t2
);
4840 tcg_temp_free_i32(t3
);
4844 MIPS_INVAL("mul TXx9");
4845 generate_exception_end(ctx
, EXCP_RI
);
4854 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
4855 int rd
, int rs
, int rt
)
4857 TCGv t0
= tcg_temp_new();
4858 TCGv t1
= tcg_temp_new();
4860 gen_load_gpr(t0
, rs
);
4861 gen_load_gpr(t1
, rt
);
4864 case OPC_VR54XX_MULS
:
4865 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
4867 case OPC_VR54XX_MULSU
:
4868 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
4870 case OPC_VR54XX_MACC
:
4871 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
4873 case OPC_VR54XX_MACCU
:
4874 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
4876 case OPC_VR54XX_MSAC
:
4877 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
4879 case OPC_VR54XX_MSACU
:
4880 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
4882 case OPC_VR54XX_MULHI
:
4883 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
4885 case OPC_VR54XX_MULHIU
:
4886 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
4888 case OPC_VR54XX_MULSHI
:
4889 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
4891 case OPC_VR54XX_MULSHIU
:
4892 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
4894 case OPC_VR54XX_MACCHI
:
4895 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
4897 case OPC_VR54XX_MACCHIU
:
4898 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
4900 case OPC_VR54XX_MSACHI
:
4901 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
4903 case OPC_VR54XX_MSACHIU
:
4904 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
4907 MIPS_INVAL("mul vr54xx");
4908 generate_exception_end(ctx
, EXCP_RI
);
4911 gen_store_gpr(t0
, rd
);
4918 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
4928 gen_load_gpr(t0
, rs
);
4933 #if defined(TARGET_MIPS64)
4937 tcg_gen_not_tl(t0
, t0
);
4946 tcg_gen_ext32u_tl(t0
, t0
);
4947 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
4948 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
4950 #if defined(TARGET_MIPS64)
4955 tcg_gen_clzi_i64(t0
, t0
, 64);
4961 /* Godson integer instructions */
4962 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
4963 int rd
, int rs
, int rt
)
4975 case OPC_MULTU_G_2E
:
4976 case OPC_MULTU_G_2F
:
4977 #if defined(TARGET_MIPS64)
4978 case OPC_DMULT_G_2E
:
4979 case OPC_DMULT_G_2F
:
4980 case OPC_DMULTU_G_2E
:
4981 case OPC_DMULTU_G_2F
:
4983 t0
= tcg_temp_new();
4984 t1
= tcg_temp_new();
4987 t0
= tcg_temp_local_new();
4988 t1
= tcg_temp_local_new();
4992 gen_load_gpr(t0
, rs
);
4993 gen_load_gpr(t1
, rt
);
4998 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
4999 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5001 case OPC_MULTU_G_2E
:
5002 case OPC_MULTU_G_2F
:
5003 tcg_gen_ext32u_tl(t0
, t0
);
5004 tcg_gen_ext32u_tl(t1
, t1
);
5005 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5006 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5011 TCGLabel
*l1
= gen_new_label();
5012 TCGLabel
*l2
= gen_new_label();
5013 TCGLabel
*l3
= gen_new_label();
5014 tcg_gen_ext32s_tl(t0
, t0
);
5015 tcg_gen_ext32s_tl(t1
, t1
);
5016 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5017 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5020 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5021 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5022 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5025 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5026 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5033 TCGLabel
*l1
= gen_new_label();
5034 TCGLabel
*l2
= gen_new_label();
5035 tcg_gen_ext32u_tl(t0
, t0
);
5036 tcg_gen_ext32u_tl(t1
, t1
);
5037 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5038 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5041 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5042 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5049 TCGLabel
*l1
= gen_new_label();
5050 TCGLabel
*l2
= gen_new_label();
5051 TCGLabel
*l3
= gen_new_label();
5052 tcg_gen_ext32u_tl(t0
, t0
);
5053 tcg_gen_ext32u_tl(t1
, t1
);
5054 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5055 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5056 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5058 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5061 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5062 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5069 TCGLabel
*l1
= gen_new_label();
5070 TCGLabel
*l2
= gen_new_label();
5071 tcg_gen_ext32u_tl(t0
, t0
);
5072 tcg_gen_ext32u_tl(t1
, t1
);
5073 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5074 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5077 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5078 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5082 #if defined(TARGET_MIPS64)
5083 case OPC_DMULT_G_2E
:
5084 case OPC_DMULT_G_2F
:
5085 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5087 case OPC_DMULTU_G_2E
:
5088 case OPC_DMULTU_G_2F
:
5089 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5094 TCGLabel
*l1
= gen_new_label();
5095 TCGLabel
*l2
= gen_new_label();
5096 TCGLabel
*l3
= gen_new_label();
5097 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5098 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5101 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5102 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5103 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5106 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5110 case OPC_DDIVU_G_2E
:
5111 case OPC_DDIVU_G_2F
:
5113 TCGLabel
*l1
= gen_new_label();
5114 TCGLabel
*l2
= gen_new_label();
5115 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5116 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5119 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5126 TCGLabel
*l1
= gen_new_label();
5127 TCGLabel
*l2
= gen_new_label();
5128 TCGLabel
*l3
= gen_new_label();
5129 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5130 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5131 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5133 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5136 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5140 case OPC_DMODU_G_2E
:
5141 case OPC_DMODU_G_2F
:
5143 TCGLabel
*l1
= gen_new_label();
5144 TCGLabel
*l2
= gen_new_label();
5145 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5146 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5149 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5160 /* Loongson multimedia instructions */
5161 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5163 uint32_t opc
, shift_max
;
5166 opc
= MASK_LMI(ctx
->opcode
);
5172 t0
= tcg_temp_local_new_i64();
5173 t1
= tcg_temp_local_new_i64();
5176 t0
= tcg_temp_new_i64();
5177 t1
= tcg_temp_new_i64();
5181 check_cp1_enabled(ctx
);
5182 gen_load_fpr64(ctx
, t0
, rs
);
5183 gen_load_fpr64(ctx
, t1
, rt
);
5185 #define LMI_HELPER(UP, LO) \
5186 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5187 #define LMI_HELPER_1(UP, LO) \
5188 case OPC_##UP: gen_helper_##LO(t0, t0); break
5189 #define LMI_DIRECT(UP, LO, OP) \
5190 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5193 LMI_HELPER(PADDSH
, paddsh
);
5194 LMI_HELPER(PADDUSH
, paddush
);
5195 LMI_HELPER(PADDH
, paddh
);
5196 LMI_HELPER(PADDW
, paddw
);
5197 LMI_HELPER(PADDSB
, paddsb
);
5198 LMI_HELPER(PADDUSB
, paddusb
);
5199 LMI_HELPER(PADDB
, paddb
);
5201 LMI_HELPER(PSUBSH
, psubsh
);
5202 LMI_HELPER(PSUBUSH
, psubush
);
5203 LMI_HELPER(PSUBH
, psubh
);
5204 LMI_HELPER(PSUBW
, psubw
);
5205 LMI_HELPER(PSUBSB
, psubsb
);
5206 LMI_HELPER(PSUBUSB
, psubusb
);
5207 LMI_HELPER(PSUBB
, psubb
);
5209 LMI_HELPER(PSHUFH
, pshufh
);
5210 LMI_HELPER(PACKSSWH
, packsswh
);
5211 LMI_HELPER(PACKSSHB
, packsshb
);
5212 LMI_HELPER(PACKUSHB
, packushb
);
5214 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5215 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5216 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5217 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5218 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5219 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5221 LMI_HELPER(PAVGH
, pavgh
);
5222 LMI_HELPER(PAVGB
, pavgb
);
5223 LMI_HELPER(PMAXSH
, pmaxsh
);
5224 LMI_HELPER(PMINSH
, pminsh
);
5225 LMI_HELPER(PMAXUB
, pmaxub
);
5226 LMI_HELPER(PMINUB
, pminub
);
5228 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5229 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5230 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5231 LMI_HELPER(PCMPGTH
, pcmpgth
);
5232 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5233 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5235 LMI_HELPER(PSLLW
, psllw
);
5236 LMI_HELPER(PSLLH
, psllh
);
5237 LMI_HELPER(PSRLW
, psrlw
);
5238 LMI_HELPER(PSRLH
, psrlh
);
5239 LMI_HELPER(PSRAW
, psraw
);
5240 LMI_HELPER(PSRAH
, psrah
);
5242 LMI_HELPER(PMULLH
, pmullh
);
5243 LMI_HELPER(PMULHH
, pmulhh
);
5244 LMI_HELPER(PMULHUH
, pmulhuh
);
5245 LMI_HELPER(PMADDHW
, pmaddhw
);
5247 LMI_HELPER(PASUBUB
, pasubub
);
5248 LMI_HELPER_1(BIADD
, biadd
);
5249 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5251 LMI_DIRECT(PADDD
, paddd
, add
);
5252 LMI_DIRECT(PSUBD
, psubd
, sub
);
5253 LMI_DIRECT(XOR_CP2
, xor, xor);
5254 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5255 LMI_DIRECT(AND_CP2
, and, and);
5256 LMI_DIRECT(OR_CP2
, or, or);
5259 tcg_gen_andc_i64(t0
, t1
, t0
);
5263 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5266 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5269 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5272 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5276 tcg_gen_andi_i64(t1
, t1
, 3);
5277 tcg_gen_shli_i64(t1
, t1
, 4);
5278 tcg_gen_shr_i64(t0
, t0
, t1
);
5279 tcg_gen_ext16u_i64(t0
, t0
);
5283 tcg_gen_add_i64(t0
, t0
, t1
);
5284 tcg_gen_ext32s_i64(t0
, t0
);
5287 tcg_gen_sub_i64(t0
, t0
, t1
);
5288 tcg_gen_ext32s_i64(t0
, t0
);
5310 /* Make sure shift count isn't TCG undefined behaviour. */
5311 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5316 tcg_gen_shl_i64(t0
, t0
, t1
);
5320 /* Since SRA is UndefinedResult without sign-extended inputs,
5321 we can treat SRA and DSRA the same. */
5322 tcg_gen_sar_i64(t0
, t0
, t1
);
5325 /* We want to shift in zeros for SRL; zero-extend first. */
5326 tcg_gen_ext32u_i64(t0
, t0
);
5329 tcg_gen_shr_i64(t0
, t0
, t1
);
5333 if (shift_max
== 32) {
5334 tcg_gen_ext32s_i64(t0
, t0
);
5337 /* Shifts larger than MAX produce zero. */
5338 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5339 tcg_gen_neg_i64(t1
, t1
);
5340 tcg_gen_and_i64(t0
, t0
, t1
);
5346 TCGv_i64 t2
= tcg_temp_new_i64();
5347 TCGLabel
*lab
= gen_new_label();
5349 tcg_gen_mov_i64(t2
, t0
);
5350 tcg_gen_add_i64(t0
, t1
, t2
);
5351 if (opc
== OPC_ADD_CP2
) {
5352 tcg_gen_ext32s_i64(t0
, t0
);
5354 tcg_gen_xor_i64(t1
, t1
, t2
);
5355 tcg_gen_xor_i64(t2
, t2
, t0
);
5356 tcg_gen_andc_i64(t1
, t2
, t1
);
5357 tcg_temp_free_i64(t2
);
5358 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5359 generate_exception(ctx
, EXCP_OVERFLOW
);
5367 TCGv_i64 t2
= tcg_temp_new_i64();
5368 TCGLabel
*lab
= gen_new_label();
5370 tcg_gen_mov_i64(t2
, t0
);
5371 tcg_gen_sub_i64(t0
, t1
, t2
);
5372 if (opc
== OPC_SUB_CP2
) {
5373 tcg_gen_ext32s_i64(t0
, t0
);
5375 tcg_gen_xor_i64(t1
, t1
, t2
);
5376 tcg_gen_xor_i64(t2
, t2
, t0
);
5377 tcg_gen_and_i64(t1
, t1
, t2
);
5378 tcg_temp_free_i64(t2
);
5379 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5380 generate_exception(ctx
, EXCP_OVERFLOW
);
5386 tcg_gen_ext32u_i64(t0
, t0
);
5387 tcg_gen_ext32u_i64(t1
, t1
);
5388 tcg_gen_mul_i64(t0
, t0
, t1
);
5397 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
5398 FD field is the CC field? */
5400 MIPS_INVAL("loongson_cp2");
5401 generate_exception_end(ctx
, EXCP_RI
);
5408 gen_store_fpr64(ctx
, t0
, rd
);
5410 tcg_temp_free_i64(t0
);
5411 tcg_temp_free_i64(t1
);
5415 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5416 int rs
, int rt
, int16_t imm
)
5419 TCGv t0
= tcg_temp_new();
5420 TCGv t1
= tcg_temp_new();
5423 /* Load needed operands */
5431 /* Compare two registers */
5433 gen_load_gpr(t0
, rs
);
5434 gen_load_gpr(t1
, rt
);
5444 /* Compare register to immediate */
5445 if (rs
!= 0 || imm
!= 0) {
5446 gen_load_gpr(t0
, rs
);
5447 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5454 case OPC_TEQ
: /* rs == rs */
5455 case OPC_TEQI
: /* r0 == 0 */
5456 case OPC_TGE
: /* rs >= rs */
5457 case OPC_TGEI
: /* r0 >= 0 */
5458 case OPC_TGEU
: /* rs >= rs unsigned */
5459 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5461 generate_exception_end(ctx
, EXCP_TRAP
);
5463 case OPC_TLT
: /* rs < rs */
5464 case OPC_TLTI
: /* r0 < 0 */
5465 case OPC_TLTU
: /* rs < rs unsigned */
5466 case OPC_TLTIU
: /* r0 < 0 unsigned */
5467 case OPC_TNE
: /* rs != rs */
5468 case OPC_TNEI
: /* r0 != 0 */
5469 /* Never trap: treat as NOP. */
5473 TCGLabel
*l1
= gen_new_label();
5478 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5482 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5486 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5490 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5494 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5498 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5501 generate_exception(ctx
, EXCP_TRAP
);
5508 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5510 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5514 #ifndef CONFIG_USER_ONLY
5515 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5521 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5523 if (use_goto_tb(ctx
, dest
)) {
5526 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5529 if (ctx
->base
.singlestep_enabled
) {
5530 save_cpu_state(ctx
, 0);
5531 gen_helper_raise_exception_debug(cpu_env
);
5533 tcg_gen_lookup_and_goto_ptr();
5537 /* Branches (before delay slot) */
5538 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5540 int rs
, int rt
, int32_t offset
,
5543 target_ulong btgt
= -1;
5545 int bcond_compute
= 0;
5546 TCGv t0
= tcg_temp_new();
5547 TCGv t1
= tcg_temp_new();
5549 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5550 #ifdef MIPS_DEBUG_DISAS
5551 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5552 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5554 generate_exception_end(ctx
, EXCP_RI
);
5558 /* Load needed operands */
5564 /* Compare two registers */
5566 gen_load_gpr(t0
, rs
);
5567 gen_load_gpr(t1
, rt
);
5570 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5584 /* Compare to zero */
5586 gen_load_gpr(t0
, rs
);
5589 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5592 #if defined(TARGET_MIPS64)
5594 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5596 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5599 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5604 /* Jump to immediate */
5605 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5610 /* Jump to register */
5611 if (offset
!= 0 && offset
!= 16) {
5612 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5613 others are reserved. */
5614 MIPS_INVAL("jump hint");
5615 generate_exception_end(ctx
, EXCP_RI
);
5618 gen_load_gpr(btarget
, rs
);
5621 MIPS_INVAL("branch/jump");
5622 generate_exception_end(ctx
, EXCP_RI
);
5625 if (bcond_compute
== 0) {
5626 /* No condition to be computed */
5628 case OPC_BEQ
: /* rx == rx */
5629 case OPC_BEQL
: /* rx == rx likely */
5630 case OPC_BGEZ
: /* 0 >= 0 */
5631 case OPC_BGEZL
: /* 0 >= 0 likely */
5632 case OPC_BLEZ
: /* 0 <= 0 */
5633 case OPC_BLEZL
: /* 0 <= 0 likely */
5635 ctx
->hflags
|= MIPS_HFLAG_B
;
5637 case OPC_BGEZAL
: /* 0 >= 0 */
5638 case OPC_BGEZALL
: /* 0 >= 0 likely */
5639 /* Always take and link */
5641 ctx
->hflags
|= MIPS_HFLAG_B
;
5643 case OPC_BNE
: /* rx != rx */
5644 case OPC_BGTZ
: /* 0 > 0 */
5645 case OPC_BLTZ
: /* 0 < 0 */
5648 case OPC_BLTZAL
: /* 0 < 0 */
5649 /* Handle as an unconditional branch to get correct delay
5652 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5653 ctx
->hflags
|= MIPS_HFLAG_B
;
5655 case OPC_BLTZALL
: /* 0 < 0 likely */
5656 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5657 /* Skip the instruction in the delay slot */
5658 ctx
->base
.pc_next
+= 4;
5660 case OPC_BNEL
: /* rx != rx likely */
5661 case OPC_BGTZL
: /* 0 > 0 likely */
5662 case OPC_BLTZL
: /* 0 < 0 likely */
5663 /* Skip the instruction in the delay slot */
5664 ctx
->base
.pc_next
+= 4;
5667 ctx
->hflags
|= MIPS_HFLAG_B
;
5670 ctx
->hflags
|= MIPS_HFLAG_BX
;
5674 ctx
->hflags
|= MIPS_HFLAG_B
;
5677 ctx
->hflags
|= MIPS_HFLAG_BR
;
5681 ctx
->hflags
|= MIPS_HFLAG_BR
;
5684 MIPS_INVAL("branch/jump");
5685 generate_exception_end(ctx
, EXCP_RI
);
5691 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5694 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5697 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5700 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5703 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5706 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5709 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5713 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5717 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5720 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5723 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5726 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5729 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5732 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5735 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5737 #if defined(TARGET_MIPS64)
5739 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
5743 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5746 ctx
->hflags
|= MIPS_HFLAG_BC
;
5749 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5752 ctx
->hflags
|= MIPS_HFLAG_BL
;
5755 MIPS_INVAL("conditional branch/jump");
5756 generate_exception_end(ctx
, EXCP_RI
);
5761 ctx
->btarget
= btgt
;
5763 switch (delayslot_size
) {
5765 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
5768 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
5773 int post_delay
= insn_bytes
+ delayslot_size
;
5774 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
5776 tcg_gen_movi_tl(cpu_gpr
[blink
],
5777 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
5781 if (insn_bytes
== 2)
5782 ctx
->hflags
|= MIPS_HFLAG_B16
;
5788 /* nanoMIPS Branches */
5789 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
5791 int rs
, int rt
, int32_t offset
)
5793 target_ulong btgt
= -1;
5794 int bcond_compute
= 0;
5795 TCGv t0
= tcg_temp_new();
5796 TCGv t1
= tcg_temp_new();
5798 /* Load needed operands */
5802 /* Compare two registers */
5804 gen_load_gpr(t0
, rs
);
5805 gen_load_gpr(t1
, rt
);
5808 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5811 /* Compare to zero */
5813 gen_load_gpr(t0
, rs
);
5816 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5819 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5821 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5825 /* Jump to register */
5826 if (offset
!= 0 && offset
!= 16) {
5827 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5828 others are reserved. */
5829 MIPS_INVAL("jump hint");
5830 generate_exception_end(ctx
, EXCP_RI
);
5833 gen_load_gpr(btarget
, rs
);
5836 MIPS_INVAL("branch/jump");
5837 generate_exception_end(ctx
, EXCP_RI
);
5840 if (bcond_compute
== 0) {
5841 /* No condition to be computed */
5843 case OPC_BEQ
: /* rx == rx */
5845 ctx
->hflags
|= MIPS_HFLAG_B
;
5847 case OPC_BGEZAL
: /* 0 >= 0 */
5848 /* Always take and link */
5849 tcg_gen_movi_tl(cpu_gpr
[31],
5850 ctx
->base
.pc_next
+ insn_bytes
);
5851 ctx
->hflags
|= MIPS_HFLAG_B
;
5853 case OPC_BNE
: /* rx != rx */
5854 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5855 /* Skip the instruction in the delay slot */
5856 ctx
->base
.pc_next
+= 4;
5859 ctx
->hflags
|= MIPS_HFLAG_BR
;
5863 tcg_gen_movi_tl(cpu_gpr
[rt
],
5864 ctx
->base
.pc_next
+ insn_bytes
);
5866 ctx
->hflags
|= MIPS_HFLAG_BR
;
5869 MIPS_INVAL("branch/jump");
5870 generate_exception_end(ctx
, EXCP_RI
);
5876 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5879 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5882 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5883 tcg_gen_movi_tl(cpu_gpr
[31],
5884 ctx
->base
.pc_next
+ insn_bytes
);
5887 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5889 ctx
->hflags
|= MIPS_HFLAG_BC
;
5892 MIPS_INVAL("conditional branch/jump");
5893 generate_exception_end(ctx
, EXCP_RI
);
5898 ctx
->btarget
= btgt
;
5901 if (insn_bytes
== 2) {
5902 ctx
->hflags
|= MIPS_HFLAG_B16
;
5909 /* special3 bitfield operations */
5910 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
5911 int rs
, int lsb
, int msb
)
5913 TCGv t0
= tcg_temp_new();
5914 TCGv t1
= tcg_temp_new();
5916 gen_load_gpr(t1
, rs
);
5919 if (lsb
+ msb
> 31) {
5923 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5925 /* The two checks together imply that lsb == 0,
5926 so this is a simple sign-extension. */
5927 tcg_gen_ext32s_tl(t0
, t1
);
5930 #if defined(TARGET_MIPS64)
5939 if (lsb
+ msb
> 63) {
5942 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5949 gen_load_gpr(t0
, rt
);
5950 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5951 tcg_gen_ext32s_tl(t0
, t0
);
5953 #if defined(TARGET_MIPS64)
5964 gen_load_gpr(t0
, rt
);
5965 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5970 MIPS_INVAL("bitops");
5971 generate_exception_end(ctx
, EXCP_RI
);
5976 gen_store_gpr(t0
, rt
);
5981 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
5986 /* If no destination, treat it as a NOP. */
5990 t0
= tcg_temp_new();
5991 gen_load_gpr(t0
, rt
);
5995 TCGv t1
= tcg_temp_new();
5996 TCGv t2
= tcg_const_tl(0x00FF00FF);
5998 tcg_gen_shri_tl(t1
, t0
, 8);
5999 tcg_gen_and_tl(t1
, t1
, t2
);
6000 tcg_gen_and_tl(t0
, t0
, t2
);
6001 tcg_gen_shli_tl(t0
, t0
, 8);
6002 tcg_gen_or_tl(t0
, t0
, t1
);
6005 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6009 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6012 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6014 #if defined(TARGET_MIPS64)
6017 TCGv t1
= tcg_temp_new();
6018 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6020 tcg_gen_shri_tl(t1
, t0
, 8);
6021 tcg_gen_and_tl(t1
, t1
, t2
);
6022 tcg_gen_and_tl(t0
, t0
, t2
);
6023 tcg_gen_shli_tl(t0
, t0
, 8);
6024 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6031 TCGv t1
= tcg_temp_new();
6032 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6034 tcg_gen_shri_tl(t1
, t0
, 16);
6035 tcg_gen_and_tl(t1
, t1
, t2
);
6036 tcg_gen_and_tl(t0
, t0
, t2
);
6037 tcg_gen_shli_tl(t0
, t0
, 16);
6038 tcg_gen_or_tl(t0
, t0
, t1
);
6039 tcg_gen_shri_tl(t1
, t0
, 32);
6040 tcg_gen_shli_tl(t0
, t0
, 32);
6041 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6048 MIPS_INVAL("bsfhl");
6049 generate_exception_end(ctx
, EXCP_RI
);
6056 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6065 t0
= tcg_temp_new();
6066 t1
= tcg_temp_new();
6067 gen_load_gpr(t0
, rs
);
6068 gen_load_gpr(t1
, rt
);
6069 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6070 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6071 if (opc
== OPC_LSA
) {
6072 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6081 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6089 t0
= tcg_temp_new();
6090 if (bits
== 0 || bits
== wordsz
) {
6092 gen_load_gpr(t0
, rt
);
6094 gen_load_gpr(t0
, rs
);
6098 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6100 #if defined(TARGET_MIPS64)
6102 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6107 TCGv t1
= tcg_temp_new();
6108 gen_load_gpr(t0
, rt
);
6109 gen_load_gpr(t1
, rs
);
6113 TCGv_i64 t2
= tcg_temp_new_i64();
6114 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6115 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6116 gen_move_low32(cpu_gpr
[rd
], t2
);
6117 tcg_temp_free_i64(t2
);
6120 #if defined(TARGET_MIPS64)
6122 tcg_gen_shli_tl(t0
, t0
, bits
);
6123 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6124 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6134 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6137 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6140 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6143 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6146 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6153 t0
= tcg_temp_new();
6154 gen_load_gpr(t0
, rt
);
6157 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6159 #if defined(TARGET_MIPS64)
6161 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6168 #ifndef CONFIG_USER_ONLY
6169 /* CP0 (MMU and control) */
6170 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6172 TCGv_i64 t0
= tcg_temp_new_i64();
6173 TCGv_i64 t1
= tcg_temp_new_i64();
6175 tcg_gen_ext_tl_i64(t0
, arg
);
6176 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6177 #if defined(TARGET_MIPS64)
6178 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6180 tcg_gen_concat32_i64(t1
, t1
, t0
);
6182 tcg_gen_st_i64(t1
, cpu_env
, off
);
6183 tcg_temp_free_i64(t1
);
6184 tcg_temp_free_i64(t0
);
6187 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6189 TCGv_i64 t0
= tcg_temp_new_i64();
6190 TCGv_i64 t1
= tcg_temp_new_i64();
6192 tcg_gen_ext_tl_i64(t0
, arg
);
6193 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6194 tcg_gen_concat32_i64(t1
, t1
, t0
);
6195 tcg_gen_st_i64(t1
, cpu_env
, off
);
6196 tcg_temp_free_i64(t1
);
6197 tcg_temp_free_i64(t0
);
6200 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6202 TCGv_i64 t0
= tcg_temp_new_i64();
6204 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6205 #if defined(TARGET_MIPS64)
6206 tcg_gen_shri_i64(t0
, t0
, 30);
6208 tcg_gen_shri_i64(t0
, t0
, 32);
6210 gen_move_low32(arg
, t0
);
6211 tcg_temp_free_i64(t0
);
6214 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6216 TCGv_i64 t0
= tcg_temp_new_i64();
6218 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6219 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6220 gen_move_low32(arg
, t0
);
6221 tcg_temp_free_i64(t0
);
6224 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
6226 TCGv_i32 t0
= tcg_temp_new_i32();
6228 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6229 tcg_gen_ext_i32_tl(arg
, t0
);
6230 tcg_temp_free_i32(t0
);
6233 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
6235 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6236 tcg_gen_ext32s_tl(arg
, arg
);
6239 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
6241 TCGv_i32 t0
= tcg_temp_new_i32();
6243 tcg_gen_trunc_tl_i32(t0
, arg
);
6244 tcg_gen_st_i32(t0
, cpu_env
, off
);
6245 tcg_temp_free_i32(t0
);
6248 #define CP0_CHECK(c) \
6251 goto cp0_unimplemented; \
6255 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6257 const char *rn
= "invalid";
6263 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6264 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6268 goto cp0_unimplemented
;
6274 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6275 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6279 goto cp0_unimplemented
;
6285 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
6286 ctx
->CP0_LLAddr_shift
);
6290 CP0_CHECK(ctx
->mrp
);
6291 gen_helper_mfhc0_maar(arg
, cpu_env
);
6295 goto cp0_unimplemented
;
6304 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6308 goto cp0_unimplemented
;
6312 goto cp0_unimplemented
;
6314 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
6318 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6319 tcg_gen_movi_tl(arg
, 0);
6322 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6324 const char *rn
= "invalid";
6325 uint64_t mask
= ctx
->PAMask
>> 36;
6331 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6332 tcg_gen_andi_tl(arg
, arg
, mask
);
6333 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6337 goto cp0_unimplemented
;
6343 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6344 tcg_gen_andi_tl(arg
, arg
, mask
);
6345 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6349 goto cp0_unimplemented
;
6355 /* LLAddr is read-only (the only exception is bit 0 if LLB is
6356 supported); the CP0_LLAddr_rw_bitmask does not seem to be
6357 relevant for modern MIPS cores supporting MTHC0, therefore
6358 treating MTHC0 to LLAddr as NOP. */
6362 CP0_CHECK(ctx
->mrp
);
6363 gen_helper_mthc0_maar(cpu_env
, arg
);
6367 goto cp0_unimplemented
;
6376 tcg_gen_andi_tl(arg
, arg
, mask
);
6377 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6381 goto cp0_unimplemented
;
6385 goto cp0_unimplemented
;
6387 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
6390 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6393 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6395 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6396 tcg_gen_movi_tl(arg
, 0);
6398 tcg_gen_movi_tl(arg
, ~0);
6402 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6404 const char *rn
= "invalid";
6407 check_insn(ctx
, ISA_MIPS32
);
6413 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6417 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6418 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6422 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6423 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6427 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6428 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6433 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6437 goto cp0_unimplemented
;
6443 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6444 gen_helper_mfc0_random(arg
, cpu_env
);
6448 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6449 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6453 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6454 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6458 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6459 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6463 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6464 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6468 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6469 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6473 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6474 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6475 rn
= "VPEScheFBack";
6478 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6479 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6483 goto cp0_unimplemented
;
6490 TCGv_i64 tmp
= tcg_temp_new_i64();
6491 tcg_gen_ld_i64(tmp
, cpu_env
,
6492 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6493 #if defined(TARGET_MIPS64)
6495 /* Move RI/XI fields to bits 31:30 */
6496 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6497 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6500 gen_move_low32(arg
, tmp
);
6501 tcg_temp_free_i64(tmp
);
6506 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6507 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6511 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6512 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6516 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6517 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6521 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6522 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6526 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6527 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6531 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6532 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6536 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6537 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6541 goto cp0_unimplemented
;
6548 TCGv_i64 tmp
= tcg_temp_new_i64();
6549 tcg_gen_ld_i64(tmp
, cpu_env
,
6550 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6551 #if defined(TARGET_MIPS64)
6553 /* Move RI/XI fields to bits 31:30 */
6554 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6555 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6558 gen_move_low32(arg
, tmp
);
6559 tcg_temp_free_i64(tmp
);
6565 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6566 rn
= "GlobalNumber";
6569 goto cp0_unimplemented
;
6575 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6576 tcg_gen_ext32s_tl(arg
, arg
);
6580 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
6581 rn
= "ContextConfig";
6582 goto cp0_unimplemented
;
6584 CP0_CHECK(ctx
->ulri
);
6585 tcg_gen_ld_tl(arg
, cpu_env
,
6586 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6587 tcg_gen_ext32s_tl(arg
, arg
);
6591 goto cp0_unimplemented
;
6597 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6601 check_insn(ctx
, ISA_MIPS32R2
);
6602 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6607 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6608 tcg_gen_ext32s_tl(arg
, arg
);
6613 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6614 tcg_gen_ext32s_tl(arg
, arg
);
6619 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6620 tcg_gen_ext32s_tl(arg
, arg
);
6625 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6630 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
6635 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
6639 goto cp0_unimplemented
;
6645 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6649 check_insn(ctx
, ISA_MIPS32R2
);
6650 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6654 check_insn(ctx
, ISA_MIPS32R2
);
6655 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6659 check_insn(ctx
, ISA_MIPS32R2
);
6660 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6664 check_insn(ctx
, ISA_MIPS32R2
);
6665 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6669 check_insn(ctx
, ISA_MIPS32R2
);
6670 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6675 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
6679 goto cp0_unimplemented
;
6685 check_insn(ctx
, ISA_MIPS32R2
);
6686 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6690 goto cp0_unimplemented
;
6696 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6697 tcg_gen_ext32s_tl(arg
, arg
);
6702 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6707 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6712 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
6713 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
6717 goto cp0_unimplemented
;
6723 /* Mark as an IO operation because we read the time. */
6724 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6727 gen_helper_mfc0_count(arg
, cpu_env
);
6728 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6731 /* Break the TB to be able to take timer interrupts immediately
6732 after reading count. DISAS_STOP isn't sufficient, we need to
6733 ensure we break completely out of translated code. */
6734 gen_save_pc(ctx
->base
.pc_next
+ 4);
6735 ctx
->base
.is_jmp
= DISAS_EXIT
;
6738 /* 6,7 are implementation dependent */
6740 goto cp0_unimplemented
;
6746 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6747 tcg_gen_ext32s_tl(arg
, arg
);
6751 goto cp0_unimplemented
;
6757 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6760 /* 6,7 are implementation dependent */
6762 goto cp0_unimplemented
;
6768 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6772 check_insn(ctx
, ISA_MIPS32R2
);
6773 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6777 check_insn(ctx
, ISA_MIPS32R2
);
6778 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6782 check_insn(ctx
, ISA_MIPS32R2
);
6783 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6787 goto cp0_unimplemented
;
6793 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6797 goto cp0_unimplemented
;
6803 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6804 tcg_gen_ext32s_tl(arg
, arg
);
6808 goto cp0_unimplemented
;
6814 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6818 check_insn(ctx
, ISA_MIPS32R2
);
6819 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6820 tcg_gen_ext32s_tl(arg
, arg
);
6824 check_insn(ctx
, ISA_MIPS32R2
);
6825 CP0_CHECK(ctx
->cmgcr
);
6826 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6827 tcg_gen_ext32s_tl(arg
, arg
);
6831 goto cp0_unimplemented
;
6837 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6841 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6845 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6849 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6853 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6857 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6860 /* 6,7 are implementation dependent */
6862 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6866 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6870 goto cp0_unimplemented
;
6876 gen_helper_mfc0_lladdr(arg
, cpu_env
);
6880 CP0_CHECK(ctx
->mrp
);
6881 gen_helper_mfc0_maar(arg
, cpu_env
);
6885 CP0_CHECK(ctx
->mrp
);
6886 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6890 goto cp0_unimplemented
;
6903 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6904 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
6908 goto cp0_unimplemented
;
6921 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6922 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6926 goto cp0_unimplemented
;
6932 #if defined(TARGET_MIPS64)
6933 check_insn(ctx
, ISA_MIPS3
);
6934 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6935 tcg_gen_ext32s_tl(arg
, arg
);
6940 goto cp0_unimplemented
;
6944 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6945 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6948 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6952 goto cp0_unimplemented
;
6956 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6957 rn
= "'Diagnostic"; /* implementation dependent */
6962 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6966 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
6967 rn
= "TraceControl";
6968 goto cp0_unimplemented
;
6970 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
6971 rn
= "TraceControl2";
6972 goto cp0_unimplemented
;
6974 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
6975 rn
= "UserTraceData";
6976 goto cp0_unimplemented
;
6978 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
6980 goto cp0_unimplemented
;
6982 goto cp0_unimplemented
;
6989 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6990 tcg_gen_ext32s_tl(arg
, arg
);
6994 goto cp0_unimplemented
;
7000 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7001 rn
= "Performance0";
7004 // gen_helper_mfc0_performance1(arg);
7005 rn
= "Performance1";
7006 goto cp0_unimplemented
;
7008 // gen_helper_mfc0_performance2(arg);
7009 rn
= "Performance2";
7010 goto cp0_unimplemented
;
7012 // gen_helper_mfc0_performance3(arg);
7013 rn
= "Performance3";
7014 goto cp0_unimplemented
;
7016 // gen_helper_mfc0_performance4(arg);
7017 rn
= "Performance4";
7018 goto cp0_unimplemented
;
7020 // gen_helper_mfc0_performance5(arg);
7021 rn
= "Performance5";
7022 goto cp0_unimplemented
;
7024 // gen_helper_mfc0_performance6(arg);
7025 rn
= "Performance6";
7026 goto cp0_unimplemented
;
7028 // gen_helper_mfc0_performance7(arg);
7029 rn
= "Performance7";
7030 goto cp0_unimplemented
;
7032 goto cp0_unimplemented
;
7038 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7042 goto cp0_unimplemented
;
7051 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7055 goto cp0_unimplemented
;
7065 TCGv_i64 tmp
= tcg_temp_new_i64();
7066 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7067 gen_move_low32(arg
, tmp
);
7068 tcg_temp_free_i64(tmp
);
7076 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7080 goto cp0_unimplemented
;
7089 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7096 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7100 goto cp0_unimplemented
;
7106 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7107 tcg_gen_ext32s_tl(arg
, arg
);
7111 goto cp0_unimplemented
;
7118 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7127 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7128 tcg_gen_ld_tl(arg
, cpu_env
,
7129 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7130 tcg_gen_ext32s_tl(arg
, arg
);
7134 goto cp0_unimplemented
;
7138 goto cp0_unimplemented
;
7140 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
7144 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7145 gen_mfc0_unimplemented(ctx
, arg
);
7148 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7150 const char *rn
= "invalid";
7153 check_insn(ctx
, ISA_MIPS32
);
7155 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7163 gen_helper_mtc0_index(cpu_env
, arg
);
7167 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7168 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7172 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7177 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7187 goto cp0_unimplemented
;
7197 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7198 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7202 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7203 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7207 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7208 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7212 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7213 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7217 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7218 tcg_gen_st_tl(arg
, cpu_env
,
7219 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7223 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7224 tcg_gen_st_tl(arg
, cpu_env
,
7225 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7226 rn
= "VPEScheFBack";
7229 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7230 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7234 goto cp0_unimplemented
;
7240 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7244 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7245 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7249 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7250 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7254 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7255 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7259 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7260 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7264 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7265 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7269 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7270 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7274 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7275 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7279 goto cp0_unimplemented
;
7285 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7291 rn
= "GlobalNumber";
7294 goto cp0_unimplemented
;
7300 gen_helper_mtc0_context(cpu_env
, arg
);
7304 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7305 rn
= "ContextConfig";
7306 goto cp0_unimplemented
;
7308 CP0_CHECK(ctx
->ulri
);
7309 tcg_gen_st_tl(arg
, cpu_env
,
7310 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7314 goto cp0_unimplemented
;
7320 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7324 check_insn(ctx
, ISA_MIPS32R2
);
7325 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7327 ctx
->base
.is_jmp
= DISAS_STOP
;
7331 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7336 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7341 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7346 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7351 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7356 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7360 goto cp0_unimplemented
;
7366 gen_helper_mtc0_wired(cpu_env
, arg
);
7370 check_insn(ctx
, ISA_MIPS32R2
);
7371 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7375 check_insn(ctx
, ISA_MIPS32R2
);
7376 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7380 check_insn(ctx
, ISA_MIPS32R2
);
7381 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7385 check_insn(ctx
, ISA_MIPS32R2
);
7386 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7390 check_insn(ctx
, ISA_MIPS32R2
);
7391 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7396 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7400 goto cp0_unimplemented
;
7406 check_insn(ctx
, ISA_MIPS32R2
);
7407 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7408 ctx
->base
.is_jmp
= DISAS_STOP
;
7412 goto cp0_unimplemented
;
7434 goto cp0_unimplemented
;
7440 gen_helper_mtc0_count(cpu_env
, arg
);
7443 /* 6,7 are implementation dependent */
7445 goto cp0_unimplemented
;
7451 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7455 goto cp0_unimplemented
;
7461 gen_helper_mtc0_compare(cpu_env
, arg
);
7464 /* 6,7 are implementation dependent */
7466 goto cp0_unimplemented
;
7472 save_cpu_state(ctx
, 1);
7473 gen_helper_mtc0_status(cpu_env
, arg
);
7474 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7475 gen_save_pc(ctx
->base
.pc_next
+ 4);
7476 ctx
->base
.is_jmp
= DISAS_EXIT
;
7480 check_insn(ctx
, ISA_MIPS32R2
);
7481 gen_helper_mtc0_intctl(cpu_env
, arg
);
7482 /* Stop translation as we may have switched the execution mode */
7483 ctx
->base
.is_jmp
= DISAS_STOP
;
7487 check_insn(ctx
, ISA_MIPS32R2
);
7488 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7489 /* Stop translation as we may have switched the execution mode */
7490 ctx
->base
.is_jmp
= DISAS_STOP
;
7494 check_insn(ctx
, ISA_MIPS32R2
);
7495 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7496 /* Stop translation as we may have switched the execution mode */
7497 ctx
->base
.is_jmp
= DISAS_STOP
;
7501 goto cp0_unimplemented
;
7507 save_cpu_state(ctx
, 1);
7508 gen_helper_mtc0_cause(cpu_env
, arg
);
7509 /* Stop translation as we may have triggered an interrupt.
7510 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7511 * translated code to check for pending interrupts. */
7512 gen_save_pc(ctx
->base
.pc_next
+ 4);
7513 ctx
->base
.is_jmp
= DISAS_EXIT
;
7517 goto cp0_unimplemented
;
7523 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7527 goto cp0_unimplemented
;
7537 check_insn(ctx
, ISA_MIPS32R2
);
7538 gen_helper_mtc0_ebase(cpu_env
, arg
);
7542 goto cp0_unimplemented
;
7548 gen_helper_mtc0_config0(cpu_env
, arg
);
7550 /* Stop translation as we may have switched the execution mode */
7551 ctx
->base
.is_jmp
= DISAS_STOP
;
7554 /* ignored, read only */
7558 gen_helper_mtc0_config2(cpu_env
, arg
);
7560 /* Stop translation as we may have switched the execution mode */
7561 ctx
->base
.is_jmp
= DISAS_STOP
;
7564 gen_helper_mtc0_config3(cpu_env
, arg
);
7566 /* Stop translation as we may have switched the execution mode */
7567 ctx
->base
.is_jmp
= DISAS_STOP
;
7570 gen_helper_mtc0_config4(cpu_env
, arg
);
7572 ctx
->base
.is_jmp
= DISAS_STOP
;
7575 gen_helper_mtc0_config5(cpu_env
, arg
);
7577 /* Stop translation as we may have switched the execution mode */
7578 ctx
->base
.is_jmp
= DISAS_STOP
;
7580 /* 6,7 are implementation dependent */
7590 rn
= "Invalid config selector";
7591 goto cp0_unimplemented
;
7597 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7601 CP0_CHECK(ctx
->mrp
);
7602 gen_helper_mtc0_maar(cpu_env
, arg
);
7606 CP0_CHECK(ctx
->mrp
);
7607 gen_helper_mtc0_maari(cpu_env
, arg
);
7611 goto cp0_unimplemented
;
7624 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7625 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7629 goto cp0_unimplemented
;
7642 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7643 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7647 goto cp0_unimplemented
;
7653 #if defined(TARGET_MIPS64)
7654 check_insn(ctx
, ISA_MIPS3
);
7655 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7660 goto cp0_unimplemented
;
7664 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7665 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7668 gen_helper_mtc0_framemask(cpu_env
, arg
);
7672 goto cp0_unimplemented
;
7677 rn
= "Diagnostic"; /* implementation dependent */
7682 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7683 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7684 gen_save_pc(ctx
->base
.pc_next
+ 4);
7685 ctx
->base
.is_jmp
= DISAS_EXIT
;
7689 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7690 rn
= "TraceControl";
7691 /* Stop translation as we may have switched the execution mode */
7692 ctx
->base
.is_jmp
= DISAS_STOP
;
7693 goto cp0_unimplemented
;
7695 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7696 rn
= "TraceControl2";
7697 /* Stop translation as we may have switched the execution mode */
7698 ctx
->base
.is_jmp
= DISAS_STOP
;
7699 goto cp0_unimplemented
;
7701 /* Stop translation as we may have switched the execution mode */
7702 ctx
->base
.is_jmp
= DISAS_STOP
;
7703 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7704 rn
= "UserTraceData";
7705 /* Stop translation as we may have switched the execution mode */
7706 ctx
->base
.is_jmp
= DISAS_STOP
;
7707 goto cp0_unimplemented
;
7709 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7710 /* Stop translation as we may have switched the execution mode */
7711 ctx
->base
.is_jmp
= DISAS_STOP
;
7713 goto cp0_unimplemented
;
7715 goto cp0_unimplemented
;
7722 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7726 goto cp0_unimplemented
;
7732 gen_helper_mtc0_performance0(cpu_env
, arg
);
7733 rn
= "Performance0";
7736 // gen_helper_mtc0_performance1(arg);
7737 rn
= "Performance1";
7738 goto cp0_unimplemented
;
7740 // gen_helper_mtc0_performance2(arg);
7741 rn
= "Performance2";
7742 goto cp0_unimplemented
;
7744 // gen_helper_mtc0_performance3(arg);
7745 rn
= "Performance3";
7746 goto cp0_unimplemented
;
7748 // gen_helper_mtc0_performance4(arg);
7749 rn
= "Performance4";
7750 goto cp0_unimplemented
;
7752 // gen_helper_mtc0_performance5(arg);
7753 rn
= "Performance5";
7754 goto cp0_unimplemented
;
7756 // gen_helper_mtc0_performance6(arg);
7757 rn
= "Performance6";
7758 goto cp0_unimplemented
;
7760 // gen_helper_mtc0_performance7(arg);
7761 rn
= "Performance7";
7762 goto cp0_unimplemented
;
7764 goto cp0_unimplemented
;
7770 gen_helper_mtc0_errctl(cpu_env
, arg
);
7771 ctx
->base
.is_jmp
= DISAS_STOP
;
7775 goto cp0_unimplemented
;
7788 goto cp0_unimplemented
;
7797 gen_helper_mtc0_taglo(cpu_env
, arg
);
7804 gen_helper_mtc0_datalo(cpu_env
, arg
);
7808 goto cp0_unimplemented
;
7817 gen_helper_mtc0_taghi(cpu_env
, arg
);
7824 gen_helper_mtc0_datahi(cpu_env
, arg
);
7829 goto cp0_unimplemented
;
7835 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7839 goto cp0_unimplemented
;
7846 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7855 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7856 tcg_gen_st_tl(arg
, cpu_env
,
7857 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7861 goto cp0_unimplemented
;
7865 goto cp0_unimplemented
;
7867 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
7869 /* For simplicity assume that all writes can cause interrupts. */
7870 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7872 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
7873 * translated code to check for pending interrupts. */
7874 gen_save_pc(ctx
->base
.pc_next
+ 4);
7875 ctx
->base
.is_jmp
= DISAS_EXIT
;
7880 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7883 #if defined(TARGET_MIPS64)
7884 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7886 const char *rn
= "invalid";
7889 check_insn(ctx
, ISA_MIPS64
);
7895 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
7899 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7900 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
7904 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7905 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
7909 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7910 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
7915 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
7919 goto cp0_unimplemented
;
7925 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7926 gen_helper_mfc0_random(arg
, cpu_env
);
7930 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7931 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
7935 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7936 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
7940 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7941 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
7945 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7946 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
7950 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7951 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7955 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7956 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7957 rn
= "VPEScheFBack";
7960 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7961 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
7965 goto cp0_unimplemented
;
7971 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
7975 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7976 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
7980 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7981 gen_helper_mfc0_tcbind(arg
, cpu_env
);
7985 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7986 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
7990 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7991 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
7995 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7996 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8000 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8001 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8005 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8006 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8010 goto cp0_unimplemented
;
8016 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8021 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8022 rn
= "GlobalNumber";
8025 goto cp0_unimplemented
;
8031 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8035 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8036 rn
= "ContextConfig";
8037 goto cp0_unimplemented
;
8039 CP0_CHECK(ctx
->ulri
);
8040 tcg_gen_ld_tl(arg
, cpu_env
,
8041 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8045 goto cp0_unimplemented
;
8051 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8055 check_insn(ctx
, ISA_MIPS32R2
);
8056 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8061 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8066 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8071 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8076 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8081 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8086 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8090 goto cp0_unimplemented
;
8096 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8100 check_insn(ctx
, ISA_MIPS32R2
);
8101 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8105 check_insn(ctx
, ISA_MIPS32R2
);
8106 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8110 check_insn(ctx
, ISA_MIPS32R2
);
8111 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8115 check_insn(ctx
, ISA_MIPS32R2
);
8116 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8120 check_insn(ctx
, ISA_MIPS32R2
);
8121 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8126 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8130 goto cp0_unimplemented
;
8136 check_insn(ctx
, ISA_MIPS32R2
);
8137 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8141 goto cp0_unimplemented
;
8147 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8152 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8157 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8162 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8163 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8167 goto cp0_unimplemented
;
8173 /* Mark as an IO operation because we read the time. */
8174 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8177 gen_helper_mfc0_count(arg
, cpu_env
);
8178 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8181 /* Break the TB to be able to take timer interrupts immediately
8182 after reading count. DISAS_STOP isn't sufficient, we need to
8183 ensure we break completely out of translated code. */
8184 gen_save_pc(ctx
->base
.pc_next
+ 4);
8185 ctx
->base
.is_jmp
= DISAS_EXIT
;
8188 /* 6,7 are implementation dependent */
8190 goto cp0_unimplemented
;
8196 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8200 goto cp0_unimplemented
;
8206 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8209 /* 6,7 are implementation dependent */
8211 goto cp0_unimplemented
;
8217 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8221 check_insn(ctx
, ISA_MIPS32R2
);
8222 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8226 check_insn(ctx
, ISA_MIPS32R2
);
8227 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8231 check_insn(ctx
, ISA_MIPS32R2
);
8232 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8236 goto cp0_unimplemented
;
8242 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8246 goto cp0_unimplemented
;
8252 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8256 goto cp0_unimplemented
;
8262 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8266 check_insn(ctx
, ISA_MIPS32R2
);
8267 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8271 check_insn(ctx
, ISA_MIPS32R2
);
8272 CP0_CHECK(ctx
->cmgcr
);
8273 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8277 goto cp0_unimplemented
;
8283 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8287 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8291 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8295 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8303 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8306 /* 6,7 are implementation dependent */
8308 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8312 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8316 goto cp0_unimplemented
;
8322 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8326 CP0_CHECK(ctx
->mrp
);
8327 gen_helper_dmfc0_maar(arg
, cpu_env
);
8331 CP0_CHECK(ctx
->mrp
);
8332 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8336 goto cp0_unimplemented
;
8349 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8350 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8354 goto cp0_unimplemented
;
8367 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8368 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8372 goto cp0_unimplemented
;
8378 check_insn(ctx
, ISA_MIPS3
);
8379 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8383 goto cp0_unimplemented
;
8387 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8388 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8391 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8395 goto cp0_unimplemented
;
8399 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8400 rn
= "'Diagnostic"; /* implementation dependent */
8405 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8409 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8410 rn
= "TraceControl";
8411 goto cp0_unimplemented
;
8413 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8414 rn
= "TraceControl2";
8415 goto cp0_unimplemented
;
8417 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8418 rn
= "UserTraceData";
8419 goto cp0_unimplemented
;
8421 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8423 goto cp0_unimplemented
;
8425 goto cp0_unimplemented
;
8432 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8436 goto cp0_unimplemented
;
8442 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8443 rn
= "Performance0";
8446 // gen_helper_dmfc0_performance1(arg);
8447 rn
= "Performance1";
8448 goto cp0_unimplemented
;
8450 // gen_helper_dmfc0_performance2(arg);
8451 rn
= "Performance2";
8452 goto cp0_unimplemented
;
8454 // gen_helper_dmfc0_performance3(arg);
8455 rn
= "Performance3";
8456 goto cp0_unimplemented
;
8458 // gen_helper_dmfc0_performance4(arg);
8459 rn
= "Performance4";
8460 goto cp0_unimplemented
;
8462 // gen_helper_dmfc0_performance5(arg);
8463 rn
= "Performance5";
8464 goto cp0_unimplemented
;
8466 // gen_helper_dmfc0_performance6(arg);
8467 rn
= "Performance6";
8468 goto cp0_unimplemented
;
8470 // gen_helper_dmfc0_performance7(arg);
8471 rn
= "Performance7";
8472 goto cp0_unimplemented
;
8474 goto cp0_unimplemented
;
8480 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8484 goto cp0_unimplemented
;
8494 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8498 goto cp0_unimplemented
;
8507 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8514 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8518 goto cp0_unimplemented
;
8527 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8534 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8538 goto cp0_unimplemented
;
8544 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8548 goto cp0_unimplemented
;
8555 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8564 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8565 tcg_gen_ld_tl(arg
, cpu_env
,
8566 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8570 goto cp0_unimplemented
;
8574 goto cp0_unimplemented
;
8576 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
8580 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8581 gen_mfc0_unimplemented(ctx
, arg
);
8584 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8586 const char *rn
= "invalid";
8589 check_insn(ctx
, ISA_MIPS64
);
8591 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8599 gen_helper_mtc0_index(cpu_env
, arg
);
8603 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8604 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
8608 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8613 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8623 goto cp0_unimplemented
;
8633 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8634 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
8638 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8639 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
8643 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8644 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
8648 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8649 gen_helper_mtc0_yqmask(cpu_env
, arg
);
8653 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8654 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8658 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8659 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8660 rn
= "VPEScheFBack";
8663 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8664 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
8668 goto cp0_unimplemented
;
8674 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
8678 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8679 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
8683 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8684 gen_helper_mtc0_tcbind(cpu_env
, arg
);
8688 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8689 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
8693 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8694 gen_helper_mtc0_tchalt(cpu_env
, arg
);
8698 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8699 gen_helper_mtc0_tccontext(cpu_env
, arg
);
8703 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8704 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
8708 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8709 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
8713 goto cp0_unimplemented
;
8719 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
8725 rn
= "GlobalNumber";
8728 goto cp0_unimplemented
;
8734 gen_helper_mtc0_context(cpu_env
, arg
);
8738 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
8739 rn
= "ContextConfig";
8740 goto cp0_unimplemented
;
8742 CP0_CHECK(ctx
->ulri
);
8743 tcg_gen_st_tl(arg
, cpu_env
,
8744 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8748 goto cp0_unimplemented
;
8754 gen_helper_mtc0_pagemask(cpu_env
, arg
);
8758 check_insn(ctx
, ISA_MIPS32R2
);
8759 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
8764 gen_helper_mtc0_segctl0(cpu_env
, arg
);
8769 gen_helper_mtc0_segctl1(cpu_env
, arg
);
8774 gen_helper_mtc0_segctl2(cpu_env
, arg
);
8779 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8784 gen_helper_mtc0_pwfield(cpu_env
, arg
);
8789 gen_helper_mtc0_pwsize(cpu_env
, arg
);
8793 goto cp0_unimplemented
;
8799 gen_helper_mtc0_wired(cpu_env
, arg
);
8803 check_insn(ctx
, ISA_MIPS32R2
);
8804 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
8808 check_insn(ctx
, ISA_MIPS32R2
);
8809 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
8813 check_insn(ctx
, ISA_MIPS32R2
);
8814 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
8818 check_insn(ctx
, ISA_MIPS32R2
);
8819 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
8823 check_insn(ctx
, ISA_MIPS32R2
);
8824 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
8829 gen_helper_mtc0_pwctl(cpu_env
, arg
);
8833 goto cp0_unimplemented
;
8839 check_insn(ctx
, ISA_MIPS32R2
);
8840 gen_helper_mtc0_hwrena(cpu_env
, arg
);
8841 ctx
->base
.is_jmp
= DISAS_STOP
;
8845 goto cp0_unimplemented
;
8867 goto cp0_unimplemented
;
8873 gen_helper_mtc0_count(cpu_env
, arg
);
8876 /* 6,7 are implementation dependent */
8878 goto cp0_unimplemented
;
8880 /* Stop translation as we may have switched the execution mode */
8881 ctx
->base
.is_jmp
= DISAS_STOP
;
8886 gen_helper_mtc0_entryhi(cpu_env
, arg
);
8890 goto cp0_unimplemented
;
8896 gen_helper_mtc0_compare(cpu_env
, arg
);
8899 /* 6,7 are implementation dependent */
8901 goto cp0_unimplemented
;
8903 /* Stop translation as we may have switched the execution mode */
8904 ctx
->base
.is_jmp
= DISAS_STOP
;
8909 save_cpu_state(ctx
, 1);
8910 gen_helper_mtc0_status(cpu_env
, arg
);
8911 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8912 gen_save_pc(ctx
->base
.pc_next
+ 4);
8913 ctx
->base
.is_jmp
= DISAS_EXIT
;
8917 check_insn(ctx
, ISA_MIPS32R2
);
8918 gen_helper_mtc0_intctl(cpu_env
, arg
);
8919 /* Stop translation as we may have switched the execution mode */
8920 ctx
->base
.is_jmp
= DISAS_STOP
;
8924 check_insn(ctx
, ISA_MIPS32R2
);
8925 gen_helper_mtc0_srsctl(cpu_env
, arg
);
8926 /* Stop translation as we may have switched the execution mode */
8927 ctx
->base
.is_jmp
= DISAS_STOP
;
8931 check_insn(ctx
, ISA_MIPS32R2
);
8932 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8933 /* Stop translation as we may have switched the execution mode */
8934 ctx
->base
.is_jmp
= DISAS_STOP
;
8938 goto cp0_unimplemented
;
8944 save_cpu_state(ctx
, 1);
8945 gen_helper_mtc0_cause(cpu_env
, arg
);
8946 /* Stop translation as we may have triggered an interrupt.
8947 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8948 * translated code to check for pending interrupts. */
8949 gen_save_pc(ctx
->base
.pc_next
+ 4);
8950 ctx
->base
.is_jmp
= DISAS_EXIT
;
8954 goto cp0_unimplemented
;
8960 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8964 goto cp0_unimplemented
;
8974 check_insn(ctx
, ISA_MIPS32R2
);
8975 gen_helper_mtc0_ebase(cpu_env
, arg
);
8979 goto cp0_unimplemented
;
8985 gen_helper_mtc0_config0(cpu_env
, arg
);
8987 /* Stop translation as we may have switched the execution mode */
8988 ctx
->base
.is_jmp
= DISAS_STOP
;
8991 /* ignored, read only */
8995 gen_helper_mtc0_config2(cpu_env
, arg
);
8997 /* Stop translation as we may have switched the execution mode */
8998 ctx
->base
.is_jmp
= DISAS_STOP
;
9001 gen_helper_mtc0_config3(cpu_env
, arg
);
9003 /* Stop translation as we may have switched the execution mode */
9004 ctx
->base
.is_jmp
= DISAS_STOP
;
9007 /* currently ignored */
9011 gen_helper_mtc0_config5(cpu_env
, arg
);
9013 /* Stop translation as we may have switched the execution mode */
9014 ctx
->base
.is_jmp
= DISAS_STOP
;
9016 /* 6,7 are implementation dependent */
9018 rn
= "Invalid config selector";
9019 goto cp0_unimplemented
;
9025 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9029 CP0_CHECK(ctx
->mrp
);
9030 gen_helper_mtc0_maar(cpu_env
, arg
);
9034 CP0_CHECK(ctx
->mrp
);
9035 gen_helper_mtc0_maari(cpu_env
, arg
);
9039 goto cp0_unimplemented
;
9052 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9053 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9057 goto cp0_unimplemented
;
9070 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9071 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9075 goto cp0_unimplemented
;
9081 check_insn(ctx
, ISA_MIPS3
);
9082 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9086 goto cp0_unimplemented
;
9090 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9091 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9094 gen_helper_mtc0_framemask(cpu_env
, arg
);
9098 goto cp0_unimplemented
;
9103 rn
= "Diagnostic"; /* implementation dependent */
9108 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9109 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9110 gen_save_pc(ctx
->base
.pc_next
+ 4);
9111 ctx
->base
.is_jmp
= DISAS_EXIT
;
9115 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9116 /* Stop translation as we may have switched the execution mode */
9117 ctx
->base
.is_jmp
= DISAS_STOP
;
9118 rn
= "TraceControl";
9119 goto cp0_unimplemented
;
9121 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9122 /* Stop translation as we may have switched the execution mode */
9123 ctx
->base
.is_jmp
= DISAS_STOP
;
9124 rn
= "TraceControl2";
9125 goto cp0_unimplemented
;
9127 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9128 /* Stop translation as we may have switched the execution mode */
9129 ctx
->base
.is_jmp
= DISAS_STOP
;
9130 rn
= "UserTraceData";
9131 goto cp0_unimplemented
;
9133 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9134 /* Stop translation as we may have switched the execution mode */
9135 ctx
->base
.is_jmp
= DISAS_STOP
;
9137 goto cp0_unimplemented
;
9139 goto cp0_unimplemented
;
9146 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9150 goto cp0_unimplemented
;
9156 gen_helper_mtc0_performance0(cpu_env
, arg
);
9157 rn
= "Performance0";
9160 // gen_helper_mtc0_performance1(cpu_env, arg);
9161 rn
= "Performance1";
9162 goto cp0_unimplemented
;
9164 // gen_helper_mtc0_performance2(cpu_env, arg);
9165 rn
= "Performance2";
9166 goto cp0_unimplemented
;
9168 // gen_helper_mtc0_performance3(cpu_env, arg);
9169 rn
= "Performance3";
9170 goto cp0_unimplemented
;
9172 // gen_helper_mtc0_performance4(cpu_env, arg);
9173 rn
= "Performance4";
9174 goto cp0_unimplemented
;
9176 // gen_helper_mtc0_performance5(cpu_env, arg);
9177 rn
= "Performance5";
9178 goto cp0_unimplemented
;
9180 // gen_helper_mtc0_performance6(cpu_env, arg);
9181 rn
= "Performance6";
9182 goto cp0_unimplemented
;
9184 // gen_helper_mtc0_performance7(cpu_env, arg);
9185 rn
= "Performance7";
9186 goto cp0_unimplemented
;
9188 goto cp0_unimplemented
;
9194 gen_helper_mtc0_errctl(cpu_env
, arg
);
9195 ctx
->base
.is_jmp
= DISAS_STOP
;
9199 goto cp0_unimplemented
;
9212 goto cp0_unimplemented
;
9221 gen_helper_mtc0_taglo(cpu_env
, arg
);
9228 gen_helper_mtc0_datalo(cpu_env
, arg
);
9232 goto cp0_unimplemented
;
9241 gen_helper_mtc0_taghi(cpu_env
, arg
);
9248 gen_helper_mtc0_datahi(cpu_env
, arg
);
9253 goto cp0_unimplemented
;
9259 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9263 goto cp0_unimplemented
;
9270 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9279 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9280 tcg_gen_st_tl(arg
, cpu_env
,
9281 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9285 goto cp0_unimplemented
;
9289 goto cp0_unimplemented
;
9291 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
9293 /* For simplicity assume that all writes can cause interrupts. */
9294 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9296 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
9297 * translated code to check for pending interrupts. */
9298 gen_save_pc(ctx
->base
.pc_next
+ 4);
9299 ctx
->base
.is_jmp
= DISAS_EXIT
;
9304 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
9306 #endif /* TARGET_MIPS64 */
9308 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9309 int u
, int sel
, int h
)
9311 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9312 TCGv t0
= tcg_temp_local_new();
9314 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9315 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9316 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9317 tcg_gen_movi_tl(t0
, -1);
9318 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9319 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9320 tcg_gen_movi_tl(t0
, -1);
9326 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9329 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9339 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9342 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9345 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9348 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9351 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9354 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9357 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9360 gen_mfc0(ctx
, t0
, rt
, sel
);
9367 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9370 gen_mfc0(ctx
, t0
, rt
, sel
);
9376 gen_helper_mftc0_status(t0
, cpu_env
);
9379 gen_mfc0(ctx
, t0
, rt
, sel
);
9385 gen_helper_mftc0_cause(t0
, cpu_env
);
9395 gen_helper_mftc0_epc(t0
, cpu_env
);
9405 gen_helper_mftc0_ebase(t0
, cpu_env
);
9422 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9432 gen_helper_mftc0_debug(t0
, cpu_env
);
9435 gen_mfc0(ctx
, t0
, rt
, sel
);
9440 gen_mfc0(ctx
, t0
, rt
, sel
);
9442 } else switch (sel
) {
9443 /* GPR registers. */
9445 gen_helper_1e0i(mftgpr
, t0
, rt
);
9447 /* Auxiliary CPU registers */
9451 gen_helper_1e0i(mftlo
, t0
, 0);
9454 gen_helper_1e0i(mfthi
, t0
, 0);
9457 gen_helper_1e0i(mftacx
, t0
, 0);
9460 gen_helper_1e0i(mftlo
, t0
, 1);
9463 gen_helper_1e0i(mfthi
, t0
, 1);
9466 gen_helper_1e0i(mftacx
, t0
, 1);
9469 gen_helper_1e0i(mftlo
, t0
, 2);
9472 gen_helper_1e0i(mfthi
, t0
, 2);
9475 gen_helper_1e0i(mftacx
, t0
, 2);
9478 gen_helper_1e0i(mftlo
, t0
, 3);
9481 gen_helper_1e0i(mfthi
, t0
, 3);
9484 gen_helper_1e0i(mftacx
, t0
, 3);
9487 gen_helper_mftdsp(t0
, cpu_env
);
9493 /* Floating point (COP1). */
9495 /* XXX: For now we support only a single FPU context. */
9497 TCGv_i32 fp0
= tcg_temp_new_i32();
9499 gen_load_fpr32(ctx
, fp0
, rt
);
9500 tcg_gen_ext_i32_tl(t0
, fp0
);
9501 tcg_temp_free_i32(fp0
);
9503 TCGv_i32 fp0
= tcg_temp_new_i32();
9505 gen_load_fpr32h(ctx
, fp0
, rt
);
9506 tcg_gen_ext_i32_tl(t0
, fp0
);
9507 tcg_temp_free_i32(fp0
);
9511 /* XXX: For now we support only a single FPU context. */
9512 gen_helper_1e0i(cfc1
, t0
, rt
);
9514 /* COP2: Not implemented. */
9521 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9522 gen_store_gpr(t0
, rd
);
9528 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9529 generate_exception_end(ctx
, EXCP_RI
);
9532 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9533 int u
, int sel
, int h
)
9535 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9536 TCGv t0
= tcg_temp_local_new();
9538 gen_load_gpr(t0
, rt
);
9539 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9540 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9541 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9543 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9544 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9551 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
9554 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
9564 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
9567 gen_helper_mttc0_tcbind(cpu_env
, t0
);
9570 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
9573 gen_helper_mttc0_tchalt(cpu_env
, t0
);
9576 gen_helper_mttc0_tccontext(cpu_env
, t0
);
9579 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
9582 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
9585 gen_mtc0(ctx
, t0
, rd
, sel
);
9592 gen_helper_mttc0_entryhi(cpu_env
, t0
);
9595 gen_mtc0(ctx
, t0
, rd
, sel
);
9601 gen_helper_mttc0_status(cpu_env
, t0
);
9604 gen_mtc0(ctx
, t0
, rd
, sel
);
9610 gen_helper_mttc0_cause(cpu_env
, t0
);
9620 gen_helper_mttc0_ebase(cpu_env
, t0
);
9630 gen_helper_mttc0_debug(cpu_env
, t0
);
9633 gen_mtc0(ctx
, t0
, rd
, sel
);
9638 gen_mtc0(ctx
, t0
, rd
, sel
);
9640 } else switch (sel
) {
9641 /* GPR registers. */
9643 gen_helper_0e1i(mttgpr
, t0
, rd
);
9645 /* Auxiliary CPU registers */
9649 gen_helper_0e1i(mttlo
, t0
, 0);
9652 gen_helper_0e1i(mtthi
, t0
, 0);
9655 gen_helper_0e1i(mttacx
, t0
, 0);
9658 gen_helper_0e1i(mttlo
, t0
, 1);
9661 gen_helper_0e1i(mtthi
, t0
, 1);
9664 gen_helper_0e1i(mttacx
, t0
, 1);
9667 gen_helper_0e1i(mttlo
, t0
, 2);
9670 gen_helper_0e1i(mtthi
, t0
, 2);
9673 gen_helper_0e1i(mttacx
, t0
, 2);
9676 gen_helper_0e1i(mttlo
, t0
, 3);
9679 gen_helper_0e1i(mtthi
, t0
, 3);
9682 gen_helper_0e1i(mttacx
, t0
, 3);
9685 gen_helper_mttdsp(cpu_env
, t0
);
9691 /* Floating point (COP1). */
9693 /* XXX: For now we support only a single FPU context. */
9695 TCGv_i32 fp0
= tcg_temp_new_i32();
9697 tcg_gen_trunc_tl_i32(fp0
, t0
);
9698 gen_store_fpr32(ctx
, fp0
, rd
);
9699 tcg_temp_free_i32(fp0
);
9701 TCGv_i32 fp0
= tcg_temp_new_i32();
9703 tcg_gen_trunc_tl_i32(fp0
, t0
);
9704 gen_store_fpr32h(ctx
, fp0
, rd
);
9705 tcg_temp_free_i32(fp0
);
9709 /* XXX: For now we support only a single FPU context. */
9711 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
9713 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
9714 tcg_temp_free_i32(fs_tmp
);
9716 /* Stop translation as we may have changed hflags */
9717 ctx
->base
.is_jmp
= DISAS_STOP
;
9719 /* COP2: Not implemented. */
9726 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
9732 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
9733 generate_exception_end(ctx
, EXCP_RI
);
9736 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
9738 const char *opn
= "ldst";
9740 check_cp0_enabled(ctx
);
9747 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9752 TCGv t0
= tcg_temp_new();
9754 gen_load_gpr(t0
, rt
);
9755 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9760 #if defined(TARGET_MIPS64)
9762 check_insn(ctx
, ISA_MIPS3
);
9767 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9771 check_insn(ctx
, ISA_MIPS3
);
9773 TCGv t0
= tcg_temp_new();
9775 gen_load_gpr(t0
, rt
);
9776 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9788 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9794 TCGv t0
= tcg_temp_new();
9795 gen_load_gpr(t0
, rt
);
9796 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9802 check_cp0_enabled(ctx
);
9807 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
9808 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9812 check_cp0_enabled(ctx
);
9813 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
9814 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9819 if (!env
->tlb
->helper_tlbwi
)
9821 gen_helper_tlbwi(cpu_env
);
9826 if (!env
->tlb
->helper_tlbinv
) {
9829 gen_helper_tlbinv(cpu_env
);
9830 } /* treat as nop if TLBINV not supported */
9835 if (!env
->tlb
->helper_tlbinvf
) {
9838 gen_helper_tlbinvf(cpu_env
);
9839 } /* treat as nop if TLBINV not supported */
9843 if (!env
->tlb
->helper_tlbwr
)
9845 gen_helper_tlbwr(cpu_env
);
9849 if (!env
->tlb
->helper_tlbp
)
9851 gen_helper_tlbp(cpu_env
);
9855 if (!env
->tlb
->helper_tlbr
)
9857 gen_helper_tlbr(cpu_env
);
9859 case OPC_ERET
: /* OPC_ERETNC */
9860 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9861 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9864 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
9865 if (ctx
->opcode
& (1 << bit_shift
)) {
9868 check_insn(ctx
, ISA_MIPS32R5
);
9869 gen_helper_eretnc(cpu_env
);
9873 check_insn(ctx
, ISA_MIPS2
);
9874 gen_helper_eret(cpu_env
);
9876 ctx
->base
.is_jmp
= DISAS_EXIT
;
9881 check_insn(ctx
, ISA_MIPS32
);
9882 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9883 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9886 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9888 generate_exception_end(ctx
, EXCP_RI
);
9890 gen_helper_deret(cpu_env
);
9891 ctx
->base
.is_jmp
= DISAS_EXIT
;
9896 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
9897 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9898 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9901 /* If we get an exception, we want to restart at next instruction */
9902 ctx
->base
.pc_next
+= 4;
9903 save_cpu_state(ctx
, 1);
9904 ctx
->base
.pc_next
-= 4;
9905 gen_helper_wait(cpu_env
);
9906 ctx
->base
.is_jmp
= DISAS_NORETURN
;
9911 generate_exception_end(ctx
, EXCP_RI
);
9914 (void)opn
; /* avoid a compiler warning */
9916 #endif /* !CONFIG_USER_ONLY */
9918 /* CP1 Branches (before delay slot) */
9919 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
9920 int32_t cc
, int32_t offset
)
9922 target_ulong btarget
;
9923 TCGv_i32 t0
= tcg_temp_new_i32();
9925 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9926 generate_exception_end(ctx
, EXCP_RI
);
9931 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
9933 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
9937 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9938 tcg_gen_not_i32(t0
, t0
);
9939 tcg_gen_andi_i32(t0
, t0
, 1);
9940 tcg_gen_extu_i32_tl(bcond
, t0
);
9943 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9944 tcg_gen_not_i32(t0
, t0
);
9945 tcg_gen_andi_i32(t0
, t0
, 1);
9946 tcg_gen_extu_i32_tl(bcond
, t0
);
9949 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9950 tcg_gen_andi_i32(t0
, t0
, 1);
9951 tcg_gen_extu_i32_tl(bcond
, t0
);
9954 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9955 tcg_gen_andi_i32(t0
, t0
, 1);
9956 tcg_gen_extu_i32_tl(bcond
, t0
);
9958 ctx
->hflags
|= MIPS_HFLAG_BL
;
9962 TCGv_i32 t1
= tcg_temp_new_i32();
9963 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9964 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
9965 tcg_gen_nand_i32(t0
, t0
, t1
);
9966 tcg_temp_free_i32(t1
);
9967 tcg_gen_andi_i32(t0
, t0
, 1);
9968 tcg_gen_extu_i32_tl(bcond
, t0
);
9973 TCGv_i32 t1
= tcg_temp_new_i32();
9974 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9975 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
9976 tcg_gen_or_i32(t0
, t0
, t1
);
9977 tcg_temp_free_i32(t1
);
9978 tcg_gen_andi_i32(t0
, t0
, 1);
9979 tcg_gen_extu_i32_tl(bcond
, t0
);
9984 TCGv_i32 t1
= tcg_temp_new_i32();
9985 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9986 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
9987 tcg_gen_and_i32(t0
, t0
, t1
);
9988 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
9989 tcg_gen_and_i32(t0
, t0
, t1
);
9990 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
9991 tcg_gen_nand_i32(t0
, t0
, t1
);
9992 tcg_temp_free_i32(t1
);
9993 tcg_gen_andi_i32(t0
, t0
, 1);
9994 tcg_gen_extu_i32_tl(bcond
, t0
);
9999 TCGv_i32 t1
= tcg_temp_new_i32();
10000 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10001 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10002 tcg_gen_or_i32(t0
, t0
, t1
);
10003 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10004 tcg_gen_or_i32(t0
, t0
, t1
);
10005 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10006 tcg_gen_or_i32(t0
, t0
, t1
);
10007 tcg_temp_free_i32(t1
);
10008 tcg_gen_andi_i32(t0
, t0
, 1);
10009 tcg_gen_extu_i32_tl(bcond
, t0
);
10012 ctx
->hflags
|= MIPS_HFLAG_BC
;
10015 MIPS_INVAL("cp1 cond branch");
10016 generate_exception_end(ctx
, EXCP_RI
);
10019 ctx
->btarget
= btarget
;
10020 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10022 tcg_temp_free_i32(t0
);
10025 /* R6 CP1 Branches */
10026 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10027 int32_t ft
, int32_t offset
,
10028 int delayslot_size
)
10030 target_ulong btarget
;
10031 TCGv_i64 t0
= tcg_temp_new_i64();
10033 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10034 #ifdef MIPS_DEBUG_DISAS
10035 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10036 "\n", ctx
->base
.pc_next
);
10038 generate_exception_end(ctx
, EXCP_RI
);
10042 gen_load_fpr64(ctx
, t0
, ft
);
10043 tcg_gen_andi_i64(t0
, t0
, 1);
10045 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10049 tcg_gen_xori_i64(t0
, t0
, 1);
10050 ctx
->hflags
|= MIPS_HFLAG_BC
;
10053 /* t0 already set */
10054 ctx
->hflags
|= MIPS_HFLAG_BC
;
10057 MIPS_INVAL("cp1 cond branch");
10058 generate_exception_end(ctx
, EXCP_RI
);
10062 tcg_gen_trunc_i64_tl(bcond
, t0
);
10064 ctx
->btarget
= btarget
;
10066 switch (delayslot_size
) {
10068 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10071 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10076 tcg_temp_free_i64(t0
);
10079 /* Coprocessor 1 (FPU) */
10081 #define FOP(func, fmt) (((fmt) << 21) | (func))
10084 OPC_ADD_S
= FOP(0, FMT_S
),
10085 OPC_SUB_S
= FOP(1, FMT_S
),
10086 OPC_MUL_S
= FOP(2, FMT_S
),
10087 OPC_DIV_S
= FOP(3, FMT_S
),
10088 OPC_SQRT_S
= FOP(4, FMT_S
),
10089 OPC_ABS_S
= FOP(5, FMT_S
),
10090 OPC_MOV_S
= FOP(6, FMT_S
),
10091 OPC_NEG_S
= FOP(7, FMT_S
),
10092 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10093 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10094 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10095 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10096 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10097 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10098 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10099 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10100 OPC_SEL_S
= FOP(16, FMT_S
),
10101 OPC_MOVCF_S
= FOP(17, FMT_S
),
10102 OPC_MOVZ_S
= FOP(18, FMT_S
),
10103 OPC_MOVN_S
= FOP(19, FMT_S
),
10104 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10105 OPC_RECIP_S
= FOP(21, FMT_S
),
10106 OPC_RSQRT_S
= FOP(22, FMT_S
),
10107 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10108 OPC_MADDF_S
= FOP(24, FMT_S
),
10109 OPC_MSUBF_S
= FOP(25, FMT_S
),
10110 OPC_RINT_S
= FOP(26, FMT_S
),
10111 OPC_CLASS_S
= FOP(27, FMT_S
),
10112 OPC_MIN_S
= FOP(28, FMT_S
),
10113 OPC_RECIP2_S
= FOP(28, FMT_S
),
10114 OPC_MINA_S
= FOP(29, FMT_S
),
10115 OPC_RECIP1_S
= FOP(29, FMT_S
),
10116 OPC_MAX_S
= FOP(30, FMT_S
),
10117 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10118 OPC_MAXA_S
= FOP(31, FMT_S
),
10119 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10120 OPC_CVT_D_S
= FOP(33, FMT_S
),
10121 OPC_CVT_W_S
= FOP(36, FMT_S
),
10122 OPC_CVT_L_S
= FOP(37, FMT_S
),
10123 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10124 OPC_CMP_F_S
= FOP (48, FMT_S
),
10125 OPC_CMP_UN_S
= FOP (49, FMT_S
),
10126 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
10127 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
10128 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
10129 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
10130 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
10131 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
10132 OPC_CMP_SF_S
= FOP (56, FMT_S
),
10133 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
10134 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
10135 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
10136 OPC_CMP_LT_S
= FOP (60, FMT_S
),
10137 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
10138 OPC_CMP_LE_S
= FOP (62, FMT_S
),
10139 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
10141 OPC_ADD_D
= FOP(0, FMT_D
),
10142 OPC_SUB_D
= FOP(1, FMT_D
),
10143 OPC_MUL_D
= FOP(2, FMT_D
),
10144 OPC_DIV_D
= FOP(3, FMT_D
),
10145 OPC_SQRT_D
= FOP(4, FMT_D
),
10146 OPC_ABS_D
= FOP(5, FMT_D
),
10147 OPC_MOV_D
= FOP(6, FMT_D
),
10148 OPC_NEG_D
= FOP(7, FMT_D
),
10149 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10150 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10151 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10152 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10153 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10154 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10155 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10156 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10157 OPC_SEL_D
= FOP(16, FMT_D
),
10158 OPC_MOVCF_D
= FOP(17, FMT_D
),
10159 OPC_MOVZ_D
= FOP(18, FMT_D
),
10160 OPC_MOVN_D
= FOP(19, FMT_D
),
10161 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10162 OPC_RECIP_D
= FOP(21, FMT_D
),
10163 OPC_RSQRT_D
= FOP(22, FMT_D
),
10164 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10165 OPC_MADDF_D
= FOP(24, FMT_D
),
10166 OPC_MSUBF_D
= FOP(25, FMT_D
),
10167 OPC_RINT_D
= FOP(26, FMT_D
),
10168 OPC_CLASS_D
= FOP(27, FMT_D
),
10169 OPC_MIN_D
= FOP(28, FMT_D
),
10170 OPC_RECIP2_D
= FOP(28, FMT_D
),
10171 OPC_MINA_D
= FOP(29, FMT_D
),
10172 OPC_RECIP1_D
= FOP(29, FMT_D
),
10173 OPC_MAX_D
= FOP(30, FMT_D
),
10174 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10175 OPC_MAXA_D
= FOP(31, FMT_D
),
10176 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10177 OPC_CVT_S_D
= FOP(32, FMT_D
),
10178 OPC_CVT_W_D
= FOP(36, FMT_D
),
10179 OPC_CVT_L_D
= FOP(37, FMT_D
),
10180 OPC_CMP_F_D
= FOP (48, FMT_D
),
10181 OPC_CMP_UN_D
= FOP (49, FMT_D
),
10182 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
10183 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
10184 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
10185 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
10186 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
10187 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
10188 OPC_CMP_SF_D
= FOP (56, FMT_D
),
10189 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
10190 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
10191 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
10192 OPC_CMP_LT_D
= FOP (60, FMT_D
),
10193 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
10194 OPC_CMP_LE_D
= FOP (62, FMT_D
),
10195 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
10197 OPC_CVT_S_W
= FOP(32, FMT_W
),
10198 OPC_CVT_D_W
= FOP(33, FMT_W
),
10199 OPC_CVT_S_L
= FOP(32, FMT_L
),
10200 OPC_CVT_D_L
= FOP(33, FMT_L
),
10201 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10203 OPC_ADD_PS
= FOP(0, FMT_PS
),
10204 OPC_SUB_PS
= FOP(1, FMT_PS
),
10205 OPC_MUL_PS
= FOP(2, FMT_PS
),
10206 OPC_DIV_PS
= FOP(3, FMT_PS
),
10207 OPC_ABS_PS
= FOP(5, FMT_PS
),
10208 OPC_MOV_PS
= FOP(6, FMT_PS
),
10209 OPC_NEG_PS
= FOP(7, FMT_PS
),
10210 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10211 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10212 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10213 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10214 OPC_MULR_PS
= FOP(26, FMT_PS
),
10215 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10216 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10217 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10218 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10220 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10221 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10222 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10223 OPC_PLL_PS
= FOP(44, FMT_PS
),
10224 OPC_PLU_PS
= FOP(45, FMT_PS
),
10225 OPC_PUL_PS
= FOP(46, FMT_PS
),
10226 OPC_PUU_PS
= FOP(47, FMT_PS
),
10227 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
10228 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
10229 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
10230 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
10231 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
10232 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
10233 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
10234 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
10235 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
10236 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
10237 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
10238 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
10239 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
10240 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
10241 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
10242 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
10246 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10247 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10248 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10249 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10250 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10251 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10252 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10253 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10254 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10255 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10256 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10257 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10258 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10259 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10260 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10261 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10262 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10263 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10264 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10265 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10266 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10267 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10269 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10270 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10271 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10272 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10273 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10274 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10275 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10276 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10277 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10278 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10279 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10280 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10281 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10282 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10283 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10284 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10285 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10286 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10287 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10288 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10289 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10290 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10292 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10294 TCGv t0
= tcg_temp_new();
10299 TCGv_i32 fp0
= tcg_temp_new_i32();
10301 gen_load_fpr32(ctx
, fp0
, fs
);
10302 tcg_gen_ext_i32_tl(t0
, fp0
);
10303 tcg_temp_free_i32(fp0
);
10305 gen_store_gpr(t0
, rt
);
10308 gen_load_gpr(t0
, rt
);
10310 TCGv_i32 fp0
= tcg_temp_new_i32();
10312 tcg_gen_trunc_tl_i32(fp0
, t0
);
10313 gen_store_fpr32(ctx
, fp0
, fs
);
10314 tcg_temp_free_i32(fp0
);
10318 gen_helper_1e0i(cfc1
, t0
, fs
);
10319 gen_store_gpr(t0
, rt
);
10322 gen_load_gpr(t0
, rt
);
10323 save_cpu_state(ctx
, 0);
10325 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10327 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10328 tcg_temp_free_i32(fs_tmp
);
10330 /* Stop translation as we may have changed hflags */
10331 ctx
->base
.is_jmp
= DISAS_STOP
;
10333 #if defined(TARGET_MIPS64)
10335 gen_load_fpr64(ctx
, t0
, fs
);
10336 gen_store_gpr(t0
, rt
);
10339 gen_load_gpr(t0
, rt
);
10340 gen_store_fpr64(ctx
, t0
, fs
);
10345 TCGv_i32 fp0
= tcg_temp_new_i32();
10347 gen_load_fpr32h(ctx
, fp0
, fs
);
10348 tcg_gen_ext_i32_tl(t0
, fp0
);
10349 tcg_temp_free_i32(fp0
);
10351 gen_store_gpr(t0
, rt
);
10354 gen_load_gpr(t0
, rt
);
10356 TCGv_i32 fp0
= tcg_temp_new_i32();
10358 tcg_gen_trunc_tl_i32(fp0
, t0
);
10359 gen_store_fpr32h(ctx
, fp0
, fs
);
10360 tcg_temp_free_i32(fp0
);
10364 MIPS_INVAL("cp1 move");
10365 generate_exception_end(ctx
, EXCP_RI
);
10373 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10380 /* Treat as NOP. */
10385 cond
= TCG_COND_EQ
;
10387 cond
= TCG_COND_NE
;
10389 l1
= gen_new_label();
10390 t0
= tcg_temp_new_i32();
10391 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10392 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10393 tcg_temp_free_i32(t0
);
10395 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10397 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10402 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10406 TCGv_i32 t0
= tcg_temp_new_i32();
10407 TCGLabel
*l1
= gen_new_label();
10410 cond
= TCG_COND_EQ
;
10412 cond
= TCG_COND_NE
;
10414 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10415 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10416 gen_load_fpr32(ctx
, t0
, fs
);
10417 gen_store_fpr32(ctx
, t0
, fd
);
10419 tcg_temp_free_i32(t0
);
10422 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
10425 TCGv_i32 t0
= tcg_temp_new_i32();
10427 TCGLabel
*l1
= gen_new_label();
10430 cond
= TCG_COND_EQ
;
10432 cond
= TCG_COND_NE
;
10434 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10435 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10436 tcg_temp_free_i32(t0
);
10437 fp0
= tcg_temp_new_i64();
10438 gen_load_fpr64(ctx
, fp0
, fs
);
10439 gen_store_fpr64(ctx
, fp0
, fd
);
10440 tcg_temp_free_i64(fp0
);
10444 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10448 TCGv_i32 t0
= tcg_temp_new_i32();
10449 TCGLabel
*l1
= gen_new_label();
10450 TCGLabel
*l2
= gen_new_label();
10453 cond
= TCG_COND_EQ
;
10455 cond
= TCG_COND_NE
;
10457 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10458 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10459 gen_load_fpr32(ctx
, t0
, fs
);
10460 gen_store_fpr32(ctx
, t0
, fd
);
10463 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10464 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10465 gen_load_fpr32h(ctx
, t0
, fs
);
10466 gen_store_fpr32h(ctx
, t0
, fd
);
10467 tcg_temp_free_i32(t0
);
10471 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10474 TCGv_i32 t1
= tcg_const_i32(0);
10475 TCGv_i32 fp0
= tcg_temp_new_i32();
10476 TCGv_i32 fp1
= tcg_temp_new_i32();
10477 TCGv_i32 fp2
= tcg_temp_new_i32();
10478 gen_load_fpr32(ctx
, fp0
, fd
);
10479 gen_load_fpr32(ctx
, fp1
, ft
);
10480 gen_load_fpr32(ctx
, fp2
, fs
);
10484 tcg_gen_andi_i32(fp0
, fp0
, 1);
10485 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10488 tcg_gen_andi_i32(fp1
, fp1
, 1);
10489 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10492 tcg_gen_andi_i32(fp1
, fp1
, 1);
10493 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10496 MIPS_INVAL("gen_sel_s");
10497 generate_exception_end(ctx
, EXCP_RI
);
10501 gen_store_fpr32(ctx
, fp0
, fd
);
10502 tcg_temp_free_i32(fp2
);
10503 tcg_temp_free_i32(fp1
);
10504 tcg_temp_free_i32(fp0
);
10505 tcg_temp_free_i32(t1
);
10508 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10511 TCGv_i64 t1
= tcg_const_i64(0);
10512 TCGv_i64 fp0
= tcg_temp_new_i64();
10513 TCGv_i64 fp1
= tcg_temp_new_i64();
10514 TCGv_i64 fp2
= tcg_temp_new_i64();
10515 gen_load_fpr64(ctx
, fp0
, fd
);
10516 gen_load_fpr64(ctx
, fp1
, ft
);
10517 gen_load_fpr64(ctx
, fp2
, fs
);
10521 tcg_gen_andi_i64(fp0
, fp0
, 1);
10522 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10525 tcg_gen_andi_i64(fp1
, fp1
, 1);
10526 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10529 tcg_gen_andi_i64(fp1
, fp1
, 1);
10530 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10533 MIPS_INVAL("gen_sel_d");
10534 generate_exception_end(ctx
, EXCP_RI
);
10538 gen_store_fpr64(ctx
, fp0
, fd
);
10539 tcg_temp_free_i64(fp2
);
10540 tcg_temp_free_i64(fp1
);
10541 tcg_temp_free_i64(fp0
);
10542 tcg_temp_free_i64(t1
);
10545 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
10546 int ft
, int fs
, int fd
, int cc
)
10548 uint32_t func
= ctx
->opcode
& 0x3f;
10552 TCGv_i32 fp0
= tcg_temp_new_i32();
10553 TCGv_i32 fp1
= tcg_temp_new_i32();
10555 gen_load_fpr32(ctx
, fp0
, fs
);
10556 gen_load_fpr32(ctx
, fp1
, ft
);
10557 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
10558 tcg_temp_free_i32(fp1
);
10559 gen_store_fpr32(ctx
, fp0
, fd
);
10560 tcg_temp_free_i32(fp0
);
10565 TCGv_i32 fp0
= tcg_temp_new_i32();
10566 TCGv_i32 fp1
= tcg_temp_new_i32();
10568 gen_load_fpr32(ctx
, fp0
, fs
);
10569 gen_load_fpr32(ctx
, fp1
, ft
);
10570 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
10571 tcg_temp_free_i32(fp1
);
10572 gen_store_fpr32(ctx
, fp0
, fd
);
10573 tcg_temp_free_i32(fp0
);
10578 TCGv_i32 fp0
= tcg_temp_new_i32();
10579 TCGv_i32 fp1
= tcg_temp_new_i32();
10581 gen_load_fpr32(ctx
, fp0
, fs
);
10582 gen_load_fpr32(ctx
, fp1
, ft
);
10583 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
10584 tcg_temp_free_i32(fp1
);
10585 gen_store_fpr32(ctx
, fp0
, fd
);
10586 tcg_temp_free_i32(fp0
);
10591 TCGv_i32 fp0
= tcg_temp_new_i32();
10592 TCGv_i32 fp1
= tcg_temp_new_i32();
10594 gen_load_fpr32(ctx
, fp0
, fs
);
10595 gen_load_fpr32(ctx
, fp1
, ft
);
10596 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
10597 tcg_temp_free_i32(fp1
);
10598 gen_store_fpr32(ctx
, fp0
, fd
);
10599 tcg_temp_free_i32(fp0
);
10604 TCGv_i32 fp0
= tcg_temp_new_i32();
10606 gen_load_fpr32(ctx
, fp0
, fs
);
10607 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
10608 gen_store_fpr32(ctx
, fp0
, fd
);
10609 tcg_temp_free_i32(fp0
);
10614 TCGv_i32 fp0
= tcg_temp_new_i32();
10616 gen_load_fpr32(ctx
, fp0
, fs
);
10617 if (ctx
->abs2008
) {
10618 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
10620 gen_helper_float_abs_s(fp0
, fp0
);
10622 gen_store_fpr32(ctx
, fp0
, fd
);
10623 tcg_temp_free_i32(fp0
);
10628 TCGv_i32 fp0
= tcg_temp_new_i32();
10630 gen_load_fpr32(ctx
, fp0
, fs
);
10631 gen_store_fpr32(ctx
, fp0
, fd
);
10632 tcg_temp_free_i32(fp0
);
10637 TCGv_i32 fp0
= tcg_temp_new_i32();
10639 gen_load_fpr32(ctx
, fp0
, fs
);
10640 if (ctx
->abs2008
) {
10641 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
10643 gen_helper_float_chs_s(fp0
, fp0
);
10645 gen_store_fpr32(ctx
, fp0
, fd
);
10646 tcg_temp_free_i32(fp0
);
10649 case OPC_ROUND_L_S
:
10650 check_cp1_64bitmode(ctx
);
10652 TCGv_i32 fp32
= tcg_temp_new_i32();
10653 TCGv_i64 fp64
= tcg_temp_new_i64();
10655 gen_load_fpr32(ctx
, fp32
, fs
);
10656 if (ctx
->nan2008
) {
10657 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
10659 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
10661 tcg_temp_free_i32(fp32
);
10662 gen_store_fpr64(ctx
, fp64
, fd
);
10663 tcg_temp_free_i64(fp64
);
10666 case OPC_TRUNC_L_S
:
10667 check_cp1_64bitmode(ctx
);
10669 TCGv_i32 fp32
= tcg_temp_new_i32();
10670 TCGv_i64 fp64
= tcg_temp_new_i64();
10672 gen_load_fpr32(ctx
, fp32
, fs
);
10673 if (ctx
->nan2008
) {
10674 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
10676 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
10678 tcg_temp_free_i32(fp32
);
10679 gen_store_fpr64(ctx
, fp64
, fd
);
10680 tcg_temp_free_i64(fp64
);
10684 check_cp1_64bitmode(ctx
);
10686 TCGv_i32 fp32
= tcg_temp_new_i32();
10687 TCGv_i64 fp64
= tcg_temp_new_i64();
10689 gen_load_fpr32(ctx
, fp32
, fs
);
10690 if (ctx
->nan2008
) {
10691 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
10693 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
10695 tcg_temp_free_i32(fp32
);
10696 gen_store_fpr64(ctx
, fp64
, fd
);
10697 tcg_temp_free_i64(fp64
);
10700 case OPC_FLOOR_L_S
:
10701 check_cp1_64bitmode(ctx
);
10703 TCGv_i32 fp32
= tcg_temp_new_i32();
10704 TCGv_i64 fp64
= tcg_temp_new_i64();
10706 gen_load_fpr32(ctx
, fp32
, fs
);
10707 if (ctx
->nan2008
) {
10708 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
10710 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
10712 tcg_temp_free_i32(fp32
);
10713 gen_store_fpr64(ctx
, fp64
, fd
);
10714 tcg_temp_free_i64(fp64
);
10717 case OPC_ROUND_W_S
:
10719 TCGv_i32 fp0
= tcg_temp_new_i32();
10721 gen_load_fpr32(ctx
, fp0
, fs
);
10722 if (ctx
->nan2008
) {
10723 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
10725 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
10727 gen_store_fpr32(ctx
, fp0
, fd
);
10728 tcg_temp_free_i32(fp0
);
10731 case OPC_TRUNC_W_S
:
10733 TCGv_i32 fp0
= tcg_temp_new_i32();
10735 gen_load_fpr32(ctx
, fp0
, fs
);
10736 if (ctx
->nan2008
) {
10737 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
10739 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
10741 gen_store_fpr32(ctx
, fp0
, fd
);
10742 tcg_temp_free_i32(fp0
);
10747 TCGv_i32 fp0
= tcg_temp_new_i32();
10749 gen_load_fpr32(ctx
, fp0
, fs
);
10750 if (ctx
->nan2008
) {
10751 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
10753 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
10755 gen_store_fpr32(ctx
, fp0
, fd
);
10756 tcg_temp_free_i32(fp0
);
10759 case OPC_FLOOR_W_S
:
10761 TCGv_i32 fp0
= tcg_temp_new_i32();
10763 gen_load_fpr32(ctx
, fp0
, fs
);
10764 if (ctx
->nan2008
) {
10765 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
10767 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
10769 gen_store_fpr32(ctx
, fp0
, fd
);
10770 tcg_temp_free_i32(fp0
);
10774 check_insn(ctx
, ISA_MIPS32R6
);
10775 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10778 check_insn(ctx
, ISA_MIPS32R6
);
10779 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10782 check_insn(ctx
, ISA_MIPS32R6
);
10783 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10786 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10787 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10790 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10792 TCGLabel
*l1
= gen_new_label();
10796 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10798 fp0
= tcg_temp_new_i32();
10799 gen_load_fpr32(ctx
, fp0
, fs
);
10800 gen_store_fpr32(ctx
, fp0
, fd
);
10801 tcg_temp_free_i32(fp0
);
10806 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10808 TCGLabel
*l1
= gen_new_label();
10812 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10813 fp0
= tcg_temp_new_i32();
10814 gen_load_fpr32(ctx
, fp0
, fs
);
10815 gen_store_fpr32(ctx
, fp0
, fd
);
10816 tcg_temp_free_i32(fp0
);
10823 TCGv_i32 fp0
= tcg_temp_new_i32();
10825 gen_load_fpr32(ctx
, fp0
, fs
);
10826 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
10827 gen_store_fpr32(ctx
, fp0
, fd
);
10828 tcg_temp_free_i32(fp0
);
10833 TCGv_i32 fp0
= tcg_temp_new_i32();
10835 gen_load_fpr32(ctx
, fp0
, fs
);
10836 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
10837 gen_store_fpr32(ctx
, fp0
, fd
);
10838 tcg_temp_free_i32(fp0
);
10842 check_insn(ctx
, ISA_MIPS32R6
);
10844 TCGv_i32 fp0
= tcg_temp_new_i32();
10845 TCGv_i32 fp1
= tcg_temp_new_i32();
10846 TCGv_i32 fp2
= tcg_temp_new_i32();
10847 gen_load_fpr32(ctx
, fp0
, fs
);
10848 gen_load_fpr32(ctx
, fp1
, ft
);
10849 gen_load_fpr32(ctx
, fp2
, fd
);
10850 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10851 gen_store_fpr32(ctx
, fp2
, fd
);
10852 tcg_temp_free_i32(fp2
);
10853 tcg_temp_free_i32(fp1
);
10854 tcg_temp_free_i32(fp0
);
10858 check_insn(ctx
, ISA_MIPS32R6
);
10860 TCGv_i32 fp0
= tcg_temp_new_i32();
10861 TCGv_i32 fp1
= tcg_temp_new_i32();
10862 TCGv_i32 fp2
= tcg_temp_new_i32();
10863 gen_load_fpr32(ctx
, fp0
, fs
);
10864 gen_load_fpr32(ctx
, fp1
, ft
);
10865 gen_load_fpr32(ctx
, fp2
, fd
);
10866 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10867 gen_store_fpr32(ctx
, fp2
, fd
);
10868 tcg_temp_free_i32(fp2
);
10869 tcg_temp_free_i32(fp1
);
10870 tcg_temp_free_i32(fp0
);
10874 check_insn(ctx
, ISA_MIPS32R6
);
10876 TCGv_i32 fp0
= tcg_temp_new_i32();
10877 gen_load_fpr32(ctx
, fp0
, fs
);
10878 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
10879 gen_store_fpr32(ctx
, fp0
, fd
);
10880 tcg_temp_free_i32(fp0
);
10884 check_insn(ctx
, ISA_MIPS32R6
);
10886 TCGv_i32 fp0
= tcg_temp_new_i32();
10887 gen_load_fpr32(ctx
, fp0
, fs
);
10888 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
10889 gen_store_fpr32(ctx
, fp0
, fd
);
10890 tcg_temp_free_i32(fp0
);
10893 case OPC_MIN_S
: /* OPC_RECIP2_S */
10894 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10896 TCGv_i32 fp0
= tcg_temp_new_i32();
10897 TCGv_i32 fp1
= tcg_temp_new_i32();
10898 TCGv_i32 fp2
= tcg_temp_new_i32();
10899 gen_load_fpr32(ctx
, fp0
, fs
);
10900 gen_load_fpr32(ctx
, fp1
, ft
);
10901 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
10902 gen_store_fpr32(ctx
, fp2
, fd
);
10903 tcg_temp_free_i32(fp2
);
10904 tcg_temp_free_i32(fp1
);
10905 tcg_temp_free_i32(fp0
);
10908 check_cp1_64bitmode(ctx
);
10910 TCGv_i32 fp0
= tcg_temp_new_i32();
10911 TCGv_i32 fp1
= tcg_temp_new_i32();
10913 gen_load_fpr32(ctx
, fp0
, fs
);
10914 gen_load_fpr32(ctx
, fp1
, ft
);
10915 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
10916 tcg_temp_free_i32(fp1
);
10917 gen_store_fpr32(ctx
, fp0
, fd
);
10918 tcg_temp_free_i32(fp0
);
10922 case OPC_MINA_S
: /* OPC_RECIP1_S */
10923 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10925 TCGv_i32 fp0
= tcg_temp_new_i32();
10926 TCGv_i32 fp1
= tcg_temp_new_i32();
10927 TCGv_i32 fp2
= tcg_temp_new_i32();
10928 gen_load_fpr32(ctx
, fp0
, fs
);
10929 gen_load_fpr32(ctx
, fp1
, ft
);
10930 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
10931 gen_store_fpr32(ctx
, fp2
, fd
);
10932 tcg_temp_free_i32(fp2
);
10933 tcg_temp_free_i32(fp1
);
10934 tcg_temp_free_i32(fp0
);
10937 check_cp1_64bitmode(ctx
);
10939 TCGv_i32 fp0
= tcg_temp_new_i32();
10941 gen_load_fpr32(ctx
, fp0
, fs
);
10942 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
10943 gen_store_fpr32(ctx
, fp0
, fd
);
10944 tcg_temp_free_i32(fp0
);
10948 case OPC_MAX_S
: /* OPC_RSQRT1_S */
10949 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10951 TCGv_i32 fp0
= tcg_temp_new_i32();
10952 TCGv_i32 fp1
= tcg_temp_new_i32();
10953 gen_load_fpr32(ctx
, fp0
, fs
);
10954 gen_load_fpr32(ctx
, fp1
, ft
);
10955 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
10956 gen_store_fpr32(ctx
, fp1
, fd
);
10957 tcg_temp_free_i32(fp1
);
10958 tcg_temp_free_i32(fp0
);
10961 check_cp1_64bitmode(ctx
);
10963 TCGv_i32 fp0
= tcg_temp_new_i32();
10965 gen_load_fpr32(ctx
, fp0
, fs
);
10966 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
10967 gen_store_fpr32(ctx
, fp0
, fd
);
10968 tcg_temp_free_i32(fp0
);
10972 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
10973 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10975 TCGv_i32 fp0
= tcg_temp_new_i32();
10976 TCGv_i32 fp1
= tcg_temp_new_i32();
10977 gen_load_fpr32(ctx
, fp0
, fs
);
10978 gen_load_fpr32(ctx
, fp1
, ft
);
10979 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
10980 gen_store_fpr32(ctx
, fp1
, fd
);
10981 tcg_temp_free_i32(fp1
);
10982 tcg_temp_free_i32(fp0
);
10985 check_cp1_64bitmode(ctx
);
10987 TCGv_i32 fp0
= tcg_temp_new_i32();
10988 TCGv_i32 fp1
= tcg_temp_new_i32();
10990 gen_load_fpr32(ctx
, fp0
, fs
);
10991 gen_load_fpr32(ctx
, fp1
, ft
);
10992 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
10993 tcg_temp_free_i32(fp1
);
10994 gen_store_fpr32(ctx
, fp0
, fd
);
10995 tcg_temp_free_i32(fp0
);
11000 check_cp1_registers(ctx
, fd
);
11002 TCGv_i32 fp32
= tcg_temp_new_i32();
11003 TCGv_i64 fp64
= tcg_temp_new_i64();
11005 gen_load_fpr32(ctx
, fp32
, fs
);
11006 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11007 tcg_temp_free_i32(fp32
);
11008 gen_store_fpr64(ctx
, fp64
, fd
);
11009 tcg_temp_free_i64(fp64
);
11014 TCGv_i32 fp0
= tcg_temp_new_i32();
11016 gen_load_fpr32(ctx
, fp0
, fs
);
11017 if (ctx
->nan2008
) {
11018 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11020 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11022 gen_store_fpr32(ctx
, fp0
, fd
);
11023 tcg_temp_free_i32(fp0
);
11027 check_cp1_64bitmode(ctx
);
11029 TCGv_i32 fp32
= tcg_temp_new_i32();
11030 TCGv_i64 fp64
= tcg_temp_new_i64();
11032 gen_load_fpr32(ctx
, fp32
, fs
);
11033 if (ctx
->nan2008
) {
11034 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11036 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11038 tcg_temp_free_i32(fp32
);
11039 gen_store_fpr64(ctx
, fp64
, fd
);
11040 tcg_temp_free_i64(fp64
);
11046 TCGv_i64 fp64
= tcg_temp_new_i64();
11047 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11048 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11050 gen_load_fpr32(ctx
, fp32_0
, fs
);
11051 gen_load_fpr32(ctx
, fp32_1
, ft
);
11052 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11053 tcg_temp_free_i32(fp32_1
);
11054 tcg_temp_free_i32(fp32_0
);
11055 gen_store_fpr64(ctx
, fp64
, fd
);
11056 tcg_temp_free_i64(fp64
);
11062 case OPC_CMP_UEQ_S
:
11063 case OPC_CMP_OLT_S
:
11064 case OPC_CMP_ULT_S
:
11065 case OPC_CMP_OLE_S
:
11066 case OPC_CMP_ULE_S
:
11068 case OPC_CMP_NGLE_S
:
11069 case OPC_CMP_SEQ_S
:
11070 case OPC_CMP_NGL_S
:
11072 case OPC_CMP_NGE_S
:
11074 case OPC_CMP_NGT_S
:
11075 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11076 if (ctx
->opcode
& (1 << 6)) {
11077 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
11079 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
11083 check_cp1_registers(ctx
, fs
| ft
| fd
);
11085 TCGv_i64 fp0
= tcg_temp_new_i64();
11086 TCGv_i64 fp1
= tcg_temp_new_i64();
11088 gen_load_fpr64(ctx
, fp0
, fs
);
11089 gen_load_fpr64(ctx
, fp1
, ft
);
11090 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11091 tcg_temp_free_i64(fp1
);
11092 gen_store_fpr64(ctx
, fp0
, fd
);
11093 tcg_temp_free_i64(fp0
);
11097 check_cp1_registers(ctx
, fs
| ft
| fd
);
11099 TCGv_i64 fp0
= tcg_temp_new_i64();
11100 TCGv_i64 fp1
= tcg_temp_new_i64();
11102 gen_load_fpr64(ctx
, fp0
, fs
);
11103 gen_load_fpr64(ctx
, fp1
, ft
);
11104 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11105 tcg_temp_free_i64(fp1
);
11106 gen_store_fpr64(ctx
, fp0
, fd
);
11107 tcg_temp_free_i64(fp0
);
11111 check_cp1_registers(ctx
, fs
| ft
| fd
);
11113 TCGv_i64 fp0
= tcg_temp_new_i64();
11114 TCGv_i64 fp1
= tcg_temp_new_i64();
11116 gen_load_fpr64(ctx
, fp0
, fs
);
11117 gen_load_fpr64(ctx
, fp1
, ft
);
11118 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11119 tcg_temp_free_i64(fp1
);
11120 gen_store_fpr64(ctx
, fp0
, fd
);
11121 tcg_temp_free_i64(fp0
);
11125 check_cp1_registers(ctx
, fs
| ft
| fd
);
11127 TCGv_i64 fp0
= tcg_temp_new_i64();
11128 TCGv_i64 fp1
= tcg_temp_new_i64();
11130 gen_load_fpr64(ctx
, fp0
, fs
);
11131 gen_load_fpr64(ctx
, fp1
, ft
);
11132 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11133 tcg_temp_free_i64(fp1
);
11134 gen_store_fpr64(ctx
, fp0
, fd
);
11135 tcg_temp_free_i64(fp0
);
11139 check_cp1_registers(ctx
, fs
| fd
);
11141 TCGv_i64 fp0
= tcg_temp_new_i64();
11143 gen_load_fpr64(ctx
, fp0
, fs
);
11144 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11145 gen_store_fpr64(ctx
, fp0
, fd
);
11146 tcg_temp_free_i64(fp0
);
11150 check_cp1_registers(ctx
, fs
| fd
);
11152 TCGv_i64 fp0
= tcg_temp_new_i64();
11154 gen_load_fpr64(ctx
, fp0
, fs
);
11155 if (ctx
->abs2008
) {
11156 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11158 gen_helper_float_abs_d(fp0
, fp0
);
11160 gen_store_fpr64(ctx
, fp0
, fd
);
11161 tcg_temp_free_i64(fp0
);
11165 check_cp1_registers(ctx
, fs
| fd
);
11167 TCGv_i64 fp0
= tcg_temp_new_i64();
11169 gen_load_fpr64(ctx
, fp0
, fs
);
11170 gen_store_fpr64(ctx
, fp0
, fd
);
11171 tcg_temp_free_i64(fp0
);
11175 check_cp1_registers(ctx
, fs
| fd
);
11177 TCGv_i64 fp0
= tcg_temp_new_i64();
11179 gen_load_fpr64(ctx
, fp0
, fs
);
11180 if (ctx
->abs2008
) {
11181 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11183 gen_helper_float_chs_d(fp0
, fp0
);
11185 gen_store_fpr64(ctx
, fp0
, fd
);
11186 tcg_temp_free_i64(fp0
);
11189 case OPC_ROUND_L_D
:
11190 check_cp1_64bitmode(ctx
);
11192 TCGv_i64 fp0
= tcg_temp_new_i64();
11194 gen_load_fpr64(ctx
, fp0
, fs
);
11195 if (ctx
->nan2008
) {
11196 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11198 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11200 gen_store_fpr64(ctx
, fp0
, fd
);
11201 tcg_temp_free_i64(fp0
);
11204 case OPC_TRUNC_L_D
:
11205 check_cp1_64bitmode(ctx
);
11207 TCGv_i64 fp0
= tcg_temp_new_i64();
11209 gen_load_fpr64(ctx
, fp0
, fs
);
11210 if (ctx
->nan2008
) {
11211 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11213 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11215 gen_store_fpr64(ctx
, fp0
, fd
);
11216 tcg_temp_free_i64(fp0
);
11220 check_cp1_64bitmode(ctx
);
11222 TCGv_i64 fp0
= tcg_temp_new_i64();
11224 gen_load_fpr64(ctx
, fp0
, fs
);
11225 if (ctx
->nan2008
) {
11226 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11228 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11230 gen_store_fpr64(ctx
, fp0
, fd
);
11231 tcg_temp_free_i64(fp0
);
11234 case OPC_FLOOR_L_D
:
11235 check_cp1_64bitmode(ctx
);
11237 TCGv_i64 fp0
= tcg_temp_new_i64();
11239 gen_load_fpr64(ctx
, fp0
, fs
);
11240 if (ctx
->nan2008
) {
11241 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11243 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11245 gen_store_fpr64(ctx
, fp0
, fd
);
11246 tcg_temp_free_i64(fp0
);
11249 case OPC_ROUND_W_D
:
11250 check_cp1_registers(ctx
, fs
);
11252 TCGv_i32 fp32
= tcg_temp_new_i32();
11253 TCGv_i64 fp64
= tcg_temp_new_i64();
11255 gen_load_fpr64(ctx
, fp64
, fs
);
11256 if (ctx
->nan2008
) {
11257 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11259 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11261 tcg_temp_free_i64(fp64
);
11262 gen_store_fpr32(ctx
, fp32
, fd
);
11263 tcg_temp_free_i32(fp32
);
11266 case OPC_TRUNC_W_D
:
11267 check_cp1_registers(ctx
, fs
);
11269 TCGv_i32 fp32
= tcg_temp_new_i32();
11270 TCGv_i64 fp64
= tcg_temp_new_i64();
11272 gen_load_fpr64(ctx
, fp64
, fs
);
11273 if (ctx
->nan2008
) {
11274 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11276 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11278 tcg_temp_free_i64(fp64
);
11279 gen_store_fpr32(ctx
, fp32
, fd
);
11280 tcg_temp_free_i32(fp32
);
11284 check_cp1_registers(ctx
, fs
);
11286 TCGv_i32 fp32
= tcg_temp_new_i32();
11287 TCGv_i64 fp64
= tcg_temp_new_i64();
11289 gen_load_fpr64(ctx
, fp64
, fs
);
11290 if (ctx
->nan2008
) {
11291 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11293 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11295 tcg_temp_free_i64(fp64
);
11296 gen_store_fpr32(ctx
, fp32
, fd
);
11297 tcg_temp_free_i32(fp32
);
11300 case OPC_FLOOR_W_D
:
11301 check_cp1_registers(ctx
, fs
);
11303 TCGv_i32 fp32
= tcg_temp_new_i32();
11304 TCGv_i64 fp64
= tcg_temp_new_i64();
11306 gen_load_fpr64(ctx
, fp64
, fs
);
11307 if (ctx
->nan2008
) {
11308 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11310 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11312 tcg_temp_free_i64(fp64
);
11313 gen_store_fpr32(ctx
, fp32
, fd
);
11314 tcg_temp_free_i32(fp32
);
11318 check_insn(ctx
, ISA_MIPS32R6
);
11319 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11322 check_insn(ctx
, ISA_MIPS32R6
);
11323 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11326 check_insn(ctx
, ISA_MIPS32R6
);
11327 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11330 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11331 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11334 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11336 TCGLabel
*l1
= gen_new_label();
11340 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11342 fp0
= tcg_temp_new_i64();
11343 gen_load_fpr64(ctx
, fp0
, fs
);
11344 gen_store_fpr64(ctx
, fp0
, fd
);
11345 tcg_temp_free_i64(fp0
);
11350 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11352 TCGLabel
*l1
= gen_new_label();
11356 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11357 fp0
= tcg_temp_new_i64();
11358 gen_load_fpr64(ctx
, fp0
, fs
);
11359 gen_store_fpr64(ctx
, fp0
, fd
);
11360 tcg_temp_free_i64(fp0
);
11366 check_cp1_registers(ctx
, fs
| fd
);
11368 TCGv_i64 fp0
= tcg_temp_new_i64();
11370 gen_load_fpr64(ctx
, fp0
, fs
);
11371 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11372 gen_store_fpr64(ctx
, fp0
, fd
);
11373 tcg_temp_free_i64(fp0
);
11377 check_cp1_registers(ctx
, fs
| fd
);
11379 TCGv_i64 fp0
= tcg_temp_new_i64();
11381 gen_load_fpr64(ctx
, fp0
, fs
);
11382 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11383 gen_store_fpr64(ctx
, fp0
, fd
);
11384 tcg_temp_free_i64(fp0
);
11388 check_insn(ctx
, ISA_MIPS32R6
);
11390 TCGv_i64 fp0
= tcg_temp_new_i64();
11391 TCGv_i64 fp1
= tcg_temp_new_i64();
11392 TCGv_i64 fp2
= tcg_temp_new_i64();
11393 gen_load_fpr64(ctx
, fp0
, fs
);
11394 gen_load_fpr64(ctx
, fp1
, ft
);
11395 gen_load_fpr64(ctx
, fp2
, fd
);
11396 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11397 gen_store_fpr64(ctx
, fp2
, fd
);
11398 tcg_temp_free_i64(fp2
);
11399 tcg_temp_free_i64(fp1
);
11400 tcg_temp_free_i64(fp0
);
11404 check_insn(ctx
, ISA_MIPS32R6
);
11406 TCGv_i64 fp0
= tcg_temp_new_i64();
11407 TCGv_i64 fp1
= tcg_temp_new_i64();
11408 TCGv_i64 fp2
= tcg_temp_new_i64();
11409 gen_load_fpr64(ctx
, fp0
, fs
);
11410 gen_load_fpr64(ctx
, fp1
, ft
);
11411 gen_load_fpr64(ctx
, fp2
, fd
);
11412 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11413 gen_store_fpr64(ctx
, fp2
, fd
);
11414 tcg_temp_free_i64(fp2
);
11415 tcg_temp_free_i64(fp1
);
11416 tcg_temp_free_i64(fp0
);
11420 check_insn(ctx
, ISA_MIPS32R6
);
11422 TCGv_i64 fp0
= tcg_temp_new_i64();
11423 gen_load_fpr64(ctx
, fp0
, fs
);
11424 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11425 gen_store_fpr64(ctx
, fp0
, fd
);
11426 tcg_temp_free_i64(fp0
);
11430 check_insn(ctx
, ISA_MIPS32R6
);
11432 TCGv_i64 fp0
= tcg_temp_new_i64();
11433 gen_load_fpr64(ctx
, fp0
, fs
);
11434 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11435 gen_store_fpr64(ctx
, fp0
, fd
);
11436 tcg_temp_free_i64(fp0
);
11439 case OPC_MIN_D
: /* OPC_RECIP2_D */
11440 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11442 TCGv_i64 fp0
= tcg_temp_new_i64();
11443 TCGv_i64 fp1
= tcg_temp_new_i64();
11444 gen_load_fpr64(ctx
, fp0
, fs
);
11445 gen_load_fpr64(ctx
, fp1
, ft
);
11446 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11447 gen_store_fpr64(ctx
, fp1
, fd
);
11448 tcg_temp_free_i64(fp1
);
11449 tcg_temp_free_i64(fp0
);
11452 check_cp1_64bitmode(ctx
);
11454 TCGv_i64 fp0
= tcg_temp_new_i64();
11455 TCGv_i64 fp1
= tcg_temp_new_i64();
11457 gen_load_fpr64(ctx
, fp0
, fs
);
11458 gen_load_fpr64(ctx
, fp1
, ft
);
11459 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11460 tcg_temp_free_i64(fp1
);
11461 gen_store_fpr64(ctx
, fp0
, fd
);
11462 tcg_temp_free_i64(fp0
);
11466 case OPC_MINA_D
: /* OPC_RECIP1_D */
11467 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11469 TCGv_i64 fp0
= tcg_temp_new_i64();
11470 TCGv_i64 fp1
= tcg_temp_new_i64();
11471 gen_load_fpr64(ctx
, fp0
, fs
);
11472 gen_load_fpr64(ctx
, fp1
, ft
);
11473 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11474 gen_store_fpr64(ctx
, fp1
, fd
);
11475 tcg_temp_free_i64(fp1
);
11476 tcg_temp_free_i64(fp0
);
11479 check_cp1_64bitmode(ctx
);
11481 TCGv_i64 fp0
= tcg_temp_new_i64();
11483 gen_load_fpr64(ctx
, fp0
, fs
);
11484 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11485 gen_store_fpr64(ctx
, fp0
, fd
);
11486 tcg_temp_free_i64(fp0
);
11490 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11491 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11493 TCGv_i64 fp0
= tcg_temp_new_i64();
11494 TCGv_i64 fp1
= tcg_temp_new_i64();
11495 gen_load_fpr64(ctx
, fp0
, fs
);
11496 gen_load_fpr64(ctx
, fp1
, ft
);
11497 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11498 gen_store_fpr64(ctx
, fp1
, fd
);
11499 tcg_temp_free_i64(fp1
);
11500 tcg_temp_free_i64(fp0
);
11503 check_cp1_64bitmode(ctx
);
11505 TCGv_i64 fp0
= tcg_temp_new_i64();
11507 gen_load_fpr64(ctx
, fp0
, fs
);
11508 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11509 gen_store_fpr64(ctx
, fp0
, fd
);
11510 tcg_temp_free_i64(fp0
);
11514 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11515 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11517 TCGv_i64 fp0
= tcg_temp_new_i64();
11518 TCGv_i64 fp1
= tcg_temp_new_i64();
11519 gen_load_fpr64(ctx
, fp0
, fs
);
11520 gen_load_fpr64(ctx
, fp1
, ft
);
11521 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11522 gen_store_fpr64(ctx
, fp1
, fd
);
11523 tcg_temp_free_i64(fp1
);
11524 tcg_temp_free_i64(fp0
);
11527 check_cp1_64bitmode(ctx
);
11529 TCGv_i64 fp0
= tcg_temp_new_i64();
11530 TCGv_i64 fp1
= tcg_temp_new_i64();
11532 gen_load_fpr64(ctx
, fp0
, fs
);
11533 gen_load_fpr64(ctx
, fp1
, ft
);
11534 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
11535 tcg_temp_free_i64(fp1
);
11536 gen_store_fpr64(ctx
, fp0
, fd
);
11537 tcg_temp_free_i64(fp0
);
11544 case OPC_CMP_UEQ_D
:
11545 case OPC_CMP_OLT_D
:
11546 case OPC_CMP_ULT_D
:
11547 case OPC_CMP_OLE_D
:
11548 case OPC_CMP_ULE_D
:
11550 case OPC_CMP_NGLE_D
:
11551 case OPC_CMP_SEQ_D
:
11552 case OPC_CMP_NGL_D
:
11554 case OPC_CMP_NGE_D
:
11556 case OPC_CMP_NGT_D
:
11557 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11558 if (ctx
->opcode
& (1 << 6)) {
11559 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
11561 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
11565 check_cp1_registers(ctx
, fs
);
11567 TCGv_i32 fp32
= tcg_temp_new_i32();
11568 TCGv_i64 fp64
= tcg_temp_new_i64();
11570 gen_load_fpr64(ctx
, fp64
, fs
);
11571 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
11572 tcg_temp_free_i64(fp64
);
11573 gen_store_fpr32(ctx
, fp32
, fd
);
11574 tcg_temp_free_i32(fp32
);
11578 check_cp1_registers(ctx
, fs
);
11580 TCGv_i32 fp32
= tcg_temp_new_i32();
11581 TCGv_i64 fp64
= tcg_temp_new_i64();
11583 gen_load_fpr64(ctx
, fp64
, fs
);
11584 if (ctx
->nan2008
) {
11585 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
11587 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
11589 tcg_temp_free_i64(fp64
);
11590 gen_store_fpr32(ctx
, fp32
, fd
);
11591 tcg_temp_free_i32(fp32
);
11595 check_cp1_64bitmode(ctx
);
11597 TCGv_i64 fp0
= tcg_temp_new_i64();
11599 gen_load_fpr64(ctx
, fp0
, fs
);
11600 if (ctx
->nan2008
) {
11601 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
11603 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
11605 gen_store_fpr64(ctx
, fp0
, fd
);
11606 tcg_temp_free_i64(fp0
);
11611 TCGv_i32 fp0
= tcg_temp_new_i32();
11613 gen_load_fpr32(ctx
, fp0
, fs
);
11614 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
11615 gen_store_fpr32(ctx
, fp0
, fd
);
11616 tcg_temp_free_i32(fp0
);
11620 check_cp1_registers(ctx
, fd
);
11622 TCGv_i32 fp32
= tcg_temp_new_i32();
11623 TCGv_i64 fp64
= tcg_temp_new_i64();
11625 gen_load_fpr32(ctx
, fp32
, fs
);
11626 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
11627 tcg_temp_free_i32(fp32
);
11628 gen_store_fpr64(ctx
, fp64
, fd
);
11629 tcg_temp_free_i64(fp64
);
11633 check_cp1_64bitmode(ctx
);
11635 TCGv_i32 fp32
= tcg_temp_new_i32();
11636 TCGv_i64 fp64
= tcg_temp_new_i64();
11638 gen_load_fpr64(ctx
, fp64
, fs
);
11639 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
11640 tcg_temp_free_i64(fp64
);
11641 gen_store_fpr32(ctx
, fp32
, fd
);
11642 tcg_temp_free_i32(fp32
);
11646 check_cp1_64bitmode(ctx
);
11648 TCGv_i64 fp0
= tcg_temp_new_i64();
11650 gen_load_fpr64(ctx
, fp0
, fs
);
11651 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
11652 gen_store_fpr64(ctx
, fp0
, fd
);
11653 tcg_temp_free_i64(fp0
);
11656 case OPC_CVT_PS_PW
:
11659 TCGv_i64 fp0
= tcg_temp_new_i64();
11661 gen_load_fpr64(ctx
, fp0
, fs
);
11662 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
11663 gen_store_fpr64(ctx
, fp0
, fd
);
11664 tcg_temp_free_i64(fp0
);
11670 TCGv_i64 fp0
= tcg_temp_new_i64();
11671 TCGv_i64 fp1
= tcg_temp_new_i64();
11673 gen_load_fpr64(ctx
, fp0
, fs
);
11674 gen_load_fpr64(ctx
, fp1
, ft
);
11675 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
11676 tcg_temp_free_i64(fp1
);
11677 gen_store_fpr64(ctx
, fp0
, fd
);
11678 tcg_temp_free_i64(fp0
);
11684 TCGv_i64 fp0
= tcg_temp_new_i64();
11685 TCGv_i64 fp1
= tcg_temp_new_i64();
11687 gen_load_fpr64(ctx
, fp0
, fs
);
11688 gen_load_fpr64(ctx
, fp1
, ft
);
11689 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
11690 tcg_temp_free_i64(fp1
);
11691 gen_store_fpr64(ctx
, fp0
, fd
);
11692 tcg_temp_free_i64(fp0
);
11698 TCGv_i64 fp0
= tcg_temp_new_i64();
11699 TCGv_i64 fp1
= tcg_temp_new_i64();
11701 gen_load_fpr64(ctx
, fp0
, fs
);
11702 gen_load_fpr64(ctx
, fp1
, ft
);
11703 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
11704 tcg_temp_free_i64(fp1
);
11705 gen_store_fpr64(ctx
, fp0
, fd
);
11706 tcg_temp_free_i64(fp0
);
11712 TCGv_i64 fp0
= tcg_temp_new_i64();
11714 gen_load_fpr64(ctx
, fp0
, fs
);
11715 gen_helper_float_abs_ps(fp0
, fp0
);
11716 gen_store_fpr64(ctx
, fp0
, fd
);
11717 tcg_temp_free_i64(fp0
);
11723 TCGv_i64 fp0
= tcg_temp_new_i64();
11725 gen_load_fpr64(ctx
, fp0
, fs
);
11726 gen_store_fpr64(ctx
, fp0
, fd
);
11727 tcg_temp_free_i64(fp0
);
11733 TCGv_i64 fp0
= tcg_temp_new_i64();
11735 gen_load_fpr64(ctx
, fp0
, fs
);
11736 gen_helper_float_chs_ps(fp0
, fp0
);
11737 gen_store_fpr64(ctx
, fp0
, fd
);
11738 tcg_temp_free_i64(fp0
);
11743 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11748 TCGLabel
*l1
= gen_new_label();
11752 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11753 fp0
= tcg_temp_new_i64();
11754 gen_load_fpr64(ctx
, fp0
, fs
);
11755 gen_store_fpr64(ctx
, fp0
, fd
);
11756 tcg_temp_free_i64(fp0
);
11763 TCGLabel
*l1
= gen_new_label();
11767 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11768 fp0
= tcg_temp_new_i64();
11769 gen_load_fpr64(ctx
, fp0
, fs
);
11770 gen_store_fpr64(ctx
, fp0
, fd
);
11771 tcg_temp_free_i64(fp0
);
11779 TCGv_i64 fp0
= tcg_temp_new_i64();
11780 TCGv_i64 fp1
= tcg_temp_new_i64();
11782 gen_load_fpr64(ctx
, fp0
, ft
);
11783 gen_load_fpr64(ctx
, fp1
, fs
);
11784 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
11785 tcg_temp_free_i64(fp1
);
11786 gen_store_fpr64(ctx
, fp0
, fd
);
11787 tcg_temp_free_i64(fp0
);
11793 TCGv_i64 fp0
= tcg_temp_new_i64();
11794 TCGv_i64 fp1
= tcg_temp_new_i64();
11796 gen_load_fpr64(ctx
, fp0
, ft
);
11797 gen_load_fpr64(ctx
, fp1
, fs
);
11798 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
11799 tcg_temp_free_i64(fp1
);
11800 gen_store_fpr64(ctx
, fp0
, fd
);
11801 tcg_temp_free_i64(fp0
);
11804 case OPC_RECIP2_PS
:
11807 TCGv_i64 fp0
= tcg_temp_new_i64();
11808 TCGv_i64 fp1
= tcg_temp_new_i64();
11810 gen_load_fpr64(ctx
, fp0
, fs
);
11811 gen_load_fpr64(ctx
, fp1
, ft
);
11812 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
11813 tcg_temp_free_i64(fp1
);
11814 gen_store_fpr64(ctx
, fp0
, fd
);
11815 tcg_temp_free_i64(fp0
);
11818 case OPC_RECIP1_PS
:
11821 TCGv_i64 fp0
= tcg_temp_new_i64();
11823 gen_load_fpr64(ctx
, fp0
, fs
);
11824 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
11825 gen_store_fpr64(ctx
, fp0
, fd
);
11826 tcg_temp_free_i64(fp0
);
11829 case OPC_RSQRT1_PS
:
11832 TCGv_i64 fp0
= tcg_temp_new_i64();
11834 gen_load_fpr64(ctx
, fp0
, fs
);
11835 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
11836 gen_store_fpr64(ctx
, fp0
, fd
);
11837 tcg_temp_free_i64(fp0
);
11840 case OPC_RSQRT2_PS
:
11843 TCGv_i64 fp0
= tcg_temp_new_i64();
11844 TCGv_i64 fp1
= tcg_temp_new_i64();
11846 gen_load_fpr64(ctx
, fp0
, fs
);
11847 gen_load_fpr64(ctx
, fp1
, ft
);
11848 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
11849 tcg_temp_free_i64(fp1
);
11850 gen_store_fpr64(ctx
, fp0
, fd
);
11851 tcg_temp_free_i64(fp0
);
11855 check_cp1_64bitmode(ctx
);
11857 TCGv_i32 fp0
= tcg_temp_new_i32();
11859 gen_load_fpr32h(ctx
, fp0
, fs
);
11860 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
11861 gen_store_fpr32(ctx
, fp0
, fd
);
11862 tcg_temp_free_i32(fp0
);
11865 case OPC_CVT_PW_PS
:
11868 TCGv_i64 fp0
= tcg_temp_new_i64();
11870 gen_load_fpr64(ctx
, fp0
, fs
);
11871 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
11872 gen_store_fpr64(ctx
, fp0
, fd
);
11873 tcg_temp_free_i64(fp0
);
11877 check_cp1_64bitmode(ctx
);
11879 TCGv_i32 fp0
= tcg_temp_new_i32();
11881 gen_load_fpr32(ctx
, fp0
, fs
);
11882 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
11883 gen_store_fpr32(ctx
, fp0
, fd
);
11884 tcg_temp_free_i32(fp0
);
11890 TCGv_i32 fp0
= tcg_temp_new_i32();
11891 TCGv_i32 fp1
= tcg_temp_new_i32();
11893 gen_load_fpr32(ctx
, fp0
, fs
);
11894 gen_load_fpr32(ctx
, fp1
, ft
);
11895 gen_store_fpr32h(ctx
, fp0
, fd
);
11896 gen_store_fpr32(ctx
, fp1
, fd
);
11897 tcg_temp_free_i32(fp0
);
11898 tcg_temp_free_i32(fp1
);
11904 TCGv_i32 fp0
= tcg_temp_new_i32();
11905 TCGv_i32 fp1
= tcg_temp_new_i32();
11907 gen_load_fpr32(ctx
, fp0
, fs
);
11908 gen_load_fpr32h(ctx
, fp1
, ft
);
11909 gen_store_fpr32(ctx
, fp1
, fd
);
11910 gen_store_fpr32h(ctx
, fp0
, fd
);
11911 tcg_temp_free_i32(fp0
);
11912 tcg_temp_free_i32(fp1
);
11918 TCGv_i32 fp0
= tcg_temp_new_i32();
11919 TCGv_i32 fp1
= tcg_temp_new_i32();
11921 gen_load_fpr32h(ctx
, fp0
, fs
);
11922 gen_load_fpr32(ctx
, fp1
, ft
);
11923 gen_store_fpr32(ctx
, fp1
, fd
);
11924 gen_store_fpr32h(ctx
, fp0
, fd
);
11925 tcg_temp_free_i32(fp0
);
11926 tcg_temp_free_i32(fp1
);
11932 TCGv_i32 fp0
= tcg_temp_new_i32();
11933 TCGv_i32 fp1
= tcg_temp_new_i32();
11935 gen_load_fpr32h(ctx
, fp0
, fs
);
11936 gen_load_fpr32h(ctx
, fp1
, ft
);
11937 gen_store_fpr32(ctx
, fp1
, fd
);
11938 gen_store_fpr32h(ctx
, fp0
, fd
);
11939 tcg_temp_free_i32(fp0
);
11940 tcg_temp_free_i32(fp1
);
11944 case OPC_CMP_UN_PS
:
11945 case OPC_CMP_EQ_PS
:
11946 case OPC_CMP_UEQ_PS
:
11947 case OPC_CMP_OLT_PS
:
11948 case OPC_CMP_ULT_PS
:
11949 case OPC_CMP_OLE_PS
:
11950 case OPC_CMP_ULE_PS
:
11951 case OPC_CMP_SF_PS
:
11952 case OPC_CMP_NGLE_PS
:
11953 case OPC_CMP_SEQ_PS
:
11954 case OPC_CMP_NGL_PS
:
11955 case OPC_CMP_LT_PS
:
11956 case OPC_CMP_NGE_PS
:
11957 case OPC_CMP_LE_PS
:
11958 case OPC_CMP_NGT_PS
:
11959 if (ctx
->opcode
& (1 << 6)) {
11960 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
11962 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
11966 MIPS_INVAL("farith");
11967 generate_exception_end(ctx
, EXCP_RI
);
11972 /* Coprocessor 3 (FPU) */
11973 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
11974 int fd
, int fs
, int base
, int index
)
11976 TCGv t0
= tcg_temp_new();
11979 gen_load_gpr(t0
, index
);
11980 } else if (index
== 0) {
11981 gen_load_gpr(t0
, base
);
11983 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
11985 /* Don't do NOP if destination is zero: we must perform the actual
11991 TCGv_i32 fp0
= tcg_temp_new_i32();
11993 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
11994 tcg_gen_trunc_tl_i32(fp0
, t0
);
11995 gen_store_fpr32(ctx
, fp0
, fd
);
11996 tcg_temp_free_i32(fp0
);
12001 check_cp1_registers(ctx
, fd
);
12003 TCGv_i64 fp0
= tcg_temp_new_i64();
12004 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12005 gen_store_fpr64(ctx
, fp0
, fd
);
12006 tcg_temp_free_i64(fp0
);
12010 check_cp1_64bitmode(ctx
);
12011 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12013 TCGv_i64 fp0
= tcg_temp_new_i64();
12015 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12016 gen_store_fpr64(ctx
, fp0
, fd
);
12017 tcg_temp_free_i64(fp0
);
12023 TCGv_i32 fp0
= tcg_temp_new_i32();
12024 gen_load_fpr32(ctx
, fp0
, fs
);
12025 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12026 tcg_temp_free_i32(fp0
);
12031 check_cp1_registers(ctx
, fs
);
12033 TCGv_i64 fp0
= tcg_temp_new_i64();
12034 gen_load_fpr64(ctx
, fp0
, fs
);
12035 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12036 tcg_temp_free_i64(fp0
);
12040 check_cp1_64bitmode(ctx
);
12041 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12043 TCGv_i64 fp0
= tcg_temp_new_i64();
12044 gen_load_fpr64(ctx
, fp0
, fs
);
12045 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12046 tcg_temp_free_i64(fp0
);
12053 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
12054 int fd
, int fr
, int fs
, int ft
)
12060 TCGv t0
= tcg_temp_local_new();
12061 TCGv_i32 fp
= tcg_temp_new_i32();
12062 TCGv_i32 fph
= tcg_temp_new_i32();
12063 TCGLabel
*l1
= gen_new_label();
12064 TCGLabel
*l2
= gen_new_label();
12066 gen_load_gpr(t0
, fr
);
12067 tcg_gen_andi_tl(t0
, t0
, 0x7);
12069 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12070 gen_load_fpr32(ctx
, fp
, fs
);
12071 gen_load_fpr32h(ctx
, fph
, fs
);
12072 gen_store_fpr32(ctx
, fp
, fd
);
12073 gen_store_fpr32h(ctx
, fph
, fd
);
12076 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12078 #ifdef TARGET_WORDS_BIGENDIAN
12079 gen_load_fpr32(ctx
, fp
, fs
);
12080 gen_load_fpr32h(ctx
, fph
, ft
);
12081 gen_store_fpr32h(ctx
, fp
, fd
);
12082 gen_store_fpr32(ctx
, fph
, fd
);
12084 gen_load_fpr32h(ctx
, fph
, fs
);
12085 gen_load_fpr32(ctx
, fp
, ft
);
12086 gen_store_fpr32(ctx
, fph
, fd
);
12087 gen_store_fpr32h(ctx
, fp
, fd
);
12090 tcg_temp_free_i32(fp
);
12091 tcg_temp_free_i32(fph
);
12097 TCGv_i32 fp0
= tcg_temp_new_i32();
12098 TCGv_i32 fp1
= tcg_temp_new_i32();
12099 TCGv_i32 fp2
= tcg_temp_new_i32();
12101 gen_load_fpr32(ctx
, fp0
, fs
);
12102 gen_load_fpr32(ctx
, fp1
, ft
);
12103 gen_load_fpr32(ctx
, fp2
, fr
);
12104 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12105 tcg_temp_free_i32(fp0
);
12106 tcg_temp_free_i32(fp1
);
12107 gen_store_fpr32(ctx
, fp2
, fd
);
12108 tcg_temp_free_i32(fp2
);
12113 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12115 TCGv_i64 fp0
= tcg_temp_new_i64();
12116 TCGv_i64 fp1
= tcg_temp_new_i64();
12117 TCGv_i64 fp2
= tcg_temp_new_i64();
12119 gen_load_fpr64(ctx
, fp0
, fs
);
12120 gen_load_fpr64(ctx
, fp1
, ft
);
12121 gen_load_fpr64(ctx
, fp2
, fr
);
12122 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12123 tcg_temp_free_i64(fp0
);
12124 tcg_temp_free_i64(fp1
);
12125 gen_store_fpr64(ctx
, fp2
, fd
);
12126 tcg_temp_free_i64(fp2
);
12132 TCGv_i64 fp0
= tcg_temp_new_i64();
12133 TCGv_i64 fp1
= tcg_temp_new_i64();
12134 TCGv_i64 fp2
= tcg_temp_new_i64();
12136 gen_load_fpr64(ctx
, fp0
, fs
);
12137 gen_load_fpr64(ctx
, fp1
, ft
);
12138 gen_load_fpr64(ctx
, fp2
, fr
);
12139 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12140 tcg_temp_free_i64(fp0
);
12141 tcg_temp_free_i64(fp1
);
12142 gen_store_fpr64(ctx
, fp2
, fd
);
12143 tcg_temp_free_i64(fp2
);
12149 TCGv_i32 fp0
= tcg_temp_new_i32();
12150 TCGv_i32 fp1
= tcg_temp_new_i32();
12151 TCGv_i32 fp2
= tcg_temp_new_i32();
12153 gen_load_fpr32(ctx
, fp0
, fs
);
12154 gen_load_fpr32(ctx
, fp1
, ft
);
12155 gen_load_fpr32(ctx
, fp2
, fr
);
12156 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12157 tcg_temp_free_i32(fp0
);
12158 tcg_temp_free_i32(fp1
);
12159 gen_store_fpr32(ctx
, fp2
, fd
);
12160 tcg_temp_free_i32(fp2
);
12165 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12167 TCGv_i64 fp0
= tcg_temp_new_i64();
12168 TCGv_i64 fp1
= tcg_temp_new_i64();
12169 TCGv_i64 fp2
= tcg_temp_new_i64();
12171 gen_load_fpr64(ctx
, fp0
, fs
);
12172 gen_load_fpr64(ctx
, fp1
, ft
);
12173 gen_load_fpr64(ctx
, fp2
, fr
);
12174 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12175 tcg_temp_free_i64(fp0
);
12176 tcg_temp_free_i64(fp1
);
12177 gen_store_fpr64(ctx
, fp2
, fd
);
12178 tcg_temp_free_i64(fp2
);
12184 TCGv_i64 fp0
= tcg_temp_new_i64();
12185 TCGv_i64 fp1
= tcg_temp_new_i64();
12186 TCGv_i64 fp2
= tcg_temp_new_i64();
12188 gen_load_fpr64(ctx
, fp0
, fs
);
12189 gen_load_fpr64(ctx
, fp1
, ft
);
12190 gen_load_fpr64(ctx
, fp2
, fr
);
12191 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12192 tcg_temp_free_i64(fp0
);
12193 tcg_temp_free_i64(fp1
);
12194 gen_store_fpr64(ctx
, fp2
, fd
);
12195 tcg_temp_free_i64(fp2
);
12201 TCGv_i32 fp0
= tcg_temp_new_i32();
12202 TCGv_i32 fp1
= tcg_temp_new_i32();
12203 TCGv_i32 fp2
= tcg_temp_new_i32();
12205 gen_load_fpr32(ctx
, fp0
, fs
);
12206 gen_load_fpr32(ctx
, fp1
, ft
);
12207 gen_load_fpr32(ctx
, fp2
, fr
);
12208 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12209 tcg_temp_free_i32(fp0
);
12210 tcg_temp_free_i32(fp1
);
12211 gen_store_fpr32(ctx
, fp2
, fd
);
12212 tcg_temp_free_i32(fp2
);
12217 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12219 TCGv_i64 fp0
= tcg_temp_new_i64();
12220 TCGv_i64 fp1
= tcg_temp_new_i64();
12221 TCGv_i64 fp2
= tcg_temp_new_i64();
12223 gen_load_fpr64(ctx
, fp0
, fs
);
12224 gen_load_fpr64(ctx
, fp1
, ft
);
12225 gen_load_fpr64(ctx
, fp2
, fr
);
12226 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12227 tcg_temp_free_i64(fp0
);
12228 tcg_temp_free_i64(fp1
);
12229 gen_store_fpr64(ctx
, fp2
, fd
);
12230 tcg_temp_free_i64(fp2
);
12236 TCGv_i64 fp0
= tcg_temp_new_i64();
12237 TCGv_i64 fp1
= tcg_temp_new_i64();
12238 TCGv_i64 fp2
= tcg_temp_new_i64();
12240 gen_load_fpr64(ctx
, fp0
, fs
);
12241 gen_load_fpr64(ctx
, fp1
, ft
);
12242 gen_load_fpr64(ctx
, fp2
, fr
);
12243 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12244 tcg_temp_free_i64(fp0
);
12245 tcg_temp_free_i64(fp1
);
12246 gen_store_fpr64(ctx
, fp2
, fd
);
12247 tcg_temp_free_i64(fp2
);
12253 TCGv_i32 fp0
= tcg_temp_new_i32();
12254 TCGv_i32 fp1
= tcg_temp_new_i32();
12255 TCGv_i32 fp2
= tcg_temp_new_i32();
12257 gen_load_fpr32(ctx
, fp0
, fs
);
12258 gen_load_fpr32(ctx
, fp1
, ft
);
12259 gen_load_fpr32(ctx
, fp2
, fr
);
12260 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12261 tcg_temp_free_i32(fp0
);
12262 tcg_temp_free_i32(fp1
);
12263 gen_store_fpr32(ctx
, fp2
, fd
);
12264 tcg_temp_free_i32(fp2
);
12269 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12271 TCGv_i64 fp0
= tcg_temp_new_i64();
12272 TCGv_i64 fp1
= tcg_temp_new_i64();
12273 TCGv_i64 fp2
= tcg_temp_new_i64();
12275 gen_load_fpr64(ctx
, fp0
, fs
);
12276 gen_load_fpr64(ctx
, fp1
, ft
);
12277 gen_load_fpr64(ctx
, fp2
, fr
);
12278 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12279 tcg_temp_free_i64(fp0
);
12280 tcg_temp_free_i64(fp1
);
12281 gen_store_fpr64(ctx
, fp2
, fd
);
12282 tcg_temp_free_i64(fp2
);
12288 TCGv_i64 fp0
= tcg_temp_new_i64();
12289 TCGv_i64 fp1
= tcg_temp_new_i64();
12290 TCGv_i64 fp2
= tcg_temp_new_i64();
12292 gen_load_fpr64(ctx
, fp0
, fs
);
12293 gen_load_fpr64(ctx
, fp1
, ft
);
12294 gen_load_fpr64(ctx
, fp2
, fr
);
12295 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12296 tcg_temp_free_i64(fp0
);
12297 tcg_temp_free_i64(fp1
);
12298 gen_store_fpr64(ctx
, fp2
, fd
);
12299 tcg_temp_free_i64(fp2
);
12303 MIPS_INVAL("flt3_arith");
12304 generate_exception_end(ctx
, EXCP_RI
);
12309 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12313 #if !defined(CONFIG_USER_ONLY)
12314 /* The Linux kernel will emulate rdhwr if it's not supported natively.
12315 Therefore only check the ISA in system mode. */
12316 check_insn(ctx
, ISA_MIPS32R2
);
12318 t0
= tcg_temp_new();
12322 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12323 gen_store_gpr(t0
, rt
);
12326 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12327 gen_store_gpr(t0
, rt
);
12330 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12333 gen_helper_rdhwr_cc(t0
, cpu_env
);
12334 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12337 gen_store_gpr(t0
, rt
);
12338 /* Break the TB to be able to take timer interrupts immediately
12339 after reading count. DISAS_STOP isn't sufficient, we need to ensure
12340 we break completely out of translated code. */
12341 gen_save_pc(ctx
->base
.pc_next
+ 4);
12342 ctx
->base
.is_jmp
= DISAS_EXIT
;
12345 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12346 gen_store_gpr(t0
, rt
);
12349 check_insn(ctx
, ISA_MIPS32R6
);
12351 /* Performance counter registers are not implemented other than
12352 * control register 0.
12354 generate_exception(ctx
, EXCP_RI
);
12356 gen_helper_rdhwr_performance(t0
, cpu_env
);
12357 gen_store_gpr(t0
, rt
);
12360 check_insn(ctx
, ISA_MIPS32R6
);
12361 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12362 gen_store_gpr(t0
, rt
);
12365 #if defined(CONFIG_USER_ONLY)
12366 tcg_gen_ld_tl(t0
, cpu_env
,
12367 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12368 gen_store_gpr(t0
, rt
);
12371 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12372 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12373 tcg_gen_ld_tl(t0
, cpu_env
,
12374 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12375 gen_store_gpr(t0
, rt
);
12377 generate_exception_end(ctx
, EXCP_RI
);
12381 default: /* Invalid */
12382 MIPS_INVAL("rdhwr");
12383 generate_exception_end(ctx
, EXCP_RI
);
12389 static inline void clear_branch_hflags(DisasContext
*ctx
)
12391 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12392 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12393 save_cpu_state(ctx
, 0);
12395 /* it is not safe to save ctx->hflags as hflags may be changed
12396 in execution time by the instruction in delay / forbidden slot. */
12397 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12401 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12403 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12404 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12405 /* Branches completion */
12406 clear_branch_hflags(ctx
);
12407 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12408 /* FIXME: Need to clear can_do_io. */
12409 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12410 case MIPS_HFLAG_FBNSLOT
:
12411 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12414 /* unconditional branch */
12415 if (proc_hflags
& MIPS_HFLAG_BX
) {
12416 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12418 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12420 case MIPS_HFLAG_BL
:
12421 /* blikely taken case */
12422 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12424 case MIPS_HFLAG_BC
:
12425 /* Conditional branch */
12427 TCGLabel
*l1
= gen_new_label();
12429 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12430 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12432 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12435 case MIPS_HFLAG_BR
:
12436 /* unconditional branch to register */
12437 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12438 TCGv t0
= tcg_temp_new();
12439 TCGv_i32 t1
= tcg_temp_new_i32();
12441 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12442 tcg_gen_trunc_tl_i32(t1
, t0
);
12444 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12445 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12446 tcg_gen_or_i32(hflags
, hflags
, t1
);
12447 tcg_temp_free_i32(t1
);
12449 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12451 tcg_gen_mov_tl(cpu_PC
, btarget
);
12453 if (ctx
->base
.singlestep_enabled
) {
12454 save_cpu_state(ctx
, 0);
12455 gen_helper_raise_exception_debug(cpu_env
);
12457 tcg_gen_lookup_and_goto_ptr();
12460 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12466 /* Compact Branches */
12467 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12468 int rs
, int rt
, int32_t offset
)
12470 int bcond_compute
= 0;
12471 TCGv t0
= tcg_temp_new();
12472 TCGv t1
= tcg_temp_new();
12473 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12475 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12476 #ifdef MIPS_DEBUG_DISAS
12477 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12478 "\n", ctx
->base
.pc_next
);
12480 generate_exception_end(ctx
, EXCP_RI
);
12484 /* Load needed operands and calculate btarget */
12486 /* compact branch */
12487 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12488 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12489 gen_load_gpr(t0
, rs
);
12490 gen_load_gpr(t1
, rt
);
12492 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12493 if (rs
<= rt
&& rs
== 0) {
12494 /* OPC_BEQZALC, OPC_BNEZALC */
12495 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12498 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12499 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12500 gen_load_gpr(t0
, rs
);
12501 gen_load_gpr(t1
, rt
);
12503 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12505 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12506 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12507 if (rs
== 0 || rs
== rt
) {
12508 /* OPC_BLEZALC, OPC_BGEZALC */
12509 /* OPC_BGTZALC, OPC_BLTZALC */
12510 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12512 gen_load_gpr(t0
, rs
);
12513 gen_load_gpr(t1
, rt
);
12515 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12519 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12524 /* OPC_BEQZC, OPC_BNEZC */
12525 gen_load_gpr(t0
, rs
);
12527 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12529 /* OPC_JIC, OPC_JIALC */
12530 TCGv tbase
= tcg_temp_new();
12531 TCGv toffset
= tcg_temp_new();
12533 gen_load_gpr(tbase
, rt
);
12534 tcg_gen_movi_tl(toffset
, offset
);
12535 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
12536 tcg_temp_free(tbase
);
12537 tcg_temp_free(toffset
);
12541 MIPS_INVAL("Compact branch/jump");
12542 generate_exception_end(ctx
, EXCP_RI
);
12546 if (bcond_compute
== 0) {
12547 /* Uncoditional compact branch */
12550 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12553 ctx
->hflags
|= MIPS_HFLAG_BR
;
12556 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12559 ctx
->hflags
|= MIPS_HFLAG_B
;
12562 MIPS_INVAL("Compact branch/jump");
12563 generate_exception_end(ctx
, EXCP_RI
);
12567 /* Generating branch here as compact branches don't have delay slot */
12568 gen_branch(ctx
, 4);
12570 /* Conditional compact branch */
12571 TCGLabel
*fs
= gen_new_label();
12572 save_cpu_state(ctx
, 0);
12575 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12576 if (rs
== 0 && rt
!= 0) {
12578 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12579 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12581 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12584 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
12587 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12588 if (rs
== 0 && rt
!= 0) {
12590 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12591 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12593 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12596 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
12599 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12600 if (rs
== 0 && rt
!= 0) {
12602 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12603 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12605 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12608 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
12611 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12612 if (rs
== 0 && rt
!= 0) {
12614 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12615 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12617 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12620 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
12623 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12624 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12626 /* OPC_BOVC, OPC_BNVC */
12627 TCGv t2
= tcg_temp_new();
12628 TCGv t3
= tcg_temp_new();
12629 TCGv t4
= tcg_temp_new();
12630 TCGv input_overflow
= tcg_temp_new();
12632 gen_load_gpr(t0
, rs
);
12633 gen_load_gpr(t1
, rt
);
12634 tcg_gen_ext32s_tl(t2
, t0
);
12635 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
12636 tcg_gen_ext32s_tl(t3
, t1
);
12637 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
12638 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
12640 tcg_gen_add_tl(t4
, t2
, t3
);
12641 tcg_gen_ext32s_tl(t4
, t4
);
12642 tcg_gen_xor_tl(t2
, t2
, t3
);
12643 tcg_gen_xor_tl(t3
, t4
, t3
);
12644 tcg_gen_andc_tl(t2
, t3
, t2
);
12645 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
12646 tcg_gen_or_tl(t4
, t4
, input_overflow
);
12647 if (opc
== OPC_BOVC
) {
12649 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
12652 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
12654 tcg_temp_free(input_overflow
);
12658 } else if (rs
< rt
&& rs
== 0) {
12659 /* OPC_BEQZALC, OPC_BNEZALC */
12660 if (opc
== OPC_BEQZALC
) {
12662 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
12665 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
12668 /* OPC_BEQC, OPC_BNEC */
12669 if (opc
== OPC_BEQC
) {
12671 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
12674 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
12679 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
12682 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
12685 MIPS_INVAL("Compact conditional branch/jump");
12686 generate_exception_end(ctx
, EXCP_RI
);
12690 /* Generating branch here as compact branches don't have delay slot */
12691 gen_goto_tb(ctx
, 1, ctx
->btarget
);
12694 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
12702 /* ISA extensions (ASEs) */
12703 /* MIPS16 extension to MIPS32 */
12705 /* MIPS16 major opcodes */
12707 M16_OPC_ADDIUSP
= 0x00,
12708 M16_OPC_ADDIUPC
= 0x01,
12710 M16_OPC_JAL
= 0x03,
12711 M16_OPC_BEQZ
= 0x04,
12712 M16_OPC_BNEQZ
= 0x05,
12713 M16_OPC_SHIFT
= 0x06,
12715 M16_OPC_RRIA
= 0x08,
12716 M16_OPC_ADDIU8
= 0x09,
12717 M16_OPC_SLTI
= 0x0a,
12718 M16_OPC_SLTIU
= 0x0b,
12721 M16_OPC_CMPI
= 0x0e,
12725 M16_OPC_LWSP
= 0x12,
12727 M16_OPC_LBU
= 0x14,
12728 M16_OPC_LHU
= 0x15,
12729 M16_OPC_LWPC
= 0x16,
12730 M16_OPC_LWU
= 0x17,
12733 M16_OPC_SWSP
= 0x1a,
12735 M16_OPC_RRR
= 0x1c,
12737 M16_OPC_EXTEND
= 0x1e,
12741 /* I8 funct field */
12760 /* RR funct field */
12794 /* I64 funct field */
12802 I64_DADDIUPC
= 0x6,
12806 /* RR ry field for CNVT */
12808 RR_RY_CNVT_ZEB
= 0x0,
12809 RR_RY_CNVT_ZEH
= 0x1,
12810 RR_RY_CNVT_ZEW
= 0x2,
12811 RR_RY_CNVT_SEB
= 0x4,
12812 RR_RY_CNVT_SEH
= 0x5,
12813 RR_RY_CNVT_SEW
= 0x6,
12816 static int xlat (int r
)
12818 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12823 static void gen_mips16_save (DisasContext
*ctx
,
12824 int xsregs
, int aregs
,
12825 int do_ra
, int do_s0
, int do_s1
,
12828 TCGv t0
= tcg_temp_new();
12829 TCGv t1
= tcg_temp_new();
12830 TCGv t2
= tcg_temp_new();
12860 generate_exception_end(ctx
, EXCP_RI
);
12866 gen_base_offset_addr(ctx
, t0
, 29, 12);
12867 gen_load_gpr(t1
, 7);
12868 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12871 gen_base_offset_addr(ctx
, t0
, 29, 8);
12872 gen_load_gpr(t1
, 6);
12873 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12876 gen_base_offset_addr(ctx
, t0
, 29, 4);
12877 gen_load_gpr(t1
, 5);
12878 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12881 gen_base_offset_addr(ctx
, t0
, 29, 0);
12882 gen_load_gpr(t1
, 4);
12883 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12886 gen_load_gpr(t0
, 29);
12888 #define DECR_AND_STORE(reg) do { \
12889 tcg_gen_movi_tl(t2, -4); \
12890 gen_op_addr_add(ctx, t0, t0, t2); \
12891 gen_load_gpr(t1, reg); \
12892 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
12896 DECR_AND_STORE(31);
12901 DECR_AND_STORE(30);
12904 DECR_AND_STORE(23);
12907 DECR_AND_STORE(22);
12910 DECR_AND_STORE(21);
12913 DECR_AND_STORE(20);
12916 DECR_AND_STORE(19);
12919 DECR_AND_STORE(18);
12923 DECR_AND_STORE(17);
12926 DECR_AND_STORE(16);
12956 generate_exception_end(ctx
, EXCP_RI
);
12972 #undef DECR_AND_STORE
12974 tcg_gen_movi_tl(t2
, -framesize
);
12975 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
12981 static void gen_mips16_restore (DisasContext
*ctx
,
12982 int xsregs
, int aregs
,
12983 int do_ra
, int do_s0
, int do_s1
,
12987 TCGv t0
= tcg_temp_new();
12988 TCGv t1
= tcg_temp_new();
12989 TCGv t2
= tcg_temp_new();
12991 tcg_gen_movi_tl(t2
, framesize
);
12992 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
12994 #define DECR_AND_LOAD(reg) do { \
12995 tcg_gen_movi_tl(t2, -4); \
12996 gen_op_addr_add(ctx, t0, t0, t2); \
12997 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
12998 gen_store_gpr(t1, reg); \
13062 generate_exception_end(ctx
, EXCP_RI
);
13078 #undef DECR_AND_LOAD
13080 tcg_gen_movi_tl(t2
, framesize
);
13081 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13087 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
13088 int is_64_bit
, int extended
)
13092 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13093 generate_exception_end(ctx
, EXCP_RI
);
13097 t0
= tcg_temp_new();
13099 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13100 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13102 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13108 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13111 TCGv_i32 t0
= tcg_const_i32(op
);
13112 TCGv t1
= tcg_temp_new();
13113 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13114 gen_helper_cache(cpu_env
, t1
, t0
);
13117 #if defined(TARGET_MIPS64)
13118 static void decode_i64_mips16 (DisasContext
*ctx
,
13119 int ry
, int funct
, int16_t offset
,
13124 check_insn(ctx
, ISA_MIPS3
);
13125 check_mips_64(ctx
);
13126 offset
= extended
? offset
: offset
<< 3;
13127 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13130 check_insn(ctx
, ISA_MIPS3
);
13131 check_mips_64(ctx
);
13132 offset
= extended
? offset
: offset
<< 3;
13133 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13136 check_insn(ctx
, ISA_MIPS3
);
13137 check_mips_64(ctx
);
13138 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13139 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13142 check_insn(ctx
, ISA_MIPS3
);
13143 check_mips_64(ctx
);
13144 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13145 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13148 check_insn(ctx
, ISA_MIPS3
);
13149 check_mips_64(ctx
);
13150 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13151 generate_exception_end(ctx
, EXCP_RI
);
13153 offset
= extended
? offset
: offset
<< 3;
13154 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13158 check_insn(ctx
, ISA_MIPS3
);
13159 check_mips_64(ctx
);
13160 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13161 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13164 check_insn(ctx
, ISA_MIPS3
);
13165 check_mips_64(ctx
);
13166 offset
= extended
? offset
: offset
<< 2;
13167 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13170 check_insn(ctx
, ISA_MIPS3
);
13171 check_mips_64(ctx
);
13172 offset
= extended
? offset
: offset
<< 2;
13173 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13179 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13181 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13182 int op
, rx
, ry
, funct
, sa
;
13183 int16_t imm
, offset
;
13185 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13186 op
= (ctx
->opcode
>> 11) & 0x1f;
13187 sa
= (ctx
->opcode
>> 22) & 0x1f;
13188 funct
= (ctx
->opcode
>> 8) & 0x7;
13189 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13190 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13191 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13192 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13193 | (ctx
->opcode
& 0x1f));
13195 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
13198 case M16_OPC_ADDIUSP
:
13199 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13201 case M16_OPC_ADDIUPC
:
13202 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13205 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13206 /* No delay slot, so just process as a normal instruction */
13209 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13210 /* No delay slot, so just process as a normal instruction */
13212 case M16_OPC_BNEQZ
:
13213 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13214 /* No delay slot, so just process as a normal instruction */
13216 case M16_OPC_SHIFT
:
13217 switch (ctx
->opcode
& 0x3) {
13219 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13222 #if defined(TARGET_MIPS64)
13223 check_mips_64(ctx
);
13224 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13226 generate_exception_end(ctx
, EXCP_RI
);
13230 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13233 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13237 #if defined(TARGET_MIPS64)
13239 check_insn(ctx
, ISA_MIPS3
);
13240 check_mips_64(ctx
);
13241 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13245 imm
= ctx
->opcode
& 0xf;
13246 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13247 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13248 imm
= (int16_t) (imm
<< 1) >> 1;
13249 if ((ctx
->opcode
>> 4) & 0x1) {
13250 #if defined(TARGET_MIPS64)
13251 check_mips_64(ctx
);
13252 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13254 generate_exception_end(ctx
, EXCP_RI
);
13257 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13260 case M16_OPC_ADDIU8
:
13261 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13264 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13266 case M16_OPC_SLTIU
:
13267 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13272 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13275 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13278 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13281 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13284 check_insn(ctx
, ISA_MIPS32
);
13286 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13287 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13288 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13289 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13290 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13291 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13292 | (ctx
->opcode
& 0xf)) << 3;
13294 if (ctx
->opcode
& (1 << 7)) {
13295 gen_mips16_save(ctx
, xsregs
, aregs
,
13296 do_ra
, do_s0
, do_s1
,
13299 gen_mips16_restore(ctx
, xsregs
, aregs
,
13300 do_ra
, do_s0
, do_s1
,
13306 generate_exception_end(ctx
, EXCP_RI
);
13311 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13314 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13316 #if defined(TARGET_MIPS64)
13318 check_insn(ctx
, ISA_MIPS3
);
13319 check_mips_64(ctx
);
13320 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13324 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13327 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13330 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13333 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13336 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13339 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13342 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13344 #if defined(TARGET_MIPS64)
13346 check_insn(ctx
, ISA_MIPS3
);
13347 check_mips_64(ctx
);
13348 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13352 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13355 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13358 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13361 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13363 #if defined(TARGET_MIPS64)
13365 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13369 generate_exception_end(ctx
, EXCP_RI
);
13376 static inline bool is_uhi(int sdbbp_code
)
13378 #ifdef CONFIG_USER_ONLY
13381 return semihosting_enabled() && sdbbp_code
== 1;
13385 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13389 int op
, cnvt_op
, op1
, offset
;
13393 op
= (ctx
->opcode
>> 11) & 0x1f;
13394 sa
= (ctx
->opcode
>> 2) & 0x7;
13395 sa
= sa
== 0 ? 8 : sa
;
13396 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13397 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13398 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13399 op1
= offset
= ctx
->opcode
& 0x1f;
13404 case M16_OPC_ADDIUSP
:
13406 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13408 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13411 case M16_OPC_ADDIUPC
:
13412 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13415 offset
= (ctx
->opcode
& 0x7ff) << 1;
13416 offset
= (int16_t)(offset
<< 4) >> 4;
13417 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13418 /* No delay slot, so just process as a normal instruction */
13421 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13422 offset
= (((ctx
->opcode
& 0x1f) << 21)
13423 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13425 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13426 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13430 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13431 ((int8_t)ctx
->opcode
) << 1, 0);
13432 /* No delay slot, so just process as a normal instruction */
13434 case M16_OPC_BNEQZ
:
13435 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13436 ((int8_t)ctx
->opcode
) << 1, 0);
13437 /* No delay slot, so just process as a normal instruction */
13439 case M16_OPC_SHIFT
:
13440 switch (ctx
->opcode
& 0x3) {
13442 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13445 #if defined(TARGET_MIPS64)
13446 check_insn(ctx
, ISA_MIPS3
);
13447 check_mips_64(ctx
);
13448 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13450 generate_exception_end(ctx
, EXCP_RI
);
13454 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13457 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13461 #if defined(TARGET_MIPS64)
13463 check_insn(ctx
, ISA_MIPS3
);
13464 check_mips_64(ctx
);
13465 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13470 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13472 if ((ctx
->opcode
>> 4) & 1) {
13473 #if defined(TARGET_MIPS64)
13474 check_insn(ctx
, ISA_MIPS3
);
13475 check_mips_64(ctx
);
13476 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13478 generate_exception_end(ctx
, EXCP_RI
);
13481 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13485 case M16_OPC_ADDIU8
:
13487 int16_t imm
= (int8_t) ctx
->opcode
;
13489 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13494 int16_t imm
= (uint8_t) ctx
->opcode
;
13495 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13498 case M16_OPC_SLTIU
:
13500 int16_t imm
= (uint8_t) ctx
->opcode
;
13501 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13508 funct
= (ctx
->opcode
>> 8) & 0x7;
13511 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
13512 ((int8_t)ctx
->opcode
) << 1, 0);
13515 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
13516 ((int8_t)ctx
->opcode
) << 1, 0);
13519 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
13522 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
13523 ((int8_t)ctx
->opcode
) << 3);
13526 check_insn(ctx
, ISA_MIPS32
);
13528 int do_ra
= ctx
->opcode
& (1 << 6);
13529 int do_s0
= ctx
->opcode
& (1 << 5);
13530 int do_s1
= ctx
->opcode
& (1 << 4);
13531 int framesize
= ctx
->opcode
& 0xf;
13533 if (framesize
== 0) {
13536 framesize
= framesize
<< 3;
13539 if (ctx
->opcode
& (1 << 7)) {
13540 gen_mips16_save(ctx
, 0, 0,
13541 do_ra
, do_s0
, do_s1
, framesize
);
13543 gen_mips16_restore(ctx
, 0, 0,
13544 do_ra
, do_s0
, do_s1
, framesize
);
13550 int rz
= xlat(ctx
->opcode
& 0x7);
13552 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
13553 ((ctx
->opcode
>> 5) & 0x7);
13554 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
13558 reg32
= ctx
->opcode
& 0x1f;
13559 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
13562 generate_exception_end(ctx
, EXCP_RI
);
13569 int16_t imm
= (uint8_t) ctx
->opcode
;
13571 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
13576 int16_t imm
= (uint8_t) ctx
->opcode
;
13577 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
13580 #if defined(TARGET_MIPS64)
13582 check_insn(ctx
, ISA_MIPS3
);
13583 check_mips_64(ctx
);
13584 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
13588 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13591 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
13594 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13597 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
13600 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13603 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
13606 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
13608 #if defined (TARGET_MIPS64)
13610 check_insn(ctx
, ISA_MIPS3
);
13611 check_mips_64(ctx
);
13612 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
13616 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13619 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
13622 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13625 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
13629 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
13632 switch (ctx
->opcode
& 0x3) {
13634 mips32_op
= OPC_ADDU
;
13637 mips32_op
= OPC_SUBU
;
13639 #if defined(TARGET_MIPS64)
13641 mips32_op
= OPC_DADDU
;
13642 check_insn(ctx
, ISA_MIPS3
);
13643 check_mips_64(ctx
);
13646 mips32_op
= OPC_DSUBU
;
13647 check_insn(ctx
, ISA_MIPS3
);
13648 check_mips_64(ctx
);
13652 generate_exception_end(ctx
, EXCP_RI
);
13656 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
13665 int nd
= (ctx
->opcode
>> 7) & 0x1;
13666 int link
= (ctx
->opcode
>> 6) & 0x1;
13667 int ra
= (ctx
->opcode
>> 5) & 0x1;
13670 check_insn(ctx
, ISA_MIPS32
);
13679 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
13684 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
13685 gen_helper_do_semihosting(cpu_env
);
13687 /* XXX: not clear which exception should be raised
13688 * when in debug mode...
13690 check_insn(ctx
, ISA_MIPS32
);
13691 generate_exception_end(ctx
, EXCP_DBp
);
13695 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
13698 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
13701 generate_exception_end(ctx
, EXCP_BREAK
);
13704 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
13707 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
13710 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
13712 #if defined (TARGET_MIPS64)
13714 check_insn(ctx
, ISA_MIPS3
);
13715 check_mips_64(ctx
);
13716 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
13720 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
13723 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
13726 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
13729 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
13732 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
13735 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
13738 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
13741 check_insn(ctx
, ISA_MIPS32
);
13743 case RR_RY_CNVT_ZEB
:
13744 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13746 case RR_RY_CNVT_ZEH
:
13747 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13749 case RR_RY_CNVT_SEB
:
13750 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13752 case RR_RY_CNVT_SEH
:
13753 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13755 #if defined (TARGET_MIPS64)
13756 case RR_RY_CNVT_ZEW
:
13757 check_insn(ctx
, ISA_MIPS64
);
13758 check_mips_64(ctx
);
13759 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13761 case RR_RY_CNVT_SEW
:
13762 check_insn(ctx
, ISA_MIPS64
);
13763 check_mips_64(ctx
);
13764 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13768 generate_exception_end(ctx
, EXCP_RI
);
13773 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
13775 #if defined (TARGET_MIPS64)
13777 check_insn(ctx
, ISA_MIPS3
);
13778 check_mips_64(ctx
);
13779 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
13782 check_insn(ctx
, ISA_MIPS3
);
13783 check_mips_64(ctx
);
13784 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
13787 check_insn(ctx
, ISA_MIPS3
);
13788 check_mips_64(ctx
);
13789 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
13792 check_insn(ctx
, ISA_MIPS3
);
13793 check_mips_64(ctx
);
13794 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
13798 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
13801 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
13804 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
13807 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
13809 #if defined (TARGET_MIPS64)
13811 check_insn(ctx
, ISA_MIPS3
);
13812 check_mips_64(ctx
);
13813 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
13816 check_insn(ctx
, ISA_MIPS3
);
13817 check_mips_64(ctx
);
13818 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
13821 check_insn(ctx
, ISA_MIPS3
);
13822 check_mips_64(ctx
);
13823 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
13826 check_insn(ctx
, ISA_MIPS3
);
13827 check_mips_64(ctx
);
13828 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
13832 generate_exception_end(ctx
, EXCP_RI
);
13836 case M16_OPC_EXTEND
:
13837 decode_extended_mips16_opc(env
, ctx
);
13840 #if defined(TARGET_MIPS64)
13842 funct
= (ctx
->opcode
>> 8) & 0x7;
13843 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
13847 generate_exception_end(ctx
, EXCP_RI
);
13854 /* microMIPS extension to MIPS32/MIPS64 */
13857 * microMIPS32/microMIPS64 major opcodes
13859 * 1. MIPS Architecture for Programmers Volume II-B:
13860 * The microMIPS32 Instruction Set (Revision 3.05)
13862 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
13864 * 2. MIPS Architecture For Programmers Volume II-A:
13865 * The MIPS64 Instruction Set (Revision 3.51)
13895 POOL32S
= 0x16, /* MIPS64 */
13896 DADDIU32
= 0x17, /* MIPS64 */
13925 /* 0x29 is reserved */
13938 /* 0x31 is reserved */
13951 SD32
= 0x36, /* MIPS64 */
13952 LD32
= 0x37, /* MIPS64 */
13954 /* 0x39 is reserved */
13970 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
13992 /* POOL32A encoding of minor opcode field */
13995 /* These opcodes are distinguished only by bits 9..6; those bits are
13996 * what are recorded below. */
14033 /* The following can be distinguished by their lower 6 bits. */
14043 /* POOL32AXF encoding of minor opcode field extension */
14046 * 1. MIPS Architecture for Programmers Volume II-B:
14047 * The microMIPS32 Instruction Set (Revision 3.05)
14049 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14051 * 2. MIPS Architecture for Programmers VolumeIV-e:
14052 * The MIPS DSP Application-Specific Extension
14053 * to the microMIPS32 Architecture (Revision 2.34)
14055 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14070 /* begin of microMIPS32 DSP */
14072 /* bits 13..12 for 0x01 */
14078 /* bits 13..12 for 0x2a */
14084 /* bits 13..12 for 0x32 */
14088 /* end of microMIPS32 DSP */
14090 /* bits 15..12 for 0x2c */
14107 /* bits 15..12 for 0x34 */
14115 /* bits 15..12 for 0x3c */
14117 JR
= 0x0, /* alias */
14125 /* bits 15..12 for 0x05 */
14129 /* bits 15..12 for 0x0d */
14141 /* bits 15..12 for 0x15 */
14147 /* bits 15..12 for 0x1d */
14151 /* bits 15..12 for 0x2d */
14156 /* bits 15..12 for 0x35 */
14163 /* POOL32B encoding of minor opcode field (bits 15..12) */
14179 /* POOL32C encoding of minor opcode field (bits 15..12) */
14200 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14213 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14226 /* POOL32F encoding of minor opcode field (bits 5..0) */
14229 /* These are the bit 7..6 values */
14238 /* These are the bit 8..6 values */
14263 MOVZ_FMT_05
= 0x05,
14297 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14304 /* POOL32Fxf encoding of minor opcode extension field */
14342 /* POOL32I encoding of minor opcode field (bits 25..21) */
14372 /* These overlap and are distinguished by bit16 of the instruction */
14381 /* POOL16A encoding of minor opcode field */
14388 /* POOL16B encoding of minor opcode field */
14395 /* POOL16C encoding of minor opcode field */
14415 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14439 /* POOL16D encoding of minor opcode field */
14446 /* POOL16E encoding of minor opcode field */
14453 static int mmreg (int r
)
14455 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14460 /* Used for 16-bit store instructions. */
14461 static int mmreg2 (int r
)
14463 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14468 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14469 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14470 #define uMIPS_RS2(op) uMIPS_RS(op)
14471 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14472 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14473 #define uMIPS_RS5(op) (op & 0x1f)
14475 /* Signed immediate */
14476 #define SIMM(op, start, width) \
14477 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14480 /* Zero-extended immediate */
14481 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14483 static void gen_addiur1sp(DisasContext
*ctx
)
14485 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14487 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14490 static void gen_addiur2(DisasContext
*ctx
)
14492 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14493 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14494 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14496 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14499 static void gen_addiusp(DisasContext
*ctx
)
14501 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14504 if (encoded
<= 1) {
14505 decoded
= 256 + encoded
;
14506 } else if (encoded
<= 255) {
14508 } else if (encoded
<= 509) {
14509 decoded
= encoded
- 512;
14511 decoded
= encoded
- 768;
14514 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
14517 static void gen_addius5(DisasContext
*ctx
)
14519 int imm
= SIMM(ctx
->opcode
, 1, 4);
14520 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14522 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
14525 static void gen_andi16(DisasContext
*ctx
)
14527 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
14528 31, 32, 63, 64, 255, 32768, 65535 };
14529 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14530 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14531 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
14533 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
14536 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
14537 int base
, int16_t offset
)
14542 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
14543 generate_exception_end(ctx
, EXCP_RI
);
14547 t0
= tcg_temp_new();
14549 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14551 t1
= tcg_const_tl(reglist
);
14552 t2
= tcg_const_i32(ctx
->mem_idx
);
14554 save_cpu_state(ctx
, 1);
14557 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
14560 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
14562 #ifdef TARGET_MIPS64
14564 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
14567 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
14573 tcg_temp_free_i32(t2
);
14577 static void gen_pool16c_insn(DisasContext
*ctx
)
14579 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
14580 int rs
= mmreg(ctx
->opcode
& 0x7);
14582 switch (((ctx
->opcode
) >> 4) & 0x3f) {
14587 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
14593 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
14599 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
14605 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
14612 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14613 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14615 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
14624 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14625 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14627 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
14634 int reg
= ctx
->opcode
& 0x1f;
14636 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
14642 int reg
= ctx
->opcode
& 0x1f;
14643 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
14644 /* Let normal delay slot handling in our caller take us
14645 to the branch target. */
14650 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
14651 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14655 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
14656 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14660 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
14664 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
14667 generate_exception_end(ctx
, EXCP_BREAK
);
14670 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
14671 gen_helper_do_semihosting(cpu_env
);
14673 /* XXX: not clear which exception should be raised
14674 * when in debug mode...
14676 check_insn(ctx
, ISA_MIPS32
);
14677 generate_exception_end(ctx
, EXCP_DBp
);
14680 case JRADDIUSP
+ 0:
14681 case JRADDIUSP
+ 1:
14683 int imm
= ZIMM(ctx
->opcode
, 0, 5);
14684 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14685 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14686 /* Let normal delay slot handling in our caller take us
14687 to the branch target. */
14691 generate_exception_end(ctx
, EXCP_RI
);
14696 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
14699 int rd
, rs
, re
, rt
;
14700 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
14701 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
14702 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
14703 rd
= rd_enc
[enc_dest
];
14704 re
= re_enc
[enc_dest
];
14705 rs
= rs_rt_enc
[enc_rs
];
14706 rt
= rs_rt_enc
[enc_rt
];
14708 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
14710 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
14713 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
14715 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
14719 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
14721 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
14722 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
14724 switch (ctx
->opcode
& 0xf) {
14726 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
14729 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
14733 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14734 int offset
= extract32(ctx
->opcode
, 4, 4);
14735 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
14738 case R6_JRC16
: /* JRCADDIUSP */
14739 if ((ctx
->opcode
>> 4) & 1) {
14741 int imm
= extract32(ctx
->opcode
, 5, 5);
14742 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14743 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14746 rs
= extract32(ctx
->opcode
, 5, 5);
14747 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
14759 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14760 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14761 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
14762 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14766 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
14769 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
14773 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14774 int offset
= extract32(ctx
->opcode
, 4, 4);
14775 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
14778 case JALRC16
: /* BREAK16, SDBBP16 */
14779 switch (ctx
->opcode
& 0x3f) {
14781 case JALRC16
+ 0x20:
14783 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
14788 generate_exception(ctx
, EXCP_BREAK
);
14792 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
14793 gen_helper_do_semihosting(cpu_env
);
14795 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
14796 generate_exception(ctx
, EXCP_RI
);
14798 generate_exception(ctx
, EXCP_DBp
);
14805 generate_exception(ctx
, EXCP_RI
);
14810 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
14812 TCGv t0
= tcg_temp_new();
14813 TCGv t1
= tcg_temp_new();
14815 gen_load_gpr(t0
, base
);
14818 gen_load_gpr(t1
, index
);
14819 tcg_gen_shli_tl(t1
, t1
, 2);
14820 gen_op_addr_add(ctx
, t0
, t1
, t0
);
14823 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14824 gen_store_gpr(t1
, rd
);
14830 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
14831 int base
, int16_t offset
)
14835 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
14836 generate_exception_end(ctx
, EXCP_RI
);
14840 t0
= tcg_temp_new();
14841 t1
= tcg_temp_new();
14843 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14848 generate_exception_end(ctx
, EXCP_RI
);
14851 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14852 gen_store_gpr(t1
, rd
);
14853 tcg_gen_movi_tl(t1
, 4);
14854 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14855 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14856 gen_store_gpr(t1
, rd
+1);
14859 gen_load_gpr(t1
, rd
);
14860 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
14861 tcg_gen_movi_tl(t1
, 4);
14862 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14863 gen_load_gpr(t1
, rd
+1);
14864 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
14866 #ifdef TARGET_MIPS64
14869 generate_exception_end(ctx
, EXCP_RI
);
14872 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14873 gen_store_gpr(t1
, rd
);
14874 tcg_gen_movi_tl(t1
, 8);
14875 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14876 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14877 gen_store_gpr(t1
, rd
+1);
14880 gen_load_gpr(t1
, rd
);
14881 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14882 tcg_gen_movi_tl(t1
, 8);
14883 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14884 gen_load_gpr(t1
, rd
+1);
14885 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14893 static void gen_sync(int stype
)
14895 TCGBar tcg_mo
= TCG_BAR_SC
;
14898 case 0x4: /* SYNC_WMB */
14899 tcg_mo
|= TCG_MO_ST_ST
;
14901 case 0x10: /* SYNC_MB */
14902 tcg_mo
|= TCG_MO_ALL
;
14904 case 0x11: /* SYNC_ACQUIRE */
14905 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
14907 case 0x12: /* SYNC_RELEASE */
14908 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
14910 case 0x13: /* SYNC_RMB */
14911 tcg_mo
|= TCG_MO_LD_LD
;
14914 tcg_mo
|= TCG_MO_ALL
;
14918 tcg_gen_mb(tcg_mo
);
14921 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
14923 int extension
= (ctx
->opcode
>> 6) & 0x3f;
14924 int minor
= (ctx
->opcode
>> 12) & 0xf;
14925 uint32_t mips32_op
;
14927 switch (extension
) {
14929 mips32_op
= OPC_TEQ
;
14932 mips32_op
= OPC_TGE
;
14935 mips32_op
= OPC_TGEU
;
14938 mips32_op
= OPC_TLT
;
14941 mips32_op
= OPC_TLTU
;
14944 mips32_op
= OPC_TNE
;
14946 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
14948 #ifndef CONFIG_USER_ONLY
14951 check_cp0_enabled(ctx
);
14953 /* Treat as NOP. */
14956 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
14960 check_cp0_enabled(ctx
);
14962 TCGv t0
= tcg_temp_new();
14964 gen_load_gpr(t0
, rt
);
14965 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
14971 switch (minor
& 3) {
14973 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14976 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14979 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14982 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14985 goto pool32axf_invalid
;
14989 switch (minor
& 3) {
14991 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14994 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14997 goto pool32axf_invalid
;
15003 check_insn(ctx
, ISA_MIPS32R6
);
15004 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15007 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15010 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15013 mips32_op
= OPC_CLO
;
15016 mips32_op
= OPC_CLZ
;
15018 check_insn(ctx
, ISA_MIPS32
);
15019 gen_cl(ctx
, mips32_op
, rt
, rs
);
15022 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15023 gen_rdhwr(ctx
, rt
, rs
, 0);
15026 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15029 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15030 mips32_op
= OPC_MULT
;
15033 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15034 mips32_op
= OPC_MULTU
;
15037 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15038 mips32_op
= OPC_DIV
;
15041 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15042 mips32_op
= OPC_DIVU
;
15045 check_insn(ctx
, ISA_MIPS32
);
15046 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15049 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15050 mips32_op
= OPC_MADD
;
15053 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15054 mips32_op
= OPC_MADDU
;
15057 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15058 mips32_op
= OPC_MSUB
;
15061 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15062 mips32_op
= OPC_MSUBU
;
15064 check_insn(ctx
, ISA_MIPS32
);
15065 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15068 goto pool32axf_invalid
;
15079 generate_exception_err(ctx
, EXCP_CpU
, 2);
15082 goto pool32axf_invalid
;
15087 case JALR
: /* JALRC */
15088 case JALR_HB
: /* JALRC_HB */
15089 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15090 /* JALRC, JALRC_HB */
15091 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15093 /* JALR, JALR_HB */
15094 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15095 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15100 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15101 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15102 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15105 goto pool32axf_invalid
;
15111 check_cp0_enabled(ctx
);
15112 check_insn(ctx
, ISA_MIPS32R2
);
15113 gen_load_srsgpr(rs
, rt
);
15116 check_cp0_enabled(ctx
);
15117 check_insn(ctx
, ISA_MIPS32R2
);
15118 gen_store_srsgpr(rs
, rt
);
15121 goto pool32axf_invalid
;
15124 #ifndef CONFIG_USER_ONLY
15128 mips32_op
= OPC_TLBP
;
15131 mips32_op
= OPC_TLBR
;
15134 mips32_op
= OPC_TLBWI
;
15137 mips32_op
= OPC_TLBWR
;
15140 mips32_op
= OPC_TLBINV
;
15143 mips32_op
= OPC_TLBINVF
;
15146 mips32_op
= OPC_WAIT
;
15149 mips32_op
= OPC_DERET
;
15152 mips32_op
= OPC_ERET
;
15154 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15157 goto pool32axf_invalid
;
15163 check_cp0_enabled(ctx
);
15165 TCGv t0
= tcg_temp_new();
15167 save_cpu_state(ctx
, 1);
15168 gen_helper_di(t0
, cpu_env
);
15169 gen_store_gpr(t0
, rs
);
15170 /* Stop translation as we may have switched the execution mode */
15171 ctx
->base
.is_jmp
= DISAS_STOP
;
15176 check_cp0_enabled(ctx
);
15178 TCGv t0
= tcg_temp_new();
15180 save_cpu_state(ctx
, 1);
15181 gen_helper_ei(t0
, cpu_env
);
15182 gen_store_gpr(t0
, rs
);
15183 /* DISAS_STOP isn't sufficient, we need to ensure we break out
15184 of translated code to check for pending interrupts. */
15185 gen_save_pc(ctx
->base
.pc_next
+ 4);
15186 ctx
->base
.is_jmp
= DISAS_EXIT
;
15191 goto pool32axf_invalid
;
15198 gen_sync(extract32(ctx
->opcode
, 16, 5));
15201 generate_exception_end(ctx
, EXCP_SYSCALL
);
15204 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15205 gen_helper_do_semihosting(cpu_env
);
15207 check_insn(ctx
, ISA_MIPS32
);
15208 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15209 generate_exception_end(ctx
, EXCP_RI
);
15211 generate_exception_end(ctx
, EXCP_DBp
);
15216 goto pool32axf_invalid
;
15220 switch (minor
& 3) {
15222 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15225 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15228 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15231 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15234 goto pool32axf_invalid
;
15238 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15241 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15244 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15247 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15250 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15253 goto pool32axf_invalid
;
15258 MIPS_INVAL("pool32axf");
15259 generate_exception_end(ctx
, EXCP_RI
);
15264 /* Values for microMIPS fmt field. Variable-width, depending on which
15265 formats the instruction supports. */
15284 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15286 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15287 uint32_t mips32_op
;
15289 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
15290 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
15291 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
15293 switch (extension
) {
15294 case FLOAT_1BIT_FMT(CFC1
, 0):
15295 mips32_op
= OPC_CFC1
;
15297 case FLOAT_1BIT_FMT(CTC1
, 0):
15298 mips32_op
= OPC_CTC1
;
15300 case FLOAT_1BIT_FMT(MFC1
, 0):
15301 mips32_op
= OPC_MFC1
;
15303 case FLOAT_1BIT_FMT(MTC1
, 0):
15304 mips32_op
= OPC_MTC1
;
15306 case FLOAT_1BIT_FMT(MFHC1
, 0):
15307 mips32_op
= OPC_MFHC1
;
15309 case FLOAT_1BIT_FMT(MTHC1
, 0):
15310 mips32_op
= OPC_MTHC1
;
15312 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15315 /* Reciprocal square root */
15316 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15317 mips32_op
= OPC_RSQRT_S
;
15319 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15320 mips32_op
= OPC_RSQRT_D
;
15324 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15325 mips32_op
= OPC_SQRT_S
;
15327 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15328 mips32_op
= OPC_SQRT_D
;
15332 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15333 mips32_op
= OPC_RECIP_S
;
15335 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15336 mips32_op
= OPC_RECIP_D
;
15340 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15341 mips32_op
= OPC_FLOOR_L_S
;
15343 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15344 mips32_op
= OPC_FLOOR_L_D
;
15346 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15347 mips32_op
= OPC_FLOOR_W_S
;
15349 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15350 mips32_op
= OPC_FLOOR_W_D
;
15354 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15355 mips32_op
= OPC_CEIL_L_S
;
15357 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15358 mips32_op
= OPC_CEIL_L_D
;
15360 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15361 mips32_op
= OPC_CEIL_W_S
;
15363 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15364 mips32_op
= OPC_CEIL_W_D
;
15368 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15369 mips32_op
= OPC_TRUNC_L_S
;
15371 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15372 mips32_op
= OPC_TRUNC_L_D
;
15374 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15375 mips32_op
= OPC_TRUNC_W_S
;
15377 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15378 mips32_op
= OPC_TRUNC_W_D
;
15382 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15383 mips32_op
= OPC_ROUND_L_S
;
15385 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15386 mips32_op
= OPC_ROUND_L_D
;
15388 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15389 mips32_op
= OPC_ROUND_W_S
;
15391 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15392 mips32_op
= OPC_ROUND_W_D
;
15395 /* Integer to floating-point conversion */
15396 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15397 mips32_op
= OPC_CVT_L_S
;
15399 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15400 mips32_op
= OPC_CVT_L_D
;
15402 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15403 mips32_op
= OPC_CVT_W_S
;
15405 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15406 mips32_op
= OPC_CVT_W_D
;
15409 /* Paired-foo conversions */
15410 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15411 mips32_op
= OPC_CVT_S_PL
;
15413 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15414 mips32_op
= OPC_CVT_S_PU
;
15416 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15417 mips32_op
= OPC_CVT_PW_PS
;
15419 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15420 mips32_op
= OPC_CVT_PS_PW
;
15423 /* Floating-point moves */
15424 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15425 mips32_op
= OPC_MOV_S
;
15427 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15428 mips32_op
= OPC_MOV_D
;
15430 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15431 mips32_op
= OPC_MOV_PS
;
15434 /* Absolute value */
15435 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15436 mips32_op
= OPC_ABS_S
;
15438 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15439 mips32_op
= OPC_ABS_D
;
15441 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15442 mips32_op
= OPC_ABS_PS
;
15446 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15447 mips32_op
= OPC_NEG_S
;
15449 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15450 mips32_op
= OPC_NEG_D
;
15452 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15453 mips32_op
= OPC_NEG_PS
;
15456 /* Reciprocal square root step */
15457 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15458 mips32_op
= OPC_RSQRT1_S
;
15460 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15461 mips32_op
= OPC_RSQRT1_D
;
15463 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15464 mips32_op
= OPC_RSQRT1_PS
;
15467 /* Reciprocal step */
15468 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15469 mips32_op
= OPC_RECIP1_S
;
15471 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15472 mips32_op
= OPC_RECIP1_S
;
15474 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15475 mips32_op
= OPC_RECIP1_PS
;
15478 /* Conversions from double */
15479 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15480 mips32_op
= OPC_CVT_D_S
;
15482 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15483 mips32_op
= OPC_CVT_D_W
;
15485 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15486 mips32_op
= OPC_CVT_D_L
;
15489 /* Conversions from single */
15490 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15491 mips32_op
= OPC_CVT_S_D
;
15493 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15494 mips32_op
= OPC_CVT_S_W
;
15496 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15497 mips32_op
= OPC_CVT_S_L
;
15499 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
15502 /* Conditional moves on floating-point codes */
15503 case COND_FLOAT_MOV(MOVT
, 0):
15504 case COND_FLOAT_MOV(MOVT
, 1):
15505 case COND_FLOAT_MOV(MOVT
, 2):
15506 case COND_FLOAT_MOV(MOVT
, 3):
15507 case COND_FLOAT_MOV(MOVT
, 4):
15508 case COND_FLOAT_MOV(MOVT
, 5):
15509 case COND_FLOAT_MOV(MOVT
, 6):
15510 case COND_FLOAT_MOV(MOVT
, 7):
15511 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15512 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
15514 case COND_FLOAT_MOV(MOVF
, 0):
15515 case COND_FLOAT_MOV(MOVF
, 1):
15516 case COND_FLOAT_MOV(MOVF
, 2):
15517 case COND_FLOAT_MOV(MOVF
, 3):
15518 case COND_FLOAT_MOV(MOVF
, 4):
15519 case COND_FLOAT_MOV(MOVF
, 5):
15520 case COND_FLOAT_MOV(MOVF
, 6):
15521 case COND_FLOAT_MOV(MOVF
, 7):
15522 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15523 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
15526 MIPS_INVAL("pool32fxf");
15527 generate_exception_end(ctx
, EXCP_RI
);
15532 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15536 int rt
, rs
, rd
, rr
;
15538 uint32_t op
, minor
, minor2
, mips32_op
;
15539 uint32_t cond
, fmt
, cc
;
15541 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
15542 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
15544 rt
= (ctx
->opcode
>> 21) & 0x1f;
15545 rs
= (ctx
->opcode
>> 16) & 0x1f;
15546 rd
= (ctx
->opcode
>> 11) & 0x1f;
15547 rr
= (ctx
->opcode
>> 6) & 0x1f;
15548 imm
= (int16_t) ctx
->opcode
;
15550 op
= (ctx
->opcode
>> 26) & 0x3f;
15553 minor
= ctx
->opcode
& 0x3f;
15556 minor
= (ctx
->opcode
>> 6) & 0xf;
15559 mips32_op
= OPC_SLL
;
15562 mips32_op
= OPC_SRA
;
15565 mips32_op
= OPC_SRL
;
15568 mips32_op
= OPC_ROTR
;
15570 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
15573 check_insn(ctx
, ISA_MIPS32R6
);
15574 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
15577 check_insn(ctx
, ISA_MIPS32R6
);
15578 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
15581 check_insn(ctx
, ISA_MIPS32R6
);
15582 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
15585 goto pool32a_invalid
;
15589 minor
= (ctx
->opcode
>> 6) & 0xf;
15593 mips32_op
= OPC_ADD
;
15596 mips32_op
= OPC_ADDU
;
15599 mips32_op
= OPC_SUB
;
15602 mips32_op
= OPC_SUBU
;
15605 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15606 mips32_op
= OPC_MUL
;
15608 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
15612 mips32_op
= OPC_SLLV
;
15615 mips32_op
= OPC_SRLV
;
15618 mips32_op
= OPC_SRAV
;
15621 mips32_op
= OPC_ROTRV
;
15623 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
15625 /* Logical operations */
15627 mips32_op
= OPC_AND
;
15630 mips32_op
= OPC_OR
;
15633 mips32_op
= OPC_NOR
;
15636 mips32_op
= OPC_XOR
;
15638 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
15640 /* Set less than */
15642 mips32_op
= OPC_SLT
;
15645 mips32_op
= OPC_SLTU
;
15647 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
15650 goto pool32a_invalid
;
15654 minor
= (ctx
->opcode
>> 6) & 0xf;
15656 /* Conditional moves */
15657 case MOVN
: /* MUL */
15658 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15660 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
15663 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
15666 case MOVZ
: /* MUH */
15667 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15669 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
15672 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
15676 check_insn(ctx
, ISA_MIPS32R6
);
15677 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
15680 check_insn(ctx
, ISA_MIPS32R6
);
15681 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
15683 case LWXS
: /* DIV */
15684 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15686 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
15689 gen_ldxs(ctx
, rs
, rt
, rd
);
15693 check_insn(ctx
, ISA_MIPS32R6
);
15694 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
15697 check_insn(ctx
, ISA_MIPS32R6
);
15698 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
15701 check_insn(ctx
, ISA_MIPS32R6
);
15702 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
15705 goto pool32a_invalid
;
15709 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
15712 check_insn(ctx
, ISA_MIPS32R6
);
15713 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
15714 extract32(ctx
->opcode
, 9, 2));
15717 check_insn(ctx
, ISA_MIPS32R6
);
15718 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
15721 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
15724 gen_pool32axf(env
, ctx
, rt
, rs
);
15727 generate_exception_end(ctx
, EXCP_BREAK
);
15730 check_insn(ctx
, ISA_MIPS32R6
);
15731 generate_exception_end(ctx
, EXCP_RI
);
15735 MIPS_INVAL("pool32a");
15736 generate_exception_end(ctx
, EXCP_RI
);
15741 minor
= (ctx
->opcode
>> 12) & 0xf;
15744 check_cp0_enabled(ctx
);
15745 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15746 gen_cache_operation(ctx
, rt
, rs
, imm
);
15751 /* COP2: Not implemented. */
15752 generate_exception_err(ctx
, EXCP_CpU
, 2);
15754 #ifdef TARGET_MIPS64
15757 check_insn(ctx
, ISA_MIPS3
);
15758 check_mips_64(ctx
);
15763 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
15765 #ifdef TARGET_MIPS64
15768 check_insn(ctx
, ISA_MIPS3
);
15769 check_mips_64(ctx
);
15774 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
15777 MIPS_INVAL("pool32b");
15778 generate_exception_end(ctx
, EXCP_RI
);
15783 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
15784 minor
= ctx
->opcode
& 0x3f;
15785 check_cp1_enabled(ctx
);
15788 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15789 mips32_op
= OPC_ALNV_PS
;
15792 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15793 mips32_op
= OPC_MADD_S
;
15796 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15797 mips32_op
= OPC_MADD_D
;
15800 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15801 mips32_op
= OPC_MADD_PS
;
15804 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15805 mips32_op
= OPC_MSUB_S
;
15808 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15809 mips32_op
= OPC_MSUB_D
;
15812 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15813 mips32_op
= OPC_MSUB_PS
;
15816 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15817 mips32_op
= OPC_NMADD_S
;
15820 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15821 mips32_op
= OPC_NMADD_D
;
15824 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15825 mips32_op
= OPC_NMADD_PS
;
15828 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15829 mips32_op
= OPC_NMSUB_S
;
15832 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15833 mips32_op
= OPC_NMSUB_D
;
15836 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15837 mips32_op
= OPC_NMSUB_PS
;
15839 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
15841 case CABS_COND_FMT
:
15842 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15843 cond
= (ctx
->opcode
>> 6) & 0xf;
15844 cc
= (ctx
->opcode
>> 13) & 0x7;
15845 fmt
= (ctx
->opcode
>> 10) & 0x3;
15848 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
15851 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
15854 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
15857 goto pool32f_invalid
;
15861 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15862 cond
= (ctx
->opcode
>> 6) & 0xf;
15863 cc
= (ctx
->opcode
>> 13) & 0x7;
15864 fmt
= (ctx
->opcode
>> 10) & 0x3;
15867 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
15870 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
15873 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
15876 goto pool32f_invalid
;
15880 check_insn(ctx
, ISA_MIPS32R6
);
15881 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
15884 check_insn(ctx
, ISA_MIPS32R6
);
15885 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
15888 gen_pool32fxf(ctx
, rt
, rs
);
15892 switch ((ctx
->opcode
>> 6) & 0x7) {
15894 mips32_op
= OPC_PLL_PS
;
15897 mips32_op
= OPC_PLU_PS
;
15900 mips32_op
= OPC_PUL_PS
;
15903 mips32_op
= OPC_PUU_PS
;
15906 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15907 mips32_op
= OPC_CVT_PS_S
;
15909 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
15912 goto pool32f_invalid
;
15916 check_insn(ctx
, ISA_MIPS32R6
);
15917 switch ((ctx
->opcode
>> 9) & 0x3) {
15919 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
15922 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
15925 goto pool32f_invalid
;
15930 switch ((ctx
->opcode
>> 6) & 0x7) {
15932 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15933 mips32_op
= OPC_LWXC1
;
15936 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15937 mips32_op
= OPC_SWXC1
;
15940 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15941 mips32_op
= OPC_LDXC1
;
15944 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15945 mips32_op
= OPC_SDXC1
;
15948 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15949 mips32_op
= OPC_LUXC1
;
15952 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15953 mips32_op
= OPC_SUXC1
;
15955 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
15958 goto pool32f_invalid
;
15962 check_insn(ctx
, ISA_MIPS32R6
);
15963 switch ((ctx
->opcode
>> 9) & 0x3) {
15965 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
15968 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
15971 goto pool32f_invalid
;
15976 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15977 fmt
= (ctx
->opcode
>> 9) & 0x3;
15978 switch ((ctx
->opcode
>> 6) & 0x7) {
15982 mips32_op
= OPC_RSQRT2_S
;
15985 mips32_op
= OPC_RSQRT2_D
;
15988 mips32_op
= OPC_RSQRT2_PS
;
15991 goto pool32f_invalid
;
15997 mips32_op
= OPC_RECIP2_S
;
16000 mips32_op
= OPC_RECIP2_D
;
16003 mips32_op
= OPC_RECIP2_PS
;
16006 goto pool32f_invalid
;
16010 mips32_op
= OPC_ADDR_PS
;
16013 mips32_op
= OPC_MULR_PS
;
16015 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16018 goto pool32f_invalid
;
16022 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16023 cc
= (ctx
->opcode
>> 13) & 0x7;
16024 fmt
= (ctx
->opcode
>> 9) & 0x3;
16025 switch ((ctx
->opcode
>> 6) & 0x7) {
16026 case MOVF_FMT
: /* RINT_FMT */
16027 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16031 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16034 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16037 goto pool32f_invalid
;
16043 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16046 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16050 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16053 goto pool32f_invalid
;
16057 case MOVT_FMT
: /* CLASS_FMT */
16058 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16062 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16065 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16068 goto pool32f_invalid
;
16074 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16077 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16081 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16084 goto pool32f_invalid
;
16089 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16092 goto pool32f_invalid
;
16095 #define FINSN_3ARG_SDPS(prfx) \
16096 switch ((ctx->opcode >> 8) & 0x3) { \
16098 mips32_op = OPC_##prfx##_S; \
16101 mips32_op = OPC_##prfx##_D; \
16103 case FMT_SDPS_PS: \
16105 mips32_op = OPC_##prfx##_PS; \
16108 goto pool32f_invalid; \
16111 check_insn(ctx
, ISA_MIPS32R6
);
16112 switch ((ctx
->opcode
>> 9) & 0x3) {
16114 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16117 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16120 goto pool32f_invalid
;
16124 check_insn(ctx
, ISA_MIPS32R6
);
16125 switch ((ctx
->opcode
>> 9) & 0x3) {
16127 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16130 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16133 goto pool32f_invalid
;
16137 /* regular FP ops */
16138 switch ((ctx
->opcode
>> 6) & 0x3) {
16140 FINSN_3ARG_SDPS(ADD
);
16143 FINSN_3ARG_SDPS(SUB
);
16146 FINSN_3ARG_SDPS(MUL
);
16149 fmt
= (ctx
->opcode
>> 8) & 0x3;
16151 mips32_op
= OPC_DIV_D
;
16152 } else if (fmt
== 0) {
16153 mips32_op
= OPC_DIV_S
;
16155 goto pool32f_invalid
;
16159 goto pool32f_invalid
;
16164 switch ((ctx
->opcode
>> 6) & 0x7) {
16165 case MOVN_FMT
: /* SELEQZ_FMT */
16166 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16168 switch ((ctx
->opcode
>> 9) & 0x3) {
16170 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16173 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16176 goto pool32f_invalid
;
16180 FINSN_3ARG_SDPS(MOVN
);
16184 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16185 FINSN_3ARG_SDPS(MOVN
);
16187 case MOVZ_FMT
: /* SELNEZ_FMT */
16188 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16190 switch ((ctx
->opcode
>> 9) & 0x3) {
16192 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16195 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16198 goto pool32f_invalid
;
16202 FINSN_3ARG_SDPS(MOVZ
);
16206 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16207 FINSN_3ARG_SDPS(MOVZ
);
16210 check_insn(ctx
, ISA_MIPS32R6
);
16211 switch ((ctx
->opcode
>> 9) & 0x3) {
16213 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16216 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16219 goto pool32f_invalid
;
16223 check_insn(ctx
, ISA_MIPS32R6
);
16224 switch ((ctx
->opcode
>> 9) & 0x3) {
16226 mips32_op
= OPC_MADDF_S
;
16229 mips32_op
= OPC_MADDF_D
;
16232 goto pool32f_invalid
;
16236 check_insn(ctx
, ISA_MIPS32R6
);
16237 switch ((ctx
->opcode
>> 9) & 0x3) {
16239 mips32_op
= OPC_MSUBF_S
;
16242 mips32_op
= OPC_MSUBF_D
;
16245 goto pool32f_invalid
;
16249 goto pool32f_invalid
;
16253 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16257 MIPS_INVAL("pool32f");
16258 generate_exception_end(ctx
, EXCP_RI
);
16262 generate_exception_err(ctx
, EXCP_CpU
, 1);
16266 minor
= (ctx
->opcode
>> 21) & 0x1f;
16269 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16270 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16273 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16274 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16275 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16278 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16279 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16280 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16283 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16284 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16287 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16288 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16289 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16292 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16293 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16294 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16297 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16298 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16301 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16302 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16306 case TLTI
: /* BC1EQZC */
16307 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16309 check_cp1_enabled(ctx
);
16310 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16313 mips32_op
= OPC_TLTI
;
16317 case TGEI
: /* BC1NEZC */
16318 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16320 check_cp1_enabled(ctx
);
16321 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16324 mips32_op
= OPC_TGEI
;
16329 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16330 mips32_op
= OPC_TLTIU
;
16333 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16334 mips32_op
= OPC_TGEIU
;
16336 case TNEI
: /* SYNCI */
16337 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16339 /* Break the TB to be able to sync copied instructions
16341 ctx
->base
.is_jmp
= DISAS_STOP
;
16344 mips32_op
= OPC_TNEI
;
16349 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16350 mips32_op
= OPC_TEQI
;
16352 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16357 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16358 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16359 4, rs
, 0, imm
<< 1, 0);
16360 /* Compact branches don't have a delay slot, so just let
16361 the normal delay slot handling take us to the branch
16365 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16366 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16369 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16370 /* Break the TB to be able to sync copied instructions
16372 ctx
->base
.is_jmp
= DISAS_STOP
;
16376 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16377 /* COP2: Not implemented. */
16378 generate_exception_err(ctx
, EXCP_CpU
, 2);
16381 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16382 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16385 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16386 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16389 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16390 mips32_op
= OPC_BC1FANY4
;
16393 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16394 mips32_op
= OPC_BC1TANY4
;
16397 check_insn(ctx
, ASE_MIPS3D
);
16400 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16401 check_cp1_enabled(ctx
);
16402 gen_compute_branch1(ctx
, mips32_op
,
16403 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16405 generate_exception_err(ctx
, EXCP_CpU
, 1);
16410 /* MIPS DSP: not implemented */
16413 MIPS_INVAL("pool32i");
16414 generate_exception_end(ctx
, EXCP_RI
);
16419 minor
= (ctx
->opcode
>> 12) & 0xf;
16420 offset
= sextract32(ctx
->opcode
, 0,
16421 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16424 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16425 mips32_op
= OPC_LWL
;
16428 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16429 mips32_op
= OPC_SWL
;
16432 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16433 mips32_op
= OPC_LWR
;
16436 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16437 mips32_op
= OPC_SWR
;
16439 #if defined(TARGET_MIPS64)
16441 check_insn(ctx
, ISA_MIPS3
);
16442 check_mips_64(ctx
);
16443 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16444 mips32_op
= OPC_LDL
;
16447 check_insn(ctx
, ISA_MIPS3
);
16448 check_mips_64(ctx
);
16449 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16450 mips32_op
= OPC_SDL
;
16453 check_insn(ctx
, ISA_MIPS3
);
16454 check_mips_64(ctx
);
16455 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16456 mips32_op
= OPC_LDR
;
16459 check_insn(ctx
, ISA_MIPS3
);
16460 check_mips_64(ctx
);
16461 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16462 mips32_op
= OPC_SDR
;
16465 check_insn(ctx
, ISA_MIPS3
);
16466 check_mips_64(ctx
);
16467 mips32_op
= OPC_LWU
;
16470 check_insn(ctx
, ISA_MIPS3
);
16471 check_mips_64(ctx
);
16472 mips32_op
= OPC_LLD
;
16476 mips32_op
= OPC_LL
;
16479 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16482 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16485 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
16487 #if defined(TARGET_MIPS64)
16489 check_insn(ctx
, ISA_MIPS3
);
16490 check_mips_64(ctx
);
16491 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
16496 MIPS_INVAL("pool32c ld-eva");
16497 generate_exception_end(ctx
, EXCP_RI
);
16500 check_cp0_enabled(ctx
);
16502 minor2
= (ctx
->opcode
>> 9) & 0x7;
16503 offset
= sextract32(ctx
->opcode
, 0, 9);
16506 mips32_op
= OPC_LBUE
;
16509 mips32_op
= OPC_LHUE
;
16512 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16513 mips32_op
= OPC_LWLE
;
16516 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16517 mips32_op
= OPC_LWRE
;
16520 mips32_op
= OPC_LBE
;
16523 mips32_op
= OPC_LHE
;
16526 mips32_op
= OPC_LLE
;
16529 mips32_op
= OPC_LWE
;
16535 MIPS_INVAL("pool32c st-eva");
16536 generate_exception_end(ctx
, EXCP_RI
);
16539 check_cp0_enabled(ctx
);
16541 minor2
= (ctx
->opcode
>> 9) & 0x7;
16542 offset
= sextract32(ctx
->opcode
, 0, 9);
16545 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16546 mips32_op
= OPC_SWLE
;
16549 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16550 mips32_op
= OPC_SWRE
;
16553 /* Treat as no-op */
16554 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16555 /* hint codes 24-31 are reserved and signal RI */
16556 generate_exception(ctx
, EXCP_RI
);
16560 /* Treat as no-op */
16561 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16562 gen_cache_operation(ctx
, rt
, rs
, offset
);
16566 mips32_op
= OPC_SBE
;
16569 mips32_op
= OPC_SHE
;
16572 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
16575 mips32_op
= OPC_SWE
;
16580 /* Treat as no-op */
16581 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16582 /* hint codes 24-31 are reserved and signal RI */
16583 generate_exception(ctx
, EXCP_RI
);
16587 MIPS_INVAL("pool32c");
16588 generate_exception_end(ctx
, EXCP_RI
);
16592 case ADDI32
: /* AUI, LUI */
16593 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16595 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
16598 mips32_op
= OPC_ADDI
;
16603 mips32_op
= OPC_ADDIU
;
16605 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16608 /* Logical operations */
16610 mips32_op
= OPC_ORI
;
16613 mips32_op
= OPC_XORI
;
16616 mips32_op
= OPC_ANDI
;
16618 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16621 /* Set less than immediate */
16623 mips32_op
= OPC_SLTI
;
16626 mips32_op
= OPC_SLTIU
;
16628 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16631 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16632 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
16633 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
16634 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16636 case JALS32
: /* BOVC, BEQC, BEQZALC */
16637 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16640 mips32_op
= OPC_BOVC
;
16641 } else if (rs
< rt
&& rs
== 0) {
16643 mips32_op
= OPC_BEQZALC
;
16646 mips32_op
= OPC_BEQC
;
16648 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16651 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
16652 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
16653 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16656 case BEQ32
: /* BC */
16657 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16659 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
16660 sextract32(ctx
->opcode
<< 1, 0, 27));
16663 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
16666 case BNE32
: /* BALC */
16667 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16669 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
16670 sextract32(ctx
->opcode
<< 1, 0, 27));
16673 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
16676 case J32
: /* BGTZC, BLTZC, BLTC */
16677 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16678 if (rs
== 0 && rt
!= 0) {
16680 mips32_op
= OPC_BGTZC
;
16681 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16683 mips32_op
= OPC_BLTZC
;
16686 mips32_op
= OPC_BLTC
;
16688 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16691 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
16692 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16695 case JAL32
: /* BLEZC, BGEZC, BGEC */
16696 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16697 if (rs
== 0 && rt
!= 0) {
16699 mips32_op
= OPC_BLEZC
;
16700 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16702 mips32_op
= OPC_BGEZC
;
16705 mips32_op
= OPC_BGEC
;
16707 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16710 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
16711 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16712 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16715 /* Floating point (COP1) */
16717 mips32_op
= OPC_LWC1
;
16720 mips32_op
= OPC_LDC1
;
16723 mips32_op
= OPC_SWC1
;
16726 mips32_op
= OPC_SDC1
;
16728 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
16730 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16731 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16732 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16733 switch ((ctx
->opcode
>> 16) & 0x1f) {
16742 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16745 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
16748 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
16758 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16761 generate_exception(ctx
, EXCP_RI
);
16766 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
16767 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
16769 gen_addiupc(ctx
, reg
, offset
, 0, 0);
16772 case BNVC
: /* BNEC, BNEZALC */
16773 check_insn(ctx
, ISA_MIPS32R6
);
16776 mips32_op
= OPC_BNVC
;
16777 } else if (rs
< rt
&& rs
== 0) {
16779 mips32_op
= OPC_BNEZALC
;
16782 mips32_op
= OPC_BNEC
;
16784 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16786 case R6_BNEZC
: /* JIALC */
16787 check_insn(ctx
, ISA_MIPS32R6
);
16790 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
16791 sextract32(ctx
->opcode
<< 1, 0, 22));
16794 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
16797 case R6_BEQZC
: /* JIC */
16798 check_insn(ctx
, ISA_MIPS32R6
);
16801 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
16802 sextract32(ctx
->opcode
<< 1, 0, 22));
16805 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
16808 case BLEZALC
: /* BGEZALC, BGEUC */
16809 check_insn(ctx
, ISA_MIPS32R6
);
16810 if (rs
== 0 && rt
!= 0) {
16812 mips32_op
= OPC_BLEZALC
;
16813 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16815 mips32_op
= OPC_BGEZALC
;
16818 mips32_op
= OPC_BGEUC
;
16820 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16822 case BGTZALC
: /* BLTZALC, BLTUC */
16823 check_insn(ctx
, ISA_MIPS32R6
);
16824 if (rs
== 0 && rt
!= 0) {
16826 mips32_op
= OPC_BGTZALC
;
16827 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16829 mips32_op
= OPC_BLTZALC
;
16832 mips32_op
= OPC_BLTUC
;
16834 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16836 /* Loads and stores */
16838 mips32_op
= OPC_LB
;
16841 mips32_op
= OPC_LBU
;
16844 mips32_op
= OPC_LH
;
16847 mips32_op
= OPC_LHU
;
16850 mips32_op
= OPC_LW
;
16852 #ifdef TARGET_MIPS64
16854 check_insn(ctx
, ISA_MIPS3
);
16855 check_mips_64(ctx
);
16856 mips32_op
= OPC_LD
;
16859 check_insn(ctx
, ISA_MIPS3
);
16860 check_mips_64(ctx
);
16861 mips32_op
= OPC_SD
;
16865 mips32_op
= OPC_SB
;
16868 mips32_op
= OPC_SH
;
16871 mips32_op
= OPC_SW
;
16874 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
16877 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
16880 generate_exception_end(ctx
, EXCP_RI
);
16885 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
16889 /* make sure instructions are on a halfword boundary */
16890 if (ctx
->base
.pc_next
& 0x1) {
16891 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
16892 generate_exception_end(ctx
, EXCP_AdEL
);
16896 op
= (ctx
->opcode
>> 10) & 0x3f;
16897 /* Enforce properly-sized instructions in a delay slot */
16898 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
16899 switch (op
& 0x7) { /* MSB-3..MSB-5 */
16901 /* POOL32A, POOL32B, POOL32I, POOL32C */
16903 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
16905 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
16907 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
16909 /* LB32, LH32, LWC132, LDC132, LW32 */
16910 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
16911 generate_exception_end(ctx
, EXCP_RI
);
16916 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
16918 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
16920 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
16921 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
16922 generate_exception_end(ctx
, EXCP_RI
);
16932 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16933 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
16934 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
16937 switch (ctx
->opcode
& 0x1) {
16945 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16946 /* In the Release 6 the register number location in
16947 * the instruction encoding has changed.
16949 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
16951 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
16957 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16958 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
16959 int amount
= (ctx
->opcode
>> 1) & 0x7;
16961 amount
= amount
== 0 ? 8 : amount
;
16963 switch (ctx
->opcode
& 0x1) {
16972 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
16976 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16977 gen_pool16c_r6_insn(ctx
);
16979 gen_pool16c_insn(ctx
);
16984 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16985 int rb
= 28; /* GP */
16986 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
16988 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
16992 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16993 if (ctx
->opcode
& 1) {
16994 generate_exception_end(ctx
, EXCP_RI
);
16997 int enc_dest
= uMIPS_RD(ctx
->opcode
);
16998 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
16999 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17000 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17005 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17006 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17007 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17008 offset
= (offset
== 0xf ? -1 : offset
);
17010 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17015 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17016 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17017 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17019 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17024 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17025 int rb
= 29; /* SP */
17026 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17028 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17033 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17034 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17035 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17037 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17042 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17043 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17044 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17046 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17051 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17052 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17053 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17055 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17060 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17061 int rb
= 29; /* SP */
17062 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17064 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17069 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17070 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17071 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17073 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17078 int rd
= uMIPS_RD5(ctx
->opcode
);
17079 int rs
= uMIPS_RS5(ctx
->opcode
);
17081 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17088 switch (ctx
->opcode
& 0x1) {
17098 switch (ctx
->opcode
& 0x1) {
17103 gen_addiur1sp(ctx
);
17107 case B16
: /* BC16 */
17108 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17109 sextract32(ctx
->opcode
, 0, 10) << 1,
17110 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17112 case BNEZ16
: /* BNEZC16 */
17113 case BEQZ16
: /* BEQZC16 */
17114 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17115 mmreg(uMIPS_RD(ctx
->opcode
)),
17116 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17117 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17122 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17123 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17125 imm
= (imm
== 0x7f ? -1 : imm
);
17126 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17132 generate_exception_end(ctx
, EXCP_RI
);
17135 decode_micromips32_opc(env
, ctx
);
17148 /* MAJOR, P16, and P32 pools opcodes */
17152 NM_MOVE_BALC
= 0x02,
17160 NM_P16_SHIFT
= 0x0c,
17178 NM_P_LS_U12
= 0x21,
17188 NM_P16_ADDU
= 0x2c,
17202 NM_MOVEPREV
= 0x3f,
17205 /* POOL32A instruction pool */
17207 NM_POOL32A0
= 0x00,
17208 NM_SPECIAL2
= 0x01,
17211 NM_POOL32A5
= 0x05,
17212 NM_POOL32A7
= 0x07,
17215 /* P.GP.W instruction pool */
17217 NM_ADDIUGP_W
= 0x00,
17222 /* P48I instruction pool */
17226 NM_ADDIUGP48
= 0x02,
17227 NM_ADDIUPC48
= 0x03,
17232 /* P.U12 instruction pool */
17241 NM_ADDIUNEG
= 0x08,
17248 /* POOL32F instruction pool */
17250 NM_POOL32F_0
= 0x00,
17251 NM_POOL32F_3
= 0x03,
17252 NM_POOL32F_5
= 0x05,
17255 /* POOL32S instruction pool */
17257 NM_POOL32S_0
= 0x00,
17258 NM_POOL32S_4
= 0x04,
17261 /* P.LUI instruction pool */
17267 /* P.GP.BH instruction pool */
17272 NM_ADDIUGP_B
= 0x03,
17275 NM_P_GP_CP1
= 0x06,
17278 /* P.LS.U12 instruction pool */
17283 NM_P_PREFU12
= 0x03,
17296 /* P.LS.S9 instruction pool */
17302 NM_P_LS_UAWM
= 0x05,
17305 /* P.BAL instruction pool */
17311 /* P.J instruction pool */
17314 NM_JALRC_HB
= 0x01,
17315 NM_P_BALRSC
= 0x08,
17318 /* P.BR1 instruction pool */
17326 /* P.BR2 instruction pool */
17333 /* P.BRI instruction pool */
17345 /* P16.SHIFT instruction pool */
17351 /* POOL16C instruction pool */
17353 NM_POOL16C_0
= 0x00,
17357 /* P16.A1 instruction pool */
17359 NM_ADDIUR1SP
= 0x01,
17362 /* P16.A2 instruction pool */
17365 NM_P_ADDIURS5
= 0x01,
17368 /* P16.ADDU instruction pool */
17374 /* P16.SR instruction pool */
17377 NM_RESTORE_JRC16
= 0x01,
17380 /* P16.4X4 instruction pool */
17386 /* P16.LB instruction pool */
17393 /* P16.LH instruction pool */
17400 /* P.RI instruction pool */
17403 NM_P_SYSCALL
= 0x01,
17408 /* POOL32A0 instruction pool */
17443 NM_D_E_MT_VPE
= 0x56,
17451 /* POOL32A5 instruction pool */
17453 NM_CMP_EQ_PH
= 0x00,
17454 NM_CMP_LT_PH
= 0x08,
17455 NM_CMP_LE_PH
= 0x10,
17456 NM_CMPGU_EQ_QB
= 0x18,
17457 NM_CMPGU_LT_QB
= 0x20,
17458 NM_CMPGU_LE_QB
= 0x28,
17459 NM_CMPGDU_EQ_QB
= 0x30,
17460 NM_CMPGDU_LT_QB
= 0x38,
17461 NM_CMPGDU_LE_QB
= 0x40,
17462 NM_CMPU_EQ_QB
= 0x48,
17463 NM_CMPU_LT_QB
= 0x50,
17464 NM_CMPU_LE_QB
= 0x58,
17465 NM_ADDQ_S_W
= 0x60,
17466 NM_SUBQ_S_W
= 0x68,
17470 NM_ADDQ_S_PH
= 0x01,
17471 NM_ADDQH_R_PH
= 0x09,
17472 NM_ADDQH_R_W
= 0x11,
17473 NM_ADDU_S_QB
= 0x19,
17474 NM_ADDU_S_PH
= 0x21,
17475 NM_ADDUH_R_QB
= 0x29,
17476 NM_SHRAV_R_PH
= 0x31,
17477 NM_SHRAV_R_QB
= 0x39,
17478 NM_SUBQ_S_PH
= 0x41,
17479 NM_SUBQH_R_PH
= 0x49,
17480 NM_SUBQH_R_W
= 0x51,
17481 NM_SUBU_S_QB
= 0x59,
17482 NM_SUBU_S_PH
= 0x61,
17483 NM_SUBUH_R_QB
= 0x69,
17484 NM_SHLLV_S_PH
= 0x71,
17485 NM_PRECR_SRA_R_PH_W
= 0x79,
17487 NM_MULEU_S_PH_QBL
= 0x12,
17488 NM_MULEU_S_PH_QBR
= 0x1a,
17489 NM_MULQ_RS_PH
= 0x22,
17490 NM_MULQ_S_PH
= 0x2a,
17491 NM_MULQ_RS_W
= 0x32,
17492 NM_MULQ_S_W
= 0x3a,
17495 NM_SHRAV_R_W
= 0x5a,
17496 NM_SHRLV_PH
= 0x62,
17497 NM_SHRLV_QB
= 0x6a,
17498 NM_SHLLV_QB
= 0x72,
17499 NM_SHLLV_S_W
= 0x7a,
17503 NM_MULEQ_S_W_PHL
= 0x04,
17504 NM_MULEQ_S_W_PHR
= 0x0c,
17506 NM_MUL_S_PH
= 0x05,
17507 NM_PRECR_QB_PH
= 0x0d,
17508 NM_PRECRQ_QB_PH
= 0x15,
17509 NM_PRECRQ_PH_W
= 0x1d,
17510 NM_PRECRQ_RS_PH_W
= 0x25,
17511 NM_PRECRQU_S_QB_PH
= 0x2d,
17512 NM_PACKRL_PH
= 0x35,
17516 NM_SHRA_R_W
= 0x5e,
17517 NM_SHRA_R_PH
= 0x66,
17518 NM_SHLL_S_PH
= 0x76,
17519 NM_SHLL_S_W
= 0x7e,
17524 /* POOL32A7 instruction pool */
17529 NM_POOL32AXF
= 0x07,
17532 /* P.SR instruction pool */
17538 /* P.SHIFT instruction pool */
17546 /* P.ROTX instruction pool */
17551 /* P.INS instruction pool */
17556 /* P.EXT instruction pool */
17561 /* POOL32F_0 (fmt) instruction pool */
17566 NM_SELEQZ_S
= 0x07,
17567 NM_SELEQZ_D
= 0x47,
17571 NM_SELNEZ_S
= 0x0f,
17572 NM_SELNEZ_D
= 0x4f,
17587 /* POOL32F_3 instruction pool */
17591 NM_MINA_FMT
= 0x04,
17592 NM_MAXA_FMT
= 0x05,
17593 NM_POOL32FXF
= 0x07,
17596 /* POOL32F_5 instruction pool */
17598 NM_CMP_CONDN_S
= 0x00,
17599 NM_CMP_CONDN_D
= 0x02,
17602 /* P.GP.LH instruction pool */
17608 /* P.GP.SH instruction pool */
17613 /* P.GP.CP1 instruction pool */
17621 /* P.LS.S0 instruction pool */
17638 NM_P_PREFS9
= 0x03,
17644 /* P.LS.S1 instruction pool */
17646 NM_ASET_ACLR
= 0x02,
17654 /* P.LS.E0 instruction pool */
17670 /* P.PREFE instruction pool */
17676 /* P.LLE instruction pool */
17682 /* P.SCE instruction pool */
17688 /* P.LS.WM instruction pool */
17694 /* P.LS.UAWM instruction pool */
17700 /* P.BR3A instruction pool */
17706 NM_BPOSGE32C
= 0x04,
17709 /* P16.RI instruction pool */
17711 NM_P16_SYSCALL
= 0x01,
17716 /* POOL16C_0 instruction pool */
17718 NM_POOL16C_00
= 0x00,
17721 /* P16.JRC instruction pool */
17727 /* P.SYSCALL instruction pool */
17733 /* P.TRAP instruction pool */
17739 /* P.CMOVE instruction pool */
17745 /* POOL32Axf instruction pool */
17747 NM_POOL32AXF_1
= 0x01,
17748 NM_POOL32AXF_2
= 0x02,
17749 NM_POOL32AXF_4
= 0x04,
17750 NM_POOL32AXF_5
= 0x05,
17751 NM_POOL32AXF_7
= 0x07,
17754 /* POOL32Axf_1 instruction pool */
17756 NM_POOL32AXF_1_0
= 0x00,
17757 NM_POOL32AXF_1_1
= 0x01,
17758 NM_POOL32AXF_1_3
= 0x03,
17759 NM_POOL32AXF_1_4
= 0x04,
17760 NM_POOL32AXF_1_5
= 0x05,
17761 NM_POOL32AXF_1_7
= 0x07,
17764 /* POOL32Axf_2 instruction pool */
17766 NM_POOL32AXF_2_0_7
= 0x00,
17767 NM_POOL32AXF_2_8_15
= 0x01,
17768 NM_POOL32AXF_2_16_23
= 0x02,
17769 NM_POOL32AXF_2_24_31
= 0x03,
17772 /* POOL32Axf_7 instruction pool */
17774 NM_SHRA_R_QB
= 0x0,
17779 /* POOL32Axf_1_0 instruction pool */
17787 /* POOL32Axf_1_1 instruction pool */
17793 /* POOL32Axf_1_3 instruction pool */
17801 /* POOL32Axf_1_4 instruction pool */
17807 /* POOL32Axf_1_5 instruction pool */
17809 NM_MAQ_S_W_PHR
= 0x0,
17810 NM_MAQ_S_W_PHL
= 0x1,
17811 NM_MAQ_SA_W_PHR
= 0x2,
17812 NM_MAQ_SA_W_PHL
= 0x3,
17815 /* POOL32Axf_1_7 instruction pool */
17819 NM_EXTR_RS_W
= 0x2,
17823 /* POOL32Axf_2_0_7 instruction pool */
17826 NM_DPAQ_S_W_PH
= 0x1,
17828 NM_DPSQ_S_W_PH
= 0x3,
17835 /* POOL32Axf_2_8_15 instruction pool */
17837 NM_DPAX_W_PH
= 0x0,
17838 NM_DPAQ_SA_L_W
= 0x1,
17839 NM_DPSX_W_PH
= 0x2,
17840 NM_DPSQ_SA_L_W
= 0x3,
17843 NM_EXTRV_R_W
= 0x7,
17846 /* POOL32Axf_2_16_23 instruction pool */
17848 NM_DPAU_H_QBL
= 0x0,
17849 NM_DPAQX_S_W_PH
= 0x1,
17850 NM_DPSU_H_QBL
= 0x2,
17851 NM_DPSQX_S_W_PH
= 0x3,
17854 NM_MULSA_W_PH
= 0x6,
17855 NM_EXTRV_RS_W
= 0x7,
17858 /* POOL32Axf_2_24_31 instruction pool */
17860 NM_DPAU_H_QBR
= 0x0,
17861 NM_DPAQX_SA_W_PH
= 0x1,
17862 NM_DPSU_H_QBR
= 0x2,
17863 NM_DPSQX_SA_W_PH
= 0x3,
17866 NM_MULSAQ_S_W_PH
= 0x6,
17867 NM_EXTRV_S_H
= 0x7,
17870 /* POOL32Axf_{4, 5} instruction pool */
17889 /* nanoMIPS DSP instructions */
17890 NM_ABSQ_S_QB
= 0x00,
17891 NM_ABSQ_S_PH
= 0x08,
17892 NM_ABSQ_S_W
= 0x10,
17893 NM_PRECEQ_W_PHL
= 0x28,
17894 NM_PRECEQ_W_PHR
= 0x30,
17895 NM_PRECEQU_PH_QBL
= 0x38,
17896 NM_PRECEQU_PH_QBR
= 0x48,
17897 NM_PRECEU_PH_QBL
= 0x58,
17898 NM_PRECEU_PH_QBR
= 0x68,
17899 NM_PRECEQU_PH_QBLA
= 0x39,
17900 NM_PRECEQU_PH_QBRA
= 0x49,
17901 NM_PRECEU_PH_QBLA
= 0x59,
17902 NM_PRECEU_PH_QBRA
= 0x69,
17903 NM_REPLV_PH
= 0x01,
17904 NM_REPLV_QB
= 0x09,
17907 NM_RADDU_W_QB
= 0x78,
17913 /* PP.SR instruction pool */
17917 NM_RESTORE_JRC
= 0x03,
17920 /* P.SR.F instruction pool */
17923 NM_RESTOREF
= 0x01,
17926 /* P16.SYSCALL instruction pool */
17928 NM_SYSCALL16
= 0x00,
17929 NM_HYPCALL16
= 0x01,
17932 /* POOL16C_00 instruction pool */
17940 /* PP.LSX and PP.LSXS instruction pool */
17978 /* ERETx instruction pool */
17984 /* POOL32FxF_{0, 1} insturction pool */
17993 NM_CVT_S_PL
= 0x84,
17994 NM_CVT_S_PU
= 0xa4,
17996 NM_CVT_L_S
= 0x004,
17997 NM_CVT_L_D
= 0x104,
17998 NM_CVT_W_S
= 0x024,
17999 NM_CVT_W_D
= 0x124,
18001 NM_RSQRT_S
= 0x008,
18002 NM_RSQRT_D
= 0x108,
18007 NM_RECIP_S
= 0x048,
18008 NM_RECIP_D
= 0x148,
18010 NM_FLOOR_L_S
= 0x00c,
18011 NM_FLOOR_L_D
= 0x10c,
18013 NM_FLOOR_W_S
= 0x02c,
18014 NM_FLOOR_W_D
= 0x12c,
18016 NM_CEIL_L_S
= 0x04c,
18017 NM_CEIL_L_D
= 0x14c,
18018 NM_CEIL_W_S
= 0x06c,
18019 NM_CEIL_W_D
= 0x16c,
18020 NM_TRUNC_L_S
= 0x08c,
18021 NM_TRUNC_L_D
= 0x18c,
18022 NM_TRUNC_W_S
= 0x0ac,
18023 NM_TRUNC_W_D
= 0x1ac,
18024 NM_ROUND_L_S
= 0x0cc,
18025 NM_ROUND_L_D
= 0x1cc,
18026 NM_ROUND_W_S
= 0x0ec,
18027 NM_ROUND_W_D
= 0x1ec,
18035 NM_CVT_D_S
= 0x04d,
18036 NM_CVT_D_W
= 0x0cd,
18037 NM_CVT_D_L
= 0x14d,
18038 NM_CVT_S_D
= 0x06d,
18039 NM_CVT_S_W
= 0x0ed,
18040 NM_CVT_S_L
= 0x16d,
18043 /* P.LL instruction pool */
18049 /* P.SC instruction pool */
18055 /* P.DVP instruction pool */
18064 * nanoMIPS decoding engine
18069 /* extraction utilities */
18071 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18072 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18073 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18074 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18075 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18076 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18078 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18079 static inline int decode_gpr_gpr3(int r
)
18081 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18083 return map
[r
& 0x7];
18086 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18087 static inline int decode_gpr_gpr3_src_store(int r
)
18089 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18091 return map
[r
& 0x7];
18094 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18095 static inline int decode_gpr_gpr4(int r
)
18097 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18098 16, 17, 18, 19, 20, 21, 22, 23 };
18100 return map
[r
& 0xf];
18103 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18104 static inline int decode_gpr_gpr4_zero(int r
)
18106 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18107 16, 17, 18, 19, 20, 21, 22, 23 };
18109 return map
[r
& 0xf];
18113 /* extraction utilities */
18115 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18116 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18117 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18118 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18119 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18120 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18123 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18125 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18128 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18129 uint8_t gp
, uint16_t u
)
18132 TCGv va
= tcg_temp_new();
18133 TCGv t0
= tcg_temp_new();
18135 while (counter
!= count
) {
18136 bool use_gp
= gp
&& (counter
== count
- 1);
18137 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18138 int this_offset
= -((counter
+ 1) << 2);
18139 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18140 gen_load_gpr(t0
, this_rt
);
18141 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18142 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18146 /* adjust stack pointer */
18147 gen_adjust_sp(ctx
, -u
);
18153 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18154 uint8_t gp
, uint16_t u
)
18157 TCGv va
= tcg_temp_new();
18158 TCGv t0
= tcg_temp_new();
18160 while (counter
!= count
) {
18161 bool use_gp
= gp
&& (counter
== count
- 1);
18162 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18163 int this_offset
= u
- ((counter
+ 1) << 2);
18164 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18165 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18166 ctx
->default_tcg_memop_mask
);
18167 tcg_gen_ext32s_tl(t0
, t0
);
18168 gen_store_gpr(t0
, this_rt
);
18172 /* adjust stack pointer */
18173 gen_adjust_sp(ctx
, u
);
18179 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18181 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
18182 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
18184 switch (extract32(ctx
->opcode
, 2, 2)) {
18186 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18189 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18192 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18195 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18200 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18202 int rt
= extract32(ctx
->opcode
, 21, 5);
18203 int rs
= extract32(ctx
->opcode
, 16, 5);
18204 int rd
= extract32(ctx
->opcode
, 11, 5);
18206 switch (extract32(ctx
->opcode
, 3, 7)) {
18208 switch (extract32(ctx
->opcode
, 10, 1)) {
18211 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18215 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18221 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18225 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18228 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18231 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18234 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18237 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18240 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18243 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18246 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18250 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18253 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18256 switch (extract32(ctx
->opcode
, 10, 1)) {
18258 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18261 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18266 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18269 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18272 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18275 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18278 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18283 #ifndef CONFIG_USER_ONLY
18284 TCGv t0
= tcg_temp_new();
18285 switch (extract32(ctx
->opcode
, 10, 1)) {
18288 check_cp0_enabled(ctx
);
18289 gen_helper_dvp(t0
, cpu_env
);
18290 gen_store_gpr(t0
, rt
);
18295 check_cp0_enabled(ctx
);
18296 gen_helper_evp(t0
, cpu_env
);
18297 gen_store_gpr(t0
, rt
);
18304 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18309 TCGv t0
= tcg_temp_new();
18310 TCGv t1
= tcg_temp_new();
18311 TCGv t2
= tcg_temp_new();
18313 gen_load_gpr(t1
, rs
);
18314 gen_load_gpr(t2
, rt
);
18315 tcg_gen_add_tl(t0
, t1
, t2
);
18316 tcg_gen_ext32s_tl(t0
, t0
);
18317 tcg_gen_xor_tl(t1
, t1
, t2
);
18318 tcg_gen_xor_tl(t2
, t0
, t2
);
18319 tcg_gen_andc_tl(t1
, t2
, t1
);
18321 /* operands of same sign, result different sign */
18322 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18323 gen_store_gpr(t0
, rd
);
18331 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18334 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18337 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18340 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18343 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18346 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18349 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18352 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18354 #ifndef CONFIG_USER_ONLY
18356 check_cp0_enabled(ctx
);
18358 /* Treat as NOP. */
18361 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18364 check_cp0_enabled(ctx
);
18366 TCGv t0
= tcg_temp_new();
18368 gen_load_gpr(t0
, rt
);
18369 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18373 case NM_D_E_MT_VPE
:
18375 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18376 TCGv t0
= tcg_temp_new();
18383 gen_helper_dmt(t0
);
18384 gen_store_gpr(t0
, rt
);
18385 } else if (rs
== 0) {
18388 gen_helper_dvpe(t0
, cpu_env
);
18389 gen_store_gpr(t0
, rt
);
18391 generate_exception_end(ctx
, EXCP_RI
);
18398 gen_helper_emt(t0
);
18399 gen_store_gpr(t0
, rt
);
18400 } else if (rs
== 0) {
18403 gen_helper_evpe(t0
, cpu_env
);
18404 gen_store_gpr(t0
, rt
);
18406 generate_exception_end(ctx
, EXCP_RI
);
18417 TCGv t0
= tcg_temp_new();
18418 TCGv t1
= tcg_temp_new();
18420 gen_load_gpr(t0
, rt
);
18421 gen_load_gpr(t1
, rs
);
18422 gen_helper_fork(t0
, t1
);
18429 check_cp0_enabled(ctx
);
18431 /* Treat as NOP. */
18434 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18435 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18439 check_cp0_enabled(ctx
);
18440 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18441 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18446 TCGv t0
= tcg_temp_new();
18448 gen_load_gpr(t0
, rs
);
18449 gen_helper_yield(t0
, cpu_env
, t0
);
18450 gen_store_gpr(t0
, rt
);
18456 generate_exception_end(ctx
, EXCP_RI
);
18462 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18463 int ret
, int v1
, int v2
)
18469 t0
= tcg_temp_new_i32();
18471 v0_t
= tcg_temp_new();
18472 v1_t
= tcg_temp_new();
18474 tcg_gen_movi_i32(t0
, v2
>> 3);
18476 gen_load_gpr(v0_t
, ret
);
18477 gen_load_gpr(v1_t
, v1
);
18480 case NM_MAQ_S_W_PHR
:
18482 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18484 case NM_MAQ_S_W_PHL
:
18486 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18488 case NM_MAQ_SA_W_PHR
:
18490 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18492 case NM_MAQ_SA_W_PHL
:
18494 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18497 generate_exception_end(ctx
, EXCP_RI
);
18501 tcg_temp_free_i32(t0
);
18503 tcg_temp_free(v0_t
);
18504 tcg_temp_free(v1_t
);
18508 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18509 int ret
, int v1
, int v2
)
18512 TCGv t0
= tcg_temp_new();
18513 TCGv t1
= tcg_temp_new();
18514 TCGv v0_t
= tcg_temp_new();
18516 gen_load_gpr(v0_t
, v1
);
18519 case NM_POOL32AXF_1_0
:
18521 switch (extract32(ctx
->opcode
, 12, 2)) {
18523 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
18526 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
18529 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
18532 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
18536 case NM_POOL32AXF_1_1
:
18538 switch (extract32(ctx
->opcode
, 12, 2)) {
18540 tcg_gen_movi_tl(t0
, v2
);
18541 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
18544 tcg_gen_movi_tl(t0
, v2
>> 3);
18545 gen_helper_shilo(t0
, v0_t
, cpu_env
);
18548 generate_exception_end(ctx
, EXCP_RI
);
18552 case NM_POOL32AXF_1_3
:
18554 imm
= extract32(ctx
->opcode
, 14, 7);
18555 switch (extract32(ctx
->opcode
, 12, 2)) {
18557 tcg_gen_movi_tl(t0
, imm
);
18558 gen_helper_rddsp(t0
, t0
, cpu_env
);
18559 gen_store_gpr(t0
, ret
);
18562 gen_load_gpr(t0
, ret
);
18563 tcg_gen_movi_tl(t1
, imm
);
18564 gen_helper_wrdsp(t0
, t1
, cpu_env
);
18567 tcg_gen_movi_tl(t0
, v2
>> 3);
18568 tcg_gen_movi_tl(t1
, v1
);
18569 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
18570 gen_store_gpr(t0
, ret
);
18573 tcg_gen_movi_tl(t0
, v2
>> 3);
18574 tcg_gen_movi_tl(t1
, v1
);
18575 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
18576 gen_store_gpr(t0
, ret
);
18580 case NM_POOL32AXF_1_4
:
18582 tcg_gen_movi_tl(t0
, v2
>> 2);
18583 switch (extract32(ctx
->opcode
, 12, 1)) {
18585 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
18586 gen_store_gpr(t0
, ret
);
18589 gen_helper_shrl_qb(t0
, t0
, v0_t
);
18590 gen_store_gpr(t0
, ret
);
18594 case NM_POOL32AXF_1_5
:
18595 opc
= extract32(ctx
->opcode
, 12, 2);
18596 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
18598 case NM_POOL32AXF_1_7
:
18600 tcg_gen_movi_tl(t0
, v2
>> 3);
18601 tcg_gen_movi_tl(t1
, v1
);
18602 switch (extract32(ctx
->opcode
, 12, 2)) {
18604 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
18605 gen_store_gpr(t0
, ret
);
18608 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
18609 gen_store_gpr(t0
, ret
);
18612 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
18613 gen_store_gpr(t0
, ret
);
18616 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
18617 gen_store_gpr(t0
, ret
);
18622 generate_exception_end(ctx
, EXCP_RI
);
18628 tcg_temp_free(v0_t
);
18631 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
18632 TCGv v0
, TCGv v1
, int rd
)
18636 t0
= tcg_temp_new_i32();
18638 tcg_gen_movi_i32(t0
, rd
>> 3);
18641 case NM_POOL32AXF_2_0_7
:
18642 switch (extract32(ctx
->opcode
, 9, 3)) {
18645 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
18647 case NM_DPAQ_S_W_PH
:
18649 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18653 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
18655 case NM_DPSQ_S_W_PH
:
18657 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18660 generate_exception_end(ctx
, EXCP_RI
);
18664 case NM_POOL32AXF_2_8_15
:
18665 switch (extract32(ctx
->opcode
, 9, 3)) {
18668 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
18670 case NM_DPAQ_SA_L_W
:
18672 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18676 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
18678 case NM_DPSQ_SA_L_W
:
18680 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18683 generate_exception_end(ctx
, EXCP_RI
);
18687 case NM_POOL32AXF_2_16_23
:
18688 switch (extract32(ctx
->opcode
, 9, 3)) {
18689 case NM_DPAU_H_QBL
:
18691 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
18693 case NM_DPAQX_S_W_PH
:
18695 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18697 case NM_DPSU_H_QBL
:
18699 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
18701 case NM_DPSQX_S_W_PH
:
18703 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18705 case NM_MULSA_W_PH
:
18707 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
18710 generate_exception_end(ctx
, EXCP_RI
);
18714 case NM_POOL32AXF_2_24_31
:
18715 switch (extract32(ctx
->opcode
, 9, 3)) {
18716 case NM_DPAU_H_QBR
:
18718 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
18720 case NM_DPAQX_SA_W_PH
:
18722 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18724 case NM_DPSU_H_QBR
:
18726 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
18728 case NM_DPSQX_SA_W_PH
:
18730 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18732 case NM_MULSAQ_S_W_PH
:
18734 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18737 generate_exception_end(ctx
, EXCP_RI
);
18742 generate_exception_end(ctx
, EXCP_RI
);
18746 tcg_temp_free_i32(t0
);
18749 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18750 int rt
, int rs
, int rd
)
18753 TCGv t0
= tcg_temp_new();
18754 TCGv t1
= tcg_temp_new();
18755 TCGv v0_t
= tcg_temp_new();
18756 TCGv v1_t
= tcg_temp_new();
18758 gen_load_gpr(v0_t
, rt
);
18759 gen_load_gpr(v1_t
, rs
);
18762 case NM_POOL32AXF_2_0_7
:
18763 switch (extract32(ctx
->opcode
, 9, 3)) {
18765 case NM_DPAQ_S_W_PH
:
18767 case NM_DPSQ_S_W_PH
:
18768 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18773 gen_load_gpr(t0
, rs
);
18775 if (rd
!= 0 && rd
!= 2) {
18776 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
18777 tcg_gen_ext32u_tl(t0
, t0
);
18778 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
18779 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
18781 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
18787 int acc
= extract32(ctx
->opcode
, 14, 2);
18788 TCGv_i64 t2
= tcg_temp_new_i64();
18789 TCGv_i64 t3
= tcg_temp_new_i64();
18791 gen_load_gpr(t0
, rt
);
18792 gen_load_gpr(t1
, rs
);
18793 tcg_gen_ext_tl_i64(t2
, t0
);
18794 tcg_gen_ext_tl_i64(t3
, t1
);
18795 tcg_gen_mul_i64(t2
, t2
, t3
);
18796 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18797 tcg_gen_add_i64(t2
, t2
, t3
);
18798 tcg_temp_free_i64(t3
);
18799 gen_move_low32(cpu_LO
[acc
], t2
);
18800 gen_move_high32(cpu_HI
[acc
], t2
);
18801 tcg_temp_free_i64(t2
);
18807 int acc
= extract32(ctx
->opcode
, 14, 2);
18808 TCGv_i32 t2
= tcg_temp_new_i32();
18809 TCGv_i32 t3
= tcg_temp_new_i32();
18811 gen_load_gpr(t0
, rs
);
18812 gen_load_gpr(t1
, rt
);
18813 tcg_gen_trunc_tl_i32(t2
, t0
);
18814 tcg_gen_trunc_tl_i32(t3
, t1
);
18815 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
18816 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
18817 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
18818 tcg_temp_free_i32(t2
);
18819 tcg_temp_free_i32(t3
);
18824 gen_load_gpr(v1_t
, rs
);
18825 tcg_gen_movi_tl(t0
, rd
>> 3);
18826 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
18827 gen_store_gpr(t0
, ret
);
18831 case NM_POOL32AXF_2_8_15
:
18832 switch (extract32(ctx
->opcode
, 9, 3)) {
18834 case NM_DPAQ_SA_L_W
:
18836 case NM_DPSQ_SA_L_W
:
18837 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18842 int acc
= extract32(ctx
->opcode
, 14, 2);
18843 TCGv_i64 t2
= tcg_temp_new_i64();
18844 TCGv_i64 t3
= tcg_temp_new_i64();
18846 gen_load_gpr(t0
, rs
);
18847 gen_load_gpr(t1
, rt
);
18848 tcg_gen_ext32u_tl(t0
, t0
);
18849 tcg_gen_ext32u_tl(t1
, t1
);
18850 tcg_gen_extu_tl_i64(t2
, t0
);
18851 tcg_gen_extu_tl_i64(t3
, t1
);
18852 tcg_gen_mul_i64(t2
, t2
, t3
);
18853 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18854 tcg_gen_add_i64(t2
, t2
, t3
);
18855 tcg_temp_free_i64(t3
);
18856 gen_move_low32(cpu_LO
[acc
], t2
);
18857 gen_move_high32(cpu_HI
[acc
], t2
);
18858 tcg_temp_free_i64(t2
);
18864 int acc
= extract32(ctx
->opcode
, 14, 2);
18865 TCGv_i32 t2
= tcg_temp_new_i32();
18866 TCGv_i32 t3
= tcg_temp_new_i32();
18868 gen_load_gpr(t0
, rs
);
18869 gen_load_gpr(t1
, rt
);
18870 tcg_gen_trunc_tl_i32(t2
, t0
);
18871 tcg_gen_trunc_tl_i32(t3
, t1
);
18872 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
18873 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
18874 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
18875 tcg_temp_free_i32(t2
);
18876 tcg_temp_free_i32(t3
);
18881 tcg_gen_movi_tl(t0
, rd
>> 3);
18882 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
18883 gen_store_gpr(t0
, ret
);
18886 generate_exception_end(ctx
, EXCP_RI
);
18890 case NM_POOL32AXF_2_16_23
:
18891 switch (extract32(ctx
->opcode
, 9, 3)) {
18892 case NM_DPAU_H_QBL
:
18893 case NM_DPAQX_S_W_PH
:
18894 case NM_DPSU_H_QBL
:
18895 case NM_DPSQX_S_W_PH
:
18896 case NM_MULSA_W_PH
:
18897 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18901 tcg_gen_movi_tl(t0
, rd
>> 3);
18902 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
18903 gen_store_gpr(t0
, ret
);
18908 int acc
= extract32(ctx
->opcode
, 14, 2);
18909 TCGv_i64 t2
= tcg_temp_new_i64();
18910 TCGv_i64 t3
= tcg_temp_new_i64();
18912 gen_load_gpr(t0
, rs
);
18913 gen_load_gpr(t1
, rt
);
18914 tcg_gen_ext_tl_i64(t2
, t0
);
18915 tcg_gen_ext_tl_i64(t3
, t1
);
18916 tcg_gen_mul_i64(t2
, t2
, t3
);
18917 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18918 tcg_gen_sub_i64(t2
, t3
, t2
);
18919 tcg_temp_free_i64(t3
);
18920 gen_move_low32(cpu_LO
[acc
], t2
);
18921 gen_move_high32(cpu_HI
[acc
], t2
);
18922 tcg_temp_free_i64(t2
);
18925 case NM_EXTRV_RS_W
:
18927 tcg_gen_movi_tl(t0
, rd
>> 3);
18928 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
18929 gen_store_gpr(t0
, ret
);
18933 case NM_POOL32AXF_2_24_31
:
18934 switch (extract32(ctx
->opcode
, 9, 3)) {
18935 case NM_DPAU_H_QBR
:
18936 case NM_DPAQX_SA_W_PH
:
18937 case NM_DPSU_H_QBR
:
18938 case NM_DPSQX_SA_W_PH
:
18939 case NM_MULSAQ_S_W_PH
:
18940 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18944 tcg_gen_movi_tl(t0
, rd
>> 3);
18945 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
18946 gen_store_gpr(t0
, ret
);
18951 int acc
= extract32(ctx
->opcode
, 14, 2);
18952 TCGv_i64 t2
= tcg_temp_new_i64();
18953 TCGv_i64 t3
= tcg_temp_new_i64();
18955 gen_load_gpr(t0
, rs
);
18956 gen_load_gpr(t1
, rt
);
18957 tcg_gen_ext32u_tl(t0
, t0
);
18958 tcg_gen_ext32u_tl(t1
, t1
);
18959 tcg_gen_extu_tl_i64(t2
, t0
);
18960 tcg_gen_extu_tl_i64(t3
, t1
);
18961 tcg_gen_mul_i64(t2
, t2
, t3
);
18962 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18963 tcg_gen_sub_i64(t2
, t3
, t2
);
18964 tcg_temp_free_i64(t3
);
18965 gen_move_low32(cpu_LO
[acc
], t2
);
18966 gen_move_high32(cpu_HI
[acc
], t2
);
18967 tcg_temp_free_i64(t2
);
18972 tcg_gen_movi_tl(t0
, rd
>> 3);
18973 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
18974 gen_store_gpr(t0
, ret
);
18979 generate_exception_end(ctx
, EXCP_RI
);
18986 tcg_temp_free(v0_t
);
18987 tcg_temp_free(v1_t
);
18990 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18994 TCGv t0
= tcg_temp_new();
18995 TCGv v0_t
= tcg_temp_new();
18997 gen_load_gpr(v0_t
, rs
);
19002 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19003 gen_store_gpr(v0_t
, ret
);
19007 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19008 gen_store_gpr(v0_t
, ret
);
19012 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19013 gen_store_gpr(v0_t
, ret
);
19015 case NM_PRECEQ_W_PHL
:
19017 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19018 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19019 gen_store_gpr(v0_t
, ret
);
19021 case NM_PRECEQ_W_PHR
:
19023 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19024 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19025 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19026 gen_store_gpr(v0_t
, ret
);
19028 case NM_PRECEQU_PH_QBL
:
19030 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19031 gen_store_gpr(v0_t
, ret
);
19033 case NM_PRECEQU_PH_QBR
:
19035 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19036 gen_store_gpr(v0_t
, ret
);
19038 case NM_PRECEQU_PH_QBLA
:
19040 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19041 gen_store_gpr(v0_t
, ret
);
19043 case NM_PRECEQU_PH_QBRA
:
19045 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19046 gen_store_gpr(v0_t
, ret
);
19048 case NM_PRECEU_PH_QBL
:
19050 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19051 gen_store_gpr(v0_t
, ret
);
19053 case NM_PRECEU_PH_QBR
:
19055 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19056 gen_store_gpr(v0_t
, ret
);
19058 case NM_PRECEU_PH_QBLA
:
19060 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19061 gen_store_gpr(v0_t
, ret
);
19063 case NM_PRECEU_PH_QBRA
:
19065 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19066 gen_store_gpr(v0_t
, ret
);
19070 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19071 tcg_gen_shli_tl(t0
, v0_t
, 16);
19072 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19073 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19074 gen_store_gpr(v0_t
, ret
);
19078 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19079 tcg_gen_shli_tl(t0
, v0_t
, 8);
19080 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19081 tcg_gen_shli_tl(t0
, v0_t
, 16);
19082 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19083 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19084 gen_store_gpr(v0_t
, ret
);
19088 gen_helper_bitrev(v0_t
, v0_t
);
19089 gen_store_gpr(v0_t
, ret
);
19094 TCGv tv0
= tcg_temp_new();
19096 gen_load_gpr(tv0
, rt
);
19097 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19098 gen_store_gpr(v0_t
, ret
);
19099 tcg_temp_free(tv0
);
19102 case NM_RADDU_W_QB
:
19104 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19105 gen_store_gpr(v0_t
, ret
);
19108 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19112 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19116 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19119 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19122 generate_exception_end(ctx
, EXCP_RI
);
19126 tcg_temp_free(v0_t
);
19130 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19131 int rt
, int rs
, int rd
)
19133 TCGv t0
= tcg_temp_new();
19134 TCGv rs_t
= tcg_temp_new();
19136 gen_load_gpr(rs_t
, rs
);
19141 tcg_gen_movi_tl(t0
, rd
>> 2);
19142 switch (extract32(ctx
->opcode
, 12, 1)) {
19145 gen_helper_shra_qb(t0
, t0
, rs_t
);
19146 gen_store_gpr(t0
, rt
);
19150 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19151 gen_store_gpr(t0
, rt
);
19157 tcg_gen_movi_tl(t0
, rd
>> 1);
19158 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19159 gen_store_gpr(t0
, rt
);
19165 target_long result
;
19166 imm
= extract32(ctx
->opcode
, 13, 8);
19167 result
= (uint32_t)imm
<< 24 |
19168 (uint32_t)imm
<< 16 |
19169 (uint32_t)imm
<< 8 |
19171 result
= (int32_t)result
;
19172 tcg_gen_movi_tl(t0
, result
);
19173 gen_store_gpr(t0
, rt
);
19177 generate_exception_end(ctx
, EXCP_RI
);
19181 tcg_temp_free(rs_t
);
19185 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19187 int rt
= extract32(ctx
->opcode
, 21, 5);
19188 int rs
= extract32(ctx
->opcode
, 16, 5);
19189 int rd
= extract32(ctx
->opcode
, 11, 5);
19191 switch (extract32(ctx
->opcode
, 6, 3)) {
19192 case NM_POOL32AXF_1
:
19194 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19195 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19198 case NM_POOL32AXF_2
:
19200 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19201 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19204 case NM_POOL32AXF_4
:
19206 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19207 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19210 case NM_POOL32AXF_5
:
19211 switch (extract32(ctx
->opcode
, 9, 7)) {
19212 #ifndef CONFIG_USER_ONLY
19214 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19217 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19220 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19223 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19226 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19229 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19232 check_cp0_enabled(ctx
);
19234 TCGv t0
= tcg_temp_new();
19236 save_cpu_state(ctx
, 1);
19237 gen_helper_di(t0
, cpu_env
);
19238 gen_store_gpr(t0
, rt
);
19239 /* Stop translation as we may have switched the execution mode */
19240 ctx
->base
.is_jmp
= DISAS_STOP
;
19245 check_cp0_enabled(ctx
);
19247 TCGv t0
= tcg_temp_new();
19249 save_cpu_state(ctx
, 1);
19250 gen_helper_ei(t0
, cpu_env
);
19251 gen_store_gpr(t0
, rt
);
19252 /* Stop translation as we may have switched the execution mode */
19253 ctx
->base
.is_jmp
= DISAS_STOP
;
19258 gen_load_srsgpr(rs
, rt
);
19261 gen_store_srsgpr(rs
, rt
);
19264 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19267 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19270 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19274 generate_exception_end(ctx
, EXCP_RI
);
19278 case NM_POOL32AXF_7
:
19280 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19281 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19285 generate_exception_end(ctx
, EXCP_RI
);
19290 /* Immediate Value Compact Branches */
19291 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19292 int rt
, int32_t imm
, int32_t offset
)
19295 int bcond_compute
= 0;
19296 TCGv t0
= tcg_temp_new();
19297 TCGv t1
= tcg_temp_new();
19299 gen_load_gpr(t0
, rt
);
19300 tcg_gen_movi_tl(t1
, imm
);
19301 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19303 /* Load needed operands and calculate btarget */
19306 if (rt
== 0 && imm
== 0) {
19307 /* Unconditional branch */
19308 } else if (rt
== 0 && imm
!= 0) {
19313 cond
= TCG_COND_EQ
;
19319 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19320 generate_exception_end(ctx
, EXCP_RI
);
19322 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19323 /* Unconditional branch */
19324 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19328 tcg_gen_shri_tl(t0
, t0
, imm
);
19329 tcg_gen_andi_tl(t0
, t0
, 1);
19330 tcg_gen_movi_tl(t1
, 0);
19332 if (opc
== NM_BBEQZC
) {
19333 cond
= TCG_COND_EQ
;
19335 cond
= TCG_COND_NE
;
19340 if (rt
== 0 && imm
== 0) {
19343 } else if (rt
== 0 && imm
!= 0) {
19344 /* Unconditional branch */
19347 cond
= TCG_COND_NE
;
19351 if (rt
== 0 && imm
== 0) {
19352 /* Unconditional branch */
19355 cond
= TCG_COND_GE
;
19360 cond
= TCG_COND_LT
;
19363 if (rt
== 0 && imm
== 0) {
19364 /* Unconditional branch */
19367 cond
= TCG_COND_GEU
;
19372 cond
= TCG_COND_LTU
;
19375 MIPS_INVAL("Immediate Value Compact branch");
19376 generate_exception_end(ctx
, EXCP_RI
);
19380 if (bcond_compute
== 0) {
19381 /* Uncoditional compact branch */
19382 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19384 /* Conditional compact branch */
19385 TCGLabel
*fs
= gen_new_label();
19387 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19389 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19392 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19400 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19401 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19404 TCGv t0
= tcg_temp_new();
19405 TCGv t1
= tcg_temp_new();
19408 gen_load_gpr(t0
, rs
);
19412 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19415 /* calculate btarget */
19416 tcg_gen_shli_tl(t0
, t0
, 1);
19417 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19418 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19420 /* unconditional branch to register */
19421 tcg_gen_mov_tl(cpu_PC
, btarget
);
19422 tcg_gen_lookup_and_goto_ptr();
19428 /* nanoMIPS Branches */
19429 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19430 int rs
, int rt
, int32_t offset
)
19432 int bcond_compute
= 0;
19433 TCGv t0
= tcg_temp_new();
19434 TCGv t1
= tcg_temp_new();
19436 /* Load needed operands and calculate btarget */
19438 /* compact branch */
19441 gen_load_gpr(t0
, rs
);
19442 gen_load_gpr(t1
, rt
);
19444 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19448 if (rs
== 0 || rs
== rt
) {
19449 /* OPC_BLEZALC, OPC_BGEZALC */
19450 /* OPC_BGTZALC, OPC_BLTZALC */
19451 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19453 gen_load_gpr(t0
, rs
);
19454 gen_load_gpr(t1
, rt
);
19456 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19459 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19463 /* OPC_BEQZC, OPC_BNEZC */
19464 gen_load_gpr(t0
, rs
);
19466 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19468 /* OPC_JIC, OPC_JIALC */
19469 TCGv tbase
= tcg_temp_new();
19470 TCGv toffset
= tcg_temp_new();
19472 gen_load_gpr(tbase
, rt
);
19473 tcg_gen_movi_tl(toffset
, offset
);
19474 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19475 tcg_temp_free(tbase
);
19476 tcg_temp_free(toffset
);
19480 MIPS_INVAL("Compact branch/jump");
19481 generate_exception_end(ctx
, EXCP_RI
);
19485 if (bcond_compute
== 0) {
19486 /* Uncoditional compact branch */
19489 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19492 MIPS_INVAL("Compact branch/jump");
19493 generate_exception_end(ctx
, EXCP_RI
);
19497 /* Conditional compact branch */
19498 TCGLabel
*fs
= gen_new_label();
19502 if (rs
== 0 && rt
!= 0) {
19504 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19505 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19507 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19510 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
19514 if (rs
== 0 && rt
!= 0) {
19516 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19517 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19519 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19522 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
19526 if (rs
== 0 && rt
!= 0) {
19528 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19529 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19531 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19534 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
19538 if (rs
== 0 && rt
!= 0) {
19540 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19541 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19543 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19546 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
19550 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
19553 MIPS_INVAL("Compact conditional branch/jump");
19554 generate_exception_end(ctx
, EXCP_RI
);
19558 /* Generating branch here as compact branches don't have delay slot */
19559 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19562 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19571 /* nanoMIPS CP1 Branches */
19572 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
19573 int32_t ft
, int32_t offset
)
19575 target_ulong btarget
;
19576 TCGv_i64 t0
= tcg_temp_new_i64();
19578 gen_load_fpr64(ctx
, t0
, ft
);
19579 tcg_gen_andi_i64(t0
, t0
, 1);
19581 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19585 tcg_gen_xori_i64(t0
, t0
, 1);
19586 ctx
->hflags
|= MIPS_HFLAG_BC
;
19589 /* t0 already set */
19590 ctx
->hflags
|= MIPS_HFLAG_BC
;
19593 MIPS_INVAL("cp1 cond branch");
19594 generate_exception_end(ctx
, EXCP_RI
);
19598 tcg_gen_trunc_i64_tl(bcond
, t0
);
19600 ctx
->btarget
= btarget
;
19603 tcg_temp_free_i64(t0
);
19607 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
19610 t0
= tcg_temp_new();
19611 t1
= tcg_temp_new();
19613 gen_load_gpr(t0
, rs
);
19614 gen_load_gpr(t1
, rt
);
19616 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
19617 /* PP.LSXS instructions require shifting */
19618 switch (extract32(ctx
->opcode
, 7, 4)) {
19623 tcg_gen_shli_tl(t0
, t0
, 1);
19630 tcg_gen_shli_tl(t0
, t0
, 2);
19634 tcg_gen_shli_tl(t0
, t0
, 3);
19638 gen_op_addr_add(ctx
, t0
, t0
, t1
);
19640 switch (extract32(ctx
->opcode
, 7, 4)) {
19642 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19644 gen_store_gpr(t0
, rd
);
19648 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19650 gen_store_gpr(t0
, rd
);
19654 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19656 gen_store_gpr(t0
, rd
);
19659 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19661 gen_store_gpr(t0
, rd
);
19665 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19667 gen_store_gpr(t0
, rd
);
19671 gen_load_gpr(t1
, rd
);
19672 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19678 gen_load_gpr(t1
, rd
);
19679 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19685 gen_load_gpr(t1
, rd
);
19686 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19690 /*case NM_LWC1XS:*/
19692 /*case NM_LDC1XS:*/
19694 /*case NM_SWC1XS:*/
19696 /*case NM_SDC1XS:*/
19697 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19698 check_cp1_enabled(ctx
);
19699 switch (extract32(ctx
->opcode
, 7, 4)) {
19701 /*case NM_LWC1XS:*/
19702 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
19705 /*case NM_LDC1XS:*/
19706 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
19709 /*case NM_SWC1XS:*/
19710 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
19713 /*case NM_SDC1XS:*/
19714 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
19718 generate_exception_err(ctx
, EXCP_CpU
, 1);
19722 generate_exception_end(ctx
, EXCP_RI
);
19730 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
19734 rt
= extract32(ctx
->opcode
, 21, 5);
19735 rs
= extract32(ctx
->opcode
, 16, 5);
19736 rd
= extract32(ctx
->opcode
, 11, 5);
19738 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
19739 generate_exception_end(ctx
, EXCP_RI
);
19742 check_cp1_enabled(ctx
);
19743 switch (extract32(ctx
->opcode
, 0, 3)) {
19745 switch (extract32(ctx
->opcode
, 3, 7)) {
19747 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
19750 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
19753 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
19756 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
19759 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
19762 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
19765 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
19768 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
19771 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
19774 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
19777 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
19780 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
19783 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
19786 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
19789 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
19792 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
19795 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
19798 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
19801 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
19804 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
19807 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
19810 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
19813 generate_exception_end(ctx
, EXCP_RI
);
19818 switch (extract32(ctx
->opcode
, 3, 3)) {
19820 switch (extract32(ctx
->opcode
, 9, 1)) {
19822 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
19825 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
19830 switch (extract32(ctx
->opcode
, 9, 1)) {
19832 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
19835 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
19840 switch (extract32(ctx
->opcode
, 9, 1)) {
19842 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
19845 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
19850 switch (extract32(ctx
->opcode
, 9, 1)) {
19852 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
19855 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
19860 switch (extract32(ctx
->opcode
, 6, 8)) {
19862 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
19865 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
19868 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
19871 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
19874 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
19877 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
19880 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
19883 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
19886 switch (extract32(ctx
->opcode
, 6, 9)) {
19888 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
19891 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
19894 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
19897 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
19900 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
19903 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
19906 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
19909 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
19912 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
19915 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
19918 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
19921 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
19924 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
19927 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
19930 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
19933 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
19936 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
19939 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
19942 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
19945 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
19948 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
19951 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
19954 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
19957 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
19960 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
19963 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
19966 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
19969 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
19972 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
19975 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
19978 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
19981 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
19984 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
19987 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
19990 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
19993 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
19996 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
19999 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20002 generate_exception_end(ctx
, EXCP_RI
);
20011 switch (extract32(ctx
->opcode
, 3, 3)) {
20012 case NM_CMP_CONDN_S
:
20013 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20015 case NM_CMP_CONDN_D
:
20016 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20019 generate_exception_end(ctx
, EXCP_RI
);
20024 generate_exception_end(ctx
, EXCP_RI
);
20029 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20030 int rd
, int rs
, int rt
)
20033 TCGv t0
= tcg_temp_new();
20034 TCGv v1_t
= tcg_temp_new();
20035 TCGv v2_t
= tcg_temp_new();
20037 gen_load_gpr(v1_t
, rs
);
20038 gen_load_gpr(v2_t
, rt
);
20043 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20047 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20051 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20053 case NM_CMPU_EQ_QB
:
20055 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20057 case NM_CMPU_LT_QB
:
20059 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20061 case NM_CMPU_LE_QB
:
20063 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20065 case NM_CMPGU_EQ_QB
:
20067 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20068 gen_store_gpr(v1_t
, ret
);
20070 case NM_CMPGU_LT_QB
:
20072 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20073 gen_store_gpr(v1_t
, ret
);
20075 case NM_CMPGU_LE_QB
:
20077 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20078 gen_store_gpr(v1_t
, ret
);
20080 case NM_CMPGDU_EQ_QB
:
20082 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20083 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20084 gen_store_gpr(v1_t
, ret
);
20086 case NM_CMPGDU_LT_QB
:
20088 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20089 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20090 gen_store_gpr(v1_t
, ret
);
20092 case NM_CMPGDU_LE_QB
:
20094 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20095 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20096 gen_store_gpr(v1_t
, ret
);
20100 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20101 gen_store_gpr(v1_t
, ret
);
20105 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20106 gen_store_gpr(v1_t
, ret
);
20110 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20111 gen_store_gpr(v1_t
, ret
);
20115 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20116 gen_store_gpr(v1_t
, ret
);
20120 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20121 gen_store_gpr(v1_t
, ret
);
20125 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20126 gen_store_gpr(v1_t
, ret
);
20130 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20131 gen_store_gpr(v1_t
, ret
);
20135 switch (extract32(ctx
->opcode
, 10, 1)) {
20138 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20139 gen_store_gpr(v1_t
, ret
);
20143 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20144 gen_store_gpr(v1_t
, ret
);
20148 case NM_ADDQH_R_PH
:
20150 switch (extract32(ctx
->opcode
, 10, 1)) {
20153 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20154 gen_store_gpr(v1_t
, ret
);
20158 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20159 gen_store_gpr(v1_t
, ret
);
20165 switch (extract32(ctx
->opcode
, 10, 1)) {
20168 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20169 gen_store_gpr(v1_t
, ret
);
20173 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20174 gen_store_gpr(v1_t
, ret
);
20180 switch (extract32(ctx
->opcode
, 10, 1)) {
20183 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20184 gen_store_gpr(v1_t
, ret
);
20188 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20189 gen_store_gpr(v1_t
, ret
);
20195 switch (extract32(ctx
->opcode
, 10, 1)) {
20198 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20199 gen_store_gpr(v1_t
, ret
);
20203 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20204 gen_store_gpr(v1_t
, ret
);
20208 case NM_ADDUH_R_QB
:
20210 switch (extract32(ctx
->opcode
, 10, 1)) {
20213 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20214 gen_store_gpr(v1_t
, ret
);
20218 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20219 gen_store_gpr(v1_t
, ret
);
20223 case NM_SHRAV_R_PH
:
20225 switch (extract32(ctx
->opcode
, 10, 1)) {
20228 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20229 gen_store_gpr(v1_t
, ret
);
20233 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20234 gen_store_gpr(v1_t
, ret
);
20238 case NM_SHRAV_R_QB
:
20240 switch (extract32(ctx
->opcode
, 10, 1)) {
20243 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20244 gen_store_gpr(v1_t
, ret
);
20248 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20249 gen_store_gpr(v1_t
, ret
);
20255 switch (extract32(ctx
->opcode
, 10, 1)) {
20258 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20259 gen_store_gpr(v1_t
, ret
);
20263 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20264 gen_store_gpr(v1_t
, ret
);
20268 case NM_SUBQH_R_PH
:
20270 switch (extract32(ctx
->opcode
, 10, 1)) {
20273 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20274 gen_store_gpr(v1_t
, ret
);
20278 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20279 gen_store_gpr(v1_t
, ret
);
20285 switch (extract32(ctx
->opcode
, 10, 1)) {
20288 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20289 gen_store_gpr(v1_t
, ret
);
20293 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20294 gen_store_gpr(v1_t
, ret
);
20300 switch (extract32(ctx
->opcode
, 10, 1)) {
20303 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20304 gen_store_gpr(v1_t
, ret
);
20308 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20309 gen_store_gpr(v1_t
, ret
);
20315 switch (extract32(ctx
->opcode
, 10, 1)) {
20318 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20319 gen_store_gpr(v1_t
, ret
);
20323 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20324 gen_store_gpr(v1_t
, ret
);
20328 case NM_SUBUH_R_QB
:
20330 switch (extract32(ctx
->opcode
, 10, 1)) {
20333 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20334 gen_store_gpr(v1_t
, ret
);
20338 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20339 gen_store_gpr(v1_t
, ret
);
20343 case NM_SHLLV_S_PH
:
20345 switch (extract32(ctx
->opcode
, 10, 1)) {
20348 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20349 gen_store_gpr(v1_t
, ret
);
20353 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20354 gen_store_gpr(v1_t
, ret
);
20358 case NM_PRECR_SRA_R_PH_W
:
20360 switch (extract32(ctx
->opcode
, 10, 1)) {
20362 /* PRECR_SRA_PH_W */
20364 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20365 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20367 gen_store_gpr(v1_t
, rt
);
20368 tcg_temp_free_i32(sa_t
);
20372 /* PRECR_SRA_R_PH_W */
20374 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20375 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20377 gen_store_gpr(v1_t
, rt
);
20378 tcg_temp_free_i32(sa_t
);
20383 case NM_MULEU_S_PH_QBL
:
20385 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20386 gen_store_gpr(v1_t
, ret
);
20388 case NM_MULEU_S_PH_QBR
:
20390 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20391 gen_store_gpr(v1_t
, ret
);
20393 case NM_MULQ_RS_PH
:
20395 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20396 gen_store_gpr(v1_t
, ret
);
20400 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20401 gen_store_gpr(v1_t
, ret
);
20405 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20406 gen_store_gpr(v1_t
, ret
);
20410 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20411 gen_store_gpr(v1_t
, ret
);
20415 gen_load_gpr(t0
, rs
);
20417 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20419 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20423 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20424 gen_store_gpr(v1_t
, ret
);
20428 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20429 gen_store_gpr(v1_t
, ret
);
20433 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20434 gen_store_gpr(v1_t
, ret
);
20438 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20439 gen_store_gpr(v1_t
, ret
);
20443 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20444 gen_store_gpr(v1_t
, ret
);
20448 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20449 gen_store_gpr(v1_t
, ret
);
20454 TCGv tv0
= tcg_temp_new();
20455 TCGv tv1
= tcg_temp_new();
20456 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20458 tcg_gen_movi_tl(tv0
, rd
>> 3);
20459 tcg_gen_movi_tl(tv1
, imm
);
20460 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20463 case NM_MULEQ_S_W_PHL
:
20465 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20466 gen_store_gpr(v1_t
, ret
);
20468 case NM_MULEQ_S_W_PHR
:
20470 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20471 gen_store_gpr(v1_t
, ret
);
20475 switch (extract32(ctx
->opcode
, 10, 1)) {
20478 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20479 gen_store_gpr(v1_t
, ret
);
20483 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20484 gen_store_gpr(v1_t
, ret
);
20488 case NM_PRECR_QB_PH
:
20490 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
20491 gen_store_gpr(v1_t
, ret
);
20493 case NM_PRECRQ_QB_PH
:
20495 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
20496 gen_store_gpr(v1_t
, ret
);
20498 case NM_PRECRQ_PH_W
:
20500 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
20501 gen_store_gpr(v1_t
, ret
);
20503 case NM_PRECRQ_RS_PH_W
:
20505 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20506 gen_store_gpr(v1_t
, ret
);
20508 case NM_PRECRQU_S_QB_PH
:
20510 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20511 gen_store_gpr(v1_t
, ret
);
20515 tcg_gen_movi_tl(t0
, rd
);
20516 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
20517 gen_store_gpr(v1_t
, rt
);
20521 tcg_gen_movi_tl(t0
, rd
>> 1);
20522 switch (extract32(ctx
->opcode
, 10, 1)) {
20525 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
20526 gen_store_gpr(v1_t
, rt
);
20530 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
20531 gen_store_gpr(v1_t
, rt
);
20537 tcg_gen_movi_tl(t0
, rd
>> 1);
20538 switch (extract32(ctx
->opcode
, 10, 2)) {
20541 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
20542 gen_store_gpr(v1_t
, rt
);
20546 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
20547 gen_store_gpr(v1_t
, rt
);
20550 generate_exception_end(ctx
, EXCP_RI
);
20556 tcg_gen_movi_tl(t0
, rd
);
20557 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
20558 gen_store_gpr(v1_t
, rt
);
20564 imm
= sextract32(ctx
->opcode
, 11, 11);
20565 imm
= (int16_t)(imm
<< 6) >> 6;
20567 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
20572 generate_exception_end(ctx
, EXCP_RI
);
20577 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
20585 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
20586 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
20588 rt
= extract32(ctx
->opcode
, 21, 5);
20589 rs
= extract32(ctx
->opcode
, 16, 5);
20590 rd
= extract32(ctx
->opcode
, 11, 5);
20592 op
= extract32(ctx
->opcode
, 26, 6);
20597 switch (extract32(ctx
->opcode
, 19, 2)) {
20600 generate_exception_end(ctx
, EXCP_RI
);
20603 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
20604 generate_exception_end(ctx
, EXCP_SYSCALL
);
20606 generate_exception_end(ctx
, EXCP_RI
);
20610 generate_exception_end(ctx
, EXCP_BREAK
);
20613 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
20614 gen_helper_do_semihosting(cpu_env
);
20616 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
20617 generate_exception_end(ctx
, EXCP_RI
);
20619 generate_exception_end(ctx
, EXCP_DBp
);
20626 imm
= extract32(ctx
->opcode
, 0, 16);
20628 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
20630 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
20632 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20637 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
20638 extract32(ctx
->opcode
, 1, 20) << 1;
20639 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20640 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20644 switch (ctx
->opcode
& 0x07) {
20646 gen_pool32a0_nanomips_insn(env
, ctx
);
20650 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
20651 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
20655 switch (extract32(ctx
->opcode
, 3, 3)) {
20657 gen_p_lsx(ctx
, rd
, rs
, rt
);
20660 /* In nanoMIPS, the shift field directly encodes the shift
20661 * amount, meaning that the supported shift values are in
20662 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
20663 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
20664 extract32(ctx
->opcode
, 9, 2) - 1);
20667 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
20670 gen_pool32axf_nanomips_insn(env
, ctx
);
20673 generate_exception_end(ctx
, EXCP_RI
);
20678 generate_exception_end(ctx
, EXCP_RI
);
20683 switch (ctx
->opcode
& 0x03) {
20686 offset
= extract32(ctx
->opcode
, 0, 21);
20687 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
20691 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20694 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20697 generate_exception_end(ctx
, EXCP_RI
);
20703 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
20704 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
20705 switch (extract32(ctx
->opcode
, 16, 5)) {
20709 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
20715 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
20716 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20722 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
20728 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20731 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20738 t0
= tcg_temp_new();
20740 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20743 tcg_gen_movi_tl(t0
, addr
);
20744 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
20752 t0
= tcg_temp_new();
20753 t1
= tcg_temp_new();
20755 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20758 tcg_gen_movi_tl(t0
, addr
);
20759 gen_load_gpr(t1
, rt
);
20761 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
20768 generate_exception_end(ctx
, EXCP_RI
);
20774 switch (extract32(ctx
->opcode
, 12, 4)) {
20776 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20779 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20782 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20785 switch (extract32(ctx
->opcode
, 20, 1)) {
20787 switch (ctx
->opcode
& 3) {
20789 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
20790 extract32(ctx
->opcode
, 2, 1),
20791 extract32(ctx
->opcode
, 3, 9) << 3);
20794 case NM_RESTORE_JRC
:
20795 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
20796 extract32(ctx
->opcode
, 2, 1),
20797 extract32(ctx
->opcode
, 3, 9) << 3);
20798 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
20799 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
20803 generate_exception_end(ctx
, EXCP_RI
);
20808 generate_exception_end(ctx
, EXCP_RI
);
20813 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20816 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20820 TCGv t0
= tcg_temp_new();
20822 imm
= extract32(ctx
->opcode
, 0, 12);
20823 gen_load_gpr(t0
, rs
);
20824 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
20825 gen_store_gpr(t0
, rt
);
20831 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
20832 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
20836 int shift
= extract32(ctx
->opcode
, 0, 5);
20837 switch (extract32(ctx
->opcode
, 5, 4)) {
20839 if (rt
== 0 && shift
== 0) {
20841 } else if (rt
== 0 && shift
== 3) {
20842 /* EHB - treat as NOP */
20843 } else if (rt
== 0 && shift
== 5) {
20844 /* PAUSE - treat as NOP */
20845 } else if (rt
== 0 && shift
== 6) {
20847 gen_sync(extract32(ctx
->opcode
, 16, 5));
20850 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
20851 extract32(ctx
->opcode
, 0, 5));
20855 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
20856 extract32(ctx
->opcode
, 0, 5));
20859 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
20860 extract32(ctx
->opcode
, 0, 5));
20863 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
20864 extract32(ctx
->opcode
, 0, 5));
20872 TCGv t0
= tcg_temp_new();
20873 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
20874 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
20876 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
20878 gen_load_gpr(t0
, rs
);
20879 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
20882 tcg_temp_free_i32(shift
);
20883 tcg_temp_free_i32(shiftx
);
20884 tcg_temp_free_i32(stripe
);
20888 switch (((ctx
->opcode
>> 10) & 2) |
20889 (extract32(ctx
->opcode
, 5, 1))) {
20892 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
20893 extract32(ctx
->opcode
, 6, 5));
20896 generate_exception_end(ctx
, EXCP_RI
);
20901 switch (((ctx
->opcode
>> 10) & 2) |
20902 (extract32(ctx
->opcode
, 5, 1))) {
20905 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
20906 extract32(ctx
->opcode
, 6, 5));
20909 generate_exception_end(ctx
, EXCP_RI
);
20914 generate_exception_end(ctx
, EXCP_RI
);
20919 gen_pool32f_nanomips_insn(ctx
);
20924 switch (extract32(ctx
->opcode
, 1, 1)) {
20927 tcg_gen_movi_tl(cpu_gpr
[rt
],
20928 sextract32(ctx
->opcode
, 0, 1) << 31 |
20929 extract32(ctx
->opcode
, 2, 10) << 21 |
20930 extract32(ctx
->opcode
, 12, 9) << 12);
20935 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
20936 extract32(ctx
->opcode
, 2, 10) << 21 |
20937 extract32(ctx
->opcode
, 12, 9) << 12;
20939 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20940 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20947 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
20949 switch (extract32(ctx
->opcode
, 18, 3)) {
20951 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
20954 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
20957 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
20961 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
20966 switch (ctx
->opcode
& 1) {
20968 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
20971 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
20977 switch (ctx
->opcode
& 1) {
20979 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
20982 generate_exception_end(ctx
, EXCP_RI
);
20988 switch (ctx
->opcode
& 0x3) {
20990 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
20993 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
20996 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
20999 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21004 generate_exception_end(ctx
, EXCP_RI
);
21011 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21013 switch (extract32(ctx
->opcode
, 12, 4)) {
21017 /* Break the TB to be able to sync copied instructions
21019 ctx
->base
.is_jmp
= DISAS_STOP
;
21022 /* Treat as NOP. */
21026 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21029 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21032 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21035 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21038 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21041 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21044 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21047 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21050 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21053 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21056 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21059 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21062 generate_exception_end(ctx
, EXCP_RI
);
21069 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21070 extract32(ctx
->opcode
, 0, 8);
21072 switch (extract32(ctx
->opcode
, 8, 3)) {
21074 switch (extract32(ctx
->opcode
, 11, 4)) {
21076 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21079 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21082 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21085 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21088 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21091 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21094 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21097 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21100 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21103 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21106 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21109 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21114 /* Break the TB to be able to sync copied instructions
21116 ctx
->base
.is_jmp
= DISAS_STOP
;
21119 /* Treat as NOP. */
21123 generate_exception_end(ctx
, EXCP_RI
);
21128 switch (extract32(ctx
->opcode
, 11, 4)) {
21133 TCGv t0
= tcg_temp_new();
21134 TCGv t1
= tcg_temp_new();
21136 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21138 switch (extract32(ctx
->opcode
, 11, 4)) {
21140 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21142 gen_store_gpr(t0
, rt
);
21145 gen_load_gpr(t1
, rt
);
21146 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21155 switch (ctx
->opcode
& 0x03) {
21157 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21161 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21166 switch (ctx
->opcode
& 0x03) {
21168 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, s
);
21172 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21177 check_cp0_enabled(ctx
);
21178 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21179 gen_cache_operation(ctx
, rt
, rs
, s
);
21188 int count
= extract32(ctx
->opcode
, 12, 3);
21191 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21192 extract32(ctx
->opcode
, 0, 8);
21193 TCGv va
= tcg_temp_new();
21194 TCGv t1
= tcg_temp_new();
21195 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21196 NM_P_LS_UAWM
? MO_UNALN
: 0;
21198 count
= (count
== 0) ? 8 : count
;
21199 while (counter
!= count
) {
21200 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21201 int this_offset
= offset
+ (counter
<< 2);
21203 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21205 switch (extract32(ctx
->opcode
, 11, 1)) {
21207 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21209 gen_store_gpr(t1
, this_rt
);
21210 if ((this_rt
== rs
) &&
21211 (counter
!= (count
- 1))) {
21212 /* UNPREDICTABLE */
21216 this_rt
= (rt
== 0) ? 0 : this_rt
;
21217 gen_load_gpr(t1
, this_rt
);
21218 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21229 generate_exception_end(ctx
, EXCP_RI
);
21237 TCGv t0
= tcg_temp_new();
21238 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21239 extract32(ctx
->opcode
, 1, 20) << 1;
21240 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21241 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21242 extract32(ctx
->opcode
, 21, 3));
21243 gen_load_gpr(t0
, rt
);
21244 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21245 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21251 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21252 extract32(ctx
->opcode
, 1, 24) << 1;
21254 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21256 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21259 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21264 switch (extract32(ctx
->opcode
, 12, 4)) {
21267 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21270 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21273 generate_exception_end(ctx
, EXCP_RI
);
21279 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21280 extract32(ctx
->opcode
, 1, 13) << 1;
21281 switch (extract32(ctx
->opcode
, 14, 2)) {
21284 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21287 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21288 extract32(ctx
->opcode
, 1, 13) << 1;
21289 check_cp1_enabled(ctx
);
21290 switch (extract32(ctx
->opcode
, 16, 5)) {
21292 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21295 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21300 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21301 extract32(ctx
->opcode
, 0, 1) << 13;
21303 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21308 generate_exception_end(ctx
, EXCP_RI
);
21314 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21316 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21320 if (rs
== rt
|| rt
== 0) {
21321 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21322 } else if (rs
== 0) {
21323 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21325 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21333 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21334 extract32(ctx
->opcode
, 1, 13) << 1;
21335 switch (extract32(ctx
->opcode
, 14, 2)) {
21338 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21341 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21343 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21345 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21349 if (rs
== 0 || rs
== rt
) {
21351 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21353 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21357 generate_exception_end(ctx
, EXCP_RI
);
21364 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21365 extract32(ctx
->opcode
, 1, 10) << 1;
21366 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21368 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
21373 generate_exception_end(ctx
, EXCP_RI
);
21379 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21382 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21383 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21384 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS1(ctx
->opcode
));
21388 /* make sure instructions are on a halfword boundary */
21389 if (ctx
->base
.pc_next
& 0x1) {
21390 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
21391 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
21392 tcg_temp_free(tmp
);
21393 generate_exception_end(ctx
, EXCP_AdEL
);
21397 op
= extract32(ctx
->opcode
, 10, 6);
21400 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21403 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
21404 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
21407 switch (extract32(ctx
->opcode
, 3, 2)) {
21408 case NM_P16_SYSCALL
:
21409 if (extract32(ctx
->opcode
, 2, 1) == 0) {
21410 generate_exception_end(ctx
, EXCP_SYSCALL
);
21412 generate_exception_end(ctx
, EXCP_RI
);
21416 generate_exception_end(ctx
, EXCP_BREAK
);
21419 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
21420 gen_helper_do_semihosting(cpu_env
);
21422 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21423 generate_exception_end(ctx
, EXCP_RI
);
21425 generate_exception_end(ctx
, EXCP_DBp
);
21430 generate_exception_end(ctx
, EXCP_RI
);
21437 int shift
= extract32(ctx
->opcode
, 0, 3);
21439 shift
= (shift
== 0) ? 8 : shift
;
21441 switch (extract32(ctx
->opcode
, 3, 1)) {
21449 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
21453 switch (ctx
->opcode
& 1) {
21455 gen_pool16c_nanomips_insn(ctx
);
21458 gen_ldxs(ctx
, rt
, rs
, rd
);
21463 switch (extract32(ctx
->opcode
, 6, 1)) {
21465 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
21466 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
21469 generate_exception_end(ctx
, EXCP_RI
);
21474 switch (extract32(ctx
->opcode
, 3, 1)) {
21476 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
21477 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
21479 case NM_P_ADDIURS5
:
21480 rt
= extract32(ctx
->opcode
, 5, 5);
21482 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
21483 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
21484 (extract32(ctx
->opcode
, 0, 3));
21485 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
21491 switch (ctx
->opcode
& 0x1) {
21493 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
21496 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
21501 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21502 extract32(ctx
->opcode
, 5, 3);
21503 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21504 extract32(ctx
->opcode
, 0, 3);
21505 rt
= decode_gpr_gpr4(rt
);
21506 rs
= decode_gpr_gpr4(rs
);
21507 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
21508 (extract32(ctx
->opcode
, 3, 1))) {
21511 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
21515 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
21518 generate_exception_end(ctx
, EXCP_RI
);
21524 int imm
= extract32(ctx
->opcode
, 0, 7);
21525 imm
= (imm
== 0x7f ? -1 : imm
);
21527 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21533 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
21534 u
= (u
== 12) ? 0xff :
21535 (u
== 13) ? 0xffff : u
;
21536 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
21540 offset
= extract32(ctx
->opcode
, 0, 2);
21541 switch (extract32(ctx
->opcode
, 2, 2)) {
21543 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
21546 rt
= decode_gpr_gpr3_src_store(
21547 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21548 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
21551 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
21554 generate_exception_end(ctx
, EXCP_RI
);
21559 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
21560 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
21562 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
21565 rt
= decode_gpr_gpr3_src_store(
21566 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21567 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
21570 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
21573 generate_exception_end(ctx
, EXCP_RI
);
21578 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21579 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21582 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21583 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21584 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
21588 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21589 extract32(ctx
->opcode
, 5, 3);
21590 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21591 extract32(ctx
->opcode
, 0, 3);
21592 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21593 (extract32(ctx
->opcode
, 8, 1) << 2);
21594 rt
= decode_gpr_gpr4(rt
);
21595 rs
= decode_gpr_gpr4(rs
);
21596 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21600 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21601 extract32(ctx
->opcode
, 5, 3);
21602 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21603 extract32(ctx
->opcode
, 0, 3);
21604 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21605 (extract32(ctx
->opcode
, 8, 1) << 2);
21606 rt
= decode_gpr_gpr4_zero(rt
);
21607 rs
= decode_gpr_gpr4(rs
);
21608 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21611 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21612 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
21615 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21616 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21617 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
21620 rt
= decode_gpr_gpr3_src_store(
21621 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21622 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21623 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21624 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21627 rt
= decode_gpr_gpr3_src_store(
21628 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21629 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21630 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
21633 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
21634 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21635 (extract32(ctx
->opcode
, 1, 9) << 1));
21638 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
21639 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21640 (extract32(ctx
->opcode
, 1, 9) << 1));
21643 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
21644 (sextract32(ctx
->opcode
, 0, 1) << 7) |
21645 (extract32(ctx
->opcode
, 1, 6) << 1));
21648 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
21649 (sextract32(ctx
->opcode
, 0, 1) << 7) |
21650 (extract32(ctx
->opcode
, 1, 6) << 1));
21653 switch (ctx
->opcode
& 0xf) {
21656 switch (extract32(ctx
->opcode
, 4, 1)) {
21658 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
21659 extract32(ctx
->opcode
, 5, 5), 0, 0);
21662 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
21663 extract32(ctx
->opcode
, 5, 5), 31, 0);
21670 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
21671 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
21672 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
21673 extract32(ctx
->opcode
, 0, 4) << 1);
21680 int count
= extract32(ctx
->opcode
, 0, 4);
21681 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
21683 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
21684 switch (extract32(ctx
->opcode
, 8, 1)) {
21686 gen_save(ctx
, rt
, count
, 0, u
);
21688 case NM_RESTORE_JRC16
:
21689 gen_restore(ctx
, rt
, count
, 0, u
);
21690 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21699 static const int gpr2reg1
[] = {4, 5, 6, 7};
21700 static const int gpr2reg2
[] = {5, 6, 7, 8};
21702 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
21703 extract32(ctx
->opcode
, 8, 1);
21704 int r1
= gpr2reg1
[rd2
];
21705 int r2
= gpr2reg2
[rd2
];
21706 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
21707 extract32(ctx
->opcode
, 0, 3);
21708 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
21709 extract32(ctx
->opcode
, 5, 3);
21710 TCGv t0
= tcg_temp_new();
21711 TCGv t1
= tcg_temp_new();
21712 if (op
== NM_MOVEP
) {
21715 rs
= decode_gpr_gpr4_zero(r3
);
21716 rt
= decode_gpr_gpr4_zero(r4
);
21718 rd
= decode_gpr_gpr4(r3
);
21719 re
= decode_gpr_gpr4(r4
);
21723 gen_load_gpr(t0
, rs
);
21724 gen_load_gpr(t1
, rt
);
21725 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21726 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
21732 return decode_nanomips_32_48_opc(env
, ctx
);
21739 /* SmartMIPS extension to MIPS32 */
21741 #if defined(TARGET_MIPS64)
21743 /* MDMX extension to MIPS64 */
21747 /* MIPSDSP functions. */
21748 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
21749 int rd
, int base
, int offset
)
21754 t0
= tcg_temp_new();
21757 gen_load_gpr(t0
, offset
);
21758 } else if (offset
== 0) {
21759 gen_load_gpr(t0
, base
);
21761 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
21766 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
21767 gen_store_gpr(t0
, rd
);
21770 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
21771 gen_store_gpr(t0
, rd
);
21774 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
21775 gen_store_gpr(t0
, rd
);
21777 #if defined(TARGET_MIPS64)
21779 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
21780 gen_store_gpr(t0
, rd
);
21787 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
21788 int ret
, int v1
, int v2
)
21794 /* Treat as NOP. */
21798 v1_t
= tcg_temp_new();
21799 v2_t
= tcg_temp_new();
21801 gen_load_gpr(v1_t
, v1
);
21802 gen_load_gpr(v2_t
, v2
);
21805 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
21806 case OPC_MULT_G_2E
:
21810 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21812 case OPC_ADDUH_R_QB
:
21813 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21816 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21818 case OPC_ADDQH_R_PH
:
21819 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21822 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21824 case OPC_ADDQH_R_W
:
21825 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21828 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21830 case OPC_SUBUH_R_QB
:
21831 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21834 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21836 case OPC_SUBQH_R_PH
:
21837 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21840 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21842 case OPC_SUBQH_R_W
:
21843 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21847 case OPC_ABSQ_S_PH_DSP
:
21849 case OPC_ABSQ_S_QB
:
21851 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
21853 case OPC_ABSQ_S_PH
:
21855 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
21859 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
21861 case OPC_PRECEQ_W_PHL
:
21863 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
21864 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
21866 case OPC_PRECEQ_W_PHR
:
21868 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
21869 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
21870 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
21872 case OPC_PRECEQU_PH_QBL
:
21874 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
21876 case OPC_PRECEQU_PH_QBR
:
21878 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
21880 case OPC_PRECEQU_PH_QBLA
:
21882 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
21884 case OPC_PRECEQU_PH_QBRA
:
21886 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
21888 case OPC_PRECEU_PH_QBL
:
21890 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
21892 case OPC_PRECEU_PH_QBR
:
21894 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
21896 case OPC_PRECEU_PH_QBLA
:
21898 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
21900 case OPC_PRECEU_PH_QBRA
:
21902 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
21906 case OPC_ADDU_QB_DSP
:
21910 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21912 case OPC_ADDQ_S_PH
:
21914 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21918 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21922 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21924 case OPC_ADDU_S_QB
:
21926 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21930 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21932 case OPC_ADDU_S_PH
:
21934 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21938 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21940 case OPC_SUBQ_S_PH
:
21942 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21946 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21950 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21952 case OPC_SUBU_S_QB
:
21954 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21958 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21960 case OPC_SUBU_S_PH
:
21962 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21966 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21970 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21974 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
21976 case OPC_RADDU_W_QB
:
21978 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
21982 case OPC_CMPU_EQ_QB_DSP
:
21984 case OPC_PRECR_QB_PH
:
21986 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21988 case OPC_PRECRQ_QB_PH
:
21990 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21992 case OPC_PRECR_SRA_PH_W
:
21995 TCGv_i32 sa_t
= tcg_const_i32(v2
);
21996 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
21998 tcg_temp_free_i32(sa_t
);
22001 case OPC_PRECR_SRA_R_PH_W
:
22004 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22005 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22007 tcg_temp_free_i32(sa_t
);
22010 case OPC_PRECRQ_PH_W
:
22012 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22014 case OPC_PRECRQ_RS_PH_W
:
22016 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22018 case OPC_PRECRQU_S_QB_PH
:
22020 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22024 #ifdef TARGET_MIPS64
22025 case OPC_ABSQ_S_QH_DSP
:
22027 case OPC_PRECEQ_L_PWL
:
22029 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22031 case OPC_PRECEQ_L_PWR
:
22033 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22035 case OPC_PRECEQ_PW_QHL
:
22037 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22039 case OPC_PRECEQ_PW_QHR
:
22041 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22043 case OPC_PRECEQ_PW_QHLA
:
22045 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22047 case OPC_PRECEQ_PW_QHRA
:
22049 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22051 case OPC_PRECEQU_QH_OBL
:
22053 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22055 case OPC_PRECEQU_QH_OBR
:
22057 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22059 case OPC_PRECEQU_QH_OBLA
:
22061 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22063 case OPC_PRECEQU_QH_OBRA
:
22065 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22067 case OPC_PRECEU_QH_OBL
:
22069 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22071 case OPC_PRECEU_QH_OBR
:
22073 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22075 case OPC_PRECEU_QH_OBLA
:
22077 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22079 case OPC_PRECEU_QH_OBRA
:
22081 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22083 case OPC_ABSQ_S_OB
:
22085 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22087 case OPC_ABSQ_S_PW
:
22089 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22091 case OPC_ABSQ_S_QH
:
22093 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22097 case OPC_ADDU_OB_DSP
:
22099 case OPC_RADDU_L_OB
:
22101 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22105 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22107 case OPC_SUBQ_S_PW
:
22109 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22113 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22115 case OPC_SUBQ_S_QH
:
22117 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22121 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22123 case OPC_SUBU_S_OB
:
22125 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22129 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22131 case OPC_SUBU_S_QH
:
22133 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22137 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22139 case OPC_SUBUH_R_OB
:
22141 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22145 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22147 case OPC_ADDQ_S_PW
:
22149 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22153 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22155 case OPC_ADDQ_S_QH
:
22157 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22161 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22163 case OPC_ADDU_S_OB
:
22165 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22169 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22171 case OPC_ADDU_S_QH
:
22173 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22177 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22179 case OPC_ADDUH_R_OB
:
22181 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22185 case OPC_CMPU_EQ_OB_DSP
:
22187 case OPC_PRECR_OB_QH
:
22189 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22191 case OPC_PRECR_SRA_QH_PW
:
22194 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22195 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22196 tcg_temp_free_i32(ret_t
);
22199 case OPC_PRECR_SRA_R_QH_PW
:
22202 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22203 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22204 tcg_temp_free_i32(sa_v
);
22207 case OPC_PRECRQ_OB_QH
:
22209 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22211 case OPC_PRECRQ_PW_L
:
22213 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22215 case OPC_PRECRQ_QH_PW
:
22217 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22219 case OPC_PRECRQ_RS_QH_PW
:
22221 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22223 case OPC_PRECRQU_S_OB_QH
:
22225 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22232 tcg_temp_free(v1_t
);
22233 tcg_temp_free(v2_t
);
22236 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22237 int ret
, int v1
, int v2
)
22245 /* Treat as NOP. */
22249 t0
= tcg_temp_new();
22250 v1_t
= tcg_temp_new();
22251 v2_t
= tcg_temp_new();
22253 tcg_gen_movi_tl(t0
, v1
);
22254 gen_load_gpr(v1_t
, v1
);
22255 gen_load_gpr(v2_t
, v2
);
22258 case OPC_SHLL_QB_DSP
:
22260 op2
= MASK_SHLL_QB(ctx
->opcode
);
22264 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22268 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22272 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22276 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22278 case OPC_SHLL_S_PH
:
22280 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22282 case OPC_SHLLV_S_PH
:
22284 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22288 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22290 case OPC_SHLLV_S_W
:
22292 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22296 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22300 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22304 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22308 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22312 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22314 case OPC_SHRA_R_QB
:
22316 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22320 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22322 case OPC_SHRAV_R_QB
:
22324 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22328 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22330 case OPC_SHRA_R_PH
:
22332 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22336 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22338 case OPC_SHRAV_R_PH
:
22340 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22344 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22346 case OPC_SHRAV_R_W
:
22348 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22350 default: /* Invalid */
22351 MIPS_INVAL("MASK SHLL.QB");
22352 generate_exception_end(ctx
, EXCP_RI
);
22357 #ifdef TARGET_MIPS64
22358 case OPC_SHLL_OB_DSP
:
22359 op2
= MASK_SHLL_OB(ctx
->opcode
);
22363 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22367 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22369 case OPC_SHLL_S_PW
:
22371 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22373 case OPC_SHLLV_S_PW
:
22375 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22379 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22383 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22387 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22391 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22393 case OPC_SHLL_S_QH
:
22395 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22397 case OPC_SHLLV_S_QH
:
22399 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22403 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
22407 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22409 case OPC_SHRA_R_OB
:
22411 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
22413 case OPC_SHRAV_R_OB
:
22415 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22419 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
22423 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22425 case OPC_SHRA_R_PW
:
22427 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
22429 case OPC_SHRAV_R_PW
:
22431 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22435 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
22439 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22441 case OPC_SHRA_R_QH
:
22443 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
22445 case OPC_SHRAV_R_QH
:
22447 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22451 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
22455 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22459 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
22463 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22465 default: /* Invalid */
22466 MIPS_INVAL("MASK SHLL.OB");
22467 generate_exception_end(ctx
, EXCP_RI
);
22475 tcg_temp_free(v1_t
);
22476 tcg_temp_free(v2_t
);
22479 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22480 int ret
, int v1
, int v2
, int check_ret
)
22486 if ((ret
== 0) && (check_ret
== 1)) {
22487 /* Treat as NOP. */
22491 t0
= tcg_temp_new_i32();
22492 v1_t
= tcg_temp_new();
22493 v2_t
= tcg_temp_new();
22495 tcg_gen_movi_i32(t0
, ret
);
22496 gen_load_gpr(v1_t
, v1
);
22497 gen_load_gpr(v2_t
, v2
);
22500 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22501 * the same mask and op1. */
22502 case OPC_MULT_G_2E
:
22506 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22509 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22512 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22514 case OPC_MULQ_RS_W
:
22515 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22519 case OPC_DPA_W_PH_DSP
:
22521 case OPC_DPAU_H_QBL
:
22523 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22525 case OPC_DPAU_H_QBR
:
22527 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22529 case OPC_DPSU_H_QBL
:
22531 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22533 case OPC_DPSU_H_QBR
:
22535 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22539 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22541 case OPC_DPAX_W_PH
:
22543 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22545 case OPC_DPAQ_S_W_PH
:
22547 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22549 case OPC_DPAQX_S_W_PH
:
22551 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22553 case OPC_DPAQX_SA_W_PH
:
22555 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22559 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22561 case OPC_DPSX_W_PH
:
22563 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22565 case OPC_DPSQ_S_W_PH
:
22567 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22569 case OPC_DPSQX_S_W_PH
:
22571 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22573 case OPC_DPSQX_SA_W_PH
:
22575 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22577 case OPC_MULSAQ_S_W_PH
:
22579 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22581 case OPC_DPAQ_SA_L_W
:
22583 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22585 case OPC_DPSQ_SA_L_W
:
22587 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22589 case OPC_MAQ_S_W_PHL
:
22591 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22593 case OPC_MAQ_S_W_PHR
:
22595 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22597 case OPC_MAQ_SA_W_PHL
:
22599 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22601 case OPC_MAQ_SA_W_PHR
:
22603 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22605 case OPC_MULSA_W_PH
:
22607 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22611 #ifdef TARGET_MIPS64
22612 case OPC_DPAQ_W_QH_DSP
:
22614 int ac
= ret
& 0x03;
22615 tcg_gen_movi_i32(t0
, ac
);
22620 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
22624 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
22628 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
22632 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
22636 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22638 case OPC_DPAQ_S_W_QH
:
22640 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22642 case OPC_DPAQ_SA_L_PW
:
22644 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22646 case OPC_DPAU_H_OBL
:
22648 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
22650 case OPC_DPAU_H_OBR
:
22652 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
22656 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22658 case OPC_DPSQ_S_W_QH
:
22660 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22662 case OPC_DPSQ_SA_L_PW
:
22664 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22666 case OPC_DPSU_H_OBL
:
22668 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
22670 case OPC_DPSU_H_OBR
:
22672 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
22674 case OPC_MAQ_S_L_PWL
:
22676 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
22678 case OPC_MAQ_S_L_PWR
:
22680 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
22682 case OPC_MAQ_S_W_QHLL
:
22684 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
22686 case OPC_MAQ_SA_W_QHLL
:
22688 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
22690 case OPC_MAQ_S_W_QHLR
:
22692 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
22694 case OPC_MAQ_SA_W_QHLR
:
22696 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
22698 case OPC_MAQ_S_W_QHRL
:
22700 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
22702 case OPC_MAQ_SA_W_QHRL
:
22704 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
22706 case OPC_MAQ_S_W_QHRR
:
22708 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
22710 case OPC_MAQ_SA_W_QHRR
:
22712 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
22714 case OPC_MULSAQ_S_L_PW
:
22716 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22718 case OPC_MULSAQ_S_W_QH
:
22720 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22726 case OPC_ADDU_QB_DSP
:
22728 case OPC_MULEU_S_PH_QBL
:
22730 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22732 case OPC_MULEU_S_PH_QBR
:
22734 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22736 case OPC_MULQ_RS_PH
:
22738 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22740 case OPC_MULEQ_S_W_PHL
:
22742 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22744 case OPC_MULEQ_S_W_PHR
:
22746 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22748 case OPC_MULQ_S_PH
:
22750 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22754 #ifdef TARGET_MIPS64
22755 case OPC_ADDU_OB_DSP
:
22757 case OPC_MULEQ_S_PW_QHL
:
22759 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22761 case OPC_MULEQ_S_PW_QHR
:
22763 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22765 case OPC_MULEU_S_QH_OBL
:
22767 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22769 case OPC_MULEU_S_QH_OBR
:
22771 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22773 case OPC_MULQ_RS_QH
:
22775 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22782 tcg_temp_free_i32(t0
);
22783 tcg_temp_free(v1_t
);
22784 tcg_temp_free(v2_t
);
22787 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22795 /* Treat as NOP. */
22799 t0
= tcg_temp_new();
22800 val_t
= tcg_temp_new();
22801 gen_load_gpr(val_t
, val
);
22804 case OPC_ABSQ_S_PH_DSP
:
22808 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
22813 target_long result
;
22814 imm
= (ctx
->opcode
>> 16) & 0xFF;
22815 result
= (uint32_t)imm
<< 24 |
22816 (uint32_t)imm
<< 16 |
22817 (uint32_t)imm
<< 8 |
22819 result
= (int32_t)result
;
22820 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
22825 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
22826 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
22827 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22828 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22829 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22830 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22835 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22836 imm
= (int16_t)(imm
<< 6) >> 6;
22837 tcg_gen_movi_tl(cpu_gpr
[ret
], \
22838 (target_long
)((int32_t)imm
<< 16 | \
22844 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
22845 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22846 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22847 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22851 #ifdef TARGET_MIPS64
22852 case OPC_ABSQ_S_QH_DSP
:
22859 imm
= (ctx
->opcode
>> 16) & 0xFF;
22860 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
22861 temp
= (temp
<< 16) | temp
;
22862 temp
= (temp
<< 32) | temp
;
22863 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
22871 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22872 imm
= (int16_t)(imm
<< 6) >> 6;
22873 temp
= ((target_long
)imm
<< 32) \
22874 | ((target_long
)imm
& 0xFFFFFFFF);
22875 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
22883 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22884 imm
= (int16_t)(imm
<< 6) >> 6;
22886 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
22887 ((uint64_t)(uint16_t)imm
<< 32) |
22888 ((uint64_t)(uint16_t)imm
<< 16) |
22889 (uint64_t)(uint16_t)imm
;
22890 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
22895 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
22896 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
22897 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22898 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22899 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22900 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
22901 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22905 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
22906 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
22907 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22911 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
22912 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22913 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22914 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
22915 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22922 tcg_temp_free(val_t
);
22925 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
22926 uint32_t op1
, uint32_t op2
,
22927 int ret
, int v1
, int v2
, int check_ret
)
22933 if ((ret
== 0) && (check_ret
== 1)) {
22934 /* Treat as NOP. */
22938 t1
= tcg_temp_new();
22939 v1_t
= tcg_temp_new();
22940 v2_t
= tcg_temp_new();
22942 gen_load_gpr(v1_t
, v1
);
22943 gen_load_gpr(v2_t
, v2
);
22946 case OPC_CMPU_EQ_QB_DSP
:
22948 case OPC_CMPU_EQ_QB
:
22950 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
22952 case OPC_CMPU_LT_QB
:
22954 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
22956 case OPC_CMPU_LE_QB
:
22958 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
22960 case OPC_CMPGU_EQ_QB
:
22962 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22964 case OPC_CMPGU_LT_QB
:
22966 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22968 case OPC_CMPGU_LE_QB
:
22970 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22972 case OPC_CMPGDU_EQ_QB
:
22974 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
22975 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
22976 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
22977 tcg_gen_shli_tl(t1
, t1
, 24);
22978 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
22980 case OPC_CMPGDU_LT_QB
:
22982 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
22983 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
22984 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
22985 tcg_gen_shli_tl(t1
, t1
, 24);
22986 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
22988 case OPC_CMPGDU_LE_QB
:
22990 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
22991 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
22992 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
22993 tcg_gen_shli_tl(t1
, t1
, 24);
22994 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
22996 case OPC_CMP_EQ_PH
:
22998 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23000 case OPC_CMP_LT_PH
:
23002 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23004 case OPC_CMP_LE_PH
:
23006 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23010 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23014 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23016 case OPC_PACKRL_PH
:
23018 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23022 #ifdef TARGET_MIPS64
23023 case OPC_CMPU_EQ_OB_DSP
:
23025 case OPC_CMP_EQ_PW
:
23027 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23029 case OPC_CMP_LT_PW
:
23031 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23033 case OPC_CMP_LE_PW
:
23035 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23037 case OPC_CMP_EQ_QH
:
23039 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23041 case OPC_CMP_LT_QH
:
23043 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23045 case OPC_CMP_LE_QH
:
23047 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23049 case OPC_CMPGDU_EQ_OB
:
23051 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23053 case OPC_CMPGDU_LT_OB
:
23055 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23057 case OPC_CMPGDU_LE_OB
:
23059 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23061 case OPC_CMPGU_EQ_OB
:
23063 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23065 case OPC_CMPGU_LT_OB
:
23067 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23069 case OPC_CMPGU_LE_OB
:
23071 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23073 case OPC_CMPU_EQ_OB
:
23075 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23077 case OPC_CMPU_LT_OB
:
23079 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23081 case OPC_CMPU_LE_OB
:
23083 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23085 case OPC_PACKRL_PW
:
23087 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23091 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23095 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23099 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23107 tcg_temp_free(v1_t
);
23108 tcg_temp_free(v2_t
);
23111 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23112 uint32_t op1
, int rt
, int rs
, int sa
)
23119 /* Treat as NOP. */
23123 t0
= tcg_temp_new();
23124 gen_load_gpr(t0
, rs
);
23127 case OPC_APPEND_DSP
:
23128 switch (MASK_APPEND(ctx
->opcode
)) {
23131 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23133 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23137 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23138 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23139 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23140 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23142 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23146 if (sa
!= 0 && sa
!= 2) {
23147 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23148 tcg_gen_ext32u_tl(t0
, t0
);
23149 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23150 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23152 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23154 default: /* Invalid */
23155 MIPS_INVAL("MASK APPEND");
23156 generate_exception_end(ctx
, EXCP_RI
);
23160 #ifdef TARGET_MIPS64
23161 case OPC_DAPPEND_DSP
:
23162 switch (MASK_DAPPEND(ctx
->opcode
)) {
23165 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23169 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23170 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23171 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23175 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23176 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23177 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23182 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23183 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23184 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23185 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23188 default: /* Invalid */
23189 MIPS_INVAL("MASK DAPPEND");
23190 generate_exception_end(ctx
, EXCP_RI
);
23199 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23200 int ret
, int v1
, int v2
, int check_ret
)
23209 if ((ret
== 0) && (check_ret
== 1)) {
23210 /* Treat as NOP. */
23214 t0
= tcg_temp_new();
23215 t1
= tcg_temp_new();
23216 v1_t
= tcg_temp_new();
23217 v2_t
= tcg_temp_new();
23219 gen_load_gpr(v1_t
, v1
);
23220 gen_load_gpr(v2_t
, v2
);
23223 case OPC_EXTR_W_DSP
:
23227 tcg_gen_movi_tl(t0
, v2
);
23228 tcg_gen_movi_tl(t1
, v1
);
23229 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23232 tcg_gen_movi_tl(t0
, v2
);
23233 tcg_gen_movi_tl(t1
, v1
);
23234 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23236 case OPC_EXTR_RS_W
:
23237 tcg_gen_movi_tl(t0
, v2
);
23238 tcg_gen_movi_tl(t1
, v1
);
23239 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23242 tcg_gen_movi_tl(t0
, v2
);
23243 tcg_gen_movi_tl(t1
, v1
);
23244 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23246 case OPC_EXTRV_S_H
:
23247 tcg_gen_movi_tl(t0
, v2
);
23248 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23251 tcg_gen_movi_tl(t0
, v2
);
23252 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23254 case OPC_EXTRV_R_W
:
23255 tcg_gen_movi_tl(t0
, v2
);
23256 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23258 case OPC_EXTRV_RS_W
:
23259 tcg_gen_movi_tl(t0
, v2
);
23260 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23263 tcg_gen_movi_tl(t0
, v2
);
23264 tcg_gen_movi_tl(t1
, v1
);
23265 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23268 tcg_gen_movi_tl(t0
, v2
);
23269 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23272 tcg_gen_movi_tl(t0
, v2
);
23273 tcg_gen_movi_tl(t1
, v1
);
23274 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23277 tcg_gen_movi_tl(t0
, v2
);
23278 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23281 imm
= (ctx
->opcode
>> 20) & 0x3F;
23282 tcg_gen_movi_tl(t0
, ret
);
23283 tcg_gen_movi_tl(t1
, imm
);
23284 gen_helper_shilo(t0
, t1
, cpu_env
);
23287 tcg_gen_movi_tl(t0
, ret
);
23288 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23291 tcg_gen_movi_tl(t0
, ret
);
23292 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23295 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23296 tcg_gen_movi_tl(t0
, imm
);
23297 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23300 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23301 tcg_gen_movi_tl(t0
, imm
);
23302 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23306 #ifdef TARGET_MIPS64
23307 case OPC_DEXTR_W_DSP
:
23311 tcg_gen_movi_tl(t0
, ret
);
23312 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23316 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23317 int ac
= (ctx
->opcode
>> 11) & 0x03;
23318 tcg_gen_movi_tl(t0
, shift
);
23319 tcg_gen_movi_tl(t1
, ac
);
23320 gen_helper_dshilo(t0
, t1
, cpu_env
);
23325 int ac
= (ctx
->opcode
>> 11) & 0x03;
23326 tcg_gen_movi_tl(t0
, ac
);
23327 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23331 tcg_gen_movi_tl(t0
, v2
);
23332 tcg_gen_movi_tl(t1
, v1
);
23334 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23337 tcg_gen_movi_tl(t0
, v2
);
23338 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23341 tcg_gen_movi_tl(t0
, v2
);
23342 tcg_gen_movi_tl(t1
, v1
);
23343 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23346 tcg_gen_movi_tl(t0
, v2
);
23347 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23350 tcg_gen_movi_tl(t0
, v2
);
23351 tcg_gen_movi_tl(t1
, v1
);
23352 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23354 case OPC_DEXTR_R_L
:
23355 tcg_gen_movi_tl(t0
, v2
);
23356 tcg_gen_movi_tl(t1
, v1
);
23357 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23359 case OPC_DEXTR_RS_L
:
23360 tcg_gen_movi_tl(t0
, v2
);
23361 tcg_gen_movi_tl(t1
, v1
);
23362 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23365 tcg_gen_movi_tl(t0
, v2
);
23366 tcg_gen_movi_tl(t1
, v1
);
23367 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23369 case OPC_DEXTR_R_W
:
23370 tcg_gen_movi_tl(t0
, v2
);
23371 tcg_gen_movi_tl(t1
, v1
);
23372 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23374 case OPC_DEXTR_RS_W
:
23375 tcg_gen_movi_tl(t0
, v2
);
23376 tcg_gen_movi_tl(t1
, v1
);
23377 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23379 case OPC_DEXTR_S_H
:
23380 tcg_gen_movi_tl(t0
, v2
);
23381 tcg_gen_movi_tl(t1
, v1
);
23382 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23384 case OPC_DEXTRV_S_H
:
23385 tcg_gen_movi_tl(t0
, v2
);
23386 tcg_gen_movi_tl(t1
, v1
);
23387 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23390 tcg_gen_movi_tl(t0
, v2
);
23391 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23393 case OPC_DEXTRV_R_L
:
23394 tcg_gen_movi_tl(t0
, v2
);
23395 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23397 case OPC_DEXTRV_RS_L
:
23398 tcg_gen_movi_tl(t0
, v2
);
23399 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23402 tcg_gen_movi_tl(t0
, v2
);
23403 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23405 case OPC_DEXTRV_R_W
:
23406 tcg_gen_movi_tl(t0
, v2
);
23407 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23409 case OPC_DEXTRV_RS_W
:
23410 tcg_gen_movi_tl(t0
, v2
);
23411 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23420 tcg_temp_free(v1_t
);
23421 tcg_temp_free(v2_t
);
23424 /* End MIPSDSP functions. */
23426 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23428 int rs
, rt
, rd
, sa
;
23431 rs
= (ctx
->opcode
>> 21) & 0x1f;
23432 rt
= (ctx
->opcode
>> 16) & 0x1f;
23433 rd
= (ctx
->opcode
>> 11) & 0x1f;
23434 sa
= (ctx
->opcode
>> 6) & 0x1f;
23436 op1
= MASK_SPECIAL(ctx
->opcode
);
23439 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23445 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23455 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23458 MIPS_INVAL("special_r6 muldiv");
23459 generate_exception_end(ctx
, EXCP_RI
);
23465 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23469 if (rt
== 0 && sa
== 1) {
23470 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23471 We need additionally to check other fields */
23472 gen_cl(ctx
, op1
, rd
, rs
);
23474 generate_exception_end(ctx
, EXCP_RI
);
23478 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23479 gen_helper_do_semihosting(cpu_env
);
23481 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
23482 generate_exception_end(ctx
, EXCP_RI
);
23484 generate_exception_end(ctx
, EXCP_DBp
);
23488 #if defined(TARGET_MIPS64)
23490 check_mips_64(ctx
);
23491 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23495 if (rt
== 0 && sa
== 1) {
23496 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23497 We need additionally to check other fields */
23498 check_mips_64(ctx
);
23499 gen_cl(ctx
, op1
, rd
, rs
);
23501 generate_exception_end(ctx
, EXCP_RI
);
23509 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23519 check_mips_64(ctx
);
23520 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23523 MIPS_INVAL("special_r6 muldiv");
23524 generate_exception_end(ctx
, EXCP_RI
);
23529 default: /* Invalid */
23530 MIPS_INVAL("special_r6");
23531 generate_exception_end(ctx
, EXCP_RI
);
23536 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23538 int rs
, rt
, rd
, sa
;
23541 rs
= (ctx
->opcode
>> 21) & 0x1f;
23542 rt
= (ctx
->opcode
>> 16) & 0x1f;
23543 rd
= (ctx
->opcode
>> 11) & 0x1f;
23544 sa
= (ctx
->opcode
>> 6) & 0x1f;
23546 op1
= MASK_SPECIAL(ctx
->opcode
);
23548 case OPC_MOVN
: /* Conditional move */
23550 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
23551 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
23552 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23554 case OPC_MFHI
: /* Move from HI/LO */
23556 gen_HILO(ctx
, op1
, rs
& 3, rd
);
23559 case OPC_MTLO
: /* Move to HI/LO */
23560 gen_HILO(ctx
, op1
, rd
& 3, rs
);
23563 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
23564 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
23565 check_cp1_enabled(ctx
);
23566 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
23567 (ctx
->opcode
>> 16) & 1);
23569 generate_exception_err(ctx
, EXCP_CpU
, 1);
23575 check_insn(ctx
, INSN_VR54XX
);
23576 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
23577 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
23578 } else if (ctx
->insn_flags
& INSN_R5900
) {
23579 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
23581 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
23586 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23588 #if defined(TARGET_MIPS64)
23593 check_insn(ctx
, ISA_MIPS3
);
23594 check_mips_64(ctx
);
23595 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23599 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
23602 #ifdef MIPS_STRICT_STANDARD
23603 MIPS_INVAL("SPIM");
23604 generate_exception_end(ctx
, EXCP_RI
);
23606 /* Implemented as RI exception for now. */
23607 MIPS_INVAL("spim (unofficial)");
23608 generate_exception_end(ctx
, EXCP_RI
);
23611 default: /* Invalid */
23612 MIPS_INVAL("special_legacy");
23613 generate_exception_end(ctx
, EXCP_RI
);
23618 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
23620 int rs
, rt
, rd
, sa
;
23623 rs
= (ctx
->opcode
>> 21) & 0x1f;
23624 rt
= (ctx
->opcode
>> 16) & 0x1f;
23625 rd
= (ctx
->opcode
>> 11) & 0x1f;
23626 sa
= (ctx
->opcode
>> 6) & 0x1f;
23628 op1
= MASK_SPECIAL(ctx
->opcode
);
23630 case OPC_SLL
: /* Shift with immediate */
23631 if (sa
== 5 && rd
== 0 &&
23632 rs
== 0 && rt
== 0) { /* PAUSE */
23633 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
23634 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
23635 generate_exception_end(ctx
, EXCP_RI
);
23641 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23644 switch ((ctx
->opcode
>> 21) & 0x1f) {
23646 /* rotr is decoded as srl on non-R2 CPUs */
23647 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23652 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23655 generate_exception_end(ctx
, EXCP_RI
);
23663 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23665 case OPC_SLLV
: /* Shifts */
23667 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23670 switch ((ctx
->opcode
>> 6) & 0x1f) {
23672 /* rotrv is decoded as srlv on non-R2 CPUs */
23673 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23678 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23681 generate_exception_end(ctx
, EXCP_RI
);
23685 case OPC_SLT
: /* Set on less than */
23687 gen_slt(ctx
, op1
, rd
, rs
, rt
);
23689 case OPC_AND
: /* Logic*/
23693 gen_logic(ctx
, op1
, rd
, rs
, rt
);
23696 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
23698 case OPC_TGE
: /* Traps */
23704 check_insn(ctx
, ISA_MIPS2
);
23705 gen_trap(ctx
, op1
, rs
, rt
, -1);
23707 case OPC_LSA
: /* OPC_PMON */
23708 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
23709 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
23710 decode_opc_special_r6(env
, ctx
);
23712 /* Pmon entry point, also R4010 selsl */
23713 #ifdef MIPS_STRICT_STANDARD
23714 MIPS_INVAL("PMON / selsl");
23715 generate_exception_end(ctx
, EXCP_RI
);
23717 gen_helper_0e0i(pmon
, sa
);
23722 generate_exception_end(ctx
, EXCP_SYSCALL
);
23725 generate_exception_end(ctx
, EXCP_BREAK
);
23728 check_insn(ctx
, ISA_MIPS2
);
23729 gen_sync(extract32(ctx
->opcode
, 6, 5));
23732 #if defined(TARGET_MIPS64)
23733 /* MIPS64 specific opcodes */
23738 check_insn(ctx
, ISA_MIPS3
);
23739 check_mips_64(ctx
);
23740 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23743 switch ((ctx
->opcode
>> 21) & 0x1f) {
23745 /* drotr is decoded as dsrl on non-R2 CPUs */
23746 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23751 check_insn(ctx
, ISA_MIPS3
);
23752 check_mips_64(ctx
);
23753 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23756 generate_exception_end(ctx
, EXCP_RI
);
23761 switch ((ctx
->opcode
>> 21) & 0x1f) {
23763 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
23764 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23769 check_insn(ctx
, ISA_MIPS3
);
23770 check_mips_64(ctx
);
23771 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23774 generate_exception_end(ctx
, EXCP_RI
);
23782 check_insn(ctx
, ISA_MIPS3
);
23783 check_mips_64(ctx
);
23784 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23788 check_insn(ctx
, ISA_MIPS3
);
23789 check_mips_64(ctx
);
23790 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23793 switch ((ctx
->opcode
>> 6) & 0x1f) {
23795 /* drotrv is decoded as dsrlv on non-R2 CPUs */
23796 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23801 check_insn(ctx
, ISA_MIPS3
);
23802 check_mips_64(ctx
);
23803 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23806 generate_exception_end(ctx
, EXCP_RI
);
23811 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
23812 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
23813 decode_opc_special_r6(env
, ctx
);
23818 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
23819 decode_opc_special_r6(env
, ctx
);
23821 decode_opc_special_legacy(env
, ctx
);
23826 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23831 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
23833 rs
= (ctx
->opcode
>> 21) & 0x1f;
23834 rt
= (ctx
->opcode
>> 16) & 0x1f;
23835 rd
= (ctx
->opcode
>> 11) & 0x1f;
23837 op1
= MASK_SPECIAL2(ctx
->opcode
);
23839 case OPC_MADD
: /* Multiply and add/sub */
23843 check_insn(ctx
, ISA_MIPS32
);
23844 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
23847 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23850 case OPC_DIVU_G_2F
:
23851 case OPC_MULT_G_2F
:
23852 case OPC_MULTU_G_2F
:
23854 case OPC_MODU_G_2F
:
23855 check_insn(ctx
, INSN_LOONGSON2F
);
23856 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
23860 check_insn(ctx
, ISA_MIPS32
);
23861 gen_cl(ctx
, op1
, rd
, rs
);
23864 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23865 gen_helper_do_semihosting(cpu_env
);
23867 /* XXX: not clear which exception should be raised
23868 * when in debug mode...
23870 check_insn(ctx
, ISA_MIPS32
);
23871 generate_exception_end(ctx
, EXCP_DBp
);
23874 #if defined(TARGET_MIPS64)
23877 check_insn(ctx
, ISA_MIPS64
);
23878 check_mips_64(ctx
);
23879 gen_cl(ctx
, op1
, rd
, rs
);
23881 case OPC_DMULT_G_2F
:
23882 case OPC_DMULTU_G_2F
:
23883 case OPC_DDIV_G_2F
:
23884 case OPC_DDIVU_G_2F
:
23885 case OPC_DMOD_G_2F
:
23886 case OPC_DMODU_G_2F
:
23887 check_insn(ctx
, INSN_LOONGSON2F
);
23888 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
23891 default: /* Invalid */
23892 MIPS_INVAL("special2_legacy");
23893 generate_exception_end(ctx
, EXCP_RI
);
23898 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23900 int rs
, rt
, rd
, sa
;
23904 rs
= (ctx
->opcode
>> 21) & 0x1f;
23905 rt
= (ctx
->opcode
>> 16) & 0x1f;
23906 rd
= (ctx
->opcode
>> 11) & 0x1f;
23907 sa
= (ctx
->opcode
>> 6) & 0x1f;
23908 imm
= (int16_t)ctx
->opcode
>> 7;
23910 op1
= MASK_SPECIAL3(ctx
->opcode
);
23914 /* hint codes 24-31 are reserved and signal RI */
23915 generate_exception_end(ctx
, EXCP_RI
);
23917 /* Treat as NOP. */
23920 check_cp0_enabled(ctx
);
23921 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
23922 gen_cache_operation(ctx
, rt
, rs
, imm
);
23926 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
23929 gen_ld(ctx
, op1
, rt
, rs
, imm
);
23934 /* Treat as NOP. */
23937 op2
= MASK_BSHFL(ctx
->opcode
);
23940 case OPC_ALIGN_END
:
23941 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
23944 gen_bitswap(ctx
, op2
, rd
, rt
);
23949 #if defined(TARGET_MIPS64)
23951 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
23954 gen_ld(ctx
, op1
, rt
, rs
, imm
);
23957 check_mips_64(ctx
);
23960 /* Treat as NOP. */
23963 op2
= MASK_DBSHFL(ctx
->opcode
);
23966 case OPC_DALIGN_END
:
23967 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
23970 gen_bitswap(ctx
, op2
, rd
, rt
);
23977 default: /* Invalid */
23978 MIPS_INVAL("special3_r6");
23979 generate_exception_end(ctx
, EXCP_RI
);
23984 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23989 rs
= (ctx
->opcode
>> 21) & 0x1f;
23990 rt
= (ctx
->opcode
>> 16) & 0x1f;
23991 rd
= (ctx
->opcode
>> 11) & 0x1f;
23993 op1
= MASK_SPECIAL3(ctx
->opcode
);
23996 case OPC_DIVU_G_2E
:
23998 case OPC_MODU_G_2E
:
23999 case OPC_MULT_G_2E
:
24000 case OPC_MULTU_G_2E
:
24001 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
24002 * the same mask and op1. */
24003 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
24004 op2
= MASK_ADDUH_QB(ctx
->opcode
);
24007 case OPC_ADDUH_R_QB
:
24009 case OPC_ADDQH_R_PH
:
24011 case OPC_ADDQH_R_W
:
24013 case OPC_SUBUH_R_QB
:
24015 case OPC_SUBQH_R_PH
:
24017 case OPC_SUBQH_R_W
:
24018 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24023 case OPC_MULQ_RS_W
:
24024 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24027 MIPS_INVAL("MASK ADDUH.QB");
24028 generate_exception_end(ctx
, EXCP_RI
);
24031 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
24032 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
24034 generate_exception_end(ctx
, EXCP_RI
);
24038 op2
= MASK_LX(ctx
->opcode
);
24040 #if defined(TARGET_MIPS64)
24046 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
24048 default: /* Invalid */
24049 MIPS_INVAL("MASK LX");
24050 generate_exception_end(ctx
, EXCP_RI
);
24054 case OPC_ABSQ_S_PH_DSP
:
24055 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
24057 case OPC_ABSQ_S_QB
:
24058 case OPC_ABSQ_S_PH
:
24060 case OPC_PRECEQ_W_PHL
:
24061 case OPC_PRECEQ_W_PHR
:
24062 case OPC_PRECEQU_PH_QBL
:
24063 case OPC_PRECEQU_PH_QBR
:
24064 case OPC_PRECEQU_PH_QBLA
:
24065 case OPC_PRECEQU_PH_QBRA
:
24066 case OPC_PRECEU_PH_QBL
:
24067 case OPC_PRECEU_PH_QBR
:
24068 case OPC_PRECEU_PH_QBLA
:
24069 case OPC_PRECEU_PH_QBRA
:
24070 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24077 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
24080 MIPS_INVAL("MASK ABSQ_S.PH");
24081 generate_exception_end(ctx
, EXCP_RI
);
24085 case OPC_ADDU_QB_DSP
:
24086 op2
= MASK_ADDU_QB(ctx
->opcode
);
24089 case OPC_ADDQ_S_PH
:
24092 case OPC_ADDU_S_QB
:
24094 case OPC_ADDU_S_PH
:
24096 case OPC_SUBQ_S_PH
:
24099 case OPC_SUBU_S_QB
:
24101 case OPC_SUBU_S_PH
:
24105 case OPC_RADDU_W_QB
:
24106 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24108 case OPC_MULEU_S_PH_QBL
:
24109 case OPC_MULEU_S_PH_QBR
:
24110 case OPC_MULQ_RS_PH
:
24111 case OPC_MULEQ_S_W_PHL
:
24112 case OPC_MULEQ_S_W_PHR
:
24113 case OPC_MULQ_S_PH
:
24114 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24116 default: /* Invalid */
24117 MIPS_INVAL("MASK ADDU.QB");
24118 generate_exception_end(ctx
, EXCP_RI
);
24123 case OPC_CMPU_EQ_QB_DSP
:
24124 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
24126 case OPC_PRECR_SRA_PH_W
:
24127 case OPC_PRECR_SRA_R_PH_W
:
24128 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
24130 case OPC_PRECR_QB_PH
:
24131 case OPC_PRECRQ_QB_PH
:
24132 case OPC_PRECRQ_PH_W
:
24133 case OPC_PRECRQ_RS_PH_W
:
24134 case OPC_PRECRQU_S_QB_PH
:
24135 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24137 case OPC_CMPU_EQ_QB
:
24138 case OPC_CMPU_LT_QB
:
24139 case OPC_CMPU_LE_QB
:
24140 case OPC_CMP_EQ_PH
:
24141 case OPC_CMP_LT_PH
:
24142 case OPC_CMP_LE_PH
:
24143 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24145 case OPC_CMPGU_EQ_QB
:
24146 case OPC_CMPGU_LT_QB
:
24147 case OPC_CMPGU_LE_QB
:
24148 case OPC_CMPGDU_EQ_QB
:
24149 case OPC_CMPGDU_LT_QB
:
24150 case OPC_CMPGDU_LE_QB
:
24153 case OPC_PACKRL_PH
:
24154 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24156 default: /* Invalid */
24157 MIPS_INVAL("MASK CMPU.EQ.QB");
24158 generate_exception_end(ctx
, EXCP_RI
);
24162 case OPC_SHLL_QB_DSP
:
24163 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
24165 case OPC_DPA_W_PH_DSP
:
24166 op2
= MASK_DPA_W_PH(ctx
->opcode
);
24168 case OPC_DPAU_H_QBL
:
24169 case OPC_DPAU_H_QBR
:
24170 case OPC_DPSU_H_QBL
:
24171 case OPC_DPSU_H_QBR
:
24173 case OPC_DPAX_W_PH
:
24174 case OPC_DPAQ_S_W_PH
:
24175 case OPC_DPAQX_S_W_PH
:
24176 case OPC_DPAQX_SA_W_PH
:
24178 case OPC_DPSX_W_PH
:
24179 case OPC_DPSQ_S_W_PH
:
24180 case OPC_DPSQX_S_W_PH
:
24181 case OPC_DPSQX_SA_W_PH
:
24182 case OPC_MULSAQ_S_W_PH
:
24183 case OPC_DPAQ_SA_L_W
:
24184 case OPC_DPSQ_SA_L_W
:
24185 case OPC_MAQ_S_W_PHL
:
24186 case OPC_MAQ_S_W_PHR
:
24187 case OPC_MAQ_SA_W_PHL
:
24188 case OPC_MAQ_SA_W_PHR
:
24189 case OPC_MULSA_W_PH
:
24190 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24192 default: /* Invalid */
24193 MIPS_INVAL("MASK DPAW.PH");
24194 generate_exception_end(ctx
, EXCP_RI
);
24199 op2
= MASK_INSV(ctx
->opcode
);
24210 t0
= tcg_temp_new();
24211 t1
= tcg_temp_new();
24213 gen_load_gpr(t0
, rt
);
24214 gen_load_gpr(t1
, rs
);
24216 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
24222 default: /* Invalid */
24223 MIPS_INVAL("MASK INSV");
24224 generate_exception_end(ctx
, EXCP_RI
);
24228 case OPC_APPEND_DSP
:
24229 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
24231 case OPC_EXTR_W_DSP
:
24232 op2
= MASK_EXTR_W(ctx
->opcode
);
24236 case OPC_EXTR_RS_W
:
24238 case OPC_EXTRV_S_H
:
24240 case OPC_EXTRV_R_W
:
24241 case OPC_EXTRV_RS_W
:
24246 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
24249 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24255 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24257 default: /* Invalid */
24258 MIPS_INVAL("MASK EXTR.W");
24259 generate_exception_end(ctx
, EXCP_RI
);
24263 #if defined(TARGET_MIPS64)
24264 case OPC_DDIV_G_2E
:
24265 case OPC_DDIVU_G_2E
:
24266 case OPC_DMULT_G_2E
:
24267 case OPC_DMULTU_G_2E
:
24268 case OPC_DMOD_G_2E
:
24269 case OPC_DMODU_G_2E
:
24270 check_insn(ctx
, INSN_LOONGSON2E
);
24271 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
24273 case OPC_ABSQ_S_QH_DSP
:
24274 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
24276 case OPC_PRECEQ_L_PWL
:
24277 case OPC_PRECEQ_L_PWR
:
24278 case OPC_PRECEQ_PW_QHL
:
24279 case OPC_PRECEQ_PW_QHR
:
24280 case OPC_PRECEQ_PW_QHLA
:
24281 case OPC_PRECEQ_PW_QHRA
:
24282 case OPC_PRECEQU_QH_OBL
:
24283 case OPC_PRECEQU_QH_OBR
:
24284 case OPC_PRECEQU_QH_OBLA
:
24285 case OPC_PRECEQU_QH_OBRA
:
24286 case OPC_PRECEU_QH_OBL
:
24287 case OPC_PRECEU_QH_OBR
:
24288 case OPC_PRECEU_QH_OBLA
:
24289 case OPC_PRECEU_QH_OBRA
:
24290 case OPC_ABSQ_S_OB
:
24291 case OPC_ABSQ_S_PW
:
24292 case OPC_ABSQ_S_QH
:
24293 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24301 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
24303 default: /* Invalid */
24304 MIPS_INVAL("MASK ABSQ_S.QH");
24305 generate_exception_end(ctx
, EXCP_RI
);
24309 case OPC_ADDU_OB_DSP
:
24310 op2
= MASK_ADDU_OB(ctx
->opcode
);
24312 case OPC_RADDU_L_OB
:
24314 case OPC_SUBQ_S_PW
:
24316 case OPC_SUBQ_S_QH
:
24318 case OPC_SUBU_S_OB
:
24320 case OPC_SUBU_S_QH
:
24322 case OPC_SUBUH_R_OB
:
24324 case OPC_ADDQ_S_PW
:
24326 case OPC_ADDQ_S_QH
:
24328 case OPC_ADDU_S_OB
:
24330 case OPC_ADDU_S_QH
:
24332 case OPC_ADDUH_R_OB
:
24333 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24335 case OPC_MULEQ_S_PW_QHL
:
24336 case OPC_MULEQ_S_PW_QHR
:
24337 case OPC_MULEU_S_QH_OBL
:
24338 case OPC_MULEU_S_QH_OBR
:
24339 case OPC_MULQ_RS_QH
:
24340 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24342 default: /* Invalid */
24343 MIPS_INVAL("MASK ADDU.OB");
24344 generate_exception_end(ctx
, EXCP_RI
);
24348 case OPC_CMPU_EQ_OB_DSP
:
24349 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
24351 case OPC_PRECR_SRA_QH_PW
:
24352 case OPC_PRECR_SRA_R_QH_PW
:
24353 /* Return value is rt. */
24354 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
24356 case OPC_PRECR_OB_QH
:
24357 case OPC_PRECRQ_OB_QH
:
24358 case OPC_PRECRQ_PW_L
:
24359 case OPC_PRECRQ_QH_PW
:
24360 case OPC_PRECRQ_RS_QH_PW
:
24361 case OPC_PRECRQU_S_OB_QH
:
24362 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24364 case OPC_CMPU_EQ_OB
:
24365 case OPC_CMPU_LT_OB
:
24366 case OPC_CMPU_LE_OB
:
24367 case OPC_CMP_EQ_QH
:
24368 case OPC_CMP_LT_QH
:
24369 case OPC_CMP_LE_QH
:
24370 case OPC_CMP_EQ_PW
:
24371 case OPC_CMP_LT_PW
:
24372 case OPC_CMP_LE_PW
:
24373 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24375 case OPC_CMPGDU_EQ_OB
:
24376 case OPC_CMPGDU_LT_OB
:
24377 case OPC_CMPGDU_LE_OB
:
24378 case OPC_CMPGU_EQ_OB
:
24379 case OPC_CMPGU_LT_OB
:
24380 case OPC_CMPGU_LE_OB
:
24381 case OPC_PACKRL_PW
:
24385 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24387 default: /* Invalid */
24388 MIPS_INVAL("MASK CMPU_EQ.OB");
24389 generate_exception_end(ctx
, EXCP_RI
);
24393 case OPC_DAPPEND_DSP
:
24394 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
24396 case OPC_DEXTR_W_DSP
:
24397 op2
= MASK_DEXTR_W(ctx
->opcode
);
24404 case OPC_DEXTR_R_L
:
24405 case OPC_DEXTR_RS_L
:
24407 case OPC_DEXTR_R_W
:
24408 case OPC_DEXTR_RS_W
:
24409 case OPC_DEXTR_S_H
:
24411 case OPC_DEXTRV_R_L
:
24412 case OPC_DEXTRV_RS_L
:
24413 case OPC_DEXTRV_S_H
:
24415 case OPC_DEXTRV_R_W
:
24416 case OPC_DEXTRV_RS_W
:
24417 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
24422 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24424 default: /* Invalid */
24425 MIPS_INVAL("MASK EXTR.W");
24426 generate_exception_end(ctx
, EXCP_RI
);
24430 case OPC_DPAQ_W_QH_DSP
:
24431 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
24433 case OPC_DPAU_H_OBL
:
24434 case OPC_DPAU_H_OBR
:
24435 case OPC_DPSU_H_OBL
:
24436 case OPC_DPSU_H_OBR
:
24438 case OPC_DPAQ_S_W_QH
:
24440 case OPC_DPSQ_S_W_QH
:
24441 case OPC_MULSAQ_S_W_QH
:
24442 case OPC_DPAQ_SA_L_PW
:
24443 case OPC_DPSQ_SA_L_PW
:
24444 case OPC_MULSAQ_S_L_PW
:
24445 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24447 case OPC_MAQ_S_W_QHLL
:
24448 case OPC_MAQ_S_W_QHLR
:
24449 case OPC_MAQ_S_W_QHRL
:
24450 case OPC_MAQ_S_W_QHRR
:
24451 case OPC_MAQ_SA_W_QHLL
:
24452 case OPC_MAQ_SA_W_QHLR
:
24453 case OPC_MAQ_SA_W_QHRL
:
24454 case OPC_MAQ_SA_W_QHRR
:
24455 case OPC_MAQ_S_L_PWL
:
24456 case OPC_MAQ_S_L_PWR
:
24461 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24463 default: /* Invalid */
24464 MIPS_INVAL("MASK DPAQ.W.QH");
24465 generate_exception_end(ctx
, EXCP_RI
);
24469 case OPC_DINSV_DSP
:
24470 op2
= MASK_INSV(ctx
->opcode
);
24481 t0
= tcg_temp_new();
24482 t1
= tcg_temp_new();
24484 gen_load_gpr(t0
, rt
);
24485 gen_load_gpr(t1
, rs
);
24487 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
24493 default: /* Invalid */
24494 MIPS_INVAL("MASK DINSV");
24495 generate_exception_end(ctx
, EXCP_RI
);
24499 case OPC_SHLL_OB_DSP
:
24500 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
24503 default: /* Invalid */
24504 MIPS_INVAL("special3_legacy");
24505 generate_exception_end(ctx
, EXCP_RI
);
24510 static void decode_tx79_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
24512 uint32_t opc
= MASK_TX79_MMI0(ctx
->opcode
);
24515 case TX79_MMI0_PADDW
: /* TODO: TX79_MMI0_PADDW */
24516 case TX79_MMI0_PSUBW
: /* TODO: TX79_MMI0_PSUBW */
24517 case TX79_MMI0_PCGTW
: /* TODO: TX79_MMI0_PCGTW */
24518 case TX79_MMI0_PMAXW
: /* TODO: TX79_MMI0_PMAXW */
24519 case TX79_MMI0_PADDH
: /* TODO: TX79_MMI0_PADDH */
24520 case TX79_MMI0_PSUBH
: /* TODO: TX79_MMI0_PSUBH */
24521 case TX79_MMI0_PCGTH
: /* TODO: TX79_MMI0_PCGTH */
24522 case TX79_MMI0_PMAXH
: /* TODO: TX79_MMI0_PMAXH */
24523 case TX79_MMI0_PADDB
: /* TODO: TX79_MMI0_PADDB */
24524 case TX79_MMI0_PSUBB
: /* TODO: TX79_MMI0_PSUBB */
24525 case TX79_MMI0_PCGTB
: /* TODO: TX79_MMI0_PCGTB */
24526 case TX79_MMI0_PADDSW
: /* TODO: TX79_MMI0_PADDSW */
24527 case TX79_MMI0_PSUBSW
: /* TODO: TX79_MMI0_PSUBSW */
24528 case TX79_MMI0_PEXTLW
: /* TODO: TX79_MMI0_PEXTLW */
24529 case TX79_MMI0_PPACW
: /* TODO: TX79_MMI0_PPACW */
24530 case TX79_MMI0_PADDSH
: /* TODO: TX79_MMI0_PADDSH */
24531 case TX79_MMI0_PSUBSH
: /* TODO: TX79_MMI0_PSUBSH */
24532 case TX79_MMI0_PEXTLH
: /* TODO: TX79_MMI0_PEXTLH */
24533 case TX79_MMI0_PPACH
: /* TODO: TX79_MMI0_PPACH */
24534 case TX79_MMI0_PADDSB
: /* TODO: TX79_MMI0_PADDSB */
24535 case TX79_MMI0_PSUBSB
: /* TODO: TX79_MMI0_PSUBSB */
24536 case TX79_MMI0_PEXTLB
: /* TODO: TX79_MMI0_PEXTLB */
24537 case TX79_MMI0_PPACB
: /* TODO: TX79_MMI0_PPACB */
24538 case TX79_MMI0_PEXT5
: /* TODO: TX79_MMI0_PEXT5 */
24539 case TX79_MMI0_PPAC5
: /* TODO: TX79_MMI0_PPAC5 */
24540 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI0 */
24543 MIPS_INVAL("TX79 MMI class MMI0");
24544 generate_exception_end(ctx
, EXCP_RI
);
24549 static void decode_tx79_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
24551 uint32_t opc
= MASK_TX79_MMI1(ctx
->opcode
);
24554 case TX79_MMI1_PABSW
: /* TODO: TX79_MMI1_PABSW */
24555 case TX79_MMI1_PCEQW
: /* TODO: TX79_MMI1_PCEQW */
24556 case TX79_MMI1_PMINW
: /* TODO: TX79_MMI1_PMINW */
24557 case TX79_MMI1_PADSBH
: /* TODO: TX79_MMI1_PADSBH */
24558 case TX79_MMI1_PABSH
: /* TODO: TX79_MMI1_PABSH */
24559 case TX79_MMI1_PCEQH
: /* TODO: TX79_MMI1_PCEQH */
24560 case TX79_MMI1_PMINH
: /* TODO: TX79_MMI1_PMINH */
24561 case TX79_MMI1_PCEQB
: /* TODO: TX79_MMI1_PCEQB */
24562 case TX79_MMI1_PADDUW
: /* TODO: TX79_MMI1_PADDUW */
24563 case TX79_MMI1_PSUBUW
: /* TODO: TX79_MMI1_PSUBUW */
24564 case TX79_MMI1_PEXTUW
: /* TODO: TX79_MMI1_PEXTUW */
24565 case TX79_MMI1_PADDUH
: /* TODO: TX79_MMI1_PADDUH */
24566 case TX79_MMI1_PSUBUH
: /* TODO: TX79_MMI1_PSUBUH */
24567 case TX79_MMI1_PEXTUH
: /* TODO: TX79_MMI1_PEXTUH */
24568 case TX79_MMI1_PADDUB
: /* TODO: TX79_MMI1_PADDUB */
24569 case TX79_MMI1_PSUBUB
: /* TODO: TX79_MMI1_PSUBUB */
24570 case TX79_MMI1_PEXTUB
: /* TODO: TX79_MMI1_PEXTUB */
24571 case TX79_MMI1_QFSRV
: /* TODO: TX79_MMI1_QFSRV */
24572 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI1 */
24575 MIPS_INVAL("TX79 MMI class MMI1");
24576 generate_exception_end(ctx
, EXCP_RI
);
24581 static void decode_tx79_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
24583 uint32_t opc
= MASK_TX79_MMI2(ctx
->opcode
);
24586 case TX79_MMI2_PMADDW
: /* TODO: TX79_MMI2_PMADDW */
24587 case TX79_MMI2_PSLLVW
: /* TODO: TX79_MMI2_PSLLVW */
24588 case TX79_MMI2_PSRLVW
: /* TODO: TX79_MMI2_PSRLVW */
24589 case TX79_MMI2_PMSUBW
: /* TODO: TX79_MMI2_PMSUBW */
24590 case TX79_MMI2_PMFHI
: /* TODO: TX79_MMI2_PMFHI */
24591 case TX79_MMI2_PMFLO
: /* TODO: TX79_MMI2_PMFLO */
24592 case TX79_MMI2_PINTH
: /* TODO: TX79_MMI2_PINTH */
24593 case TX79_MMI2_PMULTW
: /* TODO: TX79_MMI2_PMULTW */
24594 case TX79_MMI2_PDIVW
: /* TODO: TX79_MMI2_PDIVW */
24595 case TX79_MMI2_PCPYLD
: /* TODO: TX79_MMI2_PCPYLD */
24596 case TX79_MMI2_PMADDH
: /* TODO: TX79_MMI2_PMADDH */
24597 case TX79_MMI2_PHMADH
: /* TODO: TX79_MMI2_PHMADH */
24598 case TX79_MMI2_PAND
: /* TODO: TX79_MMI2_PAND */
24599 case TX79_MMI2_PXOR
: /* TODO: TX79_MMI2_PXOR */
24600 case TX79_MMI2_PMSUBH
: /* TODO: TX79_MMI2_PMSUBH */
24601 case TX79_MMI2_PHMSBH
: /* TODO: TX79_MMI2_PHMSBH */
24602 case TX79_MMI2_PEXEH
: /* TODO: TX79_MMI2_PEXEH */
24603 case TX79_MMI2_PREVH
: /* TODO: TX79_MMI2_PREVH */
24604 case TX79_MMI2_PMULTH
: /* TODO: TX79_MMI2_PMULTH */
24605 case TX79_MMI2_PDIVBW
: /* TODO: TX79_MMI2_PDIVBW */
24606 case TX79_MMI2_PEXEW
: /* TODO: TX79_MMI2_PEXEW */
24607 case TX79_MMI2_PROT3W
: /* TODO: TX79_MMI2_PROT3W */
24608 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI2 */
24611 MIPS_INVAL("TX79 MMI class MMI2");
24612 generate_exception_end(ctx
, EXCP_RI
);
24617 static void decode_tx79_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
24619 uint32_t opc
= MASK_TX79_MMI3(ctx
->opcode
);
24622 case TX79_MMI3_PMADDUW
: /* TODO: TX79_MMI3_PMADDUW */
24623 case TX79_MMI3_PSRAVW
: /* TODO: TX79_MMI3_PSRAVW */
24624 case TX79_MMI3_PMTHI
: /* TODO: TX79_MMI3_PMTHI */
24625 case TX79_MMI3_PMTLO
: /* TODO: TX79_MMI3_PMTLO */
24626 case TX79_MMI3_PINTEH
: /* TODO: TX79_MMI3_PINTEH */
24627 case TX79_MMI3_PMULTUW
: /* TODO: TX79_MMI3_PMULTUW */
24628 case TX79_MMI3_PDIVUW
: /* TODO: TX79_MMI3_PDIVUW */
24629 case TX79_MMI3_PCPYUD
: /* TODO: TX79_MMI3_PCPYUD */
24630 case TX79_MMI3_POR
: /* TODO: TX79_MMI3_POR */
24631 case TX79_MMI3_PNOR
: /* TODO: TX79_MMI3_PNOR */
24632 case TX79_MMI3_PEXCH
: /* TODO: TX79_MMI3_PEXCH */
24633 case TX79_MMI3_PCPYH
: /* TODO: TX79_MMI3_PCPYH */
24634 case TX79_MMI3_PEXCW
: /* TODO: TX79_MMI3_PEXCW */
24635 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI3 */
24638 MIPS_INVAL("TX79 MMI class MMI3");
24639 generate_exception_end(ctx
, EXCP_RI
);
24644 static void decode_tx79_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
24646 uint32_t opc
= MASK_TX79_MMI(ctx
->opcode
);
24647 int rs
= extract32(ctx
->opcode
, 21, 5);
24648 int rt
= extract32(ctx
->opcode
, 16, 5);
24649 int rd
= extract32(ctx
->opcode
, 11, 5);
24652 case TX79_MMI_CLASS_MMI0
:
24653 decode_tx79_mmi0(env
, ctx
);
24655 case TX79_MMI_CLASS_MMI1
:
24656 decode_tx79_mmi1(env
, ctx
);
24658 case TX79_MMI_CLASS_MMI2
:
24659 decode_tx79_mmi2(env
, ctx
);
24661 case TX79_MMI_CLASS_MMI3
:
24662 decode_tx79_mmi3(env
, ctx
);
24664 case TX79_MMI_MULT1
:
24665 case TX79_MMI_MULTU1
:
24666 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
24668 case TX79_MMI_MTLO1
:
24669 case TX79_MMI_MTHI1
:
24670 gen_HILO(ctx
, opc
, 1, rs
);
24672 case TX79_MMI_MFLO1
:
24673 case TX79_MMI_MFHI1
:
24674 gen_HILO(ctx
, opc
, 1, rd
);
24676 case TX79_MMI_MADD
: /* TODO: TX79_MMI_MADD */
24677 case TX79_MMI_MADDU
: /* TODO: TX79_MMI_MADDU */
24678 case TX79_MMI_PLZCW
: /* TODO: TX79_MMI_PLZCW */
24679 case TX79_MMI_DIV1
: /* TODO: TX79_MMI_DIV1 */
24680 case TX79_MMI_DIVU1
: /* TODO: TX79_MMI_DIVU1 */
24681 case TX79_MMI_MADD1
: /* TODO: TX79_MMI_MADD1 */
24682 case TX79_MMI_MADDU1
: /* TODO: TX79_MMI_MADDU1 */
24683 case TX79_MMI_PMFHL
: /* TODO: TX79_MMI_PMFHL */
24684 case TX79_MMI_PMTHL
: /* TODO: TX79_MMI_PMTHL */
24685 case TX79_MMI_PSLLH
: /* TODO: TX79_MMI_PSLLH */
24686 case TX79_MMI_PSRLH
: /* TODO: TX79_MMI_PSRLH */
24687 case TX79_MMI_PSRAH
: /* TODO: TX79_MMI_PSRAH */
24688 case TX79_MMI_PSLLW
: /* TODO: TX79_MMI_PSLLW */
24689 case TX79_MMI_PSRLW
: /* TODO: TX79_MMI_PSRLW */
24690 case TX79_MMI_PSRAW
: /* TODO: TX79_MMI_PSRAW */
24691 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_CLASS_MMI */
24694 MIPS_INVAL("TX79 MMI class");
24695 generate_exception_end(ctx
, EXCP_RI
);
24700 static void decode_tx79_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
24702 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_LQ */
24705 static void gen_tx79_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
24707 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_SQ */
24711 * The TX79-specific instruction Store Quadword
24713 * +--------+-------+-------+------------------------+
24714 * | 011111 | base | rt | offset | SQ
24715 * +--------+-------+-------+------------------------+
24718 * has the same opcode as the Read Hardware Register instruction
24720 * +--------+-------+-------+-------+-------+--------+
24721 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
24722 * +--------+-------+-------+-------+-------+--------+
24725 * that is required, trapped and emulated by the Linux kernel. However, all
24726 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
24727 * offset is odd. Therefore all valid SQ instructions can execute normally.
24728 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
24729 * between SQ and RDHWR, as the Linux kernel does.
24731 static void decode_tx79_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
24733 int base
= extract32(ctx
->opcode
, 21, 5);
24734 int rt
= extract32(ctx
->opcode
, 16, 5);
24735 int offset
= extract32(ctx
->opcode
, 0, 16);
24737 #ifdef CONFIG_USER_ONLY
24738 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
24739 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
24741 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
24742 int rd
= extract32(ctx
->opcode
, 11, 5);
24744 gen_rdhwr(ctx
, rt
, rd
, 0);
24749 gen_tx79_sq(ctx
, base
, rt
, offset
);
24752 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
24754 int rs
, rt
, rd
, sa
;
24758 rs
= (ctx
->opcode
>> 21) & 0x1f;
24759 rt
= (ctx
->opcode
>> 16) & 0x1f;
24760 rd
= (ctx
->opcode
>> 11) & 0x1f;
24761 sa
= (ctx
->opcode
>> 6) & 0x1f;
24762 imm
= sextract32(ctx
->opcode
, 7, 9);
24764 op1
= MASK_SPECIAL3(ctx
->opcode
);
24767 * EVA loads and stores overlap Loongson 2E instructions decoded by
24768 * decode_opc_special3_legacy(), so be careful to allow their decoding when
24775 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24783 check_cp0_enabled(ctx
);
24784 gen_ld(ctx
, op1
, rt
, rs
, imm
);
24788 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24793 check_cp0_enabled(ctx
);
24794 gen_st(ctx
, op1
, rt
, rs
, imm
);
24797 check_cp0_enabled(ctx
);
24798 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
24801 check_cp0_enabled(ctx
);
24802 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
24803 gen_cache_operation(ctx
, rt
, rs
, imm
);
24805 /* Treat as NOP. */
24808 check_cp0_enabled(ctx
);
24809 /* Treat as NOP. */
24817 check_insn(ctx
, ISA_MIPS32R2
);
24818 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
24821 op2
= MASK_BSHFL(ctx
->opcode
);
24824 case OPC_ALIGN_END
:
24826 check_insn(ctx
, ISA_MIPS32R6
);
24827 decode_opc_special3_r6(env
, ctx
);
24830 check_insn(ctx
, ISA_MIPS32R2
);
24831 gen_bshfl(ctx
, op2
, rt
, rd
);
24835 #if defined(TARGET_MIPS64)
24842 check_insn(ctx
, ISA_MIPS64R2
);
24843 check_mips_64(ctx
);
24844 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
24847 op2
= MASK_DBSHFL(ctx
->opcode
);
24850 case OPC_DALIGN_END
:
24852 check_insn(ctx
, ISA_MIPS32R6
);
24853 decode_opc_special3_r6(env
, ctx
);
24856 check_insn(ctx
, ISA_MIPS64R2
);
24857 check_mips_64(ctx
);
24858 op2
= MASK_DBSHFL(ctx
->opcode
);
24859 gen_bshfl(ctx
, op2
, rt
, rd
);
24865 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
24870 TCGv t0
= tcg_temp_new();
24871 TCGv t1
= tcg_temp_new();
24873 gen_load_gpr(t0
, rt
);
24874 gen_load_gpr(t1
, rs
);
24875 gen_helper_fork(t0
, t1
);
24883 TCGv t0
= tcg_temp_new();
24885 gen_load_gpr(t0
, rs
);
24886 gen_helper_yield(t0
, cpu_env
, t0
);
24887 gen_store_gpr(t0
, rd
);
24892 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24893 decode_opc_special3_r6(env
, ctx
);
24895 decode_opc_special3_legacy(env
, ctx
);
24900 /* MIPS SIMD Architecture (MSA) */
24901 static inline int check_msa_access(DisasContext
*ctx
)
24903 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
24904 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
24905 generate_exception_end(ctx
, EXCP_RI
);
24909 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
24910 if (ctx
->insn_flags
& ASE_MSA
) {
24911 generate_exception_end(ctx
, EXCP_MSADIS
);
24914 generate_exception_end(ctx
, EXCP_RI
);
24921 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
24923 /* generates tcg ops to check if any element is 0 */
24924 /* Note this function only works with MSA_WRLEN = 128 */
24925 uint64_t eval_zero_or_big
= 0;
24926 uint64_t eval_big
= 0;
24927 TCGv_i64 t0
= tcg_temp_new_i64();
24928 TCGv_i64 t1
= tcg_temp_new_i64();
24931 eval_zero_or_big
= 0x0101010101010101ULL
;
24932 eval_big
= 0x8080808080808080ULL
;
24935 eval_zero_or_big
= 0x0001000100010001ULL
;
24936 eval_big
= 0x8000800080008000ULL
;
24939 eval_zero_or_big
= 0x0000000100000001ULL
;
24940 eval_big
= 0x8000000080000000ULL
;
24943 eval_zero_or_big
= 0x0000000000000001ULL
;
24944 eval_big
= 0x8000000000000000ULL
;
24947 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
24948 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
24949 tcg_gen_andi_i64(t0
, t0
, eval_big
);
24950 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
24951 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
24952 tcg_gen_andi_i64(t1
, t1
, eval_big
);
24953 tcg_gen_or_i64(t0
, t0
, t1
);
24954 /* if all bits are zero then all elements are not zero */
24955 /* if some bit is non-zero then some element is zero */
24956 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
24957 tcg_gen_trunc_i64_tl(tresult
, t0
);
24958 tcg_temp_free_i64(t0
);
24959 tcg_temp_free_i64(t1
);
24962 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
24964 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
24965 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
24966 int64_t s16
= (int16_t)ctx
->opcode
;
24968 check_msa_access(ctx
);
24970 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
24971 generate_exception_end(ctx
, EXCP_RI
);
24978 TCGv_i64 t0
= tcg_temp_new_i64();
24979 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
24980 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
24981 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
24982 tcg_gen_trunc_i64_tl(bcond
, t0
);
24983 tcg_temp_free_i64(t0
);
24990 gen_check_zero_element(bcond
, df
, wt
);
24996 gen_check_zero_element(bcond
, df
, wt
);
24997 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
25001 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
25003 ctx
->hflags
|= MIPS_HFLAG_BC
;
25004 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
25007 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
25009 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
25010 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
25011 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25012 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25014 TCGv_i32 twd
= tcg_const_i32(wd
);
25015 TCGv_i32 tws
= tcg_const_i32(ws
);
25016 TCGv_i32 ti8
= tcg_const_i32(i8
);
25018 switch (MASK_MSA_I8(ctx
->opcode
)) {
25020 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
25023 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
25026 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
25029 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
25032 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
25035 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
25038 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
25044 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
25045 if (df
== DF_DOUBLE
) {
25046 generate_exception_end(ctx
, EXCP_RI
);
25048 TCGv_i32 tdf
= tcg_const_i32(df
);
25049 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
25050 tcg_temp_free_i32(tdf
);
25055 MIPS_INVAL("MSA instruction");
25056 generate_exception_end(ctx
, EXCP_RI
);
25060 tcg_temp_free_i32(twd
);
25061 tcg_temp_free_i32(tws
);
25062 tcg_temp_free_i32(ti8
);
25065 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
25067 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
25068 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
25069 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
25070 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
25071 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25072 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25074 TCGv_i32 tdf
= tcg_const_i32(df
);
25075 TCGv_i32 twd
= tcg_const_i32(wd
);
25076 TCGv_i32 tws
= tcg_const_i32(ws
);
25077 TCGv_i32 timm
= tcg_temp_new_i32();
25078 tcg_gen_movi_i32(timm
, u5
);
25080 switch (MASK_MSA_I5(ctx
->opcode
)) {
25082 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
25085 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
25087 case OPC_MAXI_S_df
:
25088 tcg_gen_movi_i32(timm
, s5
);
25089 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25091 case OPC_MAXI_U_df
:
25092 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25094 case OPC_MINI_S_df
:
25095 tcg_gen_movi_i32(timm
, s5
);
25096 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25098 case OPC_MINI_U_df
:
25099 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25102 tcg_gen_movi_i32(timm
, s5
);
25103 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
25105 case OPC_CLTI_S_df
:
25106 tcg_gen_movi_i32(timm
, s5
);
25107 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25109 case OPC_CLTI_U_df
:
25110 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25112 case OPC_CLEI_S_df
:
25113 tcg_gen_movi_i32(timm
, s5
);
25114 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25116 case OPC_CLEI_U_df
:
25117 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25121 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
25122 tcg_gen_movi_i32(timm
, s10
);
25123 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
25127 MIPS_INVAL("MSA instruction");
25128 generate_exception_end(ctx
, EXCP_RI
);
25132 tcg_temp_free_i32(tdf
);
25133 tcg_temp_free_i32(twd
);
25134 tcg_temp_free_i32(tws
);
25135 tcg_temp_free_i32(timm
);
25138 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
25140 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
25141 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
25142 uint32_t df
= 0, m
= 0;
25143 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25144 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25151 if ((dfm
& 0x40) == 0x00) {
25154 } else if ((dfm
& 0x60) == 0x40) {
25157 } else if ((dfm
& 0x70) == 0x60) {
25160 } else if ((dfm
& 0x78) == 0x70) {
25164 generate_exception_end(ctx
, EXCP_RI
);
25168 tdf
= tcg_const_i32(df
);
25169 tm
= tcg_const_i32(m
);
25170 twd
= tcg_const_i32(wd
);
25171 tws
= tcg_const_i32(ws
);
25173 switch (MASK_MSA_BIT(ctx
->opcode
)) {
25175 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
25178 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
25181 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
25184 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
25187 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
25190 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
25192 case OPC_BINSLI_df
:
25193 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
25195 case OPC_BINSRI_df
:
25196 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
25199 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
25202 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
25205 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
25208 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
25211 MIPS_INVAL("MSA instruction");
25212 generate_exception_end(ctx
, EXCP_RI
);
25216 tcg_temp_free_i32(tdf
);
25217 tcg_temp_free_i32(tm
);
25218 tcg_temp_free_i32(twd
);
25219 tcg_temp_free_i32(tws
);
25222 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
25224 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
25225 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
25226 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25227 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25228 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25230 TCGv_i32 tdf
= tcg_const_i32(df
);
25231 TCGv_i32 twd
= tcg_const_i32(wd
);
25232 TCGv_i32 tws
= tcg_const_i32(ws
);
25233 TCGv_i32 twt
= tcg_const_i32(wt
);
25235 switch (MASK_MSA_3R(ctx
->opcode
)) {
25237 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
25240 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25243 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25246 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25248 case OPC_SUBS_S_df
:
25249 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25252 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25255 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
25258 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25261 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
25264 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25266 case OPC_ADDS_A_df
:
25267 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25269 case OPC_SUBS_U_df
:
25270 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25273 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25276 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
25279 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
25282 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
25285 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25288 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25290 case OPC_ADDS_S_df
:
25291 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25293 case OPC_SUBSUS_U_df
:
25294 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25297 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25300 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
25303 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25306 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25309 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25312 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25314 case OPC_ADDS_U_df
:
25315 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25317 case OPC_SUBSUU_S_df
:
25318 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25321 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
25324 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
25327 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25330 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25333 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25335 case OPC_ASUB_S_df
:
25336 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25339 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25342 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
25345 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
25348 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25351 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25354 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25356 case OPC_ASUB_U_df
:
25357 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25360 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25363 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25366 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
25369 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25371 case OPC_AVER_S_df
:
25372 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25375 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25378 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
25381 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25384 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25386 case OPC_AVER_U_df
:
25387 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25390 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25393 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
25396 case OPC_DOTP_S_df
:
25397 case OPC_DOTP_U_df
:
25398 case OPC_DPADD_S_df
:
25399 case OPC_DPADD_U_df
:
25400 case OPC_DPSUB_S_df
:
25401 case OPC_HADD_S_df
:
25402 case OPC_DPSUB_U_df
:
25403 case OPC_HADD_U_df
:
25404 case OPC_HSUB_S_df
:
25405 case OPC_HSUB_U_df
:
25406 if (df
== DF_BYTE
) {
25407 generate_exception_end(ctx
, EXCP_RI
);
25410 switch (MASK_MSA_3R(ctx
->opcode
)) {
25411 case OPC_DOTP_S_df
:
25412 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25414 case OPC_DOTP_U_df
:
25415 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25417 case OPC_DPADD_S_df
:
25418 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25420 case OPC_DPADD_U_df
:
25421 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25423 case OPC_DPSUB_S_df
:
25424 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25426 case OPC_HADD_S_df
:
25427 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25429 case OPC_DPSUB_U_df
:
25430 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25432 case OPC_HADD_U_df
:
25433 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25435 case OPC_HSUB_S_df
:
25436 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25438 case OPC_HSUB_U_df
:
25439 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25444 MIPS_INVAL("MSA instruction");
25445 generate_exception_end(ctx
, EXCP_RI
);
25448 tcg_temp_free_i32(twd
);
25449 tcg_temp_free_i32(tws
);
25450 tcg_temp_free_i32(twt
);
25451 tcg_temp_free_i32(tdf
);
25454 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
25456 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
25457 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
25458 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
25459 TCGv telm
= tcg_temp_new();
25460 TCGv_i32 tsr
= tcg_const_i32(source
);
25461 TCGv_i32 tdt
= tcg_const_i32(dest
);
25463 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
25465 gen_load_gpr(telm
, source
);
25466 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
25469 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
25470 gen_store_gpr(telm
, dest
);
25473 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
25476 MIPS_INVAL("MSA instruction");
25477 generate_exception_end(ctx
, EXCP_RI
);
25481 tcg_temp_free(telm
);
25482 tcg_temp_free_i32(tdt
);
25483 tcg_temp_free_i32(tsr
);
25486 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
25489 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
25490 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25491 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25493 TCGv_i32 tws
= tcg_const_i32(ws
);
25494 TCGv_i32 twd
= tcg_const_i32(wd
);
25495 TCGv_i32 tn
= tcg_const_i32(n
);
25496 TCGv_i32 tdf
= tcg_const_i32(df
);
25498 switch (MASK_MSA_ELM(ctx
->opcode
)) {
25500 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
25502 case OPC_SPLATI_df
:
25503 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
25506 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
25508 case OPC_COPY_S_df
:
25509 case OPC_COPY_U_df
:
25510 case OPC_INSERT_df
:
25511 #if !defined(TARGET_MIPS64)
25512 /* Double format valid only for MIPS64 */
25513 if (df
== DF_DOUBLE
) {
25514 generate_exception_end(ctx
, EXCP_RI
);
25518 switch (MASK_MSA_ELM(ctx
->opcode
)) {
25519 case OPC_COPY_S_df
:
25520 if (likely(wd
!= 0)) {
25521 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
25524 case OPC_COPY_U_df
:
25525 if (likely(wd
!= 0)) {
25526 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
25529 case OPC_INSERT_df
:
25530 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
25535 MIPS_INVAL("MSA instruction");
25536 generate_exception_end(ctx
, EXCP_RI
);
25538 tcg_temp_free_i32(twd
);
25539 tcg_temp_free_i32(tws
);
25540 tcg_temp_free_i32(tn
);
25541 tcg_temp_free_i32(tdf
);
25544 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
25546 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
25547 uint32_t df
= 0, n
= 0;
25549 if ((dfn
& 0x30) == 0x00) {
25552 } else if ((dfn
& 0x38) == 0x20) {
25555 } else if ((dfn
& 0x3c) == 0x30) {
25558 } else if ((dfn
& 0x3e) == 0x38) {
25561 } else if (dfn
== 0x3E) {
25562 /* CTCMSA, CFCMSA, MOVE.V */
25563 gen_msa_elm_3e(env
, ctx
);
25566 generate_exception_end(ctx
, EXCP_RI
);
25570 gen_msa_elm_df(env
, ctx
, df
, n
);
25573 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
25575 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
25576 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
25577 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25578 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25579 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25581 TCGv_i32 twd
= tcg_const_i32(wd
);
25582 TCGv_i32 tws
= tcg_const_i32(ws
);
25583 TCGv_i32 twt
= tcg_const_i32(wt
);
25584 TCGv_i32 tdf
= tcg_temp_new_i32();
25586 /* adjust df value for floating-point instruction */
25587 tcg_gen_movi_i32(tdf
, df
+ 2);
25589 switch (MASK_MSA_3RF(ctx
->opcode
)) {
25591 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25594 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
25597 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
25600 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
25603 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
25606 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25609 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
25612 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
25615 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25618 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25621 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
25624 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
25627 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
25630 tcg_gen_movi_i32(tdf
, df
+ 1);
25631 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25634 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
25637 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
25639 case OPC_MADD_Q_df
:
25640 tcg_gen_movi_i32(tdf
, df
+ 1);
25641 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25644 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
25646 case OPC_MSUB_Q_df
:
25647 tcg_gen_movi_i32(tdf
, df
+ 1);
25648 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25651 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
25654 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
25657 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25660 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
25663 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
25666 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
25669 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25672 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25675 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
25678 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25681 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
25684 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
25687 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
25689 case OPC_MULR_Q_df
:
25690 tcg_gen_movi_i32(tdf
, df
+ 1);
25691 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25694 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
25696 case OPC_FMIN_A_df
:
25697 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25699 case OPC_MADDR_Q_df
:
25700 tcg_gen_movi_i32(tdf
, df
+ 1);
25701 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25704 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
25707 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
25709 case OPC_MSUBR_Q_df
:
25710 tcg_gen_movi_i32(tdf
, df
+ 1);
25711 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25714 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
25716 case OPC_FMAX_A_df
:
25717 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25720 MIPS_INVAL("MSA instruction");
25721 generate_exception_end(ctx
, EXCP_RI
);
25725 tcg_temp_free_i32(twd
);
25726 tcg_temp_free_i32(tws
);
25727 tcg_temp_free_i32(twt
);
25728 tcg_temp_free_i32(tdf
);
25731 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
25733 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
25734 (op & (0x7 << 18)))
25735 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25736 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25737 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25738 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
25739 TCGv_i32 twd
= tcg_const_i32(wd
);
25740 TCGv_i32 tws
= tcg_const_i32(ws
);
25741 TCGv_i32 twt
= tcg_const_i32(wt
);
25742 TCGv_i32 tdf
= tcg_const_i32(df
);
25744 switch (MASK_MSA_2R(ctx
->opcode
)) {
25746 #if !defined(TARGET_MIPS64)
25747 /* Double format valid only for MIPS64 */
25748 if (df
== DF_DOUBLE
) {
25749 generate_exception_end(ctx
, EXCP_RI
);
25753 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
25756 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
25759 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
25762 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
25765 MIPS_INVAL("MSA instruction");
25766 generate_exception_end(ctx
, EXCP_RI
);
25770 tcg_temp_free_i32(twd
);
25771 tcg_temp_free_i32(tws
);
25772 tcg_temp_free_i32(twt
);
25773 tcg_temp_free_i32(tdf
);
25776 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
25778 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
25779 (op & (0xf << 17)))
25780 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25781 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25782 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25783 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
25784 TCGv_i32 twd
= tcg_const_i32(wd
);
25785 TCGv_i32 tws
= tcg_const_i32(ws
);
25786 TCGv_i32 twt
= tcg_const_i32(wt
);
25787 /* adjust df value for floating-point instruction */
25788 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
25790 switch (MASK_MSA_2RF(ctx
->opcode
)) {
25791 case OPC_FCLASS_df
:
25792 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
25794 case OPC_FTRUNC_S_df
:
25795 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
25797 case OPC_FTRUNC_U_df
:
25798 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
25801 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
25803 case OPC_FRSQRT_df
:
25804 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
25807 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
25810 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
25813 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
25815 case OPC_FEXUPL_df
:
25816 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
25818 case OPC_FEXUPR_df
:
25819 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
25822 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
25825 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
25827 case OPC_FTINT_S_df
:
25828 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
25830 case OPC_FTINT_U_df
:
25831 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
25833 case OPC_FFINT_S_df
:
25834 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
25836 case OPC_FFINT_U_df
:
25837 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
25841 tcg_temp_free_i32(twd
);
25842 tcg_temp_free_i32(tws
);
25843 tcg_temp_free_i32(twt
);
25844 tcg_temp_free_i32(tdf
);
25847 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
25849 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
25850 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25851 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25852 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25853 TCGv_i32 twd
= tcg_const_i32(wd
);
25854 TCGv_i32 tws
= tcg_const_i32(ws
);
25855 TCGv_i32 twt
= tcg_const_i32(wt
);
25857 switch (MASK_MSA_VEC(ctx
->opcode
)) {
25859 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
25862 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
25865 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
25868 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
25871 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
25874 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
25877 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
25880 MIPS_INVAL("MSA instruction");
25881 generate_exception_end(ctx
, EXCP_RI
);
25885 tcg_temp_free_i32(twd
);
25886 tcg_temp_free_i32(tws
);
25887 tcg_temp_free_i32(twt
);
25890 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
25892 switch (MASK_MSA_VEC(ctx
->opcode
)) {
25900 gen_msa_vec_v(env
, ctx
);
25903 gen_msa_2r(env
, ctx
);
25906 gen_msa_2rf(env
, ctx
);
25909 MIPS_INVAL("MSA instruction");
25910 generate_exception_end(ctx
, EXCP_RI
);
25915 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
25917 uint32_t opcode
= ctx
->opcode
;
25918 check_insn(ctx
, ASE_MSA
);
25919 check_msa_access(ctx
);
25921 switch (MASK_MSA_MINOR(opcode
)) {
25922 case OPC_MSA_I8_00
:
25923 case OPC_MSA_I8_01
:
25924 case OPC_MSA_I8_02
:
25925 gen_msa_i8(env
, ctx
);
25927 case OPC_MSA_I5_06
:
25928 case OPC_MSA_I5_07
:
25929 gen_msa_i5(env
, ctx
);
25931 case OPC_MSA_BIT_09
:
25932 case OPC_MSA_BIT_0A
:
25933 gen_msa_bit(env
, ctx
);
25935 case OPC_MSA_3R_0D
:
25936 case OPC_MSA_3R_0E
:
25937 case OPC_MSA_3R_0F
:
25938 case OPC_MSA_3R_10
:
25939 case OPC_MSA_3R_11
:
25940 case OPC_MSA_3R_12
:
25941 case OPC_MSA_3R_13
:
25942 case OPC_MSA_3R_14
:
25943 case OPC_MSA_3R_15
:
25944 gen_msa_3r(env
, ctx
);
25947 gen_msa_elm(env
, ctx
);
25949 case OPC_MSA_3RF_1A
:
25950 case OPC_MSA_3RF_1B
:
25951 case OPC_MSA_3RF_1C
:
25952 gen_msa_3rf(env
, ctx
);
25955 gen_msa_vec(env
, ctx
);
25966 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
25967 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
25968 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25969 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
25971 TCGv_i32 twd
= tcg_const_i32(wd
);
25972 TCGv taddr
= tcg_temp_new();
25973 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
25975 switch (MASK_MSA_MINOR(opcode
)) {
25977 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
25980 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
25983 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
25986 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
25989 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
25992 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
25995 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
25998 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
26002 tcg_temp_free_i32(twd
);
26003 tcg_temp_free(taddr
);
26007 MIPS_INVAL("MSA instruction");
26008 generate_exception_end(ctx
, EXCP_RI
);
26014 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
26017 int rs
, rt
, rd
, sa
;
26021 /* make sure instructions are on a word boundary */
26022 if (ctx
->base
.pc_next
& 0x3) {
26023 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
26024 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
26028 /* Handle blikely not taken case */
26029 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
26030 TCGLabel
*l1
= gen_new_label();
26032 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
26033 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
26034 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
26038 op
= MASK_OP_MAJOR(ctx
->opcode
);
26039 rs
= (ctx
->opcode
>> 21) & 0x1f;
26040 rt
= (ctx
->opcode
>> 16) & 0x1f;
26041 rd
= (ctx
->opcode
>> 11) & 0x1f;
26042 sa
= (ctx
->opcode
>> 6) & 0x1f;
26043 imm
= (int16_t)ctx
->opcode
;
26046 decode_opc_special(env
, ctx
);
26049 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
26050 decode_tx79_mmi(env
, ctx
);
26052 decode_opc_special2_legacy(env
, ctx
);
26056 if (ctx
->insn_flags
& INSN_R5900
) {
26057 decode_tx79_sq(env
, ctx
); /* TX79_SQ */
26059 decode_opc_special3(env
, ctx
);
26063 op1
= MASK_REGIMM(ctx
->opcode
);
26065 case OPC_BLTZL
: /* REGIMM branches */
26069 check_insn(ctx
, ISA_MIPS2
);
26070 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26074 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
26078 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26080 /* OPC_NAL, OPC_BAL */
26081 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
26083 generate_exception_end(ctx
, EXCP_RI
);
26086 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
26089 case OPC_TGEI
: /* REGIMM traps */
26096 check_insn(ctx
, ISA_MIPS2
);
26097 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26098 gen_trap(ctx
, op1
, rs
, -1, imm
);
26101 check_insn(ctx
, ISA_MIPS32R6
);
26102 generate_exception_end(ctx
, EXCP_RI
);
26105 check_insn(ctx
, ISA_MIPS32R2
);
26106 /* Break the TB to be able to sync copied instructions
26108 ctx
->base
.is_jmp
= DISAS_STOP
;
26110 case OPC_BPOSGE32
: /* MIPS DSP branch */
26111 #if defined(TARGET_MIPS64)
26115 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
26117 #if defined(TARGET_MIPS64)
26119 check_insn(ctx
, ISA_MIPS32R6
);
26120 check_mips_64(ctx
);
26122 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
26126 check_insn(ctx
, ISA_MIPS32R6
);
26127 check_mips_64(ctx
);
26129 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
26133 default: /* Invalid */
26134 MIPS_INVAL("regimm");
26135 generate_exception_end(ctx
, EXCP_RI
);
26140 check_cp0_enabled(ctx
);
26141 op1
= MASK_CP0(ctx
->opcode
);
26149 #if defined(TARGET_MIPS64)
26153 #ifndef CONFIG_USER_ONLY
26154 gen_cp0(env
, ctx
, op1
, rt
, rd
);
26155 #endif /* !CONFIG_USER_ONLY */
26173 #ifndef CONFIG_USER_ONLY
26174 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
26175 #endif /* !CONFIG_USER_ONLY */
26178 #ifndef CONFIG_USER_ONLY
26181 TCGv t0
= tcg_temp_new();
26183 op2
= MASK_MFMC0(ctx
->opcode
);
26187 gen_helper_dmt(t0
);
26188 gen_store_gpr(t0
, rt
);
26192 gen_helper_emt(t0
);
26193 gen_store_gpr(t0
, rt
);
26197 gen_helper_dvpe(t0
, cpu_env
);
26198 gen_store_gpr(t0
, rt
);
26202 gen_helper_evpe(t0
, cpu_env
);
26203 gen_store_gpr(t0
, rt
);
26206 check_insn(ctx
, ISA_MIPS32R6
);
26208 gen_helper_dvp(t0
, cpu_env
);
26209 gen_store_gpr(t0
, rt
);
26213 check_insn(ctx
, ISA_MIPS32R6
);
26215 gen_helper_evp(t0
, cpu_env
);
26216 gen_store_gpr(t0
, rt
);
26220 check_insn(ctx
, ISA_MIPS32R2
);
26221 save_cpu_state(ctx
, 1);
26222 gen_helper_di(t0
, cpu_env
);
26223 gen_store_gpr(t0
, rt
);
26224 /* Stop translation as we may have switched
26225 the execution mode. */
26226 ctx
->base
.is_jmp
= DISAS_STOP
;
26229 check_insn(ctx
, ISA_MIPS32R2
);
26230 save_cpu_state(ctx
, 1);
26231 gen_helper_ei(t0
, cpu_env
);
26232 gen_store_gpr(t0
, rt
);
26233 /* DISAS_STOP isn't sufficient, we need to ensure we break
26234 out of translated code to check for pending interrupts */
26235 gen_save_pc(ctx
->base
.pc_next
+ 4);
26236 ctx
->base
.is_jmp
= DISAS_EXIT
;
26238 default: /* Invalid */
26239 MIPS_INVAL("mfmc0");
26240 generate_exception_end(ctx
, EXCP_RI
);
26245 #endif /* !CONFIG_USER_ONLY */
26248 check_insn(ctx
, ISA_MIPS32R2
);
26249 gen_load_srsgpr(rt
, rd
);
26252 check_insn(ctx
, ISA_MIPS32R2
);
26253 gen_store_srsgpr(rt
, rd
);
26257 generate_exception_end(ctx
, EXCP_RI
);
26261 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
26262 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26263 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
26264 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26267 /* Arithmetic with immediate opcode */
26268 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26272 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26274 case OPC_SLTI
: /* Set on less than with immediate opcode */
26276 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
26278 case OPC_ANDI
: /* Arithmetic with immediate opcode */
26279 case OPC_LUI
: /* OPC_AUI */
26282 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
26284 case OPC_J
: /* Jump */
26286 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
26287 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
26290 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
26291 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26293 generate_exception_end(ctx
, EXCP_RI
);
26296 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
26297 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26300 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26303 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
26304 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26306 generate_exception_end(ctx
, EXCP_RI
);
26309 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
26310 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26313 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26316 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
26319 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26321 check_insn(ctx
, ISA_MIPS32R6
);
26322 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
26323 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26326 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
26329 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26331 check_insn(ctx
, ISA_MIPS32R6
);
26332 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
26333 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26338 check_insn(ctx
, ISA_MIPS2
);
26339 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26343 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26345 case OPC_LL
: /* Load and stores */
26346 check_insn(ctx
, ISA_MIPS2
);
26350 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26358 gen_ld(ctx
, op
, rt
, rs
, imm
);
26362 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26367 gen_st(ctx
, op
, rt
, rs
, imm
);
26370 check_insn(ctx
, ISA_MIPS2
);
26371 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26372 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
26375 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26376 check_cp0_enabled(ctx
);
26377 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
26378 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
26379 gen_cache_operation(ctx
, rt
, rs
, imm
);
26381 /* Treat as NOP. */
26384 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26385 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
26386 /* Treat as NOP. */
26389 /* Floating point (COP1). */
26394 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
26398 op1
= MASK_CP1(ctx
->opcode
);
26403 check_cp1_enabled(ctx
);
26404 check_insn(ctx
, ISA_MIPS32R2
);
26410 check_cp1_enabled(ctx
);
26411 gen_cp1(ctx
, op1
, rt
, rd
);
26413 #if defined(TARGET_MIPS64)
26416 check_cp1_enabled(ctx
);
26417 check_insn(ctx
, ISA_MIPS3
);
26418 check_mips_64(ctx
);
26419 gen_cp1(ctx
, op1
, rt
, rd
);
26422 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
26423 check_cp1_enabled(ctx
);
26424 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26426 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
26431 check_insn(ctx
, ASE_MIPS3D
);
26432 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
26433 (rt
>> 2) & 0x7, imm
<< 2);
26437 check_cp1_enabled(ctx
);
26438 check_insn(ctx
, ISA_MIPS32R6
);
26439 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
26443 check_cp1_enabled(ctx
);
26444 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26446 check_insn(ctx
, ASE_MIPS3D
);
26449 check_cp1_enabled(ctx
);
26450 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26451 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
26452 (rt
>> 2) & 0x7, imm
<< 2);
26459 check_cp1_enabled(ctx
);
26460 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
26466 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
26467 check_cp1_enabled(ctx
);
26468 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26470 case R6_OPC_CMP_AF_S
:
26471 case R6_OPC_CMP_UN_S
:
26472 case R6_OPC_CMP_EQ_S
:
26473 case R6_OPC_CMP_UEQ_S
:
26474 case R6_OPC_CMP_LT_S
:
26475 case R6_OPC_CMP_ULT_S
:
26476 case R6_OPC_CMP_LE_S
:
26477 case R6_OPC_CMP_ULE_S
:
26478 case R6_OPC_CMP_SAF_S
:
26479 case R6_OPC_CMP_SUN_S
:
26480 case R6_OPC_CMP_SEQ_S
:
26481 case R6_OPC_CMP_SEUQ_S
:
26482 case R6_OPC_CMP_SLT_S
:
26483 case R6_OPC_CMP_SULT_S
:
26484 case R6_OPC_CMP_SLE_S
:
26485 case R6_OPC_CMP_SULE_S
:
26486 case R6_OPC_CMP_OR_S
:
26487 case R6_OPC_CMP_UNE_S
:
26488 case R6_OPC_CMP_NE_S
:
26489 case R6_OPC_CMP_SOR_S
:
26490 case R6_OPC_CMP_SUNE_S
:
26491 case R6_OPC_CMP_SNE_S
:
26492 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
26494 case R6_OPC_CMP_AF_D
:
26495 case R6_OPC_CMP_UN_D
:
26496 case R6_OPC_CMP_EQ_D
:
26497 case R6_OPC_CMP_UEQ_D
:
26498 case R6_OPC_CMP_LT_D
:
26499 case R6_OPC_CMP_ULT_D
:
26500 case R6_OPC_CMP_LE_D
:
26501 case R6_OPC_CMP_ULE_D
:
26502 case R6_OPC_CMP_SAF_D
:
26503 case R6_OPC_CMP_SUN_D
:
26504 case R6_OPC_CMP_SEQ_D
:
26505 case R6_OPC_CMP_SEUQ_D
:
26506 case R6_OPC_CMP_SLT_D
:
26507 case R6_OPC_CMP_SULT_D
:
26508 case R6_OPC_CMP_SLE_D
:
26509 case R6_OPC_CMP_SULE_D
:
26510 case R6_OPC_CMP_OR_D
:
26511 case R6_OPC_CMP_UNE_D
:
26512 case R6_OPC_CMP_NE_D
:
26513 case R6_OPC_CMP_SOR_D
:
26514 case R6_OPC_CMP_SUNE_D
:
26515 case R6_OPC_CMP_SNE_D
:
26516 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
26519 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
26520 rt
, rd
, sa
, (imm
>> 8) & 0x7);
26525 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
26540 check_insn(ctx
, ASE_MSA
);
26541 gen_msa_branch(env
, ctx
, op1
);
26545 generate_exception_end(ctx
, EXCP_RI
);
26550 /* Compact branches [R6] and COP2 [non-R6] */
26551 case OPC_BC
: /* OPC_LWC2 */
26552 case OPC_BALC
: /* OPC_SWC2 */
26553 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26554 /* OPC_BC, OPC_BALC */
26555 gen_compute_compact_branch(ctx
, op
, 0, 0,
26556 sextract32(ctx
->opcode
<< 2, 0, 28));
26558 /* OPC_LWC2, OPC_SWC2 */
26559 /* COP2: Not implemented. */
26560 generate_exception_err(ctx
, EXCP_CpU
, 2);
26563 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
26564 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
26565 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26567 /* OPC_BEQZC, OPC_BNEZC */
26568 gen_compute_compact_branch(ctx
, op
, rs
, 0,
26569 sextract32(ctx
->opcode
<< 2, 0, 23));
26571 /* OPC_JIC, OPC_JIALC */
26572 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
26575 /* OPC_LWC2, OPC_SWC2 */
26576 /* COP2: Not implemented. */
26577 generate_exception_err(ctx
, EXCP_CpU
, 2);
26581 check_insn(ctx
, INSN_LOONGSON2F
);
26582 /* Note that these instructions use different fields. */
26583 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
26587 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26588 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
26589 check_cp1_enabled(ctx
);
26590 op1
= MASK_CP3(ctx
->opcode
);
26594 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
26600 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26601 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
26604 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26605 /* Treat as NOP. */
26608 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
26622 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26623 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
26627 generate_exception_end(ctx
, EXCP_RI
);
26631 generate_exception_err(ctx
, EXCP_CpU
, 1);
26635 #if defined(TARGET_MIPS64)
26636 /* MIPS64 opcodes */
26640 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26644 check_insn(ctx
, ISA_MIPS3
);
26645 check_mips_64(ctx
);
26646 gen_ld(ctx
, op
, rt
, rs
, imm
);
26650 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26653 check_insn(ctx
, ISA_MIPS3
);
26654 check_mips_64(ctx
);
26655 gen_st(ctx
, op
, rt
, rs
, imm
);
26658 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26659 check_insn(ctx
, ISA_MIPS3
);
26660 check_mips_64(ctx
);
26661 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
26663 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
26664 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26665 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
26666 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26669 check_insn(ctx
, ISA_MIPS3
);
26670 check_mips_64(ctx
);
26671 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26675 check_insn(ctx
, ISA_MIPS3
);
26676 check_mips_64(ctx
);
26677 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26680 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
26681 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26682 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26684 MIPS_INVAL("major opcode");
26685 generate_exception_end(ctx
, EXCP_RI
);
26689 case OPC_DAUI
: /* OPC_JALX */
26690 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26691 #if defined(TARGET_MIPS64)
26693 check_mips_64(ctx
);
26695 generate_exception(ctx
, EXCP_RI
);
26696 } else if (rt
!= 0) {
26697 TCGv t0
= tcg_temp_new();
26698 gen_load_gpr(t0
, rs
);
26699 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
26703 generate_exception_end(ctx
, EXCP_RI
);
26704 MIPS_INVAL("major opcode");
26708 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
26709 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
26710 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
26713 case OPC_MSA
: /* OPC_MDMX */
26714 if (ctx
->insn_flags
& INSN_R5900
) {
26715 decode_tx79_lq(env
, ctx
); /* TX79_LQ */
26717 /* MDMX: Not implemented. */
26722 check_insn(ctx
, ISA_MIPS32R6
);
26723 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
26725 default: /* Invalid */
26726 MIPS_INVAL("major opcode");
26727 generate_exception_end(ctx
, EXCP_RI
);
26732 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
26734 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26735 CPUMIPSState
*env
= cs
->env_ptr
;
26737 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
26738 ctx
->saved_pc
= -1;
26739 ctx
->insn_flags
= env
->insn_flags
;
26740 ctx
->CP0_Config1
= env
->CP0_Config1
;
26741 ctx
->CP0_Config2
= env
->CP0_Config2
;
26742 ctx
->CP0_Config3
= env
->CP0_Config3
;
26743 ctx
->CP0_Config5
= env
->CP0_Config5
;
26745 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
26746 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
26747 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
26748 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
26749 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
26750 ctx
->PAMask
= env
->PAMask
;
26751 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
26752 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
26753 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
26754 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
26755 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
26756 /* Restore delay slot state from the tb context. */
26757 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
26758 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
26759 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
26760 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
26761 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
26762 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
26763 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
26764 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
26765 restore_cpu_state(env
, ctx
);
26766 #ifdef CONFIG_USER_ONLY
26767 ctx
->mem_idx
= MIPS_HFLAG_UM
;
26769 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
26771 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
26772 MO_UNALN
: MO_ALIGN
;
26774 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
26778 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
26782 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
26784 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26786 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
26790 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
26791 const CPUBreakpoint
*bp
)
26793 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26795 save_cpu_state(ctx
, 1);
26796 ctx
->base
.is_jmp
= DISAS_NORETURN
;
26797 gen_helper_raise_exception_debug(cpu_env
);
26798 /* The address covered by the breakpoint must be included in
26799 [tb->pc, tb->pc + tb->size) in order to for it to be
26800 properly cleared -- thus we increment the PC here so that
26801 the logic setting tb->size below does the right thing. */
26802 ctx
->base
.pc_next
+= 4;
26806 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
26808 CPUMIPSState
*env
= cs
->env_ptr
;
26809 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26813 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
26814 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
26815 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
26816 insn_bytes
= decode_nanomips_opc(env
, ctx
);
26817 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
26818 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
26820 decode_opc(env
, ctx
);
26821 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
26822 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
26823 insn_bytes
= decode_micromips_opc(env
, ctx
);
26824 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
26825 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
26826 insn_bytes
= decode_mips16_opc(env
, ctx
);
26828 generate_exception_end(ctx
, EXCP_RI
);
26829 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
26833 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
26834 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
26835 MIPS_HFLAG_FBNSLOT
))) {
26836 /* force to generate branch as there is neither delay nor
26840 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
26841 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
26842 /* Force to generate branch as microMIPS R6 doesn't restrict
26843 branches in the forbidden slot. */
26848 gen_branch(ctx
, insn_bytes
);
26850 ctx
->base
.pc_next
+= insn_bytes
;
26852 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
26855 /* Execute a branch and its delay slot as a single instruction.
26856 This is what GDB expects and is consistent with what the
26857 hardware does (e.g. if a delay slot instruction faults, the
26858 reported PC is the PC of the branch). */
26859 if (ctx
->base
.singlestep_enabled
&&
26860 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
26861 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
26863 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
26864 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
26868 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
26870 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26872 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
26873 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
26874 gen_helper_raise_exception_debug(cpu_env
);
26876 switch (ctx
->base
.is_jmp
) {
26878 gen_save_pc(ctx
->base
.pc_next
);
26879 tcg_gen_lookup_and_goto_ptr();
26882 case DISAS_TOO_MANY
:
26883 save_cpu_state(ctx
, 0);
26884 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
26887 tcg_gen_exit_tb(NULL
, 0);
26889 case DISAS_NORETURN
:
26892 g_assert_not_reached();
26897 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
26899 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
26900 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
26903 static const TranslatorOps mips_tr_ops
= {
26904 .init_disas_context
= mips_tr_init_disas_context
,
26905 .tb_start
= mips_tr_tb_start
,
26906 .insn_start
= mips_tr_insn_start
,
26907 .breakpoint_check
= mips_tr_breakpoint_check
,
26908 .translate_insn
= mips_tr_translate_insn
,
26909 .tb_stop
= mips_tr_tb_stop
,
26910 .disas_log
= mips_tr_disas_log
,
26913 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
26917 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
26920 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
26924 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
26926 #define printfpr(fp) \
26929 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
26930 " fd:%13g fs:%13g psu: %13g\n", \
26931 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
26932 (double)(fp)->fd, \
26933 (double)(fp)->fs[FP_ENDIAN_IDX], \
26934 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
26937 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
26938 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
26939 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
26940 " fd:%13g fs:%13g psu:%13g\n", \
26941 tmp.w[FP_ENDIAN_IDX], tmp.d, \
26943 (double)tmp.fs[FP_ENDIAN_IDX], \
26944 (double)tmp.fs[!FP_ENDIAN_IDX]); \
26949 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
26950 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
26951 get_float_exception_flags(&env
->active_fpu
.fp_status
));
26952 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
26953 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
26954 printfpr(&env
->active_fpu
.fpr
[i
]);
26960 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
26963 MIPSCPU
*cpu
= MIPS_CPU(cs
);
26964 CPUMIPSState
*env
= &cpu
->env
;
26967 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
26968 " LO=0x" TARGET_FMT_lx
" ds %04x "
26969 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
26970 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
26971 env
->hflags
, env
->btarget
, env
->bcond
);
26972 for (i
= 0; i
< 32; i
++) {
26974 cpu_fprintf(f
, "GPR%02d:", i
);
26975 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
26977 cpu_fprintf(f
, "\n");
26980 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
26981 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
26982 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
26984 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
26985 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
26986 env
->CP0_Config2
, env
->CP0_Config3
);
26987 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
26988 env
->CP0_Config4
, env
->CP0_Config5
);
26989 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
26990 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
26994 void mips_tcg_init(void)
26999 for (i
= 1; i
< 32; i
++)
27000 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
27001 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
27004 for (i
= 0; i
< 32; i
++) {
27005 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
27007 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
27008 /* The scalar floating-point unit (FPU) registers are mapped on
27009 * the MSA vector registers. */
27010 fpu_f64
[i
] = msa_wr_d
[i
* 2];
27011 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
27012 msa_wr_d
[i
* 2 + 1] =
27013 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
27016 cpu_PC
= tcg_global_mem_new(cpu_env
,
27017 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
27018 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
27019 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
27020 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
27022 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
27023 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
27026 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
27027 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
27029 bcond
= tcg_global_mem_new(cpu_env
,
27030 offsetof(CPUMIPSState
, bcond
), "bcond");
27031 btarget
= tcg_global_mem_new(cpu_env
,
27032 offsetof(CPUMIPSState
, btarget
), "btarget");
27033 hflags
= tcg_global_mem_new_i32(cpu_env
,
27034 offsetof(CPUMIPSState
, hflags
), "hflags");
27036 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
27037 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
27039 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
27040 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
27044 #include "translate_init.inc.c"
27046 void cpu_mips_realize_env(CPUMIPSState
*env
)
27048 env
->exception_base
= (int32_t)0xBFC00000;
27050 #ifndef CONFIG_USER_ONLY
27051 mmu_init(env
, env
->cpu_model
);
27053 fpu_init(env
, env
->cpu_model
);
27054 mvp_init(env
, env
->cpu_model
);
27057 bool cpu_supports_cps_smp(const char *cpu_type
)
27059 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
27060 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
27063 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
27065 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
27066 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
27069 void cpu_set_exception_base(int vp_index
, target_ulong address
)
27071 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
27072 vp
->env
.exception_base
= address
;
27075 void cpu_state_reset(CPUMIPSState
*env
)
27077 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
27078 CPUState
*cs
= CPU(cpu
);
27080 /* Reset registers to their default values */
27081 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
27082 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
27083 #ifdef TARGET_WORDS_BIGENDIAN
27084 env
->CP0_Config0
|= (1 << CP0C0_BE
);
27086 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
27087 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
27088 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
27089 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
27090 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
27091 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
27092 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
27093 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
27094 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
27095 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
27096 << env
->cpu_model
->CP0_LLAddr_shift
;
27097 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
27098 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
27099 env
->CCRes
= env
->cpu_model
->CCRes
;
27100 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
27101 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
27102 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
27103 env
->current_tc
= 0;
27104 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
27105 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
27106 #if defined(TARGET_MIPS64)
27107 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
27108 env
->SEGMask
|= 3ULL << 62;
27111 env
->PABITS
= env
->cpu_model
->PABITS
;
27112 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
27113 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
27114 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
27115 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
27116 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
27117 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
27118 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
27119 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
27120 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
27121 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
27122 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
27123 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
27124 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
27125 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
27126 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
27127 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
27128 env
->msair
= env
->cpu_model
->MSAIR
;
27129 env
->insn_flags
= env
->cpu_model
->insn_flags
;
27131 #if defined(CONFIG_USER_ONLY)
27132 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
27133 # ifdef TARGET_MIPS64
27134 /* Enable 64-bit register mode. */
27135 env
->CP0_Status
|= (1 << CP0St_PX
);
27137 # ifdef TARGET_ABI_MIPSN64
27138 /* Enable 64-bit address mode. */
27139 env
->CP0_Status
|= (1 << CP0St_UX
);
27141 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
27142 hardware registers. */
27143 env
->CP0_HWREna
|= 0x0000000F;
27144 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
27145 env
->CP0_Status
|= (1 << CP0St_CU1
);
27147 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
27148 env
->CP0_Status
|= (1 << CP0St_MX
);
27150 # if defined(TARGET_MIPS64)
27151 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
27152 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
27153 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
27154 env
->CP0_Status
|= (1 << CP0St_FR
);
27158 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
27159 /* If the exception was raised from a delay slot,
27160 come back to the jump. */
27161 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
27162 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
27164 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
27166 env
->active_tc
.PC
= env
->exception_base
;
27167 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
27168 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
27169 env
->CP0_Wired
= 0;
27170 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
27171 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
27172 if (mips_um_ksegs_enabled()) {
27173 env
->CP0_EBase
|= 0x40000000;
27175 env
->CP0_EBase
|= (int32_t)0x80000000;
27177 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
27178 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
27180 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
27182 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
27183 /* vectored interrupts not implemented, timer on int 7,
27184 no performance counters. */
27185 env
->CP0_IntCtl
= 0xe0000000;
27189 for (i
= 0; i
< 7; i
++) {
27190 env
->CP0_WatchLo
[i
] = 0;
27191 env
->CP0_WatchHi
[i
] = 0x80000000;
27193 env
->CP0_WatchLo
[7] = 0;
27194 env
->CP0_WatchHi
[7] = 0;
27196 /* Count register increments in debug mode, EJTAG version 1 */
27197 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
27199 cpu_mips_store_count(env
, 1);
27201 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
27204 /* Only TC0 on VPE 0 starts as active. */
27205 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
27206 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
27207 env
->tcs
[i
].CP0_TCHalt
= 1;
27209 env
->active_tc
.CP0_TCHalt
= 1;
27212 if (cs
->cpu_index
== 0) {
27213 /* VPE0 starts up enabled. */
27214 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
27215 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
27217 /* TC0 starts up unhalted. */
27219 env
->active_tc
.CP0_TCHalt
= 0;
27220 env
->tcs
[0].CP0_TCHalt
= 0;
27221 /* With thread 0 active. */
27222 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
27223 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
27228 * Configure default legacy segmentation control. We use this regardless of
27229 * whether segmentation control is presented to the guest.
27231 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
27232 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
27233 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
27234 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
27235 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
27236 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
27238 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
27239 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
27240 (3 << CP0SC_C
)) << 16;
27241 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
27242 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
27243 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
27244 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
27245 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
27246 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
27247 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
27248 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
27250 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
27251 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
27252 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
27253 env
->CP0_Status
|= (1 << CP0St_FR
);
27256 if (env
->insn_flags
& ISA_MIPS32R6
) {
27258 env
->CP0_PWSize
= 0x40;
27264 env
->CP0_PWField
= 0x0C30C302;
27271 env
->CP0_PWField
= 0x02;
27274 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
27275 /* microMIPS on reset when Config3.ISA is 3 */
27276 env
->hflags
|= MIPS_HFLAG_M16
;
27280 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
27284 compute_hflags(env
);
27285 restore_fp_status(env
);
27286 restore_pamask(env
);
27287 cs
->exception_index
= EXCP_NONE
;
27289 if (semihosting_get_argc()) {
27290 /* UHI interface can be used to obtain argc and argv */
27291 env
->active_tc
.gpr
[4] = -1;
27295 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
27296 target_ulong
*data
)
27298 env
->active_tc
.PC
= data
[0];
27299 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
27300 env
->hflags
|= data
[1];
27301 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
27302 case MIPS_HFLAG_BR
:
27304 case MIPS_HFLAG_BC
:
27305 case MIPS_HFLAG_BL
:
27307 env
->btarget
= data
[2];