2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
467 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
468 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
469 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
470 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
474 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
477 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
478 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
479 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
480 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
481 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
482 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
483 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
484 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
485 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
486 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
487 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
490 /* MIPS DSP REGIMM opcodes */
492 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
493 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
496 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
500 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
501 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
502 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
505 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
507 /* MIPS DSP Arithmetic Sub-class */
508 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
509 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
510 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
515 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
516 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
517 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
518 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
519 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
522 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
523 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
525 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
526 /* MIPS DSP Multiply Sub-class insns */
527 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
528 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
529 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
530 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
531 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
532 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
535 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
536 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
538 /* MIPS DSP Arithmetic Sub-class */
539 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
548 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
549 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
550 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
551 /* MIPS DSP Multiply Sub-class insns */
552 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
553 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
554 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
555 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
558 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
560 /* MIPS DSP Arithmetic Sub-class */
561 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
572 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
573 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
574 /* DSP Bit/Manipulation Sub-class */
575 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
576 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
577 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
578 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
579 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
582 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
584 /* MIPS DSP Arithmetic Sub-class */
585 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
592 /* DSP Compare-Pick Sub-class */
593 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
600 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
601 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
602 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
603 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
604 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
605 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
606 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
607 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
610 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
612 /* MIPS DSP GPR-Based Shift Sub-class */
613 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
627 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
628 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
629 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
630 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
631 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
632 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
633 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
634 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
637 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
639 /* MIPS DSP Multiply Sub-class insns */
640 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
654 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
655 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
656 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
657 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
658 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
659 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
660 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
661 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
664 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
666 /* DSP Bit/Manipulation Sub-class */
667 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
670 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Append Sub-class */
673 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
674 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
675 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
678 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
680 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
681 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
685 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
686 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
687 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
688 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
689 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
690 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
691 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
692 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
693 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
694 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
695 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
696 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
697 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
700 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
702 /* MIPS DSP Arithmetic Sub-class */
703 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
719 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
720 /* DSP Bit/Manipulation Sub-class */
721 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
722 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
723 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
724 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
725 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
726 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
729 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
731 /* MIPS DSP Multiply Sub-class insns */
732 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
733 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
734 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
735 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
737 /* MIPS DSP Arithmetic Sub-class */
738 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
741 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
742 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
743 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
744 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
745 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
747 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
751 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
752 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
753 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
754 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
755 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
756 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
757 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
758 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
761 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
763 /* DSP Compare-Pick Sub-class */
764 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
783 /* MIPS DSP Arithmetic Sub-class */
784 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
785 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
786 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
787 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
788 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
789 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
794 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
796 /* DSP Append Sub-class */
797 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
798 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
799 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
800 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
803 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
805 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
806 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
819 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
820 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
821 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
822 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
823 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
824 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
825 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
826 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
829 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
831 /* DSP Bit/Manipulation Sub-class */
832 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
835 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
837 /* MIPS DSP Multiply Sub-class insns */
838 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
856 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
857 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
858 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
859 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
860 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
861 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
862 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
863 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
866 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
868 /* MIPS DSP GPR-Based Shift Sub-class */
869 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
887 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
888 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
889 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
890 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
891 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
892 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
893 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
894 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
897 /* Coprocessor 0 (rs field) */
898 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
901 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
902 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
903 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
904 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
905 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
906 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
907 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
908 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
909 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
910 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
911 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
912 OPC_C0
= (0x10 << 21) | OPC_CP0
,
913 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
914 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
915 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
916 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
917 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
918 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
919 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
920 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
921 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
922 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
923 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
924 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
925 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
926 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
927 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
931 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
934 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
935 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
936 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
937 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
938 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
939 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
940 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
941 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
944 /* Coprocessor 0 (with rs == C0) */
945 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
948 OPC_TLBR
= 0x01 | OPC_C0
,
949 OPC_TLBWI
= 0x02 | OPC_C0
,
950 OPC_TLBINV
= 0x03 | OPC_C0
,
951 OPC_TLBINVF
= 0x04 | OPC_C0
,
952 OPC_TLBWR
= 0x06 | OPC_C0
,
953 OPC_TLBP
= 0x08 | OPC_C0
,
954 OPC_RFE
= 0x10 | OPC_C0
,
955 OPC_ERET
= 0x18 | OPC_C0
,
956 OPC_DERET
= 0x1F | OPC_C0
,
957 OPC_WAIT
= 0x20 | OPC_C0
,
960 /* Coprocessor 1 (rs field) */
961 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
963 /* Values for the fmt field in FP instructions */
965 /* 0 - 15 are reserved */
966 FMT_S
= 16, /* single fp */
967 FMT_D
= 17, /* double fp */
968 FMT_E
= 18, /* extended fp */
969 FMT_Q
= 19, /* quad fp */
970 FMT_W
= 20, /* 32-bit fixed */
971 FMT_L
= 21, /* 64-bit fixed */
972 FMT_PS
= 22, /* paired single fp */
973 /* 23 - 31 are reserved */
977 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
978 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
979 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
980 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
981 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
982 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
983 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
984 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
985 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
986 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
987 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
988 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
989 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
990 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
991 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
992 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
993 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
994 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
995 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
996 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
997 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
998 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
999 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
1000 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
1001 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
1002 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
1003 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
1004 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
1005 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
1006 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1009 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1010 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1013 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1014 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1015 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1016 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1020 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1021 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1025 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1026 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1029 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1032 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1033 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1034 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1035 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1036 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1037 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1038 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1039 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1040 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1041 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1042 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1045 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1048 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1050 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1051 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1052 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1053 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1054 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1055 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1057 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1059 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1060 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1061 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1062 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1063 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1064 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1066 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1068 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1069 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1070 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1071 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1072 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1073 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1075 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1077 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1078 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1079 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1080 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1081 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1082 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1084 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1085 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1086 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1087 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1088 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1089 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1091 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1092 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1093 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1094 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1095 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1096 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1098 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1099 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1100 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1101 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1102 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1103 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1105 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1106 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1107 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1108 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1109 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1110 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1112 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1113 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1114 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1115 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1116 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1117 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1119 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1120 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1121 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1122 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1123 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1124 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1126 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1127 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1128 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1129 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1130 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1131 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1133 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1134 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1135 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1136 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1137 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1138 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1142 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1145 OPC_LWXC1
= 0x00 | OPC_CP3
,
1146 OPC_LDXC1
= 0x01 | OPC_CP3
,
1147 OPC_LUXC1
= 0x05 | OPC_CP3
,
1148 OPC_SWXC1
= 0x08 | OPC_CP3
,
1149 OPC_SDXC1
= 0x09 | OPC_CP3
,
1150 OPC_SUXC1
= 0x0D | OPC_CP3
,
1151 OPC_PREFX
= 0x0F | OPC_CP3
,
1152 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1153 OPC_MADD_S
= 0x20 | OPC_CP3
,
1154 OPC_MADD_D
= 0x21 | OPC_CP3
,
1155 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1156 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1157 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1158 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1159 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1160 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1161 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1162 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1163 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1164 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1168 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1170 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1171 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1172 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1173 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1174 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1175 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1176 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1177 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1178 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1179 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1180 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1181 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1182 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1183 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1184 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1185 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1186 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1187 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1188 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1189 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1190 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1192 /* MI10 instruction */
1193 OPC_LD_B
= (0x20) | OPC_MSA
,
1194 OPC_LD_H
= (0x21) | OPC_MSA
,
1195 OPC_LD_W
= (0x22) | OPC_MSA
,
1196 OPC_LD_D
= (0x23) | OPC_MSA
,
1197 OPC_ST_B
= (0x24) | OPC_MSA
,
1198 OPC_ST_H
= (0x25) | OPC_MSA
,
1199 OPC_ST_W
= (0x26) | OPC_MSA
,
1200 OPC_ST_D
= (0x27) | OPC_MSA
,
1204 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1205 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1206 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1207 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1208 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1209 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1210 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1211 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1212 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1213 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1214 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1215 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1216 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1218 /* I8 instruction */
1219 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1220 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1221 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1222 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1223 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1224 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1225 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1226 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1227 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1228 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1230 /* VEC/2R/2RF instruction */
1231 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1232 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1233 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1234 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1235 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1236 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1237 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1239 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1240 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1242 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1243 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1244 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1245 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1246 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1248 /* 2RF instruction df(bit 16) = _w, _d */
1249 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1250 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1251 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1252 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1253 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1254 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1255 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1256 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1257 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1258 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1259 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1260 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1261 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1262 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1263 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1264 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1266 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1267 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1268 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1269 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1270 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1272 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1274 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1275 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1277 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1278 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1279 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1280 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1281 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1282 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1283 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1284 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1285 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1286 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1287 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1288 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1289 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1290 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1292 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1293 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1294 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1295 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1296 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1297 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1298 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1299 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1300 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1301 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1302 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1303 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1304 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1305 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1306 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1307 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1308 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1309 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1310 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1311 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1312 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1313 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1314 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1315 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1316 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1317 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1318 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1319 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1320 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1321 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1322 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1323 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1324 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1325 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1326 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1327 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1328 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1329 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1331 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1332 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1333 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1334 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1335 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1336 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1337 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1338 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1339 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1340 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1342 /* 3RF instruction _df(bit 21) = _w, _d */
1343 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1346 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1347 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1348 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1349 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1350 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1351 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1352 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1353 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1357 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1358 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1359 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1361 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1362 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1363 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1364 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1365 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1366 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1367 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1370 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1373 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1374 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1375 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1376 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1377 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1378 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1379 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1380 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1381 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1382 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1383 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1385 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1386 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1387 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1388 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1389 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1390 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1391 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1392 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1393 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1394 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1395 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1396 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1397 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1402 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1403 * ============================================
1405 * MXU (full name: MIPS eXtension/enhanced Unit) is an SIMD extension of MIPS32
1406 * instructions set. It is designed to fit the needs of signal, graphical and
1407 * video processing applications. MXU instruction set is used in Xburst family
1408 * of microprocessors by Ingenic.
1410 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1411 * the control register.
1413 * The notation used in MXU assembler mnemonics:
1415 * XRa, XRb, XRc, XRd - MXU registers
1416 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1417 * s12 - a subfield of an instruction code
1418 * strd2 - a subfield of an instruction code
1419 * eptn2 - a subfield of an instruction code
1420 * eptn3 - a subfield of an instruction code
1421 * optn2 - a subfield of an instruction code
1422 * optn3 - a subfield of an instruction code
1423 * sft4 - a subfield of an instruction code
1425 * Load/Store instructions Multiplication instructions
1426 * ----------------------- ---------------------------
1428 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1429 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1430 * S32LDDV XRa, Rb, rc, strd2 S32SUB XRa, XRd, Rs, Rt
1431 * S32STDV XRa, Rb, rc, strd2 S32SUBU XRa, XRd, Rs, Rt
1432 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1433 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1434 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1435 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1436 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1437 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1438 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1439 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1440 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1441 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1442 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1443 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1444 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1445 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1446 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1447 * S16SDI XRa, Rb, s10, eptn2
1448 * S8LDD XRa, Rb, s8, eptn3
1449 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1450 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1451 * S8SDI XRa, Rb, s8, eptn3
1452 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1453 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1454 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1455 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1456 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1457 * S32CPS XRa, XRb, XRc
1458 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1459 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1460 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1461 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1462 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1463 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1464 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1465 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1466 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1467 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1468 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1469 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1470 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1471 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1472 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1473 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1474 * Q8SLT XRa, XRb, XRc
1475 * Q8SLTU XRa, XRb, XRc
1476 * Q8MOVZ XRa, XRb, XRc Shift instructions
1477 * Q8MOVN XRa, XRb, XRc ------------------
1479 * D32SLL XRa, XRb, XRc, XRd, sft4
1480 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1481 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1482 * D32SARL XRa, XRb, XRc, sft4
1483 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1484 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1485 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1486 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1487 * Q16SLL XRa, XRb, XRc, XRd, sft4
1488 * Q16SLR XRa, XRb, XRc, XRd, sft4
1489 * Miscelaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1490 * ------------------------- Q16SLLV XRa, XRb, Rb
1491 * Q16SLRV XRa, XRb, Rb
1492 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1493 * S32ALN XRa, XRb, XRc, Rb
1494 * S32ALNI XRa, XRb, XRc, s3
1495 * S32LUI XRa, s8, optn3 Move instructions
1496 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1497 * S32EXTRV XRa, XRb, Rs, Rt
1498 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1499 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1505 * ┌─ 000000 ─ OPC_MXU_S32MADD
1506 * ├─ 000001 ─ OPC_MXU_S32MADDU
1507 * ├─ 000010 ─ <not assigned>
1509 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1510 * │ ├─ 001 ─ OPC_MXU_S32MIN
1511 * │ ├─ 010 ─ OPC_MXU_D16MAX
1512 * │ ├─ 011 ─ OPC_MXU_D16MIN
1513 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1514 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1515 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1516 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1517 * ├─ 000100 ─ OPC_MXU_S32MSUB
1518 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1519 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1520 * │ ├─ 001 ─ OPC_MXU_D16SLT
1521 * │ ├─ 010 ─ OPC_MXU_D16AVG
1522 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1523 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1524 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1525 * │ └─ 111 ─ OPC_MXU_Q8ADD
1528 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1529 * │ ├─ 010 ─ OPC_MXU_D16CPS
1530 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1531 * │ └─ 110 ─ OPC_MXU_Q16SAT
1532 * ├─ 001000 ─ OPC_MXU_D16MUL
1534 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1535 * │ └─ 01 ─ OPC_MXU_D16MULE
1536 * ├─ 001010 ─ OPC_MXU_D16MAC
1537 * ├─ 001011 ─ OPC_MXU_D16MACF
1538 * ├─ 001100 ─ OPC_MXU_D16MADL
1540 * ├─ 001101 ─ OPC_MXU__POOL04 ─┬─ 00 ─ OPC_MXU_S16MAD
1541 * │ └─ 01 ─ OPC_MXU_S16MAD_1
1542 * ├─ 001110 ─ OPC_MXU_Q16ADD
1543 * ├─ 001111 ─ OPC_MXU_D16MACE
1545 * ├─ 010000 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32LDD
1546 * │ └─ 1 ─ OPC_MXU_S32LDDR
1549 * ├─ 010001 ─ OPC_MXU__POOL06 ─┬─ 0 ─ OPC_MXU_S32STD
1550 * │ └─ 1 ─ OPC_MXU_S32STDR
1553 * ├─ 010010 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1554 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1557 * ├─ 010011 ─ OPC_MXU__POOL08 ─┬─ 0000 ─ OPC_MXU_S32STDV
1558 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1561 * ├─ 010100 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32LDI
1562 * │ └─ 1 ─ OPC_MXU_S32LDIR
1565 * ├─ 010101 ─ OPC_MXU__POOL10 ─┬─ 0 ─ OPC_MXU_S32SDI
1566 * │ └─ 1 ─ OPC_MXU_S32SDIR
1569 * ├─ 010110 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1570 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1573 * ├─ 010111 ─ OPC_MXU__POOL12 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1574 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1575 * ├─ 011000 ─ OPC_MXU_D32ADD
1577 * MXU ├─ 011001 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_D32ACC
1578 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1579 * │ └─ 10 ─ OPC_MXU_D32ASUM
1580 * ├─ 011010 ─ <not assigned>
1582 * ├─ 011011 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q16ACC
1583 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1584 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1587 * ├─ 011100 ─ OPC_MXU__POOL15 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1588 * │ ├─ 01 ─ OPC_MXU_D8SUM
1589 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1590 * ├─ 011110 ─ <not assigned>
1591 * ├─ 011111 ─ <not assigned>
1592 * ├─ 100000 ─ <not assigned>
1593 * ├─ 100001 ─ <not assigned>
1594 * ├─ 100010 ─ OPC_MXU_S8LDD
1595 * ├─ 100011 ─ OPC_MXU_S8STD
1596 * ├─ 100100 ─ OPC_MXU_S8LDI
1597 * ├─ 100101 ─ OPC_MXU_S8SDI
1599 * ├─ 100110 ─ OPC_MXU__POOL16 ─┬─ 00 ─ OPC_MXU_S32MUL
1600 * │ ├─ 00 ─ OPC_MXU_S32MULU
1601 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1602 * │ └─ 00 ─ OPC_MXU_S32EXTRV
1605 * ├─ 100111 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_D32SARW
1606 * │ ├─ 001 ─ OPC_MXU_S32ALN
1607 * ├─ 101000 ─ OPC_MXU_LXB ├─ 010 ─ OPC_MXU_S32ALNI
1608 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_S32NOR
1609 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_S32AND
1610 * ├─ 101011 ─ OPC_MXU_S16STD ├─ 101 ─ OPC_MXU_S32OR
1611 * ├─ 101100 ─ OPC_MXU_S16LDI ├─ 110 ─ OPC_MXU_S32XOR
1612 * ├─ 101101 ─ OPC_MXU_S16SDI └─ 111 ─ OPC_MXU_S32LUI
1613 * ├─ 101000 ─ <not assigned>
1614 * ├─ 101001 ─ <not assigned>
1615 * ├─ 101010 ─ <not assigned>
1616 * ├─ 101011 ─ <not assigned>
1617 * ├─ 101100 ─ <not assigned>
1618 * ├─ 101101 ─ <not assigned>
1619 * ├─ 101110 ─ OPC_MXU_S32M2I
1620 * ├─ 101111 ─ OPC_MXU_S32I2M
1621 * ├─ 110000 ─ OPC_MXU_D32SLL
1622 * ├─ 110001 ─ OPC_MXU_D32SLR
1623 * ├─ 110010 ─ OPC_MXU_D32SARL
1624 * ├─ 110011 ─ OPC_MXU_D32SAR
1625 * ├─ 110100 ─ OPC_MXU_Q16SLL
1626 * ├─ 110101 ─ OPC_MXU_Q16SLR 20..18
1627 * ├─ 110110 ─ OPC_MXU__POOL18 ─┬─ 000 ─ OPC_MXU_D32SLLV
1628 * │ ├─ 001 ─ OPC_MXU_D32SLRV
1629 * │ ├─ 010 ─ OPC_MXU_D32SARV
1630 * │ ├─ 011 ─ OPC_MXU_Q16SLLV
1631 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1632 * │ └─ 101 ─ OPC_MXU_Q16SARV
1633 * ├─ 110111 ─ OPC_MXU_Q16SAR
1635 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1636 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1639 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1640 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1641 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1642 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1643 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1644 * │ └─ 101 ─ OPC_MXU_S32MOV
1647 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1648 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1649 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1650 * ├─ 111100 ─ OPC_MXU_Q8MADL
1651 * ├─ 111101 ─ OPC_MXU_S32SFL
1652 * ├─ 111110 ─ OPC_MXU_Q8SAD
1653 * └─ 111111 ─ <not assigned>
1658 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1659 * Programming Manual", Ingenic Semiconductor Co, Ltd., 2017
1663 OPC_MXU_S32MADD
= 0x00,
1664 OPC_MXU_S32MADDU
= 0x01,
1665 /* not assigned 0x02 */
1666 OPC_MXU__POOL00
= 0x03,
1667 OPC_MXU_S32MSUB
= 0x04,
1668 OPC_MXU_S32MSUBU
= 0x05,
1669 OPC_MXU__POOL01
= 0x06,
1670 OPC_MXU__POOL02
= 0x07,
1671 OPC_MXU_D16MUL
= 0x08,
1672 OPC_MXU__POOL03
= 0x09,
1673 OPC_MXU_D16MAC
= 0x0A,
1674 OPC_MXU_D16MACF
= 0x0B,
1675 OPC_MXU_D16MADL
= 0x0C,
1676 OPC_MXU__POOL04
= 0x0D,
1677 OPC_MXU_Q16ADD
= 0x0E,
1678 OPC_MXU_D16MACE
= 0x0F,
1679 OPC_MXU__POOL05
= 0x10,
1680 OPC_MXU__POOL06
= 0x11,
1681 OPC_MXU__POOL07
= 0x12,
1682 OPC_MXU__POOL08
= 0x13,
1683 OPC_MXU__POOL09
= 0x14,
1684 OPC_MXU__POOL10
= 0x15,
1685 OPC_MXU__POOL11
= 0x16,
1686 OPC_MXU__POOL12
= 0x17,
1687 OPC_MXU_D32ADD
= 0x18,
1688 OPC_MXU__POOL13
= 0x19,
1689 /* not assigned 0x1A */
1690 OPC_MXU__POOL14
= 0x1B,
1691 OPC_MXU__POOL15
= 0x1C,
1692 OPC_MXU_Q8ACCE
= 0x1D,
1693 /* not assigned 0x1E */
1694 /* not assigned 0x1F */
1695 /* not assigned 0x20 */
1696 /* not assigned 0x21 */
1697 OPC_MXU_S8LDD
= 0x22,
1698 OPC_MXU_S8STD
= 0x23,
1699 OPC_MXU_S8LDI
= 0x24,
1700 OPC_MXU_S8SDI
= 0x25,
1701 OPC_MXU__POOL16
= 0x26,
1702 OPC_MXU__POOL17
= 0x27,
1704 /* not assigned 0x29 */
1705 OPC_MXU_S16LDD
= 0x2A,
1706 OPC_MXU_S16STD
= 0x2B,
1707 OPC_MXU_S16LDI
= 0x2C,
1708 OPC_MXU_S16SDI
= 0x2D,
1709 OPC_MXU_S32M2I
= 0x2E,
1710 OPC_MXU_S32I2M
= 0x2F,
1711 OPC_MXU_D32SLL
= 0x30,
1712 OPC_MXU_D32SLR
= 0x31,
1713 OPC_MXU_D32SARL
= 0x32,
1714 OPC_MXU_D32SAR
= 0x33,
1715 OPC_MXU_Q16SLL
= 0x34,
1716 OPC_MXU_Q16SLR
= 0x35,
1717 OPC_MXU__POOL18
= 0x36,
1718 OPC_MXU_Q16SAR
= 0x37,
1719 OPC_MXU__POOL19
= 0x38,
1720 OPC_MXU__POOL20
= 0x39,
1721 OPC_MXU__POOL21
= 0x3A,
1722 OPC_MXU_Q16SCOP
= 0x3B,
1723 OPC_MXU_Q8MADL
= 0x3C,
1724 OPC_MXU_S32SFL
= 0x3D,
1725 OPC_MXU_Q8SAD
= 0x3E,
1726 /* not assigned 0x3F */
1734 OPC_MXU_S32MAX
= 0x00,
1735 OPC_MXU_S32MIN
= 0x01,
1736 OPC_MXU_D16MAX
= 0x02,
1737 OPC_MXU_D16MIN
= 0x03,
1738 OPC_MXU_Q8MAX
= 0x04,
1739 OPC_MXU_Q8MIN
= 0x05,
1740 OPC_MXU_Q8SLT
= 0x06,
1741 OPC_MXU_Q8SLTU
= 0x07,
1748 OPC_MXU_S32SLT
= 0x00,
1749 OPC_MXU_D16SLT
= 0x01,
1750 OPC_MXU_D16AVG
= 0x02,
1751 OPC_MXU_D16AVGR
= 0x03,
1752 OPC_MXU_Q8AVG
= 0x04,
1753 OPC_MXU_Q8AVGR
= 0x05,
1754 OPC_MXU_Q8ADD
= 0x07,
1761 OPC_MXU_S32CPS
= 0x00,
1762 OPC_MXU_D16CPS
= 0x02,
1763 OPC_MXU_Q8ABD
= 0x04,
1764 OPC_MXU_Q16SAT
= 0x06,
1771 OPC_MXU_D16MULF
= 0x00,
1772 OPC_MXU_D16MULE
= 0x01,
1779 OPC_MXU_S16MAD
= 0x00,
1780 OPC_MXU_S16MAD_1
= 0x01,
1787 OPC_MXU_S32LDD
= 0x00,
1788 OPC_MXU_S32LDDR
= 0x01,
1795 OPC_MXU_S32STD
= 0x00,
1796 OPC_MXU_S32STDR
= 0x01,
1803 OPC_MXU_S32LDDV
= 0x00,
1804 OPC_MXU_S32LDDVR
= 0x01,
1811 OPC_MXU_S32STDV
= 0x00,
1812 OPC_MXU_S32STDVR
= 0x01,
1819 OPC_MXU_S32LDI
= 0x00,
1820 OPC_MXU_S32LDIR
= 0x01,
1827 OPC_MXU_S32SDI
= 0x00,
1828 OPC_MXU_S32SDIR
= 0x01,
1835 OPC_MXU_S32LDIV
= 0x00,
1836 OPC_MXU_S32LDIVR
= 0x01,
1843 OPC_MXU_S32SDIV
= 0x00,
1844 OPC_MXU_S32SDIVR
= 0x01,
1851 OPC_MXU_D32ACC
= 0x00,
1852 OPC_MXU_D32ACCM
= 0x01,
1853 OPC_MXU_D32ASUM
= 0x02,
1860 OPC_MXU_Q16ACC
= 0x00,
1861 OPC_MXU_Q16ACCM
= 0x01,
1862 OPC_MXU_Q16ASUM
= 0x02,
1869 OPC_MXU_Q8ADDE
= 0x00,
1870 OPC_MXU_D8SUM
= 0x01,
1871 OPC_MXU_D8SUMC
= 0x02,
1878 OPC_MXU_S32MUL
= 0x00,
1879 OPC_MXU_S32MULU
= 0x01,
1880 OPC_MXU_S32EXTR
= 0x02,
1881 OPC_MXU_S32EXTRV
= 0x03,
1888 OPC_MXU_D32SARW
= 0x00,
1889 OPC_MXU_S32ALN
= 0x01,
1890 OPC_MXU_S32ALNI
= 0x02,
1891 OPC_MXU_S32NOR
= 0x03,
1892 OPC_MXU_S32AND
= 0x04,
1893 OPC_MXU_S32OR
= 0x05,
1894 OPC_MXU_S32XOR
= 0x06,
1895 OPC_MXU_S32LUI
= 0x07,
1902 OPC_MXU_D32SLLV
= 0x00,
1903 OPC_MXU_D32SLRV
= 0x01,
1904 OPC_MXU_D32SARV
= 0x03,
1905 OPC_MXU_Q16SLLV
= 0x04,
1906 OPC_MXU_Q16SLRV
= 0x05,
1907 OPC_MXU_Q16SARV
= 0x07,
1914 OPC_MXU_Q8MUL
= 0x00,
1915 OPC_MXU_Q8MULSU
= 0x01,
1922 OPC_MXU_Q8MOVZ
= 0x00,
1923 OPC_MXU_Q8MOVN
= 0x01,
1924 OPC_MXU_D16MOVZ
= 0x02,
1925 OPC_MXU_D16MOVN
= 0x03,
1926 OPC_MXU_S32MOVZ
= 0x04,
1927 OPC_MXU_S32MOVN
= 0x05,
1934 OPC_MXU_Q8MAC
= 0x00,
1935 OPC_MXU_Q8MACSU
= 0x01,
1939 * Overview of the TX79-specific instruction set
1940 * =============================================
1942 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
1943 * are only used by the specific quadword (128-bit) LQ/SQ load/store
1944 * instructions and certain multimedia instructions (MMIs). These MMIs
1945 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
1946 * or sixteen 8-bit paths.
1950 * The Toshiba TX System RISC TX79 Core Architecture manual,
1951 * https://wiki.qemu.org/File:C790.pdf
1953 * Three-Operand Multiply and Multiply-Add (4 instructions)
1954 * --------------------------------------------------------
1955 * MADD [rd,] rs, rt Multiply/Add
1956 * MADDU [rd,] rs, rt Multiply/Add Unsigned
1957 * MULT [rd,] rs, rt Multiply (3-operand)
1958 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
1960 * Multiply Instructions for Pipeline 1 (10 instructions)
1961 * ------------------------------------------------------
1962 * MULT1 [rd,] rs, rt Multiply Pipeline 1
1963 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
1964 * DIV1 rs, rt Divide Pipeline 1
1965 * DIVU1 rs, rt Divide Unsigned Pipeline 1
1966 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
1967 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
1968 * MFHI1 rd Move From HI1 Register
1969 * MFLO1 rd Move From LO1 Register
1970 * MTHI1 rs Move To HI1 Register
1971 * MTLO1 rs Move To LO1 Register
1973 * Arithmetic (19 instructions)
1974 * ----------------------------
1975 * PADDB rd, rs, rt Parallel Add Byte
1976 * PSUBB rd, rs, rt Parallel Subtract Byte
1977 * PADDH rd, rs, rt Parallel Add Halfword
1978 * PSUBH rd, rs, rt Parallel Subtract Halfword
1979 * PADDW rd, rs, rt Parallel Add Word
1980 * PSUBW rd, rs, rt Parallel Subtract Word
1981 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
1982 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
1983 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
1984 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
1985 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
1986 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
1987 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
1988 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
1989 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
1990 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
1991 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
1992 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
1993 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
1995 * Min/Max (4 instructions)
1996 * ------------------------
1997 * PMAXH rd, rs, rt Parallel Maximum Halfword
1998 * PMINH rd, rs, rt Parallel Minimum Halfword
1999 * PMAXW rd, rs, rt Parallel Maximum Word
2000 * PMINW rd, rs, rt Parallel Minimum Word
2002 * Absolute (2 instructions)
2003 * -------------------------
2004 * PABSH rd, rt Parallel Absolute Halfword
2005 * PABSW rd, rt Parallel Absolute Word
2007 * Logical (4 instructions)
2008 * ------------------------
2009 * PAND rd, rs, rt Parallel AND
2010 * POR rd, rs, rt Parallel OR
2011 * PXOR rd, rs, rt Parallel XOR
2012 * PNOR rd, rs, rt Parallel NOR
2014 * Shift (9 instructions)
2015 * ----------------------
2016 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2017 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2018 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2019 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2020 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2021 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2022 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2023 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2024 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2026 * Compare (6 instructions)
2027 * ------------------------
2028 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2029 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2030 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2031 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2032 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2033 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2035 * LZC (1 instruction)
2036 * -------------------
2037 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2039 * Quadword Load and Store (2 instructions)
2040 * ----------------------------------------
2041 * LQ rt, offset(base) Load Quadword
2042 * SQ rt, offset(base) Store Quadword
2044 * Multiply and Divide (19 instructions)
2045 * -------------------------------------
2046 * PMULTW rd, rs, rt Parallel Multiply Word
2047 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2048 * PDIVW rs, rt Parallel Divide Word
2049 * PDIVUW rs, rt Parallel Divide Unsigned Word
2050 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2051 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2052 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2053 * PMULTH rd, rs, rt Parallel Multiply Halfword
2054 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2055 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2056 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2057 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2058 * PDIVBW rs, rt Parallel Divide Broadcast Word
2059 * PMFHI rd Parallel Move From HI Register
2060 * PMFLO rd Parallel Move From LO Register
2061 * PMTHI rs Parallel Move To HI Register
2062 * PMTLO rs Parallel Move To LO Register
2063 * PMFHL rd Parallel Move From HI/LO Register
2064 * PMTHL rs Parallel Move To HI/LO Register
2066 * Pack/Extend (11 instructions)
2067 * -----------------------------
2068 * PPAC5 rd, rt Parallel Pack to 5 bits
2069 * PPACB rd, rs, rt Parallel Pack to Byte
2070 * PPACH rd, rs, rt Parallel Pack to Halfword
2071 * PPACW rd, rs, rt Parallel Pack to Word
2072 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2073 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2074 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2075 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2076 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2077 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2078 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2080 * Others (16 instructions)
2081 * ------------------------
2082 * PCPYH rd, rt Parallel Copy Halfword
2083 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2084 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2085 * PREVH rd, rt Parallel Reverse Halfword
2086 * PINTH rd, rs, rt Parallel Interleave Halfword
2087 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2088 * PEXEH rd, rt Parallel Exchange Even Halfword
2089 * PEXCH rd, rt Parallel Exchange Center Halfword
2090 * PEXEW rd, rt Parallel Exchange Even Word
2091 * PEXCW rd, rt Parallel Exchange Center Word
2092 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2093 * MFSA rd Move from Shift Amount Register
2094 * MTSA rs Move to Shift Amount Register
2095 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2096 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2097 * PROT3W rd, rt Parallel Rotate 3 Words
2099 * The TX79-specific Multimedia Instruction encodings
2100 * ==================================================
2102 * TX79 Multimedia Instruction encoding table keys:
2104 * * This code is reserved for future use. An attempt to execute it
2105 * causes a Reserved Instruction exception.
2106 * % This code indicates an instruction class. The instruction word
2107 * must be further decoded by examining additional tables that show
2108 * the values for other instruction fields.
2109 * # This code is reserved for the unsupported instructions DMULT,
2110 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2111 * to execute it causes a Reserved Instruction exception.
2113 * TX79 Multimedia Instructions encoded by opcode field (MMI, LQ, SQ):
2116 * +--------+----------------------------------------+
2118 * +--------+----------------------------------------+
2120 * opcode bits 28..26
2121 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2122 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2123 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2124 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2125 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2126 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2127 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2128 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2129 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2130 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2131 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2135 TX79_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2136 TX79_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2137 TX79_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2141 * TX79 Multimedia Instructions with opcode field = MMI:
2144 * +--------+-------------------------------+--------+
2145 * | MMI | |function|
2146 * +--------+-------------------------------+--------+
2148 * function bits 2..0
2149 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2150 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2151 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2152 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2153 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2154 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2155 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2156 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2157 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2158 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2159 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2162 #define MASK_TX79_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2164 TX79_MMI_MADD
= 0x00 | TX79_CLASS_MMI
, /* Same as OPC_MADD */
2165 TX79_MMI_MADDU
= 0x01 | TX79_CLASS_MMI
, /* Same as OPC_MADDU */
2166 TX79_MMI_PLZCW
= 0x04 | TX79_CLASS_MMI
,
2167 TX79_MMI_CLASS_MMI0
= 0x08 | TX79_CLASS_MMI
,
2168 TX79_MMI_CLASS_MMI2
= 0x09 | TX79_CLASS_MMI
,
2169 TX79_MMI_MFHI1
= 0x10 | TX79_CLASS_MMI
, /* Same minor as OPC_MFHI */
2170 TX79_MMI_MTHI1
= 0x11 | TX79_CLASS_MMI
, /* Same minor as OPC_MTHI */
2171 TX79_MMI_MFLO1
= 0x12 | TX79_CLASS_MMI
, /* Same minor as OPC_MFLO */
2172 TX79_MMI_MTLO1
= 0x13 | TX79_CLASS_MMI
, /* Same minor as OPC_MTLO */
2173 TX79_MMI_MULT1
= 0x18 | TX79_CLASS_MMI
, /* Same minor as OPC_MULT */
2174 TX79_MMI_MULTU1
= 0x19 | TX79_CLASS_MMI
, /* Same minor as OPC_MULTU */
2175 TX79_MMI_DIV1
= 0x1A | TX79_CLASS_MMI
, /* Same minor as OPC_DIV */
2176 TX79_MMI_DIVU1
= 0x1B | TX79_CLASS_MMI
, /* Same minor as OPC_DIVU */
2177 TX79_MMI_MADD1
= 0x20 | TX79_CLASS_MMI
,
2178 TX79_MMI_MADDU1
= 0x21 | TX79_CLASS_MMI
,
2179 TX79_MMI_CLASS_MMI1
= 0x28 | TX79_CLASS_MMI
,
2180 TX79_MMI_CLASS_MMI3
= 0x29 | TX79_CLASS_MMI
,
2181 TX79_MMI_PMFHL
= 0x30 | TX79_CLASS_MMI
,
2182 TX79_MMI_PMTHL
= 0x31 | TX79_CLASS_MMI
,
2183 TX79_MMI_PSLLH
= 0x34 | TX79_CLASS_MMI
,
2184 TX79_MMI_PSRLH
= 0x36 | TX79_CLASS_MMI
,
2185 TX79_MMI_PSRAH
= 0x37 | TX79_CLASS_MMI
,
2186 TX79_MMI_PSLLW
= 0x3C | TX79_CLASS_MMI
,
2187 TX79_MMI_PSRLW
= 0x3E | TX79_CLASS_MMI
,
2188 TX79_MMI_PSRAW
= 0x3F | TX79_CLASS_MMI
,
2192 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI0:
2195 * +--------+----------------------+--------+--------+
2196 * | MMI | |function| MMI0 |
2197 * +--------+----------------------+--------+--------+
2199 * function bits 7..6
2200 * bits | 0 | 1 | 2 | 3
2201 * 10..8 | 00 | 01 | 10 | 11
2202 * -------+-------+-------+-------+-------
2203 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2204 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2205 * 2 010 | PADDB | PSUBB | PCGTB | *
2206 * 3 011 | * | * | * | *
2207 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2208 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2209 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2210 * 7 111 | * | * | PEXT5 | PPAC5
2213 #define MASK_TX79_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2215 TX79_MMI0_PADDW
= (0x00 << 6) | TX79_MMI_CLASS_MMI0
,
2216 TX79_MMI0_PSUBW
= (0x01 << 6) | TX79_MMI_CLASS_MMI0
,
2217 TX79_MMI0_PCGTW
= (0x02 << 6) | TX79_MMI_CLASS_MMI0
,
2218 TX79_MMI0_PMAXW
= (0x03 << 6) | TX79_MMI_CLASS_MMI0
,
2219 TX79_MMI0_PADDH
= (0x04 << 6) | TX79_MMI_CLASS_MMI0
,
2220 TX79_MMI0_PSUBH
= (0x05 << 6) | TX79_MMI_CLASS_MMI0
,
2221 TX79_MMI0_PCGTH
= (0x06 << 6) | TX79_MMI_CLASS_MMI0
,
2222 TX79_MMI0_PMAXH
= (0x07 << 6) | TX79_MMI_CLASS_MMI0
,
2223 TX79_MMI0_PADDB
= (0x08 << 6) | TX79_MMI_CLASS_MMI0
,
2224 TX79_MMI0_PSUBB
= (0x09 << 6) | TX79_MMI_CLASS_MMI0
,
2225 TX79_MMI0_PCGTB
= (0x0A << 6) | TX79_MMI_CLASS_MMI0
,
2226 TX79_MMI0_PADDSW
= (0x10 << 6) | TX79_MMI_CLASS_MMI0
,
2227 TX79_MMI0_PSUBSW
= (0x11 << 6) | TX79_MMI_CLASS_MMI0
,
2228 TX79_MMI0_PEXTLW
= (0x12 << 6) | TX79_MMI_CLASS_MMI0
,
2229 TX79_MMI0_PPACW
= (0x13 << 6) | TX79_MMI_CLASS_MMI0
,
2230 TX79_MMI0_PADDSH
= (0x14 << 6) | TX79_MMI_CLASS_MMI0
,
2231 TX79_MMI0_PSUBSH
= (0x15 << 6) | TX79_MMI_CLASS_MMI0
,
2232 TX79_MMI0_PEXTLH
= (0x16 << 6) | TX79_MMI_CLASS_MMI0
,
2233 TX79_MMI0_PPACH
= (0x17 << 6) | TX79_MMI_CLASS_MMI0
,
2234 TX79_MMI0_PADDSB
= (0x18 << 6) | TX79_MMI_CLASS_MMI0
,
2235 TX79_MMI0_PSUBSB
= (0x19 << 6) | TX79_MMI_CLASS_MMI0
,
2236 TX79_MMI0_PEXTLB
= (0x1A << 6) | TX79_MMI_CLASS_MMI0
,
2237 TX79_MMI0_PPACB
= (0x1B << 6) | TX79_MMI_CLASS_MMI0
,
2238 TX79_MMI0_PEXT5
= (0x1E << 6) | TX79_MMI_CLASS_MMI0
,
2239 TX79_MMI0_PPAC5
= (0x1F << 6) | TX79_MMI_CLASS_MMI0
,
2243 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI1:
2246 * +--------+----------------------+--------+--------+
2247 * | MMI | |function| MMI1 |
2248 * +--------+----------------------+--------+--------+
2250 * function bits 7..6
2251 * bits | 0 | 1 | 2 | 3
2252 * 10..8 | 00 | 01 | 10 | 11
2253 * -------+-------+-------+-------+-------
2254 * 0 000 | * | PABSW | PCEQW | PMINW
2255 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2256 * 2 010 | * | * | PCEQB | *
2257 * 3 011 | * | * | * | *
2258 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2259 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2260 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2261 * 7 111 | * | * | * | *
2264 #define MASK_TX79_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2266 TX79_MMI1_PABSW
= (0x01 << 6) | TX79_MMI_CLASS_MMI1
,
2267 TX79_MMI1_PCEQW
= (0x02 << 6) | TX79_MMI_CLASS_MMI1
,
2268 TX79_MMI1_PMINW
= (0x03 << 6) | TX79_MMI_CLASS_MMI1
,
2269 TX79_MMI1_PADSBH
= (0x04 << 6) | TX79_MMI_CLASS_MMI1
,
2270 TX79_MMI1_PABSH
= (0x05 << 6) | TX79_MMI_CLASS_MMI1
,
2271 TX79_MMI1_PCEQH
= (0x06 << 6) | TX79_MMI_CLASS_MMI1
,
2272 TX79_MMI1_PMINH
= (0x07 << 6) | TX79_MMI_CLASS_MMI1
,
2273 TX79_MMI1_PCEQB
= (0x0A << 6) | TX79_MMI_CLASS_MMI1
,
2274 TX79_MMI1_PADDUW
= (0x10 << 6) | TX79_MMI_CLASS_MMI1
,
2275 TX79_MMI1_PSUBUW
= (0x11 << 6) | TX79_MMI_CLASS_MMI1
,
2276 TX79_MMI1_PEXTUW
= (0x12 << 6) | TX79_MMI_CLASS_MMI1
,
2277 TX79_MMI1_PADDUH
= (0x14 << 6) | TX79_MMI_CLASS_MMI1
,
2278 TX79_MMI1_PSUBUH
= (0x15 << 6) | TX79_MMI_CLASS_MMI1
,
2279 TX79_MMI1_PEXTUH
= (0x16 << 6) | TX79_MMI_CLASS_MMI1
,
2280 TX79_MMI1_PADDUB
= (0x18 << 6) | TX79_MMI_CLASS_MMI1
,
2281 TX79_MMI1_PSUBUB
= (0x19 << 6) | TX79_MMI_CLASS_MMI1
,
2282 TX79_MMI1_PEXTUB
= (0x1A << 6) | TX79_MMI_CLASS_MMI1
,
2283 TX79_MMI1_QFSRV
= (0x1B << 6) | TX79_MMI_CLASS_MMI1
,
2287 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI2:
2290 * +--------+----------------------+--------+--------+
2291 * | MMI | |function| MMI2 |
2292 * +--------+----------------------+--------+--------+
2294 * function bits 7..6
2295 * bits | 0 | 1 | 2 | 3
2296 * 10..8 | 00 | 01 | 10 | 11
2297 * -------+-------+-------+-------+-------
2298 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2299 * 1 001 | PMSUBW| * | * | *
2300 * 2 010 | PMFHI | PMFLO | PINTH | *
2301 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2302 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2303 * 5 101 | PMSUBH| PHMSBH| * | *
2304 * 6 110 | * | * | PEXEH | PREVH
2305 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2308 #define MASK_TX79_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2310 TX79_MMI2_PMADDW
= (0x00 << 6) | TX79_MMI_CLASS_MMI2
,
2311 TX79_MMI2_PSLLVW
= (0x02 << 6) | TX79_MMI_CLASS_MMI2
,
2312 TX79_MMI2_PSRLVW
= (0x03 << 6) | TX79_MMI_CLASS_MMI2
,
2313 TX79_MMI2_PMSUBW
= (0x04 << 6) | TX79_MMI_CLASS_MMI2
,
2314 TX79_MMI2_PMFHI
= (0x08 << 6) | TX79_MMI_CLASS_MMI2
,
2315 TX79_MMI2_PMFLO
= (0x09 << 6) | TX79_MMI_CLASS_MMI2
,
2316 TX79_MMI2_PINTH
= (0x0A << 6) | TX79_MMI_CLASS_MMI2
,
2317 TX79_MMI2_PMULTW
= (0x0C << 6) | TX79_MMI_CLASS_MMI2
,
2318 TX79_MMI2_PDIVW
= (0x0D << 6) | TX79_MMI_CLASS_MMI2
,
2319 TX79_MMI2_PCPYLD
= (0x0E << 6) | TX79_MMI_CLASS_MMI2
,
2320 TX79_MMI2_PMADDH
= (0x10 << 6) | TX79_MMI_CLASS_MMI2
,
2321 TX79_MMI2_PHMADH
= (0x11 << 6) | TX79_MMI_CLASS_MMI2
,
2322 TX79_MMI2_PAND
= (0x12 << 6) | TX79_MMI_CLASS_MMI2
,
2323 TX79_MMI2_PXOR
= (0x13 << 6) | TX79_MMI_CLASS_MMI2
,
2324 TX79_MMI2_PMSUBH
= (0x14 << 6) | TX79_MMI_CLASS_MMI2
,
2325 TX79_MMI2_PHMSBH
= (0x15 << 6) | TX79_MMI_CLASS_MMI2
,
2326 TX79_MMI2_PEXEH
= (0x1A << 6) | TX79_MMI_CLASS_MMI2
,
2327 TX79_MMI2_PREVH
= (0x1B << 6) | TX79_MMI_CLASS_MMI2
,
2328 TX79_MMI2_PMULTH
= (0x1C << 6) | TX79_MMI_CLASS_MMI2
,
2329 TX79_MMI2_PDIVBW
= (0x1D << 6) | TX79_MMI_CLASS_MMI2
,
2330 TX79_MMI2_PEXEW
= (0x1E << 6) | TX79_MMI_CLASS_MMI2
,
2331 TX79_MMI2_PROT3W
= (0x1F << 6) | TX79_MMI_CLASS_MMI2
,
2335 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI3:
2338 * +--------+----------------------+--------+--------+
2339 * | MMI | |function| MMI3 |
2340 * +--------+----------------------+--------+--------+
2342 * function bits 7..6
2343 * bits | 0 | 1 | 2 | 3
2344 * 10..8 | 00 | 01 | 10 | 11
2345 * -------+-------+-------+-------+-------
2346 * 0 000 |PMADDUW| * | * | PSRAVW
2347 * 1 001 | * | * | * | *
2348 * 2 010 | PMTHI | PMTLO | PINTEH| *
2349 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2350 * 4 100 | * | * | POR | PNOR
2351 * 5 101 | * | * | * | *
2352 * 6 110 | * | * | PEXCH | PCPYH
2353 * 7 111 | * | * | PEXCW | *
2356 #define MASK_TX79_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2358 TX79_MMI3_PMADDUW
= (0x00 << 6) | TX79_MMI_CLASS_MMI3
,
2359 TX79_MMI3_PSRAVW
= (0x03 << 6) | TX79_MMI_CLASS_MMI3
,
2360 TX79_MMI3_PMTHI
= (0x08 << 6) | TX79_MMI_CLASS_MMI3
,
2361 TX79_MMI3_PMTLO
= (0x09 << 6) | TX79_MMI_CLASS_MMI3
,
2362 TX79_MMI3_PINTEH
= (0x0A << 6) | TX79_MMI_CLASS_MMI3
,
2363 TX79_MMI3_PMULTUW
= (0x0C << 6) | TX79_MMI_CLASS_MMI3
,
2364 TX79_MMI3_PDIVUW
= (0x0D << 6) | TX79_MMI_CLASS_MMI3
,
2365 TX79_MMI3_PCPYUD
= (0x0E << 6) | TX79_MMI_CLASS_MMI3
,
2366 TX79_MMI3_POR
= (0x12 << 6) | TX79_MMI_CLASS_MMI3
,
2367 TX79_MMI3_PNOR
= (0x13 << 6) | TX79_MMI_CLASS_MMI3
,
2368 TX79_MMI3_PEXCH
= (0x1A << 6) | TX79_MMI_CLASS_MMI3
,
2369 TX79_MMI3_PCPYH
= (0x1B << 6) | TX79_MMI_CLASS_MMI3
,
2370 TX79_MMI3_PEXCW
= (0x1E << 6) | TX79_MMI_CLASS_MMI3
,
2373 /* global register indices */
2374 static TCGv cpu_gpr
[32], cpu_PC
;
2375 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2376 static TCGv cpu_dspctrl
, btarget
, bcond
;
2377 static TCGv_i32 hflags
;
2378 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2379 static TCGv_i64 fpu_f64
[32];
2380 static TCGv_i64 msa_wr_d
[64];
2383 static TCGv mxu_gpr
[NUMBER_OF_MXU_REGISTERS
- 1];
2386 #include "exec/gen-icount.h"
2388 #define gen_helper_0e0i(name, arg) do { \
2389 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2390 gen_helper_##name(cpu_env, helper_tmp); \
2391 tcg_temp_free_i32(helper_tmp); \
2394 #define gen_helper_0e1i(name, arg1, arg2) do { \
2395 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2396 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2397 tcg_temp_free_i32(helper_tmp); \
2400 #define gen_helper_1e0i(name, ret, arg1) do { \
2401 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2402 gen_helper_##name(ret, cpu_env, helper_tmp); \
2403 tcg_temp_free_i32(helper_tmp); \
2406 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2407 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2408 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2409 tcg_temp_free_i32(helper_tmp); \
2412 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2413 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2414 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2415 tcg_temp_free_i32(helper_tmp); \
2418 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2419 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2420 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2421 tcg_temp_free_i32(helper_tmp); \
2424 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2425 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2426 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2427 tcg_temp_free_i32(helper_tmp); \
2430 typedef struct DisasContext
{
2431 DisasContextBase base
;
2432 target_ulong saved_pc
;
2433 target_ulong page_start
;
2435 uint64_t insn_flags
;
2436 int32_t CP0_Config1
;
2437 int32_t CP0_Config2
;
2438 int32_t CP0_Config3
;
2439 int32_t CP0_Config5
;
2440 /* Routine used to access memory */
2442 TCGMemOp default_tcg_memop_mask
;
2443 uint32_t hflags
, saved_hflags
;
2444 target_ulong btarget
;
2455 int CP0_LLAddr_shift
;
2464 #define DISAS_STOP DISAS_TARGET_0
2465 #define DISAS_EXIT DISAS_TARGET_1
2467 static const char * const regnames
[] = {
2468 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2469 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2470 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2471 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2474 static const char * const regnames_HI
[] = {
2475 "HI0", "HI1", "HI2", "HI3",
2478 static const char * const regnames_LO
[] = {
2479 "LO0", "LO1", "LO2", "LO3",
2482 static const char * const fregnames
[] = {
2483 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2484 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2485 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2486 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2489 static const char * const msaregnames
[] = {
2490 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2491 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2492 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2493 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2494 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2495 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2496 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2497 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2498 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2499 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2500 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2501 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2502 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2503 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2504 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2505 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2508 static const char * const mxuregnames
[] = {
2509 "XR1", "XR2", "XR3", "XR4", "XR5", "XR6", "XR7", "XR8",
2510 "XR9", "XR10", "XR11", "XR12", "XR13", "XR14", "XR15", "MXU_CR",
2513 #define LOG_DISAS(...) \
2515 if (MIPS_DEBUG_DISAS) { \
2516 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2520 #define MIPS_INVAL(op) \
2522 if (MIPS_DEBUG_DISAS) { \
2523 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2524 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2525 ctx->base.pc_next, ctx->opcode, op, \
2526 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2527 ((ctx->opcode >> 16) & 0x1F)); \
2531 /* General purpose registers moves. */
2532 static inline void gen_load_gpr (TCGv t
, int reg
)
2535 tcg_gen_movi_tl(t
, 0);
2537 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2540 static inline void gen_store_gpr (TCGv t
, int reg
)
2543 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2546 /* Moves to/from shadow registers. */
2547 static inline void gen_load_srsgpr (int from
, int to
)
2549 TCGv t0
= tcg_temp_new();
2552 tcg_gen_movi_tl(t0
, 0);
2554 TCGv_i32 t2
= tcg_temp_new_i32();
2555 TCGv_ptr addr
= tcg_temp_new_ptr();
2557 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2558 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2559 tcg_gen_andi_i32(t2
, t2
, 0xf);
2560 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2561 tcg_gen_ext_i32_ptr(addr
, t2
);
2562 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2564 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2565 tcg_temp_free_ptr(addr
);
2566 tcg_temp_free_i32(t2
);
2568 gen_store_gpr(t0
, to
);
2572 static inline void gen_store_srsgpr (int from
, int to
)
2575 TCGv t0
= tcg_temp_new();
2576 TCGv_i32 t2
= tcg_temp_new_i32();
2577 TCGv_ptr addr
= tcg_temp_new_ptr();
2579 gen_load_gpr(t0
, from
);
2580 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2581 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2582 tcg_gen_andi_i32(t2
, t2
, 0xf);
2583 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2584 tcg_gen_ext_i32_ptr(addr
, t2
);
2585 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2587 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2588 tcg_temp_free_ptr(addr
);
2589 tcg_temp_free_i32(t2
);
2595 static inline void gen_save_pc(target_ulong pc
)
2597 tcg_gen_movi_tl(cpu_PC
, pc
);
2600 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2602 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2603 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2604 gen_save_pc(ctx
->base
.pc_next
);
2605 ctx
->saved_pc
= ctx
->base
.pc_next
;
2607 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2608 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2609 ctx
->saved_hflags
= ctx
->hflags
;
2610 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2616 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2622 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2624 ctx
->saved_hflags
= ctx
->hflags
;
2625 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2631 ctx
->btarget
= env
->btarget
;
2636 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2638 TCGv_i32 texcp
= tcg_const_i32(excp
);
2639 TCGv_i32 terr
= tcg_const_i32(err
);
2640 save_cpu_state(ctx
, 1);
2641 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2642 tcg_temp_free_i32(terr
);
2643 tcg_temp_free_i32(texcp
);
2644 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2647 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2649 gen_helper_0e0i(raise_exception
, excp
);
2652 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2654 generate_exception_err(ctx
, excp
, 0);
2657 /* Floating point register moves. */
2658 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2660 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2661 generate_exception(ctx
, EXCP_RI
);
2663 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2666 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2669 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2670 generate_exception(ctx
, EXCP_RI
);
2672 t64
= tcg_temp_new_i64();
2673 tcg_gen_extu_i32_i64(t64
, t
);
2674 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2675 tcg_temp_free_i64(t64
);
2678 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2680 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2681 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2683 gen_load_fpr32(ctx
, t
, reg
| 1);
2687 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2689 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2690 TCGv_i64 t64
= tcg_temp_new_i64();
2691 tcg_gen_extu_i32_i64(t64
, t
);
2692 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2693 tcg_temp_free_i64(t64
);
2695 gen_store_fpr32(ctx
, t
, reg
| 1);
2699 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2701 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2702 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2704 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2708 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2710 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2711 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2714 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2715 t0
= tcg_temp_new_i64();
2716 tcg_gen_shri_i64(t0
, t
, 32);
2717 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2718 tcg_temp_free_i64(t0
);
2722 static inline int get_fp_bit (int cc
)
2730 /* Addresses computation */
2731 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
2733 tcg_gen_add_tl(ret
, arg0
, arg1
);
2735 #if defined(TARGET_MIPS64)
2736 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2737 tcg_gen_ext32s_i64(ret
, ret
);
2742 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2745 tcg_gen_addi_tl(ret
, base
, ofs
);
2747 #if defined(TARGET_MIPS64)
2748 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2749 tcg_gen_ext32s_i64(ret
, ret
);
2754 /* Addresses computation (translation time) */
2755 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2758 target_long sum
= base
+ offset
;
2760 #if defined(TARGET_MIPS64)
2761 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2768 /* Sign-extract the low 32-bits to a target_long. */
2769 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2771 #if defined(TARGET_MIPS64)
2772 tcg_gen_ext32s_i64(ret
, arg
);
2774 tcg_gen_extrl_i64_i32(ret
, arg
);
2778 /* Sign-extract the high 32-bits to a target_long. */
2779 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2781 #if defined(TARGET_MIPS64)
2782 tcg_gen_sari_i64(ret
, arg
, 32);
2784 tcg_gen_extrh_i64_i32(ret
, arg
);
2788 static inline void check_cp0_enabled(DisasContext
*ctx
)
2790 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
2791 generate_exception_err(ctx
, EXCP_CpU
, 0);
2794 static inline void check_cp1_enabled(DisasContext
*ctx
)
2796 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
2797 generate_exception_err(ctx
, EXCP_CpU
, 1);
2800 /* Verify that the processor is running with COP1X instructions enabled.
2801 This is associated with the nabla symbol in the MIPS32 and MIPS64
2804 static inline void check_cop1x(DisasContext
*ctx
)
2806 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
2807 generate_exception_end(ctx
, EXCP_RI
);
2810 /* Verify that the processor is running with 64-bit floating-point
2811 operations enabled. */
2813 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2815 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
2816 generate_exception_end(ctx
, EXCP_RI
);
2820 * Verify if floating point register is valid; an operation is not defined
2821 * if bit 0 of any register specification is set and the FR bit in the
2822 * Status register equals zero, since the register numbers specify an
2823 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2824 * in the Status register equals one, both even and odd register numbers
2825 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2827 * Multiple 64 bit wide registers can be checked by calling
2828 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2830 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2832 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
2833 generate_exception_end(ctx
, EXCP_RI
);
2836 /* Verify that the processor is running with DSP instructions enabled.
2837 This is enabled by CP0 Status register MX(24) bit.
2840 static inline void check_dsp(DisasContext
*ctx
)
2842 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2843 if (ctx
->insn_flags
& ASE_DSP
) {
2844 generate_exception_end(ctx
, EXCP_DSPDIS
);
2846 generate_exception_end(ctx
, EXCP_RI
);
2851 static inline void check_dsp_r2(DisasContext
*ctx
)
2853 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2854 if (ctx
->insn_flags
& ASE_DSP
) {
2855 generate_exception_end(ctx
, EXCP_DSPDIS
);
2857 generate_exception_end(ctx
, EXCP_RI
);
2862 static inline void check_dsp_r3(DisasContext
*ctx
)
2864 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2865 if (ctx
->insn_flags
& ASE_DSP
) {
2866 generate_exception_end(ctx
, EXCP_DSPDIS
);
2868 generate_exception_end(ctx
, EXCP_RI
);
2873 /* This code generates a "reserved instruction" exception if the
2874 CPU does not support the instruction set corresponding to flags. */
2875 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
2877 if (unlikely(!(ctx
->insn_flags
& flags
))) {
2878 generate_exception_end(ctx
, EXCP_RI
);
2882 /* This code generates a "reserved instruction" exception if the
2883 CPU has corresponding flag set which indicates that the instruction
2884 has been removed. */
2885 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
2887 if (unlikely(ctx
->insn_flags
& flags
)) {
2888 generate_exception_end(ctx
, EXCP_RI
);
2893 * The Linux kernel traps certain reserved instruction exceptions to
2894 * emulate the corresponding instructions. QEMU is the kernel in user
2895 * mode, so those traps are emulated by accepting the instructions.
2897 * A reserved instruction exception is generated for flagged CPUs if
2898 * QEMU runs in system mode.
2900 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
2902 #ifndef CONFIG_USER_ONLY
2903 check_insn_opc_removed(ctx
, flags
);
2907 /* This code generates a "reserved instruction" exception if the
2908 CPU does not support 64-bit paired-single (PS) floating point data type */
2909 static inline void check_ps(DisasContext
*ctx
)
2911 if (unlikely(!ctx
->ps
)) {
2912 generate_exception(ctx
, EXCP_RI
);
2914 check_cp1_64bitmode(ctx
);
2917 #ifdef TARGET_MIPS64
2918 /* This code generates a "reserved instruction" exception if 64-bit
2919 instructions are not enabled. */
2920 static inline void check_mips_64(DisasContext
*ctx
)
2922 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
2923 generate_exception_end(ctx
, EXCP_RI
);
2927 #ifndef CONFIG_USER_ONLY
2928 static inline void check_mvh(DisasContext
*ctx
)
2930 if (unlikely(!ctx
->mvh
)) {
2931 generate_exception(ctx
, EXCP_RI
);
2937 * This code generates a "reserved instruction" exception if the
2938 * Config5 XNP bit is set.
2940 static inline void check_xnp(DisasContext
*ctx
)
2942 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
2943 generate_exception_end(ctx
, EXCP_RI
);
2947 #ifndef CONFIG_USER_ONLY
2949 * This code generates a "reserved instruction" exception if the
2950 * Config3 PW bit is NOT set.
2952 static inline void check_pw(DisasContext
*ctx
)
2954 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
2955 generate_exception_end(ctx
, EXCP_RI
);
2961 * This code generates a "reserved instruction" exception if the
2962 * Config3 MT bit is NOT set.
2964 static inline void check_mt(DisasContext
*ctx
)
2966 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2967 generate_exception_end(ctx
, EXCP_RI
);
2971 #ifndef CONFIG_USER_ONLY
2973 * This code generates a "coprocessor unusable" exception if CP0 is not
2974 * available, and, if that is not the case, generates a "reserved instruction"
2975 * exception if the Config5 MT bit is NOT set. This is needed for availability
2976 * control of some of MT ASE instructions.
2978 static inline void check_cp0_mt(DisasContext
*ctx
)
2980 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
2981 generate_exception_err(ctx
, EXCP_CpU
, 0);
2983 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2984 generate_exception_err(ctx
, EXCP_RI
, 0);
2991 * This code generates a "reserved instruction" exception if the
2992 * Config5 NMS bit is set.
2994 static inline void check_nms(DisasContext
*ctx
)
2996 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
2997 generate_exception_end(ctx
, EXCP_RI
);
3002 * This code generates a "reserved instruction" exception if the
3003 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
3004 * Config2 TL, and Config5 L2C are unset.
3006 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
3008 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
3009 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
3010 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
3011 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
3012 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
3013 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))
3015 generate_exception_end(ctx
, EXCP_RI
);
3020 * This code generates a "reserved instruction" exception if the
3021 * Config5 EVA bit is NOT set.
3023 static inline void check_eva(DisasContext
*ctx
)
3025 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
3026 generate_exception_end(ctx
, EXCP_RI
);
3031 /* Define small wrappers for gen_load_fpr* so that we have a uniform
3032 calling interface for 32 and 64-bit FPRs. No sense in changing
3033 all callers for gen_load_fpr32 when we need the CTX parameter for
3035 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
3036 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
3037 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
3038 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
3039 int ft, int fs, int cc) \
3041 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
3042 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
3051 check_cp1_registers(ctx, fs | ft); \
3059 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
3060 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
3062 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
3063 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
3064 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
3065 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
3066 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
3067 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
3068 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
3069 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
3070 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
3071 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
3072 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
3073 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
3074 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
3075 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
3076 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
3077 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
3080 tcg_temp_free_i##bits (fp0); \
3081 tcg_temp_free_i##bits (fp1); \
3084 FOP_CONDS(, 0, d
, FMT_D
, 64)
3085 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3086 FOP_CONDS(, 0, s
, FMT_S
, 32)
3087 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3088 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3089 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3092 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3093 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
3094 int ft, int fs, int fd) \
3096 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3097 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3098 if (ifmt == FMT_D) { \
3099 check_cp1_registers(ctx, fs | ft | fd); \
3101 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3102 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3105 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3108 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3111 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3114 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3117 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3120 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3123 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3126 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3129 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3132 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3135 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3138 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3141 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3144 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3147 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3150 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3153 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3156 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3159 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3162 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3165 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3168 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3174 tcg_temp_free_i ## bits (fp0); \
3175 tcg_temp_free_i ## bits (fp1); \
3178 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3179 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3181 #undef gen_ldcmp_fpr32
3182 #undef gen_ldcmp_fpr64
3184 /* load/store instructions. */
3185 #ifdef CONFIG_USER_ONLY
3186 #define OP_LD_ATOMIC(insn,fname) \
3187 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3188 DisasContext *ctx) \
3190 TCGv t0 = tcg_temp_new(); \
3191 tcg_gen_mov_tl(t0, arg1); \
3192 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3193 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3194 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3195 tcg_temp_free(t0); \
3198 #define OP_LD_ATOMIC(insn,fname) \
3199 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3200 DisasContext *ctx) \
3202 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3205 OP_LD_ATOMIC(ll
,ld32s
);
3206 #if defined(TARGET_MIPS64)
3207 OP_LD_ATOMIC(lld
,ld64
);
3211 #ifdef CONFIG_USER_ONLY
3212 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3213 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3214 DisasContext *ctx) \
3216 TCGv t0 = tcg_temp_new(); \
3217 TCGLabel *l1 = gen_new_label(); \
3218 TCGLabel *l2 = gen_new_label(); \
3220 tcg_gen_andi_tl(t0, arg2, almask); \
3221 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
3222 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
3223 generate_exception(ctx, EXCP_AdES); \
3224 gen_set_label(l1); \
3225 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3226 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
3227 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
3228 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
3229 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
3230 generate_exception_end(ctx, EXCP_SC); \
3231 gen_set_label(l2); \
3232 tcg_gen_movi_tl(t0, 0); \
3233 gen_store_gpr(t0, rt); \
3234 tcg_temp_free(t0); \
3237 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3238 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3239 DisasContext *ctx) \
3241 TCGv t0 = tcg_temp_new(); \
3242 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
3243 gen_store_gpr(t0, rt); \
3244 tcg_temp_free(t0); \
3247 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
3248 #if defined(TARGET_MIPS64)
3249 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
3253 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
3254 int base
, int offset
)
3257 tcg_gen_movi_tl(addr
, offset
);
3258 } else if (offset
== 0) {
3259 gen_load_gpr(addr
, base
);
3261 tcg_gen_movi_tl(addr
, offset
);
3262 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3266 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
3268 target_ulong pc
= ctx
->base
.pc_next
;
3270 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3271 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3276 pc
&= ~(target_ulong
)3;
3281 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3282 int rt
, int base
, int offset
)
3285 int mem_idx
= ctx
->mem_idx
;
3287 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3288 /* Loongson CPU uses a load to zero register for prefetch.
3289 We emulate it as a NOP. On other CPU we must perform the
3290 actual memory access. */
3294 t0
= tcg_temp_new();
3295 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3298 #if defined(TARGET_MIPS64)
3300 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3301 ctx
->default_tcg_memop_mask
);
3302 gen_store_gpr(t0
, rt
);
3305 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3306 ctx
->default_tcg_memop_mask
);
3307 gen_store_gpr(t0
, rt
);
3311 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3312 gen_store_gpr(t0
, rt
);
3315 t1
= tcg_temp_new();
3316 /* Do a byte access to possibly trigger a page
3317 fault with the unaligned address. */
3318 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3319 tcg_gen_andi_tl(t1
, t0
, 7);
3320 #ifndef TARGET_WORDS_BIGENDIAN
3321 tcg_gen_xori_tl(t1
, t1
, 7);
3323 tcg_gen_shli_tl(t1
, t1
, 3);
3324 tcg_gen_andi_tl(t0
, t0
, ~7);
3325 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3326 tcg_gen_shl_tl(t0
, t0
, t1
);
3327 t2
= tcg_const_tl(-1);
3328 tcg_gen_shl_tl(t2
, t2
, t1
);
3329 gen_load_gpr(t1
, rt
);
3330 tcg_gen_andc_tl(t1
, t1
, t2
);
3332 tcg_gen_or_tl(t0
, t0
, t1
);
3334 gen_store_gpr(t0
, rt
);
3337 t1
= tcg_temp_new();
3338 /* Do a byte access to possibly trigger a page
3339 fault with the unaligned address. */
3340 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3341 tcg_gen_andi_tl(t1
, t0
, 7);
3342 #ifdef TARGET_WORDS_BIGENDIAN
3343 tcg_gen_xori_tl(t1
, t1
, 7);
3345 tcg_gen_shli_tl(t1
, t1
, 3);
3346 tcg_gen_andi_tl(t0
, t0
, ~7);
3347 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3348 tcg_gen_shr_tl(t0
, t0
, t1
);
3349 tcg_gen_xori_tl(t1
, t1
, 63);
3350 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3351 tcg_gen_shl_tl(t2
, t2
, t1
);
3352 gen_load_gpr(t1
, rt
);
3353 tcg_gen_and_tl(t1
, t1
, t2
);
3355 tcg_gen_or_tl(t0
, t0
, t1
);
3357 gen_store_gpr(t0
, rt
);
3360 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3361 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3363 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3364 gen_store_gpr(t0
, rt
);
3368 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3369 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3371 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3372 gen_store_gpr(t0
, rt
);
3375 mem_idx
= MIPS_HFLAG_UM
;
3378 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3379 ctx
->default_tcg_memop_mask
);
3380 gen_store_gpr(t0
, rt
);
3383 mem_idx
= MIPS_HFLAG_UM
;
3386 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3387 ctx
->default_tcg_memop_mask
);
3388 gen_store_gpr(t0
, rt
);
3391 mem_idx
= MIPS_HFLAG_UM
;
3394 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3395 ctx
->default_tcg_memop_mask
);
3396 gen_store_gpr(t0
, rt
);
3399 mem_idx
= MIPS_HFLAG_UM
;
3402 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3403 gen_store_gpr(t0
, rt
);
3406 mem_idx
= MIPS_HFLAG_UM
;
3409 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3410 gen_store_gpr(t0
, rt
);
3413 mem_idx
= MIPS_HFLAG_UM
;
3416 t1
= tcg_temp_new();
3417 /* Do a byte access to possibly trigger a page
3418 fault with the unaligned address. */
3419 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3420 tcg_gen_andi_tl(t1
, t0
, 3);
3421 #ifndef TARGET_WORDS_BIGENDIAN
3422 tcg_gen_xori_tl(t1
, t1
, 3);
3424 tcg_gen_shli_tl(t1
, t1
, 3);
3425 tcg_gen_andi_tl(t0
, t0
, ~3);
3426 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3427 tcg_gen_shl_tl(t0
, t0
, t1
);
3428 t2
= tcg_const_tl(-1);
3429 tcg_gen_shl_tl(t2
, t2
, t1
);
3430 gen_load_gpr(t1
, rt
);
3431 tcg_gen_andc_tl(t1
, t1
, t2
);
3433 tcg_gen_or_tl(t0
, t0
, t1
);
3435 tcg_gen_ext32s_tl(t0
, t0
);
3436 gen_store_gpr(t0
, rt
);
3439 mem_idx
= MIPS_HFLAG_UM
;
3442 t1
= tcg_temp_new();
3443 /* Do a byte access to possibly trigger a page
3444 fault with the unaligned address. */
3445 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3446 tcg_gen_andi_tl(t1
, t0
, 3);
3447 #ifdef TARGET_WORDS_BIGENDIAN
3448 tcg_gen_xori_tl(t1
, t1
, 3);
3450 tcg_gen_shli_tl(t1
, t1
, 3);
3451 tcg_gen_andi_tl(t0
, t0
, ~3);
3452 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3453 tcg_gen_shr_tl(t0
, t0
, t1
);
3454 tcg_gen_xori_tl(t1
, t1
, 31);
3455 t2
= tcg_const_tl(0xfffffffeull
);
3456 tcg_gen_shl_tl(t2
, t2
, t1
);
3457 gen_load_gpr(t1
, rt
);
3458 tcg_gen_and_tl(t1
, t1
, t2
);
3460 tcg_gen_or_tl(t0
, t0
, t1
);
3462 tcg_gen_ext32s_tl(t0
, t0
);
3463 gen_store_gpr(t0
, rt
);
3466 mem_idx
= MIPS_HFLAG_UM
;
3470 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3471 gen_store_gpr(t0
, rt
);
3477 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3478 uint32_t reg1
, uint32_t reg2
)
3480 TCGv taddr
= tcg_temp_new();
3481 TCGv_i64 tval
= tcg_temp_new_i64();
3482 TCGv tmp1
= tcg_temp_new();
3483 TCGv tmp2
= tcg_temp_new();
3485 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3486 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3487 #ifdef TARGET_WORDS_BIGENDIAN
3488 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3490 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3492 gen_store_gpr(tmp1
, reg1
);
3493 tcg_temp_free(tmp1
);
3494 gen_store_gpr(tmp2
, reg2
);
3495 tcg_temp_free(tmp2
);
3496 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3497 tcg_temp_free_i64(tval
);
3498 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3499 tcg_temp_free(taddr
);
3503 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
3504 int base
, int offset
)
3506 TCGv t0
= tcg_temp_new();
3507 TCGv t1
= tcg_temp_new();
3508 int mem_idx
= ctx
->mem_idx
;
3510 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3511 gen_load_gpr(t1
, rt
);
3513 #if defined(TARGET_MIPS64)
3515 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3516 ctx
->default_tcg_memop_mask
);
3519 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3522 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3526 mem_idx
= MIPS_HFLAG_UM
;
3529 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3530 ctx
->default_tcg_memop_mask
);
3533 mem_idx
= MIPS_HFLAG_UM
;
3536 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3537 ctx
->default_tcg_memop_mask
);
3540 mem_idx
= MIPS_HFLAG_UM
;
3543 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3546 mem_idx
= MIPS_HFLAG_UM
;
3549 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3552 mem_idx
= MIPS_HFLAG_UM
;
3555 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3563 /* Store conditional */
3564 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
3565 int base
, int16_t offset
)
3568 int mem_idx
= ctx
->mem_idx
;
3570 #ifdef CONFIG_USER_ONLY
3571 t0
= tcg_temp_local_new();
3572 t1
= tcg_temp_local_new();
3574 t0
= tcg_temp_new();
3575 t1
= tcg_temp_new();
3577 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3578 gen_load_gpr(t1
, rt
);
3580 #if defined(TARGET_MIPS64)
3583 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
3587 mem_idx
= MIPS_HFLAG_UM
;
3591 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
3598 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3599 uint32_t reg1
, uint32_t reg2
)
3601 TCGv taddr
= tcg_temp_local_new();
3602 TCGv lladdr
= tcg_temp_local_new();
3603 TCGv_i64 tval
= tcg_temp_new_i64();
3604 TCGv_i64 llval
= tcg_temp_new_i64();
3605 TCGv_i64 val
= tcg_temp_new_i64();
3606 TCGv tmp1
= tcg_temp_new();
3607 TCGv tmp2
= tcg_temp_new();
3608 TCGLabel
*lab_fail
= gen_new_label();
3609 TCGLabel
*lab_done
= gen_new_label();
3611 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3613 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3614 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3616 gen_load_gpr(tmp1
, reg1
);
3617 gen_load_gpr(tmp2
, reg2
);
3619 #ifdef TARGET_WORDS_BIGENDIAN
3620 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3622 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3625 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3626 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3627 ctx
->mem_idx
, MO_64
);
3629 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3631 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3633 gen_set_label(lab_fail
);
3636 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3638 gen_set_label(lab_done
);
3639 tcg_gen_movi_tl(lladdr
, -1);
3640 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3643 /* Load and store */
3644 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
3647 /* Don't do NOP if destination is zero: we must perform the actual
3652 TCGv_i32 fp0
= tcg_temp_new_i32();
3653 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3654 ctx
->default_tcg_memop_mask
);
3655 gen_store_fpr32(ctx
, fp0
, ft
);
3656 tcg_temp_free_i32(fp0
);
3661 TCGv_i32 fp0
= tcg_temp_new_i32();
3662 gen_load_fpr32(ctx
, fp0
, ft
);
3663 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3664 ctx
->default_tcg_memop_mask
);
3665 tcg_temp_free_i32(fp0
);
3670 TCGv_i64 fp0
= tcg_temp_new_i64();
3671 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3672 ctx
->default_tcg_memop_mask
);
3673 gen_store_fpr64(ctx
, fp0
, ft
);
3674 tcg_temp_free_i64(fp0
);
3679 TCGv_i64 fp0
= tcg_temp_new_i64();
3680 gen_load_fpr64(ctx
, fp0
, ft
);
3681 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3682 ctx
->default_tcg_memop_mask
);
3683 tcg_temp_free_i64(fp0
);
3687 MIPS_INVAL("flt_ldst");
3688 generate_exception_end(ctx
, EXCP_RI
);
3693 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3694 int rs
, int16_t imm
)
3696 TCGv t0
= tcg_temp_new();
3698 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3699 check_cp1_enabled(ctx
);
3703 check_insn(ctx
, ISA_MIPS2
);
3706 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3707 gen_flt_ldst(ctx
, op
, rt
, t0
);
3710 generate_exception_err(ctx
, EXCP_CpU
, 1);
3715 /* Arithmetic with immediate operand */
3716 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3717 int rt
, int rs
, int imm
)
3719 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3721 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3722 /* If no destination, treat it as a NOP.
3723 For addi, we must generate the overflow exception when needed. */
3729 TCGv t0
= tcg_temp_local_new();
3730 TCGv t1
= tcg_temp_new();
3731 TCGv t2
= tcg_temp_new();
3732 TCGLabel
*l1
= gen_new_label();
3734 gen_load_gpr(t1
, rs
);
3735 tcg_gen_addi_tl(t0
, t1
, uimm
);
3736 tcg_gen_ext32s_tl(t0
, t0
);
3738 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3739 tcg_gen_xori_tl(t2
, t0
, uimm
);
3740 tcg_gen_and_tl(t1
, t1
, t2
);
3742 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3744 /* operands of same sign, result different sign */
3745 generate_exception(ctx
, EXCP_OVERFLOW
);
3747 tcg_gen_ext32s_tl(t0
, t0
);
3748 gen_store_gpr(t0
, rt
);
3754 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3755 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3757 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3760 #if defined(TARGET_MIPS64)
3763 TCGv t0
= tcg_temp_local_new();
3764 TCGv t1
= tcg_temp_new();
3765 TCGv t2
= tcg_temp_new();
3766 TCGLabel
*l1
= gen_new_label();
3768 gen_load_gpr(t1
, rs
);
3769 tcg_gen_addi_tl(t0
, t1
, uimm
);
3771 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3772 tcg_gen_xori_tl(t2
, t0
, uimm
);
3773 tcg_gen_and_tl(t1
, t1
, t2
);
3775 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3777 /* operands of same sign, result different sign */
3778 generate_exception(ctx
, EXCP_OVERFLOW
);
3780 gen_store_gpr(t0
, rt
);
3786 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3788 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3795 /* Logic with immediate operand */
3796 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3797 int rt
, int rs
, int16_t imm
)
3802 /* If no destination, treat it as a NOP. */
3805 uimm
= (uint16_t)imm
;
3808 if (likely(rs
!= 0))
3809 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3811 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3815 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3817 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3820 if (likely(rs
!= 0))
3821 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3823 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3826 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3828 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3829 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3831 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3840 /* Set on less than with immediate operand */
3841 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3842 int rt
, int rs
, int16_t imm
)
3844 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3848 /* If no destination, treat it as a NOP. */
3851 t0
= tcg_temp_new();
3852 gen_load_gpr(t0
, rs
);
3855 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3858 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3864 /* Shifts with immediate operand */
3865 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3866 int rt
, int rs
, int16_t imm
)
3868 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3872 /* If no destination, treat it as a NOP. */
3876 t0
= tcg_temp_new();
3877 gen_load_gpr(t0
, rs
);
3880 tcg_gen_shli_tl(t0
, t0
, uimm
);
3881 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3884 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3888 tcg_gen_ext32u_tl(t0
, t0
);
3889 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3891 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3896 TCGv_i32 t1
= tcg_temp_new_i32();
3898 tcg_gen_trunc_tl_i32(t1
, t0
);
3899 tcg_gen_rotri_i32(t1
, t1
, uimm
);
3900 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
3901 tcg_temp_free_i32(t1
);
3903 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3906 #if defined(TARGET_MIPS64)
3908 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
3911 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3914 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3918 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
3920 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
3924 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3927 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3930 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3933 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3941 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
3942 int rd
, int rs
, int rt
)
3944 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
3945 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
3946 /* If no destination, treat it as a NOP.
3947 For add & sub, we must generate the overflow exception when needed. */
3954 TCGv t0
= tcg_temp_local_new();
3955 TCGv t1
= tcg_temp_new();
3956 TCGv t2
= tcg_temp_new();
3957 TCGLabel
*l1
= gen_new_label();
3959 gen_load_gpr(t1
, rs
);
3960 gen_load_gpr(t2
, rt
);
3961 tcg_gen_add_tl(t0
, t1
, t2
);
3962 tcg_gen_ext32s_tl(t0
, t0
);
3963 tcg_gen_xor_tl(t1
, t1
, t2
);
3964 tcg_gen_xor_tl(t2
, t0
, t2
);
3965 tcg_gen_andc_tl(t1
, t2
, t1
);
3967 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3969 /* operands of same sign, result different sign */
3970 generate_exception(ctx
, EXCP_OVERFLOW
);
3972 gen_store_gpr(t0
, rd
);
3977 if (rs
!= 0 && rt
!= 0) {
3978 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3979 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3980 } else if (rs
== 0 && rt
!= 0) {
3981 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3982 } else if (rs
!= 0 && rt
== 0) {
3983 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3985 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3990 TCGv t0
= tcg_temp_local_new();
3991 TCGv t1
= tcg_temp_new();
3992 TCGv t2
= tcg_temp_new();
3993 TCGLabel
*l1
= gen_new_label();
3995 gen_load_gpr(t1
, rs
);
3996 gen_load_gpr(t2
, rt
);
3997 tcg_gen_sub_tl(t0
, t1
, t2
);
3998 tcg_gen_ext32s_tl(t0
, t0
);
3999 tcg_gen_xor_tl(t2
, t1
, t2
);
4000 tcg_gen_xor_tl(t1
, t0
, t1
);
4001 tcg_gen_and_tl(t1
, t1
, t2
);
4003 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4005 /* operands of different sign, first operand and result different sign */
4006 generate_exception(ctx
, EXCP_OVERFLOW
);
4008 gen_store_gpr(t0
, rd
);
4013 if (rs
!= 0 && rt
!= 0) {
4014 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4015 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4016 } else if (rs
== 0 && rt
!= 0) {
4017 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4018 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4019 } else if (rs
!= 0 && rt
== 0) {
4020 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4022 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4025 #if defined(TARGET_MIPS64)
4028 TCGv t0
= tcg_temp_local_new();
4029 TCGv t1
= tcg_temp_new();
4030 TCGv t2
= tcg_temp_new();
4031 TCGLabel
*l1
= gen_new_label();
4033 gen_load_gpr(t1
, rs
);
4034 gen_load_gpr(t2
, rt
);
4035 tcg_gen_add_tl(t0
, t1
, t2
);
4036 tcg_gen_xor_tl(t1
, t1
, t2
);
4037 tcg_gen_xor_tl(t2
, t0
, t2
);
4038 tcg_gen_andc_tl(t1
, t2
, t1
);
4040 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4042 /* operands of same sign, result different sign */
4043 generate_exception(ctx
, EXCP_OVERFLOW
);
4045 gen_store_gpr(t0
, rd
);
4050 if (rs
!= 0 && rt
!= 0) {
4051 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4052 } else if (rs
== 0 && rt
!= 0) {
4053 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4054 } else if (rs
!= 0 && rt
== 0) {
4055 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4057 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4062 TCGv t0
= tcg_temp_local_new();
4063 TCGv t1
= tcg_temp_new();
4064 TCGv t2
= tcg_temp_new();
4065 TCGLabel
*l1
= gen_new_label();
4067 gen_load_gpr(t1
, rs
);
4068 gen_load_gpr(t2
, rt
);
4069 tcg_gen_sub_tl(t0
, t1
, t2
);
4070 tcg_gen_xor_tl(t2
, t1
, t2
);
4071 tcg_gen_xor_tl(t1
, t0
, t1
);
4072 tcg_gen_and_tl(t1
, t1
, t2
);
4074 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4076 /* operands of different sign, first operand and result different sign */
4077 generate_exception(ctx
, EXCP_OVERFLOW
);
4079 gen_store_gpr(t0
, rd
);
4084 if (rs
!= 0 && rt
!= 0) {
4085 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4086 } else if (rs
== 0 && rt
!= 0) {
4087 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4088 } else if (rs
!= 0 && rt
== 0) {
4089 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4091 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4096 if (likely(rs
!= 0 && rt
!= 0)) {
4097 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4098 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4100 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4106 /* Conditional move */
4107 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4108 int rd
, int rs
, int rt
)
4113 /* If no destination, treat it as a NOP. */
4117 t0
= tcg_temp_new();
4118 gen_load_gpr(t0
, rt
);
4119 t1
= tcg_const_tl(0);
4120 t2
= tcg_temp_new();
4121 gen_load_gpr(t2
, rs
);
4124 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4127 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4130 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4133 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4142 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4143 int rd
, int rs
, int rt
)
4146 /* If no destination, treat it as a NOP. */
4152 if (likely(rs
!= 0 && rt
!= 0)) {
4153 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4155 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4159 if (rs
!= 0 && rt
!= 0) {
4160 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4161 } else if (rs
== 0 && rt
!= 0) {
4162 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4163 } else if (rs
!= 0 && rt
== 0) {
4164 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4166 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4170 if (likely(rs
!= 0 && rt
!= 0)) {
4171 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4172 } else if (rs
== 0 && rt
!= 0) {
4173 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4174 } else if (rs
!= 0 && rt
== 0) {
4175 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4177 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4181 if (likely(rs
!= 0 && rt
!= 0)) {
4182 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4183 } else if (rs
== 0 && rt
!= 0) {
4184 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4185 } else if (rs
!= 0 && rt
== 0) {
4186 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4188 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4194 /* Set on lower than */
4195 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4196 int rd
, int rs
, int rt
)
4201 /* If no destination, treat it as a NOP. */
4205 t0
= tcg_temp_new();
4206 t1
= tcg_temp_new();
4207 gen_load_gpr(t0
, rs
);
4208 gen_load_gpr(t1
, rt
);
4211 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4214 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4222 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4223 int rd
, int rs
, int rt
)
4228 /* If no destination, treat it as a NOP.
4229 For add & sub, we must generate the overflow exception when needed. */
4233 t0
= tcg_temp_new();
4234 t1
= tcg_temp_new();
4235 gen_load_gpr(t0
, rs
);
4236 gen_load_gpr(t1
, rt
);
4239 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4240 tcg_gen_shl_tl(t0
, t1
, t0
);
4241 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4244 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4245 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4248 tcg_gen_ext32u_tl(t1
, t1
);
4249 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4250 tcg_gen_shr_tl(t0
, t1
, t0
);
4251 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4255 TCGv_i32 t2
= tcg_temp_new_i32();
4256 TCGv_i32 t3
= tcg_temp_new_i32();
4258 tcg_gen_trunc_tl_i32(t2
, t0
);
4259 tcg_gen_trunc_tl_i32(t3
, t1
);
4260 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4261 tcg_gen_rotr_i32(t2
, t3
, t2
);
4262 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4263 tcg_temp_free_i32(t2
);
4264 tcg_temp_free_i32(t3
);
4267 #if defined(TARGET_MIPS64)
4269 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4270 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4273 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4274 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4277 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4278 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4281 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4282 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4290 /* Arithmetic on HI/LO registers */
4291 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4293 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== TX79_MMI_MFHI1
||
4294 opc
== OPC_MFLO
|| opc
== TX79_MMI_MFLO1
)) {
4300 if (!(ctx
->insn_flags
& INSN_R5900
)) {
4307 case TX79_MMI_MFHI1
:
4308 #if defined(TARGET_MIPS64)
4310 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4314 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4318 case TX79_MMI_MFLO1
:
4319 #if defined(TARGET_MIPS64)
4321 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4325 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4329 case TX79_MMI_MTHI1
:
4331 #if defined(TARGET_MIPS64)
4333 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4337 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4340 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4344 case TX79_MMI_MTLO1
:
4346 #if defined(TARGET_MIPS64)
4348 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4352 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4355 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4361 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4364 TCGv t0
= tcg_const_tl(addr
);
4365 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4366 gen_store_gpr(t0
, reg
);
4370 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4376 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4379 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4380 addr
= addr_add(ctx
, pc
, offset
);
4381 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4385 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4386 addr
= addr_add(ctx
, pc
, offset
);
4387 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4389 #if defined(TARGET_MIPS64)
4392 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4393 addr
= addr_add(ctx
, pc
, offset
);
4394 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4398 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4401 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4402 addr
= addr_add(ctx
, pc
, offset
);
4403 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4408 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4409 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4410 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4413 #if defined(TARGET_MIPS64)
4414 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4415 case R6_OPC_LDPC
+ (1 << 16):
4416 case R6_OPC_LDPC
+ (2 << 16):
4417 case R6_OPC_LDPC
+ (3 << 16):
4419 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4420 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4421 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4425 MIPS_INVAL("OPC_PCREL");
4426 generate_exception_end(ctx
, EXCP_RI
);
4433 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4442 t0
= tcg_temp_new();
4443 t1
= tcg_temp_new();
4445 gen_load_gpr(t0
, rs
);
4446 gen_load_gpr(t1
, rt
);
4451 TCGv t2
= tcg_temp_new();
4452 TCGv t3
= tcg_temp_new();
4453 tcg_gen_ext32s_tl(t0
, t0
);
4454 tcg_gen_ext32s_tl(t1
, t1
);
4455 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4456 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4457 tcg_gen_and_tl(t2
, t2
, t3
);
4458 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4459 tcg_gen_or_tl(t2
, t2
, t3
);
4460 tcg_gen_movi_tl(t3
, 0);
4461 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4462 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4463 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4470 TCGv t2
= tcg_temp_new();
4471 TCGv t3
= tcg_temp_new();
4472 tcg_gen_ext32s_tl(t0
, t0
);
4473 tcg_gen_ext32s_tl(t1
, t1
);
4474 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4475 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4476 tcg_gen_and_tl(t2
, t2
, t3
);
4477 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4478 tcg_gen_or_tl(t2
, t2
, t3
);
4479 tcg_gen_movi_tl(t3
, 0);
4480 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4481 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4482 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4489 TCGv t2
= tcg_const_tl(0);
4490 TCGv t3
= tcg_const_tl(1);
4491 tcg_gen_ext32u_tl(t0
, t0
);
4492 tcg_gen_ext32u_tl(t1
, t1
);
4493 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4494 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4495 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4502 TCGv t2
= tcg_const_tl(0);
4503 TCGv t3
= tcg_const_tl(1);
4504 tcg_gen_ext32u_tl(t0
, t0
);
4505 tcg_gen_ext32u_tl(t1
, t1
);
4506 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4507 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4508 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4515 TCGv_i32 t2
= tcg_temp_new_i32();
4516 TCGv_i32 t3
= tcg_temp_new_i32();
4517 tcg_gen_trunc_tl_i32(t2
, t0
);
4518 tcg_gen_trunc_tl_i32(t3
, t1
);
4519 tcg_gen_mul_i32(t2
, t2
, t3
);
4520 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4521 tcg_temp_free_i32(t2
);
4522 tcg_temp_free_i32(t3
);
4527 TCGv_i32 t2
= tcg_temp_new_i32();
4528 TCGv_i32 t3
= tcg_temp_new_i32();
4529 tcg_gen_trunc_tl_i32(t2
, t0
);
4530 tcg_gen_trunc_tl_i32(t3
, t1
);
4531 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4532 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4533 tcg_temp_free_i32(t2
);
4534 tcg_temp_free_i32(t3
);
4539 TCGv_i32 t2
= tcg_temp_new_i32();
4540 TCGv_i32 t3
= tcg_temp_new_i32();
4541 tcg_gen_trunc_tl_i32(t2
, t0
);
4542 tcg_gen_trunc_tl_i32(t3
, t1
);
4543 tcg_gen_mul_i32(t2
, t2
, t3
);
4544 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4545 tcg_temp_free_i32(t2
);
4546 tcg_temp_free_i32(t3
);
4551 TCGv_i32 t2
= tcg_temp_new_i32();
4552 TCGv_i32 t3
= tcg_temp_new_i32();
4553 tcg_gen_trunc_tl_i32(t2
, t0
);
4554 tcg_gen_trunc_tl_i32(t3
, t1
);
4555 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4556 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4557 tcg_temp_free_i32(t2
);
4558 tcg_temp_free_i32(t3
);
4561 #if defined(TARGET_MIPS64)
4564 TCGv t2
= tcg_temp_new();
4565 TCGv t3
= tcg_temp_new();
4566 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4567 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4568 tcg_gen_and_tl(t2
, t2
, t3
);
4569 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4570 tcg_gen_or_tl(t2
, t2
, t3
);
4571 tcg_gen_movi_tl(t3
, 0);
4572 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4573 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4580 TCGv t2
= tcg_temp_new();
4581 TCGv t3
= tcg_temp_new();
4582 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4583 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4584 tcg_gen_and_tl(t2
, t2
, t3
);
4585 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4586 tcg_gen_or_tl(t2
, t2
, t3
);
4587 tcg_gen_movi_tl(t3
, 0);
4588 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4589 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4596 TCGv t2
= tcg_const_tl(0);
4597 TCGv t3
= tcg_const_tl(1);
4598 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4599 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4606 TCGv t2
= tcg_const_tl(0);
4607 TCGv t3
= tcg_const_tl(1);
4608 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4609 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4615 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4619 TCGv t2
= tcg_temp_new();
4620 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4625 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4629 TCGv t2
= tcg_temp_new();
4630 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4636 MIPS_INVAL("r6 mul/div");
4637 generate_exception_end(ctx
, EXCP_RI
);
4645 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4646 int acc
, int rs
, int rt
)
4650 t0
= tcg_temp_new();
4651 t1
= tcg_temp_new();
4653 gen_load_gpr(t0
, rs
);
4654 gen_load_gpr(t1
, rt
);
4657 if (!(ctx
->insn_flags
& INSN_R5900
)) {
4666 TCGv t2
= tcg_temp_new();
4667 TCGv t3
= tcg_temp_new();
4668 tcg_gen_ext32s_tl(t0
, t0
);
4669 tcg_gen_ext32s_tl(t1
, t1
);
4670 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4671 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4672 tcg_gen_and_tl(t2
, t2
, t3
);
4673 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4674 tcg_gen_or_tl(t2
, t2
, t3
);
4675 tcg_gen_movi_tl(t3
, 0);
4676 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4677 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4678 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4679 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4680 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4686 case TX79_MMI_DIVU1
:
4688 TCGv t2
= tcg_const_tl(0);
4689 TCGv t3
= tcg_const_tl(1);
4690 tcg_gen_ext32u_tl(t0
, t0
);
4691 tcg_gen_ext32u_tl(t1
, t1
);
4692 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4693 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4694 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4695 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4696 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4703 TCGv_i32 t2
= tcg_temp_new_i32();
4704 TCGv_i32 t3
= tcg_temp_new_i32();
4705 tcg_gen_trunc_tl_i32(t2
, t0
);
4706 tcg_gen_trunc_tl_i32(t3
, t1
);
4707 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4708 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4709 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4710 tcg_temp_free_i32(t2
);
4711 tcg_temp_free_i32(t3
);
4716 TCGv_i32 t2
= tcg_temp_new_i32();
4717 TCGv_i32 t3
= tcg_temp_new_i32();
4718 tcg_gen_trunc_tl_i32(t2
, t0
);
4719 tcg_gen_trunc_tl_i32(t3
, t1
);
4720 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4721 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4722 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4723 tcg_temp_free_i32(t2
);
4724 tcg_temp_free_i32(t3
);
4727 #if defined(TARGET_MIPS64)
4730 TCGv t2
= tcg_temp_new();
4731 TCGv t3
= tcg_temp_new();
4732 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4733 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4734 tcg_gen_and_tl(t2
, t2
, t3
);
4735 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4736 tcg_gen_or_tl(t2
, t2
, t3
);
4737 tcg_gen_movi_tl(t3
, 0);
4738 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4739 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4740 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4747 TCGv t2
= tcg_const_tl(0);
4748 TCGv t3
= tcg_const_tl(1);
4749 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4750 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4751 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4757 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4760 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4765 TCGv_i64 t2
= tcg_temp_new_i64();
4766 TCGv_i64 t3
= tcg_temp_new_i64();
4768 tcg_gen_ext_tl_i64(t2
, t0
);
4769 tcg_gen_ext_tl_i64(t3
, t1
);
4770 tcg_gen_mul_i64(t2
, t2
, t3
);
4771 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4772 tcg_gen_add_i64(t2
, t2
, t3
);
4773 tcg_temp_free_i64(t3
);
4774 gen_move_low32(cpu_LO
[acc
], t2
);
4775 gen_move_high32(cpu_HI
[acc
], t2
);
4776 tcg_temp_free_i64(t2
);
4781 TCGv_i64 t2
= tcg_temp_new_i64();
4782 TCGv_i64 t3
= tcg_temp_new_i64();
4784 tcg_gen_ext32u_tl(t0
, t0
);
4785 tcg_gen_ext32u_tl(t1
, t1
);
4786 tcg_gen_extu_tl_i64(t2
, t0
);
4787 tcg_gen_extu_tl_i64(t3
, t1
);
4788 tcg_gen_mul_i64(t2
, t2
, t3
);
4789 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4790 tcg_gen_add_i64(t2
, t2
, t3
);
4791 tcg_temp_free_i64(t3
);
4792 gen_move_low32(cpu_LO
[acc
], t2
);
4793 gen_move_high32(cpu_HI
[acc
], t2
);
4794 tcg_temp_free_i64(t2
);
4799 TCGv_i64 t2
= tcg_temp_new_i64();
4800 TCGv_i64 t3
= tcg_temp_new_i64();
4802 tcg_gen_ext_tl_i64(t2
, t0
);
4803 tcg_gen_ext_tl_i64(t3
, t1
);
4804 tcg_gen_mul_i64(t2
, t2
, t3
);
4805 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4806 tcg_gen_sub_i64(t2
, t3
, t2
);
4807 tcg_temp_free_i64(t3
);
4808 gen_move_low32(cpu_LO
[acc
], t2
);
4809 gen_move_high32(cpu_HI
[acc
], t2
);
4810 tcg_temp_free_i64(t2
);
4815 TCGv_i64 t2
= tcg_temp_new_i64();
4816 TCGv_i64 t3
= tcg_temp_new_i64();
4818 tcg_gen_ext32u_tl(t0
, t0
);
4819 tcg_gen_ext32u_tl(t1
, t1
);
4820 tcg_gen_extu_tl_i64(t2
, t0
);
4821 tcg_gen_extu_tl_i64(t3
, t1
);
4822 tcg_gen_mul_i64(t2
, t2
, t3
);
4823 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4824 tcg_gen_sub_i64(t2
, t3
, t2
);
4825 tcg_temp_free_i64(t3
);
4826 gen_move_low32(cpu_LO
[acc
], t2
);
4827 gen_move_high32(cpu_HI
[acc
], t2
);
4828 tcg_temp_free_i64(t2
);
4832 MIPS_INVAL("mul/div");
4833 generate_exception_end(ctx
, EXCP_RI
);
4842 * These MULT and MULTU instructions implemented in for example the
4843 * Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
4844 * architectures are special three-operand variants with the syntax
4846 * MULT[U][1] rd, rs, rt
4850 * (rd, LO, HI) <- rs * rt
4852 * where the low-order 32-bits of the result is placed into both the
4853 * GPR rd and the special register LO. The high-order 32-bits of the
4854 * result is placed into the special register HI.
4856 * If the GPR rd is omitted in assembly language, it is taken to be 0,
4857 * which is the zero register that always reads as 0.
4859 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
4860 int rd
, int rs
, int rt
)
4862 TCGv t0
= tcg_temp_new();
4863 TCGv t1
= tcg_temp_new();
4866 gen_load_gpr(t0
, rs
);
4867 gen_load_gpr(t1
, rt
);
4870 case TX79_MMI_MULT1
:
4875 TCGv_i32 t2
= tcg_temp_new_i32();
4876 TCGv_i32 t3
= tcg_temp_new_i32();
4877 tcg_gen_trunc_tl_i32(t2
, t0
);
4878 tcg_gen_trunc_tl_i32(t3
, t1
);
4879 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4881 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4883 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4884 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4885 tcg_temp_free_i32(t2
);
4886 tcg_temp_free_i32(t3
);
4889 case TX79_MMI_MULTU1
:
4894 TCGv_i32 t2
= tcg_temp_new_i32();
4895 TCGv_i32 t3
= tcg_temp_new_i32();
4896 tcg_gen_trunc_tl_i32(t2
, t0
);
4897 tcg_gen_trunc_tl_i32(t3
, t1
);
4898 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4900 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4902 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4903 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4904 tcg_temp_free_i32(t2
);
4905 tcg_temp_free_i32(t3
);
4909 MIPS_INVAL("mul TXx9");
4910 generate_exception_end(ctx
, EXCP_RI
);
4919 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
4920 int rd
, int rs
, int rt
)
4922 TCGv t0
= tcg_temp_new();
4923 TCGv t1
= tcg_temp_new();
4925 gen_load_gpr(t0
, rs
);
4926 gen_load_gpr(t1
, rt
);
4929 case OPC_VR54XX_MULS
:
4930 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
4932 case OPC_VR54XX_MULSU
:
4933 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
4935 case OPC_VR54XX_MACC
:
4936 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
4938 case OPC_VR54XX_MACCU
:
4939 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
4941 case OPC_VR54XX_MSAC
:
4942 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
4944 case OPC_VR54XX_MSACU
:
4945 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
4947 case OPC_VR54XX_MULHI
:
4948 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
4950 case OPC_VR54XX_MULHIU
:
4951 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
4953 case OPC_VR54XX_MULSHI
:
4954 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
4956 case OPC_VR54XX_MULSHIU
:
4957 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
4959 case OPC_VR54XX_MACCHI
:
4960 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
4962 case OPC_VR54XX_MACCHIU
:
4963 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
4965 case OPC_VR54XX_MSACHI
:
4966 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
4968 case OPC_VR54XX_MSACHIU
:
4969 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
4972 MIPS_INVAL("mul vr54xx");
4973 generate_exception_end(ctx
, EXCP_RI
);
4976 gen_store_gpr(t0
, rd
);
4983 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
4993 gen_load_gpr(t0
, rs
);
4998 #if defined(TARGET_MIPS64)
5002 tcg_gen_not_tl(t0
, t0
);
5011 tcg_gen_ext32u_tl(t0
, t0
);
5012 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
5013 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
5015 #if defined(TARGET_MIPS64)
5020 tcg_gen_clzi_i64(t0
, t0
, 64);
5026 /* Godson integer instructions */
5027 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
5028 int rd
, int rs
, int rt
)
5040 case OPC_MULTU_G_2E
:
5041 case OPC_MULTU_G_2F
:
5042 #if defined(TARGET_MIPS64)
5043 case OPC_DMULT_G_2E
:
5044 case OPC_DMULT_G_2F
:
5045 case OPC_DMULTU_G_2E
:
5046 case OPC_DMULTU_G_2F
:
5048 t0
= tcg_temp_new();
5049 t1
= tcg_temp_new();
5052 t0
= tcg_temp_local_new();
5053 t1
= tcg_temp_local_new();
5057 gen_load_gpr(t0
, rs
);
5058 gen_load_gpr(t1
, rt
);
5063 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5064 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5066 case OPC_MULTU_G_2E
:
5067 case OPC_MULTU_G_2F
:
5068 tcg_gen_ext32u_tl(t0
, t0
);
5069 tcg_gen_ext32u_tl(t1
, t1
);
5070 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5071 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5076 TCGLabel
*l1
= gen_new_label();
5077 TCGLabel
*l2
= gen_new_label();
5078 TCGLabel
*l3
= gen_new_label();
5079 tcg_gen_ext32s_tl(t0
, t0
);
5080 tcg_gen_ext32s_tl(t1
, t1
);
5081 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5082 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5085 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5086 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5087 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5090 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5091 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5098 TCGLabel
*l1
= gen_new_label();
5099 TCGLabel
*l2
= gen_new_label();
5100 tcg_gen_ext32u_tl(t0
, t0
);
5101 tcg_gen_ext32u_tl(t1
, t1
);
5102 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5103 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5106 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5107 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5114 TCGLabel
*l1
= gen_new_label();
5115 TCGLabel
*l2
= gen_new_label();
5116 TCGLabel
*l3
= gen_new_label();
5117 tcg_gen_ext32u_tl(t0
, t0
);
5118 tcg_gen_ext32u_tl(t1
, t1
);
5119 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5120 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5121 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5123 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5126 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5127 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5134 TCGLabel
*l1
= gen_new_label();
5135 TCGLabel
*l2
= gen_new_label();
5136 tcg_gen_ext32u_tl(t0
, t0
);
5137 tcg_gen_ext32u_tl(t1
, t1
);
5138 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5139 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5142 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5143 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5147 #if defined(TARGET_MIPS64)
5148 case OPC_DMULT_G_2E
:
5149 case OPC_DMULT_G_2F
:
5150 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5152 case OPC_DMULTU_G_2E
:
5153 case OPC_DMULTU_G_2F
:
5154 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5159 TCGLabel
*l1
= gen_new_label();
5160 TCGLabel
*l2
= gen_new_label();
5161 TCGLabel
*l3
= gen_new_label();
5162 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5163 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5166 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5167 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5168 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5171 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5175 case OPC_DDIVU_G_2E
:
5176 case OPC_DDIVU_G_2F
:
5178 TCGLabel
*l1
= gen_new_label();
5179 TCGLabel
*l2
= gen_new_label();
5180 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5181 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5184 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5191 TCGLabel
*l1
= gen_new_label();
5192 TCGLabel
*l2
= gen_new_label();
5193 TCGLabel
*l3
= gen_new_label();
5194 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5195 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5196 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5198 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5201 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5205 case OPC_DMODU_G_2E
:
5206 case OPC_DMODU_G_2F
:
5208 TCGLabel
*l1
= gen_new_label();
5209 TCGLabel
*l2
= gen_new_label();
5210 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5211 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5214 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5225 /* Loongson multimedia instructions */
5226 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5228 uint32_t opc
, shift_max
;
5231 opc
= MASK_LMI(ctx
->opcode
);
5237 t0
= tcg_temp_local_new_i64();
5238 t1
= tcg_temp_local_new_i64();
5241 t0
= tcg_temp_new_i64();
5242 t1
= tcg_temp_new_i64();
5246 check_cp1_enabled(ctx
);
5247 gen_load_fpr64(ctx
, t0
, rs
);
5248 gen_load_fpr64(ctx
, t1
, rt
);
5250 #define LMI_HELPER(UP, LO) \
5251 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5252 #define LMI_HELPER_1(UP, LO) \
5253 case OPC_##UP: gen_helper_##LO(t0, t0); break
5254 #define LMI_DIRECT(UP, LO, OP) \
5255 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5258 LMI_HELPER(PADDSH
, paddsh
);
5259 LMI_HELPER(PADDUSH
, paddush
);
5260 LMI_HELPER(PADDH
, paddh
);
5261 LMI_HELPER(PADDW
, paddw
);
5262 LMI_HELPER(PADDSB
, paddsb
);
5263 LMI_HELPER(PADDUSB
, paddusb
);
5264 LMI_HELPER(PADDB
, paddb
);
5266 LMI_HELPER(PSUBSH
, psubsh
);
5267 LMI_HELPER(PSUBUSH
, psubush
);
5268 LMI_HELPER(PSUBH
, psubh
);
5269 LMI_HELPER(PSUBW
, psubw
);
5270 LMI_HELPER(PSUBSB
, psubsb
);
5271 LMI_HELPER(PSUBUSB
, psubusb
);
5272 LMI_HELPER(PSUBB
, psubb
);
5274 LMI_HELPER(PSHUFH
, pshufh
);
5275 LMI_HELPER(PACKSSWH
, packsswh
);
5276 LMI_HELPER(PACKSSHB
, packsshb
);
5277 LMI_HELPER(PACKUSHB
, packushb
);
5279 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5280 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5281 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5282 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5283 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5284 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5286 LMI_HELPER(PAVGH
, pavgh
);
5287 LMI_HELPER(PAVGB
, pavgb
);
5288 LMI_HELPER(PMAXSH
, pmaxsh
);
5289 LMI_HELPER(PMINSH
, pminsh
);
5290 LMI_HELPER(PMAXUB
, pmaxub
);
5291 LMI_HELPER(PMINUB
, pminub
);
5293 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5294 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5295 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5296 LMI_HELPER(PCMPGTH
, pcmpgth
);
5297 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5298 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5300 LMI_HELPER(PSLLW
, psllw
);
5301 LMI_HELPER(PSLLH
, psllh
);
5302 LMI_HELPER(PSRLW
, psrlw
);
5303 LMI_HELPER(PSRLH
, psrlh
);
5304 LMI_HELPER(PSRAW
, psraw
);
5305 LMI_HELPER(PSRAH
, psrah
);
5307 LMI_HELPER(PMULLH
, pmullh
);
5308 LMI_HELPER(PMULHH
, pmulhh
);
5309 LMI_HELPER(PMULHUH
, pmulhuh
);
5310 LMI_HELPER(PMADDHW
, pmaddhw
);
5312 LMI_HELPER(PASUBUB
, pasubub
);
5313 LMI_HELPER_1(BIADD
, biadd
);
5314 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5316 LMI_DIRECT(PADDD
, paddd
, add
);
5317 LMI_DIRECT(PSUBD
, psubd
, sub
);
5318 LMI_DIRECT(XOR_CP2
, xor, xor);
5319 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5320 LMI_DIRECT(AND_CP2
, and, and);
5321 LMI_DIRECT(OR_CP2
, or, or);
5324 tcg_gen_andc_i64(t0
, t1
, t0
);
5328 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5331 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5334 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5337 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5341 tcg_gen_andi_i64(t1
, t1
, 3);
5342 tcg_gen_shli_i64(t1
, t1
, 4);
5343 tcg_gen_shr_i64(t0
, t0
, t1
);
5344 tcg_gen_ext16u_i64(t0
, t0
);
5348 tcg_gen_add_i64(t0
, t0
, t1
);
5349 tcg_gen_ext32s_i64(t0
, t0
);
5352 tcg_gen_sub_i64(t0
, t0
, t1
);
5353 tcg_gen_ext32s_i64(t0
, t0
);
5375 /* Make sure shift count isn't TCG undefined behaviour. */
5376 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5381 tcg_gen_shl_i64(t0
, t0
, t1
);
5385 /* Since SRA is UndefinedResult without sign-extended inputs,
5386 we can treat SRA and DSRA the same. */
5387 tcg_gen_sar_i64(t0
, t0
, t1
);
5390 /* We want to shift in zeros for SRL; zero-extend first. */
5391 tcg_gen_ext32u_i64(t0
, t0
);
5394 tcg_gen_shr_i64(t0
, t0
, t1
);
5398 if (shift_max
== 32) {
5399 tcg_gen_ext32s_i64(t0
, t0
);
5402 /* Shifts larger than MAX produce zero. */
5403 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5404 tcg_gen_neg_i64(t1
, t1
);
5405 tcg_gen_and_i64(t0
, t0
, t1
);
5411 TCGv_i64 t2
= tcg_temp_new_i64();
5412 TCGLabel
*lab
= gen_new_label();
5414 tcg_gen_mov_i64(t2
, t0
);
5415 tcg_gen_add_i64(t0
, t1
, t2
);
5416 if (opc
== OPC_ADD_CP2
) {
5417 tcg_gen_ext32s_i64(t0
, t0
);
5419 tcg_gen_xor_i64(t1
, t1
, t2
);
5420 tcg_gen_xor_i64(t2
, t2
, t0
);
5421 tcg_gen_andc_i64(t1
, t2
, t1
);
5422 tcg_temp_free_i64(t2
);
5423 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5424 generate_exception(ctx
, EXCP_OVERFLOW
);
5432 TCGv_i64 t2
= tcg_temp_new_i64();
5433 TCGLabel
*lab
= gen_new_label();
5435 tcg_gen_mov_i64(t2
, t0
);
5436 tcg_gen_sub_i64(t0
, t1
, t2
);
5437 if (opc
== OPC_SUB_CP2
) {
5438 tcg_gen_ext32s_i64(t0
, t0
);
5440 tcg_gen_xor_i64(t1
, t1
, t2
);
5441 tcg_gen_xor_i64(t2
, t2
, t0
);
5442 tcg_gen_and_i64(t1
, t1
, t2
);
5443 tcg_temp_free_i64(t2
);
5444 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5445 generate_exception(ctx
, EXCP_OVERFLOW
);
5451 tcg_gen_ext32u_i64(t0
, t0
);
5452 tcg_gen_ext32u_i64(t1
, t1
);
5453 tcg_gen_mul_i64(t0
, t0
, t1
);
5462 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
5463 FD field is the CC field? */
5465 MIPS_INVAL("loongson_cp2");
5466 generate_exception_end(ctx
, EXCP_RI
);
5473 gen_store_fpr64(ctx
, t0
, rd
);
5475 tcg_temp_free_i64(t0
);
5476 tcg_temp_free_i64(t1
);
5480 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5481 int rs
, int rt
, int16_t imm
)
5484 TCGv t0
= tcg_temp_new();
5485 TCGv t1
= tcg_temp_new();
5488 /* Load needed operands */
5496 /* Compare two registers */
5498 gen_load_gpr(t0
, rs
);
5499 gen_load_gpr(t1
, rt
);
5509 /* Compare register to immediate */
5510 if (rs
!= 0 || imm
!= 0) {
5511 gen_load_gpr(t0
, rs
);
5512 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5519 case OPC_TEQ
: /* rs == rs */
5520 case OPC_TEQI
: /* r0 == 0 */
5521 case OPC_TGE
: /* rs >= rs */
5522 case OPC_TGEI
: /* r0 >= 0 */
5523 case OPC_TGEU
: /* rs >= rs unsigned */
5524 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5526 generate_exception_end(ctx
, EXCP_TRAP
);
5528 case OPC_TLT
: /* rs < rs */
5529 case OPC_TLTI
: /* r0 < 0 */
5530 case OPC_TLTU
: /* rs < rs unsigned */
5531 case OPC_TLTIU
: /* r0 < 0 unsigned */
5532 case OPC_TNE
: /* rs != rs */
5533 case OPC_TNEI
: /* r0 != 0 */
5534 /* Never trap: treat as NOP. */
5538 TCGLabel
*l1
= gen_new_label();
5543 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5547 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5551 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5555 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5559 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5563 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5566 generate_exception(ctx
, EXCP_TRAP
);
5573 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5575 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5579 #ifndef CONFIG_USER_ONLY
5580 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5586 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5588 if (use_goto_tb(ctx
, dest
)) {
5591 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5594 if (ctx
->base
.singlestep_enabled
) {
5595 save_cpu_state(ctx
, 0);
5596 gen_helper_raise_exception_debug(cpu_env
);
5598 tcg_gen_lookup_and_goto_ptr();
5602 /* Branches (before delay slot) */
5603 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5605 int rs
, int rt
, int32_t offset
,
5608 target_ulong btgt
= -1;
5610 int bcond_compute
= 0;
5611 TCGv t0
= tcg_temp_new();
5612 TCGv t1
= tcg_temp_new();
5614 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5615 #ifdef MIPS_DEBUG_DISAS
5616 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5617 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5619 generate_exception_end(ctx
, EXCP_RI
);
5623 /* Load needed operands */
5629 /* Compare two registers */
5631 gen_load_gpr(t0
, rs
);
5632 gen_load_gpr(t1
, rt
);
5635 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5649 /* Compare to zero */
5651 gen_load_gpr(t0
, rs
);
5654 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5657 #if defined(TARGET_MIPS64)
5659 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5661 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5664 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5669 /* Jump to immediate */
5670 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5675 /* Jump to register */
5676 if (offset
!= 0 && offset
!= 16) {
5677 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5678 others are reserved. */
5679 MIPS_INVAL("jump hint");
5680 generate_exception_end(ctx
, EXCP_RI
);
5683 gen_load_gpr(btarget
, rs
);
5686 MIPS_INVAL("branch/jump");
5687 generate_exception_end(ctx
, EXCP_RI
);
5690 if (bcond_compute
== 0) {
5691 /* No condition to be computed */
5693 case OPC_BEQ
: /* rx == rx */
5694 case OPC_BEQL
: /* rx == rx likely */
5695 case OPC_BGEZ
: /* 0 >= 0 */
5696 case OPC_BGEZL
: /* 0 >= 0 likely */
5697 case OPC_BLEZ
: /* 0 <= 0 */
5698 case OPC_BLEZL
: /* 0 <= 0 likely */
5700 ctx
->hflags
|= MIPS_HFLAG_B
;
5702 case OPC_BGEZAL
: /* 0 >= 0 */
5703 case OPC_BGEZALL
: /* 0 >= 0 likely */
5704 /* Always take and link */
5706 ctx
->hflags
|= MIPS_HFLAG_B
;
5708 case OPC_BNE
: /* rx != rx */
5709 case OPC_BGTZ
: /* 0 > 0 */
5710 case OPC_BLTZ
: /* 0 < 0 */
5713 case OPC_BLTZAL
: /* 0 < 0 */
5714 /* Handle as an unconditional branch to get correct delay
5717 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5718 ctx
->hflags
|= MIPS_HFLAG_B
;
5720 case OPC_BLTZALL
: /* 0 < 0 likely */
5721 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5722 /* Skip the instruction in the delay slot */
5723 ctx
->base
.pc_next
+= 4;
5725 case OPC_BNEL
: /* rx != rx likely */
5726 case OPC_BGTZL
: /* 0 > 0 likely */
5727 case OPC_BLTZL
: /* 0 < 0 likely */
5728 /* Skip the instruction in the delay slot */
5729 ctx
->base
.pc_next
+= 4;
5732 ctx
->hflags
|= MIPS_HFLAG_B
;
5735 ctx
->hflags
|= MIPS_HFLAG_BX
;
5739 ctx
->hflags
|= MIPS_HFLAG_B
;
5742 ctx
->hflags
|= MIPS_HFLAG_BR
;
5746 ctx
->hflags
|= MIPS_HFLAG_BR
;
5749 MIPS_INVAL("branch/jump");
5750 generate_exception_end(ctx
, EXCP_RI
);
5756 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5759 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5762 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5765 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5768 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5771 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5774 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5778 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5782 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5785 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5788 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5791 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5794 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5797 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5800 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5802 #if defined(TARGET_MIPS64)
5804 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
5808 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5811 ctx
->hflags
|= MIPS_HFLAG_BC
;
5814 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5817 ctx
->hflags
|= MIPS_HFLAG_BL
;
5820 MIPS_INVAL("conditional branch/jump");
5821 generate_exception_end(ctx
, EXCP_RI
);
5826 ctx
->btarget
= btgt
;
5828 switch (delayslot_size
) {
5830 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
5833 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
5838 int post_delay
= insn_bytes
+ delayslot_size
;
5839 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
5841 tcg_gen_movi_tl(cpu_gpr
[blink
],
5842 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
5846 if (insn_bytes
== 2)
5847 ctx
->hflags
|= MIPS_HFLAG_B16
;
5853 /* nanoMIPS Branches */
5854 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
5856 int rs
, int rt
, int32_t offset
)
5858 target_ulong btgt
= -1;
5859 int bcond_compute
= 0;
5860 TCGv t0
= tcg_temp_new();
5861 TCGv t1
= tcg_temp_new();
5863 /* Load needed operands */
5867 /* Compare two registers */
5869 gen_load_gpr(t0
, rs
);
5870 gen_load_gpr(t1
, rt
);
5873 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5876 /* Compare to zero */
5878 gen_load_gpr(t0
, rs
);
5881 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5884 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5886 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5890 /* Jump to register */
5891 if (offset
!= 0 && offset
!= 16) {
5892 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5893 others are reserved. */
5894 MIPS_INVAL("jump hint");
5895 generate_exception_end(ctx
, EXCP_RI
);
5898 gen_load_gpr(btarget
, rs
);
5901 MIPS_INVAL("branch/jump");
5902 generate_exception_end(ctx
, EXCP_RI
);
5905 if (bcond_compute
== 0) {
5906 /* No condition to be computed */
5908 case OPC_BEQ
: /* rx == rx */
5910 ctx
->hflags
|= MIPS_HFLAG_B
;
5912 case OPC_BGEZAL
: /* 0 >= 0 */
5913 /* Always take and link */
5914 tcg_gen_movi_tl(cpu_gpr
[31],
5915 ctx
->base
.pc_next
+ insn_bytes
);
5916 ctx
->hflags
|= MIPS_HFLAG_B
;
5918 case OPC_BNE
: /* rx != rx */
5919 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5920 /* Skip the instruction in the delay slot */
5921 ctx
->base
.pc_next
+= 4;
5924 ctx
->hflags
|= MIPS_HFLAG_BR
;
5928 tcg_gen_movi_tl(cpu_gpr
[rt
],
5929 ctx
->base
.pc_next
+ insn_bytes
);
5931 ctx
->hflags
|= MIPS_HFLAG_BR
;
5934 MIPS_INVAL("branch/jump");
5935 generate_exception_end(ctx
, EXCP_RI
);
5941 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5944 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5947 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5948 tcg_gen_movi_tl(cpu_gpr
[31],
5949 ctx
->base
.pc_next
+ insn_bytes
);
5952 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5954 ctx
->hflags
|= MIPS_HFLAG_BC
;
5957 MIPS_INVAL("conditional branch/jump");
5958 generate_exception_end(ctx
, EXCP_RI
);
5963 ctx
->btarget
= btgt
;
5966 if (insn_bytes
== 2) {
5967 ctx
->hflags
|= MIPS_HFLAG_B16
;
5974 /* special3 bitfield operations */
5975 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
5976 int rs
, int lsb
, int msb
)
5978 TCGv t0
= tcg_temp_new();
5979 TCGv t1
= tcg_temp_new();
5981 gen_load_gpr(t1
, rs
);
5984 if (lsb
+ msb
> 31) {
5988 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5990 /* The two checks together imply that lsb == 0,
5991 so this is a simple sign-extension. */
5992 tcg_gen_ext32s_tl(t0
, t1
);
5995 #if defined(TARGET_MIPS64)
6004 if (lsb
+ msb
> 63) {
6007 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6014 gen_load_gpr(t0
, rt
);
6015 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6016 tcg_gen_ext32s_tl(t0
, t0
);
6018 #if defined(TARGET_MIPS64)
6029 gen_load_gpr(t0
, rt
);
6030 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6035 MIPS_INVAL("bitops");
6036 generate_exception_end(ctx
, EXCP_RI
);
6041 gen_store_gpr(t0
, rt
);
6046 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6051 /* If no destination, treat it as a NOP. */
6055 t0
= tcg_temp_new();
6056 gen_load_gpr(t0
, rt
);
6060 TCGv t1
= tcg_temp_new();
6061 TCGv t2
= tcg_const_tl(0x00FF00FF);
6063 tcg_gen_shri_tl(t1
, t0
, 8);
6064 tcg_gen_and_tl(t1
, t1
, t2
);
6065 tcg_gen_and_tl(t0
, t0
, t2
);
6066 tcg_gen_shli_tl(t0
, t0
, 8);
6067 tcg_gen_or_tl(t0
, t0
, t1
);
6070 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6074 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6077 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6079 #if defined(TARGET_MIPS64)
6082 TCGv t1
= tcg_temp_new();
6083 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6085 tcg_gen_shri_tl(t1
, t0
, 8);
6086 tcg_gen_and_tl(t1
, t1
, t2
);
6087 tcg_gen_and_tl(t0
, t0
, t2
);
6088 tcg_gen_shli_tl(t0
, t0
, 8);
6089 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6096 TCGv t1
= tcg_temp_new();
6097 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6099 tcg_gen_shri_tl(t1
, t0
, 16);
6100 tcg_gen_and_tl(t1
, t1
, t2
);
6101 tcg_gen_and_tl(t0
, t0
, t2
);
6102 tcg_gen_shli_tl(t0
, t0
, 16);
6103 tcg_gen_or_tl(t0
, t0
, t1
);
6104 tcg_gen_shri_tl(t1
, t0
, 32);
6105 tcg_gen_shli_tl(t0
, t0
, 32);
6106 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6113 MIPS_INVAL("bsfhl");
6114 generate_exception_end(ctx
, EXCP_RI
);
6121 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6130 t0
= tcg_temp_new();
6131 t1
= tcg_temp_new();
6132 gen_load_gpr(t0
, rs
);
6133 gen_load_gpr(t1
, rt
);
6134 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6135 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6136 if (opc
== OPC_LSA
) {
6137 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6146 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6154 t0
= tcg_temp_new();
6155 if (bits
== 0 || bits
== wordsz
) {
6157 gen_load_gpr(t0
, rt
);
6159 gen_load_gpr(t0
, rs
);
6163 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6165 #if defined(TARGET_MIPS64)
6167 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6172 TCGv t1
= tcg_temp_new();
6173 gen_load_gpr(t0
, rt
);
6174 gen_load_gpr(t1
, rs
);
6178 TCGv_i64 t2
= tcg_temp_new_i64();
6179 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6180 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6181 gen_move_low32(cpu_gpr
[rd
], t2
);
6182 tcg_temp_free_i64(t2
);
6185 #if defined(TARGET_MIPS64)
6187 tcg_gen_shli_tl(t0
, t0
, bits
);
6188 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6189 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6199 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6202 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6205 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6208 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6211 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6218 t0
= tcg_temp_new();
6219 gen_load_gpr(t0
, rt
);
6222 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6224 #if defined(TARGET_MIPS64)
6226 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6233 #ifndef CONFIG_USER_ONLY
6234 /* CP0 (MMU and control) */
6235 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6237 TCGv_i64 t0
= tcg_temp_new_i64();
6238 TCGv_i64 t1
= tcg_temp_new_i64();
6240 tcg_gen_ext_tl_i64(t0
, arg
);
6241 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6242 #if defined(TARGET_MIPS64)
6243 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6245 tcg_gen_concat32_i64(t1
, t1
, t0
);
6247 tcg_gen_st_i64(t1
, cpu_env
, off
);
6248 tcg_temp_free_i64(t1
);
6249 tcg_temp_free_i64(t0
);
6252 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6254 TCGv_i64 t0
= tcg_temp_new_i64();
6255 TCGv_i64 t1
= tcg_temp_new_i64();
6257 tcg_gen_ext_tl_i64(t0
, arg
);
6258 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6259 tcg_gen_concat32_i64(t1
, t1
, t0
);
6260 tcg_gen_st_i64(t1
, cpu_env
, off
);
6261 tcg_temp_free_i64(t1
);
6262 tcg_temp_free_i64(t0
);
6265 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6267 TCGv_i64 t0
= tcg_temp_new_i64();
6269 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6270 #if defined(TARGET_MIPS64)
6271 tcg_gen_shri_i64(t0
, t0
, 30);
6273 tcg_gen_shri_i64(t0
, t0
, 32);
6275 gen_move_low32(arg
, t0
);
6276 tcg_temp_free_i64(t0
);
6279 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6281 TCGv_i64 t0
= tcg_temp_new_i64();
6283 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6284 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6285 gen_move_low32(arg
, t0
);
6286 tcg_temp_free_i64(t0
);
6289 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
6291 TCGv_i32 t0
= tcg_temp_new_i32();
6293 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6294 tcg_gen_ext_i32_tl(arg
, t0
);
6295 tcg_temp_free_i32(t0
);
6298 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
6300 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6301 tcg_gen_ext32s_tl(arg
, arg
);
6304 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
6306 TCGv_i32 t0
= tcg_temp_new_i32();
6308 tcg_gen_trunc_tl_i32(t0
, arg
);
6309 tcg_gen_st_i32(t0
, cpu_env
, off
);
6310 tcg_temp_free_i32(t0
);
6313 #define CP0_CHECK(c) \
6316 goto cp0_unimplemented; \
6320 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6322 const char *rn
= "invalid";
6328 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6329 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6333 goto cp0_unimplemented
;
6339 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6340 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6344 goto cp0_unimplemented
;
6350 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
6351 ctx
->CP0_LLAddr_shift
);
6355 CP0_CHECK(ctx
->mrp
);
6356 gen_helper_mfhc0_maar(arg
, cpu_env
);
6360 goto cp0_unimplemented
;
6369 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6373 goto cp0_unimplemented
;
6377 goto cp0_unimplemented
;
6379 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
6383 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6384 tcg_gen_movi_tl(arg
, 0);
6387 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6389 const char *rn
= "invalid";
6390 uint64_t mask
= ctx
->PAMask
>> 36;
6396 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6397 tcg_gen_andi_tl(arg
, arg
, mask
);
6398 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6402 goto cp0_unimplemented
;
6408 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6409 tcg_gen_andi_tl(arg
, arg
, mask
);
6410 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6414 goto cp0_unimplemented
;
6420 /* LLAddr is read-only (the only exception is bit 0 if LLB is
6421 supported); the CP0_LLAddr_rw_bitmask does not seem to be
6422 relevant for modern MIPS cores supporting MTHC0, therefore
6423 treating MTHC0 to LLAddr as NOP. */
6427 CP0_CHECK(ctx
->mrp
);
6428 gen_helper_mthc0_maar(cpu_env
, arg
);
6432 goto cp0_unimplemented
;
6441 tcg_gen_andi_tl(arg
, arg
, mask
);
6442 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6446 goto cp0_unimplemented
;
6450 goto cp0_unimplemented
;
6452 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
6455 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6458 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6460 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6461 tcg_gen_movi_tl(arg
, 0);
6463 tcg_gen_movi_tl(arg
, ~0);
6467 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6469 const char *rn
= "invalid";
6472 check_insn(ctx
, ISA_MIPS32
);
6478 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6482 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6483 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6487 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6488 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6492 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6493 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6498 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6502 goto cp0_unimplemented
;
6508 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6509 gen_helper_mfc0_random(arg
, cpu_env
);
6513 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6514 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6518 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6519 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6523 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6524 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6528 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6529 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6533 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6534 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6538 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6539 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6540 rn
= "VPEScheFBack";
6543 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6544 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6548 goto cp0_unimplemented
;
6555 TCGv_i64 tmp
= tcg_temp_new_i64();
6556 tcg_gen_ld_i64(tmp
, cpu_env
,
6557 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6558 #if defined(TARGET_MIPS64)
6560 /* Move RI/XI fields to bits 31:30 */
6561 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6562 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6565 gen_move_low32(arg
, tmp
);
6566 tcg_temp_free_i64(tmp
);
6571 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6572 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6576 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6577 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6581 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6582 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6586 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6587 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6591 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6592 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6596 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6597 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6601 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6602 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6606 goto cp0_unimplemented
;
6613 TCGv_i64 tmp
= tcg_temp_new_i64();
6614 tcg_gen_ld_i64(tmp
, cpu_env
,
6615 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6616 #if defined(TARGET_MIPS64)
6618 /* Move RI/XI fields to bits 31:30 */
6619 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6620 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6623 gen_move_low32(arg
, tmp
);
6624 tcg_temp_free_i64(tmp
);
6630 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6631 rn
= "GlobalNumber";
6634 goto cp0_unimplemented
;
6640 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6641 tcg_gen_ext32s_tl(arg
, arg
);
6645 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
6646 rn
= "ContextConfig";
6647 goto cp0_unimplemented
;
6649 CP0_CHECK(ctx
->ulri
);
6650 tcg_gen_ld_tl(arg
, cpu_env
,
6651 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6652 tcg_gen_ext32s_tl(arg
, arg
);
6656 goto cp0_unimplemented
;
6662 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6666 check_insn(ctx
, ISA_MIPS32R2
);
6667 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6672 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6673 tcg_gen_ext32s_tl(arg
, arg
);
6678 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6679 tcg_gen_ext32s_tl(arg
, arg
);
6684 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6685 tcg_gen_ext32s_tl(arg
, arg
);
6690 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6695 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
6700 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
6704 goto cp0_unimplemented
;
6710 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6714 check_insn(ctx
, ISA_MIPS32R2
);
6715 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6719 check_insn(ctx
, ISA_MIPS32R2
);
6720 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6724 check_insn(ctx
, ISA_MIPS32R2
);
6725 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6729 check_insn(ctx
, ISA_MIPS32R2
);
6730 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6734 check_insn(ctx
, ISA_MIPS32R2
);
6735 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6740 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
6744 goto cp0_unimplemented
;
6750 check_insn(ctx
, ISA_MIPS32R2
);
6751 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6755 goto cp0_unimplemented
;
6761 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6762 tcg_gen_ext32s_tl(arg
, arg
);
6767 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6772 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6777 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
6778 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
6782 goto cp0_unimplemented
;
6788 /* Mark as an IO operation because we read the time. */
6789 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6792 gen_helper_mfc0_count(arg
, cpu_env
);
6793 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6796 /* Break the TB to be able to take timer interrupts immediately
6797 after reading count. DISAS_STOP isn't sufficient, we need to
6798 ensure we break completely out of translated code. */
6799 gen_save_pc(ctx
->base
.pc_next
+ 4);
6800 ctx
->base
.is_jmp
= DISAS_EXIT
;
6803 /* 6,7 are implementation dependent */
6805 goto cp0_unimplemented
;
6811 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6812 tcg_gen_ext32s_tl(arg
, arg
);
6816 goto cp0_unimplemented
;
6822 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6825 /* 6,7 are implementation dependent */
6827 goto cp0_unimplemented
;
6833 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6837 check_insn(ctx
, ISA_MIPS32R2
);
6838 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6842 check_insn(ctx
, ISA_MIPS32R2
);
6843 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6847 check_insn(ctx
, ISA_MIPS32R2
);
6848 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6852 goto cp0_unimplemented
;
6858 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6862 goto cp0_unimplemented
;
6868 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6869 tcg_gen_ext32s_tl(arg
, arg
);
6873 goto cp0_unimplemented
;
6879 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6883 check_insn(ctx
, ISA_MIPS32R2
);
6884 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6885 tcg_gen_ext32s_tl(arg
, arg
);
6889 check_insn(ctx
, ISA_MIPS32R2
);
6890 CP0_CHECK(ctx
->cmgcr
);
6891 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6892 tcg_gen_ext32s_tl(arg
, arg
);
6896 goto cp0_unimplemented
;
6902 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6906 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6910 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6914 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6918 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6922 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6925 /* 6,7 are implementation dependent */
6927 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6931 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6935 goto cp0_unimplemented
;
6941 gen_helper_mfc0_lladdr(arg
, cpu_env
);
6945 CP0_CHECK(ctx
->mrp
);
6946 gen_helper_mfc0_maar(arg
, cpu_env
);
6950 CP0_CHECK(ctx
->mrp
);
6951 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6955 goto cp0_unimplemented
;
6968 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6969 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
6973 goto cp0_unimplemented
;
6986 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6987 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6991 goto cp0_unimplemented
;
6997 #if defined(TARGET_MIPS64)
6998 check_insn(ctx
, ISA_MIPS3
);
6999 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7000 tcg_gen_ext32s_tl(arg
, arg
);
7005 goto cp0_unimplemented
;
7009 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7010 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7013 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7017 goto cp0_unimplemented
;
7021 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7022 rn
= "'Diagnostic"; /* implementation dependent */
7027 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7031 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
7032 rn
= "TraceControl";
7033 goto cp0_unimplemented
;
7035 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
7036 rn
= "TraceControl2";
7037 goto cp0_unimplemented
;
7039 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
7040 rn
= "UserTraceData";
7041 goto cp0_unimplemented
;
7043 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
7045 goto cp0_unimplemented
;
7047 goto cp0_unimplemented
;
7054 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7055 tcg_gen_ext32s_tl(arg
, arg
);
7059 goto cp0_unimplemented
;
7065 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7066 rn
= "Performance0";
7069 // gen_helper_mfc0_performance1(arg);
7070 rn
= "Performance1";
7071 goto cp0_unimplemented
;
7073 // gen_helper_mfc0_performance2(arg);
7074 rn
= "Performance2";
7075 goto cp0_unimplemented
;
7077 // gen_helper_mfc0_performance3(arg);
7078 rn
= "Performance3";
7079 goto cp0_unimplemented
;
7081 // gen_helper_mfc0_performance4(arg);
7082 rn
= "Performance4";
7083 goto cp0_unimplemented
;
7085 // gen_helper_mfc0_performance5(arg);
7086 rn
= "Performance5";
7087 goto cp0_unimplemented
;
7089 // gen_helper_mfc0_performance6(arg);
7090 rn
= "Performance6";
7091 goto cp0_unimplemented
;
7093 // gen_helper_mfc0_performance7(arg);
7094 rn
= "Performance7";
7095 goto cp0_unimplemented
;
7097 goto cp0_unimplemented
;
7103 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7107 goto cp0_unimplemented
;
7116 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7120 goto cp0_unimplemented
;
7130 TCGv_i64 tmp
= tcg_temp_new_i64();
7131 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7132 gen_move_low32(arg
, tmp
);
7133 tcg_temp_free_i64(tmp
);
7141 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7145 goto cp0_unimplemented
;
7154 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7161 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7165 goto cp0_unimplemented
;
7171 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7172 tcg_gen_ext32s_tl(arg
, arg
);
7176 goto cp0_unimplemented
;
7183 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7192 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7193 tcg_gen_ld_tl(arg
, cpu_env
,
7194 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7195 tcg_gen_ext32s_tl(arg
, arg
);
7199 goto cp0_unimplemented
;
7203 goto cp0_unimplemented
;
7205 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
7209 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7210 gen_mfc0_unimplemented(ctx
, arg
);
7213 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7215 const char *rn
= "invalid";
7218 check_insn(ctx
, ISA_MIPS32
);
7220 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7228 gen_helper_mtc0_index(cpu_env
, arg
);
7232 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7233 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7237 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7242 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7252 goto cp0_unimplemented
;
7262 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7263 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7267 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7268 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7272 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7273 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7277 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7278 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7282 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7283 tcg_gen_st_tl(arg
, cpu_env
,
7284 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7288 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7289 tcg_gen_st_tl(arg
, cpu_env
,
7290 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7291 rn
= "VPEScheFBack";
7294 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7295 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7299 goto cp0_unimplemented
;
7305 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7309 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7310 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7314 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7315 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7319 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7320 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7324 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7325 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7329 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7330 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7334 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7335 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7339 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7340 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7344 goto cp0_unimplemented
;
7350 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7356 rn
= "GlobalNumber";
7359 goto cp0_unimplemented
;
7365 gen_helper_mtc0_context(cpu_env
, arg
);
7369 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7370 rn
= "ContextConfig";
7371 goto cp0_unimplemented
;
7373 CP0_CHECK(ctx
->ulri
);
7374 tcg_gen_st_tl(arg
, cpu_env
,
7375 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7379 goto cp0_unimplemented
;
7385 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7389 check_insn(ctx
, ISA_MIPS32R2
);
7390 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7392 ctx
->base
.is_jmp
= DISAS_STOP
;
7396 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7401 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7406 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7411 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7416 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7421 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7425 goto cp0_unimplemented
;
7431 gen_helper_mtc0_wired(cpu_env
, arg
);
7435 check_insn(ctx
, ISA_MIPS32R2
);
7436 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7440 check_insn(ctx
, ISA_MIPS32R2
);
7441 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7445 check_insn(ctx
, ISA_MIPS32R2
);
7446 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7450 check_insn(ctx
, ISA_MIPS32R2
);
7451 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7455 check_insn(ctx
, ISA_MIPS32R2
);
7456 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7461 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7465 goto cp0_unimplemented
;
7471 check_insn(ctx
, ISA_MIPS32R2
);
7472 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7473 ctx
->base
.is_jmp
= DISAS_STOP
;
7477 goto cp0_unimplemented
;
7499 goto cp0_unimplemented
;
7505 gen_helper_mtc0_count(cpu_env
, arg
);
7508 /* 6,7 are implementation dependent */
7510 goto cp0_unimplemented
;
7516 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7520 goto cp0_unimplemented
;
7526 gen_helper_mtc0_compare(cpu_env
, arg
);
7529 /* 6,7 are implementation dependent */
7531 goto cp0_unimplemented
;
7537 save_cpu_state(ctx
, 1);
7538 gen_helper_mtc0_status(cpu_env
, arg
);
7539 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7540 gen_save_pc(ctx
->base
.pc_next
+ 4);
7541 ctx
->base
.is_jmp
= DISAS_EXIT
;
7545 check_insn(ctx
, ISA_MIPS32R2
);
7546 gen_helper_mtc0_intctl(cpu_env
, arg
);
7547 /* Stop translation as we may have switched the execution mode */
7548 ctx
->base
.is_jmp
= DISAS_STOP
;
7552 check_insn(ctx
, ISA_MIPS32R2
);
7553 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7554 /* Stop translation as we may have switched the execution mode */
7555 ctx
->base
.is_jmp
= DISAS_STOP
;
7559 check_insn(ctx
, ISA_MIPS32R2
);
7560 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7561 /* Stop translation as we may have switched the execution mode */
7562 ctx
->base
.is_jmp
= DISAS_STOP
;
7566 goto cp0_unimplemented
;
7572 save_cpu_state(ctx
, 1);
7573 gen_helper_mtc0_cause(cpu_env
, arg
);
7574 /* Stop translation as we may have triggered an interrupt.
7575 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7576 * translated code to check for pending interrupts. */
7577 gen_save_pc(ctx
->base
.pc_next
+ 4);
7578 ctx
->base
.is_jmp
= DISAS_EXIT
;
7582 goto cp0_unimplemented
;
7588 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7592 goto cp0_unimplemented
;
7602 check_insn(ctx
, ISA_MIPS32R2
);
7603 gen_helper_mtc0_ebase(cpu_env
, arg
);
7607 goto cp0_unimplemented
;
7613 gen_helper_mtc0_config0(cpu_env
, arg
);
7615 /* Stop translation as we may have switched the execution mode */
7616 ctx
->base
.is_jmp
= DISAS_STOP
;
7619 /* ignored, read only */
7623 gen_helper_mtc0_config2(cpu_env
, arg
);
7625 /* Stop translation as we may have switched the execution mode */
7626 ctx
->base
.is_jmp
= DISAS_STOP
;
7629 gen_helper_mtc0_config3(cpu_env
, arg
);
7631 /* Stop translation as we may have switched the execution mode */
7632 ctx
->base
.is_jmp
= DISAS_STOP
;
7635 gen_helper_mtc0_config4(cpu_env
, arg
);
7637 ctx
->base
.is_jmp
= DISAS_STOP
;
7640 gen_helper_mtc0_config5(cpu_env
, arg
);
7642 /* Stop translation as we may have switched the execution mode */
7643 ctx
->base
.is_jmp
= DISAS_STOP
;
7645 /* 6,7 are implementation dependent */
7655 rn
= "Invalid config selector";
7656 goto cp0_unimplemented
;
7662 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7666 CP0_CHECK(ctx
->mrp
);
7667 gen_helper_mtc0_maar(cpu_env
, arg
);
7671 CP0_CHECK(ctx
->mrp
);
7672 gen_helper_mtc0_maari(cpu_env
, arg
);
7676 goto cp0_unimplemented
;
7689 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7690 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7694 goto cp0_unimplemented
;
7707 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7708 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7712 goto cp0_unimplemented
;
7718 #if defined(TARGET_MIPS64)
7719 check_insn(ctx
, ISA_MIPS3
);
7720 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7725 goto cp0_unimplemented
;
7729 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7730 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7733 gen_helper_mtc0_framemask(cpu_env
, arg
);
7737 goto cp0_unimplemented
;
7742 rn
= "Diagnostic"; /* implementation dependent */
7747 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7748 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7749 gen_save_pc(ctx
->base
.pc_next
+ 4);
7750 ctx
->base
.is_jmp
= DISAS_EXIT
;
7754 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7755 rn
= "TraceControl";
7756 /* Stop translation as we may have switched the execution mode */
7757 ctx
->base
.is_jmp
= DISAS_STOP
;
7758 goto cp0_unimplemented
;
7760 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7761 rn
= "TraceControl2";
7762 /* Stop translation as we may have switched the execution mode */
7763 ctx
->base
.is_jmp
= DISAS_STOP
;
7764 goto cp0_unimplemented
;
7766 /* Stop translation as we may have switched the execution mode */
7767 ctx
->base
.is_jmp
= DISAS_STOP
;
7768 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7769 rn
= "UserTraceData";
7770 /* Stop translation as we may have switched the execution mode */
7771 ctx
->base
.is_jmp
= DISAS_STOP
;
7772 goto cp0_unimplemented
;
7774 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7775 /* Stop translation as we may have switched the execution mode */
7776 ctx
->base
.is_jmp
= DISAS_STOP
;
7778 goto cp0_unimplemented
;
7780 goto cp0_unimplemented
;
7787 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7791 goto cp0_unimplemented
;
7797 gen_helper_mtc0_performance0(cpu_env
, arg
);
7798 rn
= "Performance0";
7801 // gen_helper_mtc0_performance1(arg);
7802 rn
= "Performance1";
7803 goto cp0_unimplemented
;
7805 // gen_helper_mtc0_performance2(arg);
7806 rn
= "Performance2";
7807 goto cp0_unimplemented
;
7809 // gen_helper_mtc0_performance3(arg);
7810 rn
= "Performance3";
7811 goto cp0_unimplemented
;
7813 // gen_helper_mtc0_performance4(arg);
7814 rn
= "Performance4";
7815 goto cp0_unimplemented
;
7817 // gen_helper_mtc0_performance5(arg);
7818 rn
= "Performance5";
7819 goto cp0_unimplemented
;
7821 // gen_helper_mtc0_performance6(arg);
7822 rn
= "Performance6";
7823 goto cp0_unimplemented
;
7825 // gen_helper_mtc0_performance7(arg);
7826 rn
= "Performance7";
7827 goto cp0_unimplemented
;
7829 goto cp0_unimplemented
;
7835 gen_helper_mtc0_errctl(cpu_env
, arg
);
7836 ctx
->base
.is_jmp
= DISAS_STOP
;
7840 goto cp0_unimplemented
;
7853 goto cp0_unimplemented
;
7862 gen_helper_mtc0_taglo(cpu_env
, arg
);
7869 gen_helper_mtc0_datalo(cpu_env
, arg
);
7873 goto cp0_unimplemented
;
7882 gen_helper_mtc0_taghi(cpu_env
, arg
);
7889 gen_helper_mtc0_datahi(cpu_env
, arg
);
7894 goto cp0_unimplemented
;
7900 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7904 goto cp0_unimplemented
;
7911 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7920 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7921 tcg_gen_st_tl(arg
, cpu_env
,
7922 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7926 goto cp0_unimplemented
;
7930 goto cp0_unimplemented
;
7932 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
7934 /* For simplicity assume that all writes can cause interrupts. */
7935 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7937 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
7938 * translated code to check for pending interrupts. */
7939 gen_save_pc(ctx
->base
.pc_next
+ 4);
7940 ctx
->base
.is_jmp
= DISAS_EXIT
;
7945 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7948 #if defined(TARGET_MIPS64)
7949 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7951 const char *rn
= "invalid";
7954 check_insn(ctx
, ISA_MIPS64
);
7960 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
7964 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7965 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
7969 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7970 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
7974 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7975 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
7980 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
7984 goto cp0_unimplemented
;
7990 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7991 gen_helper_mfc0_random(arg
, cpu_env
);
7995 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7996 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
8000 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8001 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
8005 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8006 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
8010 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8011 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
8015 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8016 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8020 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8021 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8022 rn
= "VPEScheFBack";
8025 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8026 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
8030 goto cp0_unimplemented
;
8036 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
8040 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8041 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8045 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8046 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8050 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8051 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8055 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8056 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8060 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8061 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8065 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8066 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8070 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8071 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8075 goto cp0_unimplemented
;
8081 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8086 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8087 rn
= "GlobalNumber";
8090 goto cp0_unimplemented
;
8096 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8100 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8101 rn
= "ContextConfig";
8102 goto cp0_unimplemented
;
8104 CP0_CHECK(ctx
->ulri
);
8105 tcg_gen_ld_tl(arg
, cpu_env
,
8106 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8110 goto cp0_unimplemented
;
8116 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8120 check_insn(ctx
, ISA_MIPS32R2
);
8121 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8126 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8131 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8136 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8141 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8146 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8151 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8155 goto cp0_unimplemented
;
8161 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8165 check_insn(ctx
, ISA_MIPS32R2
);
8166 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8170 check_insn(ctx
, ISA_MIPS32R2
);
8171 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8175 check_insn(ctx
, ISA_MIPS32R2
);
8176 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8180 check_insn(ctx
, ISA_MIPS32R2
);
8181 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8185 check_insn(ctx
, ISA_MIPS32R2
);
8186 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8191 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8195 goto cp0_unimplemented
;
8201 check_insn(ctx
, ISA_MIPS32R2
);
8202 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8206 goto cp0_unimplemented
;
8212 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8217 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8222 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8227 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8228 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8232 goto cp0_unimplemented
;
8238 /* Mark as an IO operation because we read the time. */
8239 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8242 gen_helper_mfc0_count(arg
, cpu_env
);
8243 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8246 /* Break the TB to be able to take timer interrupts immediately
8247 after reading count. DISAS_STOP isn't sufficient, we need to
8248 ensure we break completely out of translated code. */
8249 gen_save_pc(ctx
->base
.pc_next
+ 4);
8250 ctx
->base
.is_jmp
= DISAS_EXIT
;
8253 /* 6,7 are implementation dependent */
8255 goto cp0_unimplemented
;
8261 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8265 goto cp0_unimplemented
;
8271 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8274 /* 6,7 are implementation dependent */
8276 goto cp0_unimplemented
;
8282 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8286 check_insn(ctx
, ISA_MIPS32R2
);
8287 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8291 check_insn(ctx
, ISA_MIPS32R2
);
8292 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8296 check_insn(ctx
, ISA_MIPS32R2
);
8297 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8301 goto cp0_unimplemented
;
8307 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8311 goto cp0_unimplemented
;
8317 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8321 goto cp0_unimplemented
;
8327 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8331 check_insn(ctx
, ISA_MIPS32R2
);
8332 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8336 check_insn(ctx
, ISA_MIPS32R2
);
8337 CP0_CHECK(ctx
->cmgcr
);
8338 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8342 goto cp0_unimplemented
;
8348 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8352 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8356 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8360 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8364 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8368 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8371 /* 6,7 are implementation dependent */
8373 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8377 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8381 goto cp0_unimplemented
;
8387 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8391 CP0_CHECK(ctx
->mrp
);
8392 gen_helper_dmfc0_maar(arg
, cpu_env
);
8396 CP0_CHECK(ctx
->mrp
);
8397 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8401 goto cp0_unimplemented
;
8414 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8415 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8419 goto cp0_unimplemented
;
8432 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8433 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8437 goto cp0_unimplemented
;
8443 check_insn(ctx
, ISA_MIPS3
);
8444 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8448 goto cp0_unimplemented
;
8452 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8453 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8456 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8460 goto cp0_unimplemented
;
8464 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8465 rn
= "'Diagnostic"; /* implementation dependent */
8470 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8474 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8475 rn
= "TraceControl";
8476 goto cp0_unimplemented
;
8478 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8479 rn
= "TraceControl2";
8480 goto cp0_unimplemented
;
8482 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8483 rn
= "UserTraceData";
8484 goto cp0_unimplemented
;
8486 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8488 goto cp0_unimplemented
;
8490 goto cp0_unimplemented
;
8497 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8501 goto cp0_unimplemented
;
8507 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8508 rn
= "Performance0";
8511 // gen_helper_dmfc0_performance1(arg);
8512 rn
= "Performance1";
8513 goto cp0_unimplemented
;
8515 // gen_helper_dmfc0_performance2(arg);
8516 rn
= "Performance2";
8517 goto cp0_unimplemented
;
8519 // gen_helper_dmfc0_performance3(arg);
8520 rn
= "Performance3";
8521 goto cp0_unimplemented
;
8523 // gen_helper_dmfc0_performance4(arg);
8524 rn
= "Performance4";
8525 goto cp0_unimplemented
;
8527 // gen_helper_dmfc0_performance5(arg);
8528 rn
= "Performance5";
8529 goto cp0_unimplemented
;
8531 // gen_helper_dmfc0_performance6(arg);
8532 rn
= "Performance6";
8533 goto cp0_unimplemented
;
8535 // gen_helper_dmfc0_performance7(arg);
8536 rn
= "Performance7";
8537 goto cp0_unimplemented
;
8539 goto cp0_unimplemented
;
8545 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8549 goto cp0_unimplemented
;
8559 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8563 goto cp0_unimplemented
;
8572 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8579 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8583 goto cp0_unimplemented
;
8592 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8599 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8603 goto cp0_unimplemented
;
8609 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8613 goto cp0_unimplemented
;
8620 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8629 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8630 tcg_gen_ld_tl(arg
, cpu_env
,
8631 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8635 goto cp0_unimplemented
;
8639 goto cp0_unimplemented
;
8641 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
8645 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8646 gen_mfc0_unimplemented(ctx
, arg
);
8649 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8651 const char *rn
= "invalid";
8654 check_insn(ctx
, ISA_MIPS64
);
8656 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8664 gen_helper_mtc0_index(cpu_env
, arg
);
8668 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8669 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
8673 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8678 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8688 goto cp0_unimplemented
;
8698 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8699 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
8703 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8704 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
8708 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8709 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
8713 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8714 gen_helper_mtc0_yqmask(cpu_env
, arg
);
8718 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8719 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8723 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8724 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8725 rn
= "VPEScheFBack";
8728 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8729 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
8733 goto cp0_unimplemented
;
8739 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
8743 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8744 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
8748 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8749 gen_helper_mtc0_tcbind(cpu_env
, arg
);
8753 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8754 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
8758 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8759 gen_helper_mtc0_tchalt(cpu_env
, arg
);
8763 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8764 gen_helper_mtc0_tccontext(cpu_env
, arg
);
8768 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8769 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
8773 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8774 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
8778 goto cp0_unimplemented
;
8784 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
8790 rn
= "GlobalNumber";
8793 goto cp0_unimplemented
;
8799 gen_helper_mtc0_context(cpu_env
, arg
);
8803 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
8804 rn
= "ContextConfig";
8805 goto cp0_unimplemented
;
8807 CP0_CHECK(ctx
->ulri
);
8808 tcg_gen_st_tl(arg
, cpu_env
,
8809 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8813 goto cp0_unimplemented
;
8819 gen_helper_mtc0_pagemask(cpu_env
, arg
);
8823 check_insn(ctx
, ISA_MIPS32R2
);
8824 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
8829 gen_helper_mtc0_segctl0(cpu_env
, arg
);
8834 gen_helper_mtc0_segctl1(cpu_env
, arg
);
8839 gen_helper_mtc0_segctl2(cpu_env
, arg
);
8844 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8849 gen_helper_mtc0_pwfield(cpu_env
, arg
);
8854 gen_helper_mtc0_pwsize(cpu_env
, arg
);
8858 goto cp0_unimplemented
;
8864 gen_helper_mtc0_wired(cpu_env
, arg
);
8868 check_insn(ctx
, ISA_MIPS32R2
);
8869 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
8873 check_insn(ctx
, ISA_MIPS32R2
);
8874 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
8878 check_insn(ctx
, ISA_MIPS32R2
);
8879 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
8883 check_insn(ctx
, ISA_MIPS32R2
);
8884 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
8888 check_insn(ctx
, ISA_MIPS32R2
);
8889 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
8894 gen_helper_mtc0_pwctl(cpu_env
, arg
);
8898 goto cp0_unimplemented
;
8904 check_insn(ctx
, ISA_MIPS32R2
);
8905 gen_helper_mtc0_hwrena(cpu_env
, arg
);
8906 ctx
->base
.is_jmp
= DISAS_STOP
;
8910 goto cp0_unimplemented
;
8932 goto cp0_unimplemented
;
8938 gen_helper_mtc0_count(cpu_env
, arg
);
8941 /* 6,7 are implementation dependent */
8943 goto cp0_unimplemented
;
8945 /* Stop translation as we may have switched the execution mode */
8946 ctx
->base
.is_jmp
= DISAS_STOP
;
8951 gen_helper_mtc0_entryhi(cpu_env
, arg
);
8955 goto cp0_unimplemented
;
8961 gen_helper_mtc0_compare(cpu_env
, arg
);
8964 /* 6,7 are implementation dependent */
8966 goto cp0_unimplemented
;
8968 /* Stop translation as we may have switched the execution mode */
8969 ctx
->base
.is_jmp
= DISAS_STOP
;
8974 save_cpu_state(ctx
, 1);
8975 gen_helper_mtc0_status(cpu_env
, arg
);
8976 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8977 gen_save_pc(ctx
->base
.pc_next
+ 4);
8978 ctx
->base
.is_jmp
= DISAS_EXIT
;
8982 check_insn(ctx
, ISA_MIPS32R2
);
8983 gen_helper_mtc0_intctl(cpu_env
, arg
);
8984 /* Stop translation as we may have switched the execution mode */
8985 ctx
->base
.is_jmp
= DISAS_STOP
;
8989 check_insn(ctx
, ISA_MIPS32R2
);
8990 gen_helper_mtc0_srsctl(cpu_env
, arg
);
8991 /* Stop translation as we may have switched the execution mode */
8992 ctx
->base
.is_jmp
= DISAS_STOP
;
8996 check_insn(ctx
, ISA_MIPS32R2
);
8997 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8998 /* Stop translation as we may have switched the execution mode */
8999 ctx
->base
.is_jmp
= DISAS_STOP
;
9003 goto cp0_unimplemented
;
9009 save_cpu_state(ctx
, 1);
9010 gen_helper_mtc0_cause(cpu_env
, arg
);
9011 /* Stop translation as we may have triggered an interrupt.
9012 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9013 * translated code to check for pending interrupts. */
9014 gen_save_pc(ctx
->base
.pc_next
+ 4);
9015 ctx
->base
.is_jmp
= DISAS_EXIT
;
9019 goto cp0_unimplemented
;
9025 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
9029 goto cp0_unimplemented
;
9039 check_insn(ctx
, ISA_MIPS32R2
);
9040 gen_helper_mtc0_ebase(cpu_env
, arg
);
9044 goto cp0_unimplemented
;
9050 gen_helper_mtc0_config0(cpu_env
, arg
);
9052 /* Stop translation as we may have switched the execution mode */
9053 ctx
->base
.is_jmp
= DISAS_STOP
;
9056 /* ignored, read only */
9060 gen_helper_mtc0_config2(cpu_env
, arg
);
9062 /* Stop translation as we may have switched the execution mode */
9063 ctx
->base
.is_jmp
= DISAS_STOP
;
9066 gen_helper_mtc0_config3(cpu_env
, arg
);
9068 /* Stop translation as we may have switched the execution mode */
9069 ctx
->base
.is_jmp
= DISAS_STOP
;
9072 /* currently ignored */
9076 gen_helper_mtc0_config5(cpu_env
, arg
);
9078 /* Stop translation as we may have switched the execution mode */
9079 ctx
->base
.is_jmp
= DISAS_STOP
;
9081 /* 6,7 are implementation dependent */
9083 rn
= "Invalid config selector";
9084 goto cp0_unimplemented
;
9090 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9094 CP0_CHECK(ctx
->mrp
);
9095 gen_helper_mtc0_maar(cpu_env
, arg
);
9099 CP0_CHECK(ctx
->mrp
);
9100 gen_helper_mtc0_maari(cpu_env
, arg
);
9104 goto cp0_unimplemented
;
9117 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9118 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9122 goto cp0_unimplemented
;
9135 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9136 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9140 goto cp0_unimplemented
;
9146 check_insn(ctx
, ISA_MIPS3
);
9147 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9151 goto cp0_unimplemented
;
9155 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9156 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9159 gen_helper_mtc0_framemask(cpu_env
, arg
);
9163 goto cp0_unimplemented
;
9168 rn
= "Diagnostic"; /* implementation dependent */
9173 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9174 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9175 gen_save_pc(ctx
->base
.pc_next
+ 4);
9176 ctx
->base
.is_jmp
= DISAS_EXIT
;
9180 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9181 /* Stop translation as we may have switched the execution mode */
9182 ctx
->base
.is_jmp
= DISAS_STOP
;
9183 rn
= "TraceControl";
9184 goto cp0_unimplemented
;
9186 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9187 /* Stop translation as we may have switched the execution mode */
9188 ctx
->base
.is_jmp
= DISAS_STOP
;
9189 rn
= "TraceControl2";
9190 goto cp0_unimplemented
;
9192 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9193 /* Stop translation as we may have switched the execution mode */
9194 ctx
->base
.is_jmp
= DISAS_STOP
;
9195 rn
= "UserTraceData";
9196 goto cp0_unimplemented
;
9198 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9199 /* Stop translation as we may have switched the execution mode */
9200 ctx
->base
.is_jmp
= DISAS_STOP
;
9202 goto cp0_unimplemented
;
9204 goto cp0_unimplemented
;
9211 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9215 goto cp0_unimplemented
;
9221 gen_helper_mtc0_performance0(cpu_env
, arg
);
9222 rn
= "Performance0";
9225 // gen_helper_mtc0_performance1(cpu_env, arg);
9226 rn
= "Performance1";
9227 goto cp0_unimplemented
;
9229 // gen_helper_mtc0_performance2(cpu_env, arg);
9230 rn
= "Performance2";
9231 goto cp0_unimplemented
;
9233 // gen_helper_mtc0_performance3(cpu_env, arg);
9234 rn
= "Performance3";
9235 goto cp0_unimplemented
;
9237 // gen_helper_mtc0_performance4(cpu_env, arg);
9238 rn
= "Performance4";
9239 goto cp0_unimplemented
;
9241 // gen_helper_mtc0_performance5(cpu_env, arg);
9242 rn
= "Performance5";
9243 goto cp0_unimplemented
;
9245 // gen_helper_mtc0_performance6(cpu_env, arg);
9246 rn
= "Performance6";
9247 goto cp0_unimplemented
;
9249 // gen_helper_mtc0_performance7(cpu_env, arg);
9250 rn
= "Performance7";
9251 goto cp0_unimplemented
;
9253 goto cp0_unimplemented
;
9259 gen_helper_mtc0_errctl(cpu_env
, arg
);
9260 ctx
->base
.is_jmp
= DISAS_STOP
;
9264 goto cp0_unimplemented
;
9277 goto cp0_unimplemented
;
9286 gen_helper_mtc0_taglo(cpu_env
, arg
);
9293 gen_helper_mtc0_datalo(cpu_env
, arg
);
9297 goto cp0_unimplemented
;
9306 gen_helper_mtc0_taghi(cpu_env
, arg
);
9313 gen_helper_mtc0_datahi(cpu_env
, arg
);
9318 goto cp0_unimplemented
;
9324 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9328 goto cp0_unimplemented
;
9335 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9344 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9345 tcg_gen_st_tl(arg
, cpu_env
,
9346 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9350 goto cp0_unimplemented
;
9354 goto cp0_unimplemented
;
9356 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
9358 /* For simplicity assume that all writes can cause interrupts. */
9359 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9361 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
9362 * translated code to check for pending interrupts. */
9363 gen_save_pc(ctx
->base
.pc_next
+ 4);
9364 ctx
->base
.is_jmp
= DISAS_EXIT
;
9369 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
9371 #endif /* TARGET_MIPS64 */
9373 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9374 int u
, int sel
, int h
)
9376 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9377 TCGv t0
= tcg_temp_local_new();
9379 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9380 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9381 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9382 tcg_gen_movi_tl(t0
, -1);
9383 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9384 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9385 tcg_gen_movi_tl(t0
, -1);
9391 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9394 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9404 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9407 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9410 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9413 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9416 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9419 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9422 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9425 gen_mfc0(ctx
, t0
, rt
, sel
);
9432 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9435 gen_mfc0(ctx
, t0
, rt
, sel
);
9441 gen_helper_mftc0_status(t0
, cpu_env
);
9444 gen_mfc0(ctx
, t0
, rt
, sel
);
9450 gen_helper_mftc0_cause(t0
, cpu_env
);
9460 gen_helper_mftc0_epc(t0
, cpu_env
);
9470 gen_helper_mftc0_ebase(t0
, cpu_env
);
9487 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9497 gen_helper_mftc0_debug(t0
, cpu_env
);
9500 gen_mfc0(ctx
, t0
, rt
, sel
);
9505 gen_mfc0(ctx
, t0
, rt
, sel
);
9507 } else switch (sel
) {
9508 /* GPR registers. */
9510 gen_helper_1e0i(mftgpr
, t0
, rt
);
9512 /* Auxiliary CPU registers */
9516 gen_helper_1e0i(mftlo
, t0
, 0);
9519 gen_helper_1e0i(mfthi
, t0
, 0);
9522 gen_helper_1e0i(mftacx
, t0
, 0);
9525 gen_helper_1e0i(mftlo
, t0
, 1);
9528 gen_helper_1e0i(mfthi
, t0
, 1);
9531 gen_helper_1e0i(mftacx
, t0
, 1);
9534 gen_helper_1e0i(mftlo
, t0
, 2);
9537 gen_helper_1e0i(mfthi
, t0
, 2);
9540 gen_helper_1e0i(mftacx
, t0
, 2);
9543 gen_helper_1e0i(mftlo
, t0
, 3);
9546 gen_helper_1e0i(mfthi
, t0
, 3);
9549 gen_helper_1e0i(mftacx
, t0
, 3);
9552 gen_helper_mftdsp(t0
, cpu_env
);
9558 /* Floating point (COP1). */
9560 /* XXX: For now we support only a single FPU context. */
9562 TCGv_i32 fp0
= tcg_temp_new_i32();
9564 gen_load_fpr32(ctx
, fp0
, rt
);
9565 tcg_gen_ext_i32_tl(t0
, fp0
);
9566 tcg_temp_free_i32(fp0
);
9568 TCGv_i32 fp0
= tcg_temp_new_i32();
9570 gen_load_fpr32h(ctx
, fp0
, rt
);
9571 tcg_gen_ext_i32_tl(t0
, fp0
);
9572 tcg_temp_free_i32(fp0
);
9576 /* XXX: For now we support only a single FPU context. */
9577 gen_helper_1e0i(cfc1
, t0
, rt
);
9579 /* COP2: Not implemented. */
9586 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9587 gen_store_gpr(t0
, rd
);
9593 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9594 generate_exception_end(ctx
, EXCP_RI
);
9597 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9598 int u
, int sel
, int h
)
9600 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9601 TCGv t0
= tcg_temp_local_new();
9603 gen_load_gpr(t0
, rt
);
9604 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9605 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9606 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9608 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9609 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9616 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
9619 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
9629 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
9632 gen_helper_mttc0_tcbind(cpu_env
, t0
);
9635 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
9638 gen_helper_mttc0_tchalt(cpu_env
, t0
);
9641 gen_helper_mttc0_tccontext(cpu_env
, t0
);
9644 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
9647 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
9650 gen_mtc0(ctx
, t0
, rd
, sel
);
9657 gen_helper_mttc0_entryhi(cpu_env
, t0
);
9660 gen_mtc0(ctx
, t0
, rd
, sel
);
9666 gen_helper_mttc0_status(cpu_env
, t0
);
9669 gen_mtc0(ctx
, t0
, rd
, sel
);
9675 gen_helper_mttc0_cause(cpu_env
, t0
);
9685 gen_helper_mttc0_ebase(cpu_env
, t0
);
9695 gen_helper_mttc0_debug(cpu_env
, t0
);
9698 gen_mtc0(ctx
, t0
, rd
, sel
);
9703 gen_mtc0(ctx
, t0
, rd
, sel
);
9705 } else switch (sel
) {
9706 /* GPR registers. */
9708 gen_helper_0e1i(mttgpr
, t0
, rd
);
9710 /* Auxiliary CPU registers */
9714 gen_helper_0e1i(mttlo
, t0
, 0);
9717 gen_helper_0e1i(mtthi
, t0
, 0);
9720 gen_helper_0e1i(mttacx
, t0
, 0);
9723 gen_helper_0e1i(mttlo
, t0
, 1);
9726 gen_helper_0e1i(mtthi
, t0
, 1);
9729 gen_helper_0e1i(mttacx
, t0
, 1);
9732 gen_helper_0e1i(mttlo
, t0
, 2);
9735 gen_helper_0e1i(mtthi
, t0
, 2);
9738 gen_helper_0e1i(mttacx
, t0
, 2);
9741 gen_helper_0e1i(mttlo
, t0
, 3);
9744 gen_helper_0e1i(mtthi
, t0
, 3);
9747 gen_helper_0e1i(mttacx
, t0
, 3);
9750 gen_helper_mttdsp(cpu_env
, t0
);
9756 /* Floating point (COP1). */
9758 /* XXX: For now we support only a single FPU context. */
9760 TCGv_i32 fp0
= tcg_temp_new_i32();
9762 tcg_gen_trunc_tl_i32(fp0
, t0
);
9763 gen_store_fpr32(ctx
, fp0
, rd
);
9764 tcg_temp_free_i32(fp0
);
9766 TCGv_i32 fp0
= tcg_temp_new_i32();
9768 tcg_gen_trunc_tl_i32(fp0
, t0
);
9769 gen_store_fpr32h(ctx
, fp0
, rd
);
9770 tcg_temp_free_i32(fp0
);
9774 /* XXX: For now we support only a single FPU context. */
9776 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
9778 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
9779 tcg_temp_free_i32(fs_tmp
);
9781 /* Stop translation as we may have changed hflags */
9782 ctx
->base
.is_jmp
= DISAS_STOP
;
9784 /* COP2: Not implemented. */
9791 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
9797 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
9798 generate_exception_end(ctx
, EXCP_RI
);
9801 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
9803 const char *opn
= "ldst";
9805 check_cp0_enabled(ctx
);
9812 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9817 TCGv t0
= tcg_temp_new();
9819 gen_load_gpr(t0
, rt
);
9820 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9825 #if defined(TARGET_MIPS64)
9827 check_insn(ctx
, ISA_MIPS3
);
9832 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9836 check_insn(ctx
, ISA_MIPS3
);
9838 TCGv t0
= tcg_temp_new();
9840 gen_load_gpr(t0
, rt
);
9841 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9853 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9859 TCGv t0
= tcg_temp_new();
9860 gen_load_gpr(t0
, rt
);
9861 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9867 check_cp0_enabled(ctx
);
9872 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
9873 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9877 check_cp0_enabled(ctx
);
9878 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
9879 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9884 if (!env
->tlb
->helper_tlbwi
)
9886 gen_helper_tlbwi(cpu_env
);
9891 if (!env
->tlb
->helper_tlbinv
) {
9894 gen_helper_tlbinv(cpu_env
);
9895 } /* treat as nop if TLBINV not supported */
9900 if (!env
->tlb
->helper_tlbinvf
) {
9903 gen_helper_tlbinvf(cpu_env
);
9904 } /* treat as nop if TLBINV not supported */
9908 if (!env
->tlb
->helper_tlbwr
)
9910 gen_helper_tlbwr(cpu_env
);
9914 if (!env
->tlb
->helper_tlbp
)
9916 gen_helper_tlbp(cpu_env
);
9920 if (!env
->tlb
->helper_tlbr
)
9922 gen_helper_tlbr(cpu_env
);
9924 case OPC_ERET
: /* OPC_ERETNC */
9925 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9926 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9929 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
9930 if (ctx
->opcode
& (1 << bit_shift
)) {
9933 check_insn(ctx
, ISA_MIPS32R5
);
9934 gen_helper_eretnc(cpu_env
);
9938 check_insn(ctx
, ISA_MIPS2
);
9939 gen_helper_eret(cpu_env
);
9941 ctx
->base
.is_jmp
= DISAS_EXIT
;
9946 check_insn(ctx
, ISA_MIPS32
);
9947 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9948 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9951 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9953 generate_exception_end(ctx
, EXCP_RI
);
9955 gen_helper_deret(cpu_env
);
9956 ctx
->base
.is_jmp
= DISAS_EXIT
;
9961 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
9962 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9963 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9966 /* If we get an exception, we want to restart at next instruction */
9967 ctx
->base
.pc_next
+= 4;
9968 save_cpu_state(ctx
, 1);
9969 ctx
->base
.pc_next
-= 4;
9970 gen_helper_wait(cpu_env
);
9971 ctx
->base
.is_jmp
= DISAS_NORETURN
;
9976 generate_exception_end(ctx
, EXCP_RI
);
9979 (void)opn
; /* avoid a compiler warning */
9981 #endif /* !CONFIG_USER_ONLY */
9983 /* CP1 Branches (before delay slot) */
9984 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
9985 int32_t cc
, int32_t offset
)
9987 target_ulong btarget
;
9988 TCGv_i32 t0
= tcg_temp_new_i32();
9990 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9991 generate_exception_end(ctx
, EXCP_RI
);
9996 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
9998 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
10002 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10003 tcg_gen_not_i32(t0
, t0
);
10004 tcg_gen_andi_i32(t0
, t0
, 1);
10005 tcg_gen_extu_i32_tl(bcond
, t0
);
10008 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10009 tcg_gen_not_i32(t0
, t0
);
10010 tcg_gen_andi_i32(t0
, t0
, 1);
10011 tcg_gen_extu_i32_tl(bcond
, t0
);
10014 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10015 tcg_gen_andi_i32(t0
, t0
, 1);
10016 tcg_gen_extu_i32_tl(bcond
, t0
);
10019 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10020 tcg_gen_andi_i32(t0
, t0
, 1);
10021 tcg_gen_extu_i32_tl(bcond
, t0
);
10023 ctx
->hflags
|= MIPS_HFLAG_BL
;
10027 TCGv_i32 t1
= tcg_temp_new_i32();
10028 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10029 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10030 tcg_gen_nand_i32(t0
, t0
, t1
);
10031 tcg_temp_free_i32(t1
);
10032 tcg_gen_andi_i32(t0
, t0
, 1);
10033 tcg_gen_extu_i32_tl(bcond
, t0
);
10038 TCGv_i32 t1
= tcg_temp_new_i32();
10039 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10040 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10041 tcg_gen_or_i32(t0
, t0
, t1
);
10042 tcg_temp_free_i32(t1
);
10043 tcg_gen_andi_i32(t0
, t0
, 1);
10044 tcg_gen_extu_i32_tl(bcond
, t0
);
10049 TCGv_i32 t1
= tcg_temp_new_i32();
10050 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10051 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10052 tcg_gen_and_i32(t0
, t0
, t1
);
10053 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10054 tcg_gen_and_i32(t0
, t0
, t1
);
10055 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10056 tcg_gen_nand_i32(t0
, t0
, t1
);
10057 tcg_temp_free_i32(t1
);
10058 tcg_gen_andi_i32(t0
, t0
, 1);
10059 tcg_gen_extu_i32_tl(bcond
, t0
);
10064 TCGv_i32 t1
= tcg_temp_new_i32();
10065 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10066 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10067 tcg_gen_or_i32(t0
, t0
, t1
);
10068 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10069 tcg_gen_or_i32(t0
, t0
, t1
);
10070 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10071 tcg_gen_or_i32(t0
, t0
, t1
);
10072 tcg_temp_free_i32(t1
);
10073 tcg_gen_andi_i32(t0
, t0
, 1);
10074 tcg_gen_extu_i32_tl(bcond
, t0
);
10077 ctx
->hflags
|= MIPS_HFLAG_BC
;
10080 MIPS_INVAL("cp1 cond branch");
10081 generate_exception_end(ctx
, EXCP_RI
);
10084 ctx
->btarget
= btarget
;
10085 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10087 tcg_temp_free_i32(t0
);
10090 /* R6 CP1 Branches */
10091 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10092 int32_t ft
, int32_t offset
,
10093 int delayslot_size
)
10095 target_ulong btarget
;
10096 TCGv_i64 t0
= tcg_temp_new_i64();
10098 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10099 #ifdef MIPS_DEBUG_DISAS
10100 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10101 "\n", ctx
->base
.pc_next
);
10103 generate_exception_end(ctx
, EXCP_RI
);
10107 gen_load_fpr64(ctx
, t0
, ft
);
10108 tcg_gen_andi_i64(t0
, t0
, 1);
10110 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10114 tcg_gen_xori_i64(t0
, t0
, 1);
10115 ctx
->hflags
|= MIPS_HFLAG_BC
;
10118 /* t0 already set */
10119 ctx
->hflags
|= MIPS_HFLAG_BC
;
10122 MIPS_INVAL("cp1 cond branch");
10123 generate_exception_end(ctx
, EXCP_RI
);
10127 tcg_gen_trunc_i64_tl(bcond
, t0
);
10129 ctx
->btarget
= btarget
;
10131 switch (delayslot_size
) {
10133 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10136 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10141 tcg_temp_free_i64(t0
);
10144 /* Coprocessor 1 (FPU) */
10146 #define FOP(func, fmt) (((fmt) << 21) | (func))
10149 OPC_ADD_S
= FOP(0, FMT_S
),
10150 OPC_SUB_S
= FOP(1, FMT_S
),
10151 OPC_MUL_S
= FOP(2, FMT_S
),
10152 OPC_DIV_S
= FOP(3, FMT_S
),
10153 OPC_SQRT_S
= FOP(4, FMT_S
),
10154 OPC_ABS_S
= FOP(5, FMT_S
),
10155 OPC_MOV_S
= FOP(6, FMT_S
),
10156 OPC_NEG_S
= FOP(7, FMT_S
),
10157 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10158 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10159 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10160 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10161 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10162 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10163 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10164 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10165 OPC_SEL_S
= FOP(16, FMT_S
),
10166 OPC_MOVCF_S
= FOP(17, FMT_S
),
10167 OPC_MOVZ_S
= FOP(18, FMT_S
),
10168 OPC_MOVN_S
= FOP(19, FMT_S
),
10169 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10170 OPC_RECIP_S
= FOP(21, FMT_S
),
10171 OPC_RSQRT_S
= FOP(22, FMT_S
),
10172 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10173 OPC_MADDF_S
= FOP(24, FMT_S
),
10174 OPC_MSUBF_S
= FOP(25, FMT_S
),
10175 OPC_RINT_S
= FOP(26, FMT_S
),
10176 OPC_CLASS_S
= FOP(27, FMT_S
),
10177 OPC_MIN_S
= FOP(28, FMT_S
),
10178 OPC_RECIP2_S
= FOP(28, FMT_S
),
10179 OPC_MINA_S
= FOP(29, FMT_S
),
10180 OPC_RECIP1_S
= FOP(29, FMT_S
),
10181 OPC_MAX_S
= FOP(30, FMT_S
),
10182 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10183 OPC_MAXA_S
= FOP(31, FMT_S
),
10184 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10185 OPC_CVT_D_S
= FOP(33, FMT_S
),
10186 OPC_CVT_W_S
= FOP(36, FMT_S
),
10187 OPC_CVT_L_S
= FOP(37, FMT_S
),
10188 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10189 OPC_CMP_F_S
= FOP (48, FMT_S
),
10190 OPC_CMP_UN_S
= FOP (49, FMT_S
),
10191 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
10192 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
10193 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
10194 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
10195 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
10196 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
10197 OPC_CMP_SF_S
= FOP (56, FMT_S
),
10198 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
10199 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
10200 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
10201 OPC_CMP_LT_S
= FOP (60, FMT_S
),
10202 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
10203 OPC_CMP_LE_S
= FOP (62, FMT_S
),
10204 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
10206 OPC_ADD_D
= FOP(0, FMT_D
),
10207 OPC_SUB_D
= FOP(1, FMT_D
),
10208 OPC_MUL_D
= FOP(2, FMT_D
),
10209 OPC_DIV_D
= FOP(3, FMT_D
),
10210 OPC_SQRT_D
= FOP(4, FMT_D
),
10211 OPC_ABS_D
= FOP(5, FMT_D
),
10212 OPC_MOV_D
= FOP(6, FMT_D
),
10213 OPC_NEG_D
= FOP(7, FMT_D
),
10214 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10215 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10216 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10217 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10218 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10219 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10220 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10221 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10222 OPC_SEL_D
= FOP(16, FMT_D
),
10223 OPC_MOVCF_D
= FOP(17, FMT_D
),
10224 OPC_MOVZ_D
= FOP(18, FMT_D
),
10225 OPC_MOVN_D
= FOP(19, FMT_D
),
10226 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10227 OPC_RECIP_D
= FOP(21, FMT_D
),
10228 OPC_RSQRT_D
= FOP(22, FMT_D
),
10229 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10230 OPC_MADDF_D
= FOP(24, FMT_D
),
10231 OPC_MSUBF_D
= FOP(25, FMT_D
),
10232 OPC_RINT_D
= FOP(26, FMT_D
),
10233 OPC_CLASS_D
= FOP(27, FMT_D
),
10234 OPC_MIN_D
= FOP(28, FMT_D
),
10235 OPC_RECIP2_D
= FOP(28, FMT_D
),
10236 OPC_MINA_D
= FOP(29, FMT_D
),
10237 OPC_RECIP1_D
= FOP(29, FMT_D
),
10238 OPC_MAX_D
= FOP(30, FMT_D
),
10239 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10240 OPC_MAXA_D
= FOP(31, FMT_D
),
10241 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10242 OPC_CVT_S_D
= FOP(32, FMT_D
),
10243 OPC_CVT_W_D
= FOP(36, FMT_D
),
10244 OPC_CVT_L_D
= FOP(37, FMT_D
),
10245 OPC_CMP_F_D
= FOP (48, FMT_D
),
10246 OPC_CMP_UN_D
= FOP (49, FMT_D
),
10247 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
10248 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
10249 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
10250 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
10251 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
10252 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
10253 OPC_CMP_SF_D
= FOP (56, FMT_D
),
10254 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
10255 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
10256 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
10257 OPC_CMP_LT_D
= FOP (60, FMT_D
),
10258 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
10259 OPC_CMP_LE_D
= FOP (62, FMT_D
),
10260 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
10262 OPC_CVT_S_W
= FOP(32, FMT_W
),
10263 OPC_CVT_D_W
= FOP(33, FMT_W
),
10264 OPC_CVT_S_L
= FOP(32, FMT_L
),
10265 OPC_CVT_D_L
= FOP(33, FMT_L
),
10266 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10268 OPC_ADD_PS
= FOP(0, FMT_PS
),
10269 OPC_SUB_PS
= FOP(1, FMT_PS
),
10270 OPC_MUL_PS
= FOP(2, FMT_PS
),
10271 OPC_DIV_PS
= FOP(3, FMT_PS
),
10272 OPC_ABS_PS
= FOP(5, FMT_PS
),
10273 OPC_MOV_PS
= FOP(6, FMT_PS
),
10274 OPC_NEG_PS
= FOP(7, FMT_PS
),
10275 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10276 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10277 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10278 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10279 OPC_MULR_PS
= FOP(26, FMT_PS
),
10280 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10281 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10282 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10283 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10285 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10286 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10287 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10288 OPC_PLL_PS
= FOP(44, FMT_PS
),
10289 OPC_PLU_PS
= FOP(45, FMT_PS
),
10290 OPC_PUL_PS
= FOP(46, FMT_PS
),
10291 OPC_PUU_PS
= FOP(47, FMT_PS
),
10292 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
10293 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
10294 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
10295 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
10296 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
10297 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
10298 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
10299 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
10300 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
10301 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
10302 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
10303 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
10304 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
10305 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
10306 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
10307 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
10311 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10312 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10313 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10314 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10315 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10316 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10317 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10318 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10319 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10320 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10321 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10322 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10323 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10324 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10325 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10326 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10327 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10328 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10329 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10330 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10331 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10332 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10334 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10335 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10336 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10337 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10338 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10339 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10340 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10341 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10342 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10343 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10344 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10345 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10346 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10347 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10348 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10349 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10350 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10351 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10352 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10353 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10354 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10355 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10357 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10359 TCGv t0
= tcg_temp_new();
10364 TCGv_i32 fp0
= tcg_temp_new_i32();
10366 gen_load_fpr32(ctx
, fp0
, fs
);
10367 tcg_gen_ext_i32_tl(t0
, fp0
);
10368 tcg_temp_free_i32(fp0
);
10370 gen_store_gpr(t0
, rt
);
10373 gen_load_gpr(t0
, rt
);
10375 TCGv_i32 fp0
= tcg_temp_new_i32();
10377 tcg_gen_trunc_tl_i32(fp0
, t0
);
10378 gen_store_fpr32(ctx
, fp0
, fs
);
10379 tcg_temp_free_i32(fp0
);
10383 gen_helper_1e0i(cfc1
, t0
, fs
);
10384 gen_store_gpr(t0
, rt
);
10387 gen_load_gpr(t0
, rt
);
10388 save_cpu_state(ctx
, 0);
10390 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10392 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10393 tcg_temp_free_i32(fs_tmp
);
10395 /* Stop translation as we may have changed hflags */
10396 ctx
->base
.is_jmp
= DISAS_STOP
;
10398 #if defined(TARGET_MIPS64)
10400 gen_load_fpr64(ctx
, t0
, fs
);
10401 gen_store_gpr(t0
, rt
);
10404 gen_load_gpr(t0
, rt
);
10405 gen_store_fpr64(ctx
, t0
, fs
);
10410 TCGv_i32 fp0
= tcg_temp_new_i32();
10412 gen_load_fpr32h(ctx
, fp0
, fs
);
10413 tcg_gen_ext_i32_tl(t0
, fp0
);
10414 tcg_temp_free_i32(fp0
);
10416 gen_store_gpr(t0
, rt
);
10419 gen_load_gpr(t0
, rt
);
10421 TCGv_i32 fp0
= tcg_temp_new_i32();
10423 tcg_gen_trunc_tl_i32(fp0
, t0
);
10424 gen_store_fpr32h(ctx
, fp0
, fs
);
10425 tcg_temp_free_i32(fp0
);
10429 MIPS_INVAL("cp1 move");
10430 generate_exception_end(ctx
, EXCP_RI
);
10438 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10445 /* Treat as NOP. */
10450 cond
= TCG_COND_EQ
;
10452 cond
= TCG_COND_NE
;
10454 l1
= gen_new_label();
10455 t0
= tcg_temp_new_i32();
10456 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10457 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10458 tcg_temp_free_i32(t0
);
10460 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10462 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10467 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10471 TCGv_i32 t0
= tcg_temp_new_i32();
10472 TCGLabel
*l1
= gen_new_label();
10475 cond
= TCG_COND_EQ
;
10477 cond
= TCG_COND_NE
;
10479 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10480 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10481 gen_load_fpr32(ctx
, t0
, fs
);
10482 gen_store_fpr32(ctx
, t0
, fd
);
10484 tcg_temp_free_i32(t0
);
10487 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
10490 TCGv_i32 t0
= tcg_temp_new_i32();
10492 TCGLabel
*l1
= gen_new_label();
10495 cond
= TCG_COND_EQ
;
10497 cond
= TCG_COND_NE
;
10499 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10500 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10501 tcg_temp_free_i32(t0
);
10502 fp0
= tcg_temp_new_i64();
10503 gen_load_fpr64(ctx
, fp0
, fs
);
10504 gen_store_fpr64(ctx
, fp0
, fd
);
10505 tcg_temp_free_i64(fp0
);
10509 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10513 TCGv_i32 t0
= tcg_temp_new_i32();
10514 TCGLabel
*l1
= gen_new_label();
10515 TCGLabel
*l2
= gen_new_label();
10518 cond
= TCG_COND_EQ
;
10520 cond
= TCG_COND_NE
;
10522 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10523 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10524 gen_load_fpr32(ctx
, t0
, fs
);
10525 gen_store_fpr32(ctx
, t0
, fd
);
10528 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10529 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10530 gen_load_fpr32h(ctx
, t0
, fs
);
10531 gen_store_fpr32h(ctx
, t0
, fd
);
10532 tcg_temp_free_i32(t0
);
10536 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10539 TCGv_i32 t1
= tcg_const_i32(0);
10540 TCGv_i32 fp0
= tcg_temp_new_i32();
10541 TCGv_i32 fp1
= tcg_temp_new_i32();
10542 TCGv_i32 fp2
= tcg_temp_new_i32();
10543 gen_load_fpr32(ctx
, fp0
, fd
);
10544 gen_load_fpr32(ctx
, fp1
, ft
);
10545 gen_load_fpr32(ctx
, fp2
, fs
);
10549 tcg_gen_andi_i32(fp0
, fp0
, 1);
10550 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10553 tcg_gen_andi_i32(fp1
, fp1
, 1);
10554 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10557 tcg_gen_andi_i32(fp1
, fp1
, 1);
10558 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10561 MIPS_INVAL("gen_sel_s");
10562 generate_exception_end(ctx
, EXCP_RI
);
10566 gen_store_fpr32(ctx
, fp0
, fd
);
10567 tcg_temp_free_i32(fp2
);
10568 tcg_temp_free_i32(fp1
);
10569 tcg_temp_free_i32(fp0
);
10570 tcg_temp_free_i32(t1
);
10573 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10576 TCGv_i64 t1
= tcg_const_i64(0);
10577 TCGv_i64 fp0
= tcg_temp_new_i64();
10578 TCGv_i64 fp1
= tcg_temp_new_i64();
10579 TCGv_i64 fp2
= tcg_temp_new_i64();
10580 gen_load_fpr64(ctx
, fp0
, fd
);
10581 gen_load_fpr64(ctx
, fp1
, ft
);
10582 gen_load_fpr64(ctx
, fp2
, fs
);
10586 tcg_gen_andi_i64(fp0
, fp0
, 1);
10587 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10590 tcg_gen_andi_i64(fp1
, fp1
, 1);
10591 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10594 tcg_gen_andi_i64(fp1
, fp1
, 1);
10595 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10598 MIPS_INVAL("gen_sel_d");
10599 generate_exception_end(ctx
, EXCP_RI
);
10603 gen_store_fpr64(ctx
, fp0
, fd
);
10604 tcg_temp_free_i64(fp2
);
10605 tcg_temp_free_i64(fp1
);
10606 tcg_temp_free_i64(fp0
);
10607 tcg_temp_free_i64(t1
);
10610 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
10611 int ft
, int fs
, int fd
, int cc
)
10613 uint32_t func
= ctx
->opcode
& 0x3f;
10617 TCGv_i32 fp0
= tcg_temp_new_i32();
10618 TCGv_i32 fp1
= tcg_temp_new_i32();
10620 gen_load_fpr32(ctx
, fp0
, fs
);
10621 gen_load_fpr32(ctx
, fp1
, ft
);
10622 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
10623 tcg_temp_free_i32(fp1
);
10624 gen_store_fpr32(ctx
, fp0
, fd
);
10625 tcg_temp_free_i32(fp0
);
10630 TCGv_i32 fp0
= tcg_temp_new_i32();
10631 TCGv_i32 fp1
= tcg_temp_new_i32();
10633 gen_load_fpr32(ctx
, fp0
, fs
);
10634 gen_load_fpr32(ctx
, fp1
, ft
);
10635 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
10636 tcg_temp_free_i32(fp1
);
10637 gen_store_fpr32(ctx
, fp0
, fd
);
10638 tcg_temp_free_i32(fp0
);
10643 TCGv_i32 fp0
= tcg_temp_new_i32();
10644 TCGv_i32 fp1
= tcg_temp_new_i32();
10646 gen_load_fpr32(ctx
, fp0
, fs
);
10647 gen_load_fpr32(ctx
, fp1
, ft
);
10648 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
10649 tcg_temp_free_i32(fp1
);
10650 gen_store_fpr32(ctx
, fp0
, fd
);
10651 tcg_temp_free_i32(fp0
);
10656 TCGv_i32 fp0
= tcg_temp_new_i32();
10657 TCGv_i32 fp1
= tcg_temp_new_i32();
10659 gen_load_fpr32(ctx
, fp0
, fs
);
10660 gen_load_fpr32(ctx
, fp1
, ft
);
10661 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
10662 tcg_temp_free_i32(fp1
);
10663 gen_store_fpr32(ctx
, fp0
, fd
);
10664 tcg_temp_free_i32(fp0
);
10669 TCGv_i32 fp0
= tcg_temp_new_i32();
10671 gen_load_fpr32(ctx
, fp0
, fs
);
10672 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
10673 gen_store_fpr32(ctx
, fp0
, fd
);
10674 tcg_temp_free_i32(fp0
);
10679 TCGv_i32 fp0
= tcg_temp_new_i32();
10681 gen_load_fpr32(ctx
, fp0
, fs
);
10682 if (ctx
->abs2008
) {
10683 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
10685 gen_helper_float_abs_s(fp0
, fp0
);
10687 gen_store_fpr32(ctx
, fp0
, fd
);
10688 tcg_temp_free_i32(fp0
);
10693 TCGv_i32 fp0
= tcg_temp_new_i32();
10695 gen_load_fpr32(ctx
, fp0
, fs
);
10696 gen_store_fpr32(ctx
, fp0
, fd
);
10697 tcg_temp_free_i32(fp0
);
10702 TCGv_i32 fp0
= tcg_temp_new_i32();
10704 gen_load_fpr32(ctx
, fp0
, fs
);
10705 if (ctx
->abs2008
) {
10706 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
10708 gen_helper_float_chs_s(fp0
, fp0
);
10710 gen_store_fpr32(ctx
, fp0
, fd
);
10711 tcg_temp_free_i32(fp0
);
10714 case OPC_ROUND_L_S
:
10715 check_cp1_64bitmode(ctx
);
10717 TCGv_i32 fp32
= tcg_temp_new_i32();
10718 TCGv_i64 fp64
= tcg_temp_new_i64();
10720 gen_load_fpr32(ctx
, fp32
, fs
);
10721 if (ctx
->nan2008
) {
10722 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
10724 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
10726 tcg_temp_free_i32(fp32
);
10727 gen_store_fpr64(ctx
, fp64
, fd
);
10728 tcg_temp_free_i64(fp64
);
10731 case OPC_TRUNC_L_S
:
10732 check_cp1_64bitmode(ctx
);
10734 TCGv_i32 fp32
= tcg_temp_new_i32();
10735 TCGv_i64 fp64
= tcg_temp_new_i64();
10737 gen_load_fpr32(ctx
, fp32
, fs
);
10738 if (ctx
->nan2008
) {
10739 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
10741 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
10743 tcg_temp_free_i32(fp32
);
10744 gen_store_fpr64(ctx
, fp64
, fd
);
10745 tcg_temp_free_i64(fp64
);
10749 check_cp1_64bitmode(ctx
);
10751 TCGv_i32 fp32
= tcg_temp_new_i32();
10752 TCGv_i64 fp64
= tcg_temp_new_i64();
10754 gen_load_fpr32(ctx
, fp32
, fs
);
10755 if (ctx
->nan2008
) {
10756 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
10758 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
10760 tcg_temp_free_i32(fp32
);
10761 gen_store_fpr64(ctx
, fp64
, fd
);
10762 tcg_temp_free_i64(fp64
);
10765 case OPC_FLOOR_L_S
:
10766 check_cp1_64bitmode(ctx
);
10768 TCGv_i32 fp32
= tcg_temp_new_i32();
10769 TCGv_i64 fp64
= tcg_temp_new_i64();
10771 gen_load_fpr32(ctx
, fp32
, fs
);
10772 if (ctx
->nan2008
) {
10773 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
10775 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
10777 tcg_temp_free_i32(fp32
);
10778 gen_store_fpr64(ctx
, fp64
, fd
);
10779 tcg_temp_free_i64(fp64
);
10782 case OPC_ROUND_W_S
:
10784 TCGv_i32 fp0
= tcg_temp_new_i32();
10786 gen_load_fpr32(ctx
, fp0
, fs
);
10787 if (ctx
->nan2008
) {
10788 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
10790 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
10792 gen_store_fpr32(ctx
, fp0
, fd
);
10793 tcg_temp_free_i32(fp0
);
10796 case OPC_TRUNC_W_S
:
10798 TCGv_i32 fp0
= tcg_temp_new_i32();
10800 gen_load_fpr32(ctx
, fp0
, fs
);
10801 if (ctx
->nan2008
) {
10802 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
10804 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
10806 gen_store_fpr32(ctx
, fp0
, fd
);
10807 tcg_temp_free_i32(fp0
);
10812 TCGv_i32 fp0
= tcg_temp_new_i32();
10814 gen_load_fpr32(ctx
, fp0
, fs
);
10815 if (ctx
->nan2008
) {
10816 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
10818 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
10820 gen_store_fpr32(ctx
, fp0
, fd
);
10821 tcg_temp_free_i32(fp0
);
10824 case OPC_FLOOR_W_S
:
10826 TCGv_i32 fp0
= tcg_temp_new_i32();
10828 gen_load_fpr32(ctx
, fp0
, fs
);
10829 if (ctx
->nan2008
) {
10830 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
10832 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
10834 gen_store_fpr32(ctx
, fp0
, fd
);
10835 tcg_temp_free_i32(fp0
);
10839 check_insn(ctx
, ISA_MIPS32R6
);
10840 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10843 check_insn(ctx
, ISA_MIPS32R6
);
10844 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10847 check_insn(ctx
, ISA_MIPS32R6
);
10848 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10851 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10852 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10855 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10857 TCGLabel
*l1
= gen_new_label();
10861 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10863 fp0
= tcg_temp_new_i32();
10864 gen_load_fpr32(ctx
, fp0
, fs
);
10865 gen_store_fpr32(ctx
, fp0
, fd
);
10866 tcg_temp_free_i32(fp0
);
10871 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10873 TCGLabel
*l1
= gen_new_label();
10877 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10878 fp0
= tcg_temp_new_i32();
10879 gen_load_fpr32(ctx
, fp0
, fs
);
10880 gen_store_fpr32(ctx
, fp0
, fd
);
10881 tcg_temp_free_i32(fp0
);
10888 TCGv_i32 fp0
= tcg_temp_new_i32();
10890 gen_load_fpr32(ctx
, fp0
, fs
);
10891 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
10892 gen_store_fpr32(ctx
, fp0
, fd
);
10893 tcg_temp_free_i32(fp0
);
10898 TCGv_i32 fp0
= tcg_temp_new_i32();
10900 gen_load_fpr32(ctx
, fp0
, fs
);
10901 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
10902 gen_store_fpr32(ctx
, fp0
, fd
);
10903 tcg_temp_free_i32(fp0
);
10907 check_insn(ctx
, ISA_MIPS32R6
);
10909 TCGv_i32 fp0
= tcg_temp_new_i32();
10910 TCGv_i32 fp1
= tcg_temp_new_i32();
10911 TCGv_i32 fp2
= tcg_temp_new_i32();
10912 gen_load_fpr32(ctx
, fp0
, fs
);
10913 gen_load_fpr32(ctx
, fp1
, ft
);
10914 gen_load_fpr32(ctx
, fp2
, fd
);
10915 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10916 gen_store_fpr32(ctx
, fp2
, fd
);
10917 tcg_temp_free_i32(fp2
);
10918 tcg_temp_free_i32(fp1
);
10919 tcg_temp_free_i32(fp0
);
10923 check_insn(ctx
, ISA_MIPS32R6
);
10925 TCGv_i32 fp0
= tcg_temp_new_i32();
10926 TCGv_i32 fp1
= tcg_temp_new_i32();
10927 TCGv_i32 fp2
= tcg_temp_new_i32();
10928 gen_load_fpr32(ctx
, fp0
, fs
);
10929 gen_load_fpr32(ctx
, fp1
, ft
);
10930 gen_load_fpr32(ctx
, fp2
, fd
);
10931 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10932 gen_store_fpr32(ctx
, fp2
, fd
);
10933 tcg_temp_free_i32(fp2
);
10934 tcg_temp_free_i32(fp1
);
10935 tcg_temp_free_i32(fp0
);
10939 check_insn(ctx
, ISA_MIPS32R6
);
10941 TCGv_i32 fp0
= tcg_temp_new_i32();
10942 gen_load_fpr32(ctx
, fp0
, fs
);
10943 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
10944 gen_store_fpr32(ctx
, fp0
, fd
);
10945 tcg_temp_free_i32(fp0
);
10949 check_insn(ctx
, ISA_MIPS32R6
);
10951 TCGv_i32 fp0
= tcg_temp_new_i32();
10952 gen_load_fpr32(ctx
, fp0
, fs
);
10953 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
10954 gen_store_fpr32(ctx
, fp0
, fd
);
10955 tcg_temp_free_i32(fp0
);
10958 case OPC_MIN_S
: /* OPC_RECIP2_S */
10959 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10961 TCGv_i32 fp0
= tcg_temp_new_i32();
10962 TCGv_i32 fp1
= tcg_temp_new_i32();
10963 TCGv_i32 fp2
= tcg_temp_new_i32();
10964 gen_load_fpr32(ctx
, fp0
, fs
);
10965 gen_load_fpr32(ctx
, fp1
, ft
);
10966 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
10967 gen_store_fpr32(ctx
, fp2
, fd
);
10968 tcg_temp_free_i32(fp2
);
10969 tcg_temp_free_i32(fp1
);
10970 tcg_temp_free_i32(fp0
);
10973 check_cp1_64bitmode(ctx
);
10975 TCGv_i32 fp0
= tcg_temp_new_i32();
10976 TCGv_i32 fp1
= tcg_temp_new_i32();
10978 gen_load_fpr32(ctx
, fp0
, fs
);
10979 gen_load_fpr32(ctx
, fp1
, ft
);
10980 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
10981 tcg_temp_free_i32(fp1
);
10982 gen_store_fpr32(ctx
, fp0
, fd
);
10983 tcg_temp_free_i32(fp0
);
10987 case OPC_MINA_S
: /* OPC_RECIP1_S */
10988 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10990 TCGv_i32 fp0
= tcg_temp_new_i32();
10991 TCGv_i32 fp1
= tcg_temp_new_i32();
10992 TCGv_i32 fp2
= tcg_temp_new_i32();
10993 gen_load_fpr32(ctx
, fp0
, fs
);
10994 gen_load_fpr32(ctx
, fp1
, ft
);
10995 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
10996 gen_store_fpr32(ctx
, fp2
, fd
);
10997 tcg_temp_free_i32(fp2
);
10998 tcg_temp_free_i32(fp1
);
10999 tcg_temp_free_i32(fp0
);
11002 check_cp1_64bitmode(ctx
);
11004 TCGv_i32 fp0
= tcg_temp_new_i32();
11006 gen_load_fpr32(ctx
, fp0
, fs
);
11007 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
11008 gen_store_fpr32(ctx
, fp0
, fd
);
11009 tcg_temp_free_i32(fp0
);
11013 case OPC_MAX_S
: /* OPC_RSQRT1_S */
11014 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11016 TCGv_i32 fp0
= tcg_temp_new_i32();
11017 TCGv_i32 fp1
= tcg_temp_new_i32();
11018 gen_load_fpr32(ctx
, fp0
, fs
);
11019 gen_load_fpr32(ctx
, fp1
, ft
);
11020 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
11021 gen_store_fpr32(ctx
, fp1
, fd
);
11022 tcg_temp_free_i32(fp1
);
11023 tcg_temp_free_i32(fp0
);
11026 check_cp1_64bitmode(ctx
);
11028 TCGv_i32 fp0
= tcg_temp_new_i32();
11030 gen_load_fpr32(ctx
, fp0
, fs
);
11031 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
11032 gen_store_fpr32(ctx
, fp0
, fd
);
11033 tcg_temp_free_i32(fp0
);
11037 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11038 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11040 TCGv_i32 fp0
= tcg_temp_new_i32();
11041 TCGv_i32 fp1
= tcg_temp_new_i32();
11042 gen_load_fpr32(ctx
, fp0
, fs
);
11043 gen_load_fpr32(ctx
, fp1
, ft
);
11044 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11045 gen_store_fpr32(ctx
, fp1
, fd
);
11046 tcg_temp_free_i32(fp1
);
11047 tcg_temp_free_i32(fp0
);
11050 check_cp1_64bitmode(ctx
);
11052 TCGv_i32 fp0
= tcg_temp_new_i32();
11053 TCGv_i32 fp1
= tcg_temp_new_i32();
11055 gen_load_fpr32(ctx
, fp0
, fs
);
11056 gen_load_fpr32(ctx
, fp1
, ft
);
11057 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11058 tcg_temp_free_i32(fp1
);
11059 gen_store_fpr32(ctx
, fp0
, fd
);
11060 tcg_temp_free_i32(fp0
);
11065 check_cp1_registers(ctx
, fd
);
11067 TCGv_i32 fp32
= tcg_temp_new_i32();
11068 TCGv_i64 fp64
= tcg_temp_new_i64();
11070 gen_load_fpr32(ctx
, fp32
, fs
);
11071 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11072 tcg_temp_free_i32(fp32
);
11073 gen_store_fpr64(ctx
, fp64
, fd
);
11074 tcg_temp_free_i64(fp64
);
11079 TCGv_i32 fp0
= tcg_temp_new_i32();
11081 gen_load_fpr32(ctx
, fp0
, fs
);
11082 if (ctx
->nan2008
) {
11083 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11085 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11087 gen_store_fpr32(ctx
, fp0
, fd
);
11088 tcg_temp_free_i32(fp0
);
11092 check_cp1_64bitmode(ctx
);
11094 TCGv_i32 fp32
= tcg_temp_new_i32();
11095 TCGv_i64 fp64
= tcg_temp_new_i64();
11097 gen_load_fpr32(ctx
, fp32
, fs
);
11098 if (ctx
->nan2008
) {
11099 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11101 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11103 tcg_temp_free_i32(fp32
);
11104 gen_store_fpr64(ctx
, fp64
, fd
);
11105 tcg_temp_free_i64(fp64
);
11111 TCGv_i64 fp64
= tcg_temp_new_i64();
11112 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11113 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11115 gen_load_fpr32(ctx
, fp32_0
, fs
);
11116 gen_load_fpr32(ctx
, fp32_1
, ft
);
11117 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11118 tcg_temp_free_i32(fp32_1
);
11119 tcg_temp_free_i32(fp32_0
);
11120 gen_store_fpr64(ctx
, fp64
, fd
);
11121 tcg_temp_free_i64(fp64
);
11127 case OPC_CMP_UEQ_S
:
11128 case OPC_CMP_OLT_S
:
11129 case OPC_CMP_ULT_S
:
11130 case OPC_CMP_OLE_S
:
11131 case OPC_CMP_ULE_S
:
11133 case OPC_CMP_NGLE_S
:
11134 case OPC_CMP_SEQ_S
:
11135 case OPC_CMP_NGL_S
:
11137 case OPC_CMP_NGE_S
:
11139 case OPC_CMP_NGT_S
:
11140 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11141 if (ctx
->opcode
& (1 << 6)) {
11142 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
11144 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
11148 check_cp1_registers(ctx
, fs
| ft
| fd
);
11150 TCGv_i64 fp0
= tcg_temp_new_i64();
11151 TCGv_i64 fp1
= tcg_temp_new_i64();
11153 gen_load_fpr64(ctx
, fp0
, fs
);
11154 gen_load_fpr64(ctx
, fp1
, ft
);
11155 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11156 tcg_temp_free_i64(fp1
);
11157 gen_store_fpr64(ctx
, fp0
, fd
);
11158 tcg_temp_free_i64(fp0
);
11162 check_cp1_registers(ctx
, fs
| ft
| fd
);
11164 TCGv_i64 fp0
= tcg_temp_new_i64();
11165 TCGv_i64 fp1
= tcg_temp_new_i64();
11167 gen_load_fpr64(ctx
, fp0
, fs
);
11168 gen_load_fpr64(ctx
, fp1
, ft
);
11169 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11170 tcg_temp_free_i64(fp1
);
11171 gen_store_fpr64(ctx
, fp0
, fd
);
11172 tcg_temp_free_i64(fp0
);
11176 check_cp1_registers(ctx
, fs
| ft
| fd
);
11178 TCGv_i64 fp0
= tcg_temp_new_i64();
11179 TCGv_i64 fp1
= tcg_temp_new_i64();
11181 gen_load_fpr64(ctx
, fp0
, fs
);
11182 gen_load_fpr64(ctx
, fp1
, ft
);
11183 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11184 tcg_temp_free_i64(fp1
);
11185 gen_store_fpr64(ctx
, fp0
, fd
);
11186 tcg_temp_free_i64(fp0
);
11190 check_cp1_registers(ctx
, fs
| ft
| fd
);
11192 TCGv_i64 fp0
= tcg_temp_new_i64();
11193 TCGv_i64 fp1
= tcg_temp_new_i64();
11195 gen_load_fpr64(ctx
, fp0
, fs
);
11196 gen_load_fpr64(ctx
, fp1
, ft
);
11197 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11198 tcg_temp_free_i64(fp1
);
11199 gen_store_fpr64(ctx
, fp0
, fd
);
11200 tcg_temp_free_i64(fp0
);
11204 check_cp1_registers(ctx
, fs
| fd
);
11206 TCGv_i64 fp0
= tcg_temp_new_i64();
11208 gen_load_fpr64(ctx
, fp0
, fs
);
11209 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11210 gen_store_fpr64(ctx
, fp0
, fd
);
11211 tcg_temp_free_i64(fp0
);
11215 check_cp1_registers(ctx
, fs
| fd
);
11217 TCGv_i64 fp0
= tcg_temp_new_i64();
11219 gen_load_fpr64(ctx
, fp0
, fs
);
11220 if (ctx
->abs2008
) {
11221 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11223 gen_helper_float_abs_d(fp0
, fp0
);
11225 gen_store_fpr64(ctx
, fp0
, fd
);
11226 tcg_temp_free_i64(fp0
);
11230 check_cp1_registers(ctx
, fs
| fd
);
11232 TCGv_i64 fp0
= tcg_temp_new_i64();
11234 gen_load_fpr64(ctx
, fp0
, fs
);
11235 gen_store_fpr64(ctx
, fp0
, fd
);
11236 tcg_temp_free_i64(fp0
);
11240 check_cp1_registers(ctx
, fs
| fd
);
11242 TCGv_i64 fp0
= tcg_temp_new_i64();
11244 gen_load_fpr64(ctx
, fp0
, fs
);
11245 if (ctx
->abs2008
) {
11246 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11248 gen_helper_float_chs_d(fp0
, fp0
);
11250 gen_store_fpr64(ctx
, fp0
, fd
);
11251 tcg_temp_free_i64(fp0
);
11254 case OPC_ROUND_L_D
:
11255 check_cp1_64bitmode(ctx
);
11257 TCGv_i64 fp0
= tcg_temp_new_i64();
11259 gen_load_fpr64(ctx
, fp0
, fs
);
11260 if (ctx
->nan2008
) {
11261 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11263 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11265 gen_store_fpr64(ctx
, fp0
, fd
);
11266 tcg_temp_free_i64(fp0
);
11269 case OPC_TRUNC_L_D
:
11270 check_cp1_64bitmode(ctx
);
11272 TCGv_i64 fp0
= tcg_temp_new_i64();
11274 gen_load_fpr64(ctx
, fp0
, fs
);
11275 if (ctx
->nan2008
) {
11276 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11278 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11280 gen_store_fpr64(ctx
, fp0
, fd
);
11281 tcg_temp_free_i64(fp0
);
11285 check_cp1_64bitmode(ctx
);
11287 TCGv_i64 fp0
= tcg_temp_new_i64();
11289 gen_load_fpr64(ctx
, fp0
, fs
);
11290 if (ctx
->nan2008
) {
11291 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11293 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11295 gen_store_fpr64(ctx
, fp0
, fd
);
11296 tcg_temp_free_i64(fp0
);
11299 case OPC_FLOOR_L_D
:
11300 check_cp1_64bitmode(ctx
);
11302 TCGv_i64 fp0
= tcg_temp_new_i64();
11304 gen_load_fpr64(ctx
, fp0
, fs
);
11305 if (ctx
->nan2008
) {
11306 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11308 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11310 gen_store_fpr64(ctx
, fp0
, fd
);
11311 tcg_temp_free_i64(fp0
);
11314 case OPC_ROUND_W_D
:
11315 check_cp1_registers(ctx
, fs
);
11317 TCGv_i32 fp32
= tcg_temp_new_i32();
11318 TCGv_i64 fp64
= tcg_temp_new_i64();
11320 gen_load_fpr64(ctx
, fp64
, fs
);
11321 if (ctx
->nan2008
) {
11322 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11324 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11326 tcg_temp_free_i64(fp64
);
11327 gen_store_fpr32(ctx
, fp32
, fd
);
11328 tcg_temp_free_i32(fp32
);
11331 case OPC_TRUNC_W_D
:
11332 check_cp1_registers(ctx
, fs
);
11334 TCGv_i32 fp32
= tcg_temp_new_i32();
11335 TCGv_i64 fp64
= tcg_temp_new_i64();
11337 gen_load_fpr64(ctx
, fp64
, fs
);
11338 if (ctx
->nan2008
) {
11339 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11341 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11343 tcg_temp_free_i64(fp64
);
11344 gen_store_fpr32(ctx
, fp32
, fd
);
11345 tcg_temp_free_i32(fp32
);
11349 check_cp1_registers(ctx
, fs
);
11351 TCGv_i32 fp32
= tcg_temp_new_i32();
11352 TCGv_i64 fp64
= tcg_temp_new_i64();
11354 gen_load_fpr64(ctx
, fp64
, fs
);
11355 if (ctx
->nan2008
) {
11356 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11358 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11360 tcg_temp_free_i64(fp64
);
11361 gen_store_fpr32(ctx
, fp32
, fd
);
11362 tcg_temp_free_i32(fp32
);
11365 case OPC_FLOOR_W_D
:
11366 check_cp1_registers(ctx
, fs
);
11368 TCGv_i32 fp32
= tcg_temp_new_i32();
11369 TCGv_i64 fp64
= tcg_temp_new_i64();
11371 gen_load_fpr64(ctx
, fp64
, fs
);
11372 if (ctx
->nan2008
) {
11373 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11375 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11377 tcg_temp_free_i64(fp64
);
11378 gen_store_fpr32(ctx
, fp32
, fd
);
11379 tcg_temp_free_i32(fp32
);
11383 check_insn(ctx
, ISA_MIPS32R6
);
11384 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11387 check_insn(ctx
, ISA_MIPS32R6
);
11388 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11391 check_insn(ctx
, ISA_MIPS32R6
);
11392 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11395 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11396 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11399 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11401 TCGLabel
*l1
= gen_new_label();
11405 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11407 fp0
= tcg_temp_new_i64();
11408 gen_load_fpr64(ctx
, fp0
, fs
);
11409 gen_store_fpr64(ctx
, fp0
, fd
);
11410 tcg_temp_free_i64(fp0
);
11415 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11417 TCGLabel
*l1
= gen_new_label();
11421 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11422 fp0
= tcg_temp_new_i64();
11423 gen_load_fpr64(ctx
, fp0
, fs
);
11424 gen_store_fpr64(ctx
, fp0
, fd
);
11425 tcg_temp_free_i64(fp0
);
11431 check_cp1_registers(ctx
, fs
| fd
);
11433 TCGv_i64 fp0
= tcg_temp_new_i64();
11435 gen_load_fpr64(ctx
, fp0
, fs
);
11436 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11437 gen_store_fpr64(ctx
, fp0
, fd
);
11438 tcg_temp_free_i64(fp0
);
11442 check_cp1_registers(ctx
, fs
| fd
);
11444 TCGv_i64 fp0
= tcg_temp_new_i64();
11446 gen_load_fpr64(ctx
, fp0
, fs
);
11447 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11448 gen_store_fpr64(ctx
, fp0
, fd
);
11449 tcg_temp_free_i64(fp0
);
11453 check_insn(ctx
, ISA_MIPS32R6
);
11455 TCGv_i64 fp0
= tcg_temp_new_i64();
11456 TCGv_i64 fp1
= tcg_temp_new_i64();
11457 TCGv_i64 fp2
= tcg_temp_new_i64();
11458 gen_load_fpr64(ctx
, fp0
, fs
);
11459 gen_load_fpr64(ctx
, fp1
, ft
);
11460 gen_load_fpr64(ctx
, fp2
, fd
);
11461 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11462 gen_store_fpr64(ctx
, fp2
, fd
);
11463 tcg_temp_free_i64(fp2
);
11464 tcg_temp_free_i64(fp1
);
11465 tcg_temp_free_i64(fp0
);
11469 check_insn(ctx
, ISA_MIPS32R6
);
11471 TCGv_i64 fp0
= tcg_temp_new_i64();
11472 TCGv_i64 fp1
= tcg_temp_new_i64();
11473 TCGv_i64 fp2
= tcg_temp_new_i64();
11474 gen_load_fpr64(ctx
, fp0
, fs
);
11475 gen_load_fpr64(ctx
, fp1
, ft
);
11476 gen_load_fpr64(ctx
, fp2
, fd
);
11477 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11478 gen_store_fpr64(ctx
, fp2
, fd
);
11479 tcg_temp_free_i64(fp2
);
11480 tcg_temp_free_i64(fp1
);
11481 tcg_temp_free_i64(fp0
);
11485 check_insn(ctx
, ISA_MIPS32R6
);
11487 TCGv_i64 fp0
= tcg_temp_new_i64();
11488 gen_load_fpr64(ctx
, fp0
, fs
);
11489 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11490 gen_store_fpr64(ctx
, fp0
, fd
);
11491 tcg_temp_free_i64(fp0
);
11495 check_insn(ctx
, ISA_MIPS32R6
);
11497 TCGv_i64 fp0
= tcg_temp_new_i64();
11498 gen_load_fpr64(ctx
, fp0
, fs
);
11499 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11500 gen_store_fpr64(ctx
, fp0
, fd
);
11501 tcg_temp_free_i64(fp0
);
11504 case OPC_MIN_D
: /* OPC_RECIP2_D */
11505 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11507 TCGv_i64 fp0
= tcg_temp_new_i64();
11508 TCGv_i64 fp1
= tcg_temp_new_i64();
11509 gen_load_fpr64(ctx
, fp0
, fs
);
11510 gen_load_fpr64(ctx
, fp1
, ft
);
11511 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11512 gen_store_fpr64(ctx
, fp1
, fd
);
11513 tcg_temp_free_i64(fp1
);
11514 tcg_temp_free_i64(fp0
);
11517 check_cp1_64bitmode(ctx
);
11519 TCGv_i64 fp0
= tcg_temp_new_i64();
11520 TCGv_i64 fp1
= tcg_temp_new_i64();
11522 gen_load_fpr64(ctx
, fp0
, fs
);
11523 gen_load_fpr64(ctx
, fp1
, ft
);
11524 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11525 tcg_temp_free_i64(fp1
);
11526 gen_store_fpr64(ctx
, fp0
, fd
);
11527 tcg_temp_free_i64(fp0
);
11531 case OPC_MINA_D
: /* OPC_RECIP1_D */
11532 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11534 TCGv_i64 fp0
= tcg_temp_new_i64();
11535 TCGv_i64 fp1
= tcg_temp_new_i64();
11536 gen_load_fpr64(ctx
, fp0
, fs
);
11537 gen_load_fpr64(ctx
, fp1
, ft
);
11538 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11539 gen_store_fpr64(ctx
, fp1
, fd
);
11540 tcg_temp_free_i64(fp1
);
11541 tcg_temp_free_i64(fp0
);
11544 check_cp1_64bitmode(ctx
);
11546 TCGv_i64 fp0
= tcg_temp_new_i64();
11548 gen_load_fpr64(ctx
, fp0
, fs
);
11549 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11550 gen_store_fpr64(ctx
, fp0
, fd
);
11551 tcg_temp_free_i64(fp0
);
11555 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11556 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11558 TCGv_i64 fp0
= tcg_temp_new_i64();
11559 TCGv_i64 fp1
= tcg_temp_new_i64();
11560 gen_load_fpr64(ctx
, fp0
, fs
);
11561 gen_load_fpr64(ctx
, fp1
, ft
);
11562 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11563 gen_store_fpr64(ctx
, fp1
, fd
);
11564 tcg_temp_free_i64(fp1
);
11565 tcg_temp_free_i64(fp0
);
11568 check_cp1_64bitmode(ctx
);
11570 TCGv_i64 fp0
= tcg_temp_new_i64();
11572 gen_load_fpr64(ctx
, fp0
, fs
);
11573 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11574 gen_store_fpr64(ctx
, fp0
, fd
);
11575 tcg_temp_free_i64(fp0
);
11579 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11580 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11582 TCGv_i64 fp0
= tcg_temp_new_i64();
11583 TCGv_i64 fp1
= tcg_temp_new_i64();
11584 gen_load_fpr64(ctx
, fp0
, fs
);
11585 gen_load_fpr64(ctx
, fp1
, ft
);
11586 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11587 gen_store_fpr64(ctx
, fp1
, fd
);
11588 tcg_temp_free_i64(fp1
);
11589 tcg_temp_free_i64(fp0
);
11592 check_cp1_64bitmode(ctx
);
11594 TCGv_i64 fp0
= tcg_temp_new_i64();
11595 TCGv_i64 fp1
= tcg_temp_new_i64();
11597 gen_load_fpr64(ctx
, fp0
, fs
);
11598 gen_load_fpr64(ctx
, fp1
, ft
);
11599 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
11600 tcg_temp_free_i64(fp1
);
11601 gen_store_fpr64(ctx
, fp0
, fd
);
11602 tcg_temp_free_i64(fp0
);
11609 case OPC_CMP_UEQ_D
:
11610 case OPC_CMP_OLT_D
:
11611 case OPC_CMP_ULT_D
:
11612 case OPC_CMP_OLE_D
:
11613 case OPC_CMP_ULE_D
:
11615 case OPC_CMP_NGLE_D
:
11616 case OPC_CMP_SEQ_D
:
11617 case OPC_CMP_NGL_D
:
11619 case OPC_CMP_NGE_D
:
11621 case OPC_CMP_NGT_D
:
11622 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11623 if (ctx
->opcode
& (1 << 6)) {
11624 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
11626 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
11630 check_cp1_registers(ctx
, fs
);
11632 TCGv_i32 fp32
= tcg_temp_new_i32();
11633 TCGv_i64 fp64
= tcg_temp_new_i64();
11635 gen_load_fpr64(ctx
, fp64
, fs
);
11636 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
11637 tcg_temp_free_i64(fp64
);
11638 gen_store_fpr32(ctx
, fp32
, fd
);
11639 tcg_temp_free_i32(fp32
);
11643 check_cp1_registers(ctx
, fs
);
11645 TCGv_i32 fp32
= tcg_temp_new_i32();
11646 TCGv_i64 fp64
= tcg_temp_new_i64();
11648 gen_load_fpr64(ctx
, fp64
, fs
);
11649 if (ctx
->nan2008
) {
11650 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
11652 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
11654 tcg_temp_free_i64(fp64
);
11655 gen_store_fpr32(ctx
, fp32
, fd
);
11656 tcg_temp_free_i32(fp32
);
11660 check_cp1_64bitmode(ctx
);
11662 TCGv_i64 fp0
= tcg_temp_new_i64();
11664 gen_load_fpr64(ctx
, fp0
, fs
);
11665 if (ctx
->nan2008
) {
11666 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
11668 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
11670 gen_store_fpr64(ctx
, fp0
, fd
);
11671 tcg_temp_free_i64(fp0
);
11676 TCGv_i32 fp0
= tcg_temp_new_i32();
11678 gen_load_fpr32(ctx
, fp0
, fs
);
11679 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
11680 gen_store_fpr32(ctx
, fp0
, fd
);
11681 tcg_temp_free_i32(fp0
);
11685 check_cp1_registers(ctx
, fd
);
11687 TCGv_i32 fp32
= tcg_temp_new_i32();
11688 TCGv_i64 fp64
= tcg_temp_new_i64();
11690 gen_load_fpr32(ctx
, fp32
, fs
);
11691 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
11692 tcg_temp_free_i32(fp32
);
11693 gen_store_fpr64(ctx
, fp64
, fd
);
11694 tcg_temp_free_i64(fp64
);
11698 check_cp1_64bitmode(ctx
);
11700 TCGv_i32 fp32
= tcg_temp_new_i32();
11701 TCGv_i64 fp64
= tcg_temp_new_i64();
11703 gen_load_fpr64(ctx
, fp64
, fs
);
11704 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
11705 tcg_temp_free_i64(fp64
);
11706 gen_store_fpr32(ctx
, fp32
, fd
);
11707 tcg_temp_free_i32(fp32
);
11711 check_cp1_64bitmode(ctx
);
11713 TCGv_i64 fp0
= tcg_temp_new_i64();
11715 gen_load_fpr64(ctx
, fp0
, fs
);
11716 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
11717 gen_store_fpr64(ctx
, fp0
, fd
);
11718 tcg_temp_free_i64(fp0
);
11721 case OPC_CVT_PS_PW
:
11724 TCGv_i64 fp0
= tcg_temp_new_i64();
11726 gen_load_fpr64(ctx
, fp0
, fs
);
11727 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
11728 gen_store_fpr64(ctx
, fp0
, fd
);
11729 tcg_temp_free_i64(fp0
);
11735 TCGv_i64 fp0
= tcg_temp_new_i64();
11736 TCGv_i64 fp1
= tcg_temp_new_i64();
11738 gen_load_fpr64(ctx
, fp0
, fs
);
11739 gen_load_fpr64(ctx
, fp1
, ft
);
11740 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
11741 tcg_temp_free_i64(fp1
);
11742 gen_store_fpr64(ctx
, fp0
, fd
);
11743 tcg_temp_free_i64(fp0
);
11749 TCGv_i64 fp0
= tcg_temp_new_i64();
11750 TCGv_i64 fp1
= tcg_temp_new_i64();
11752 gen_load_fpr64(ctx
, fp0
, fs
);
11753 gen_load_fpr64(ctx
, fp1
, ft
);
11754 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
11755 tcg_temp_free_i64(fp1
);
11756 gen_store_fpr64(ctx
, fp0
, fd
);
11757 tcg_temp_free_i64(fp0
);
11763 TCGv_i64 fp0
= tcg_temp_new_i64();
11764 TCGv_i64 fp1
= tcg_temp_new_i64();
11766 gen_load_fpr64(ctx
, fp0
, fs
);
11767 gen_load_fpr64(ctx
, fp1
, ft
);
11768 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
11769 tcg_temp_free_i64(fp1
);
11770 gen_store_fpr64(ctx
, fp0
, fd
);
11771 tcg_temp_free_i64(fp0
);
11777 TCGv_i64 fp0
= tcg_temp_new_i64();
11779 gen_load_fpr64(ctx
, fp0
, fs
);
11780 gen_helper_float_abs_ps(fp0
, fp0
);
11781 gen_store_fpr64(ctx
, fp0
, fd
);
11782 tcg_temp_free_i64(fp0
);
11788 TCGv_i64 fp0
= tcg_temp_new_i64();
11790 gen_load_fpr64(ctx
, fp0
, fs
);
11791 gen_store_fpr64(ctx
, fp0
, fd
);
11792 tcg_temp_free_i64(fp0
);
11798 TCGv_i64 fp0
= tcg_temp_new_i64();
11800 gen_load_fpr64(ctx
, fp0
, fs
);
11801 gen_helper_float_chs_ps(fp0
, fp0
);
11802 gen_store_fpr64(ctx
, fp0
, fd
);
11803 tcg_temp_free_i64(fp0
);
11808 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11813 TCGLabel
*l1
= gen_new_label();
11817 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11818 fp0
= tcg_temp_new_i64();
11819 gen_load_fpr64(ctx
, fp0
, fs
);
11820 gen_store_fpr64(ctx
, fp0
, fd
);
11821 tcg_temp_free_i64(fp0
);
11828 TCGLabel
*l1
= gen_new_label();
11832 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11833 fp0
= tcg_temp_new_i64();
11834 gen_load_fpr64(ctx
, fp0
, fs
);
11835 gen_store_fpr64(ctx
, fp0
, fd
);
11836 tcg_temp_free_i64(fp0
);
11844 TCGv_i64 fp0
= tcg_temp_new_i64();
11845 TCGv_i64 fp1
= tcg_temp_new_i64();
11847 gen_load_fpr64(ctx
, fp0
, ft
);
11848 gen_load_fpr64(ctx
, fp1
, fs
);
11849 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
11850 tcg_temp_free_i64(fp1
);
11851 gen_store_fpr64(ctx
, fp0
, fd
);
11852 tcg_temp_free_i64(fp0
);
11858 TCGv_i64 fp0
= tcg_temp_new_i64();
11859 TCGv_i64 fp1
= tcg_temp_new_i64();
11861 gen_load_fpr64(ctx
, fp0
, ft
);
11862 gen_load_fpr64(ctx
, fp1
, fs
);
11863 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
11864 tcg_temp_free_i64(fp1
);
11865 gen_store_fpr64(ctx
, fp0
, fd
);
11866 tcg_temp_free_i64(fp0
);
11869 case OPC_RECIP2_PS
:
11872 TCGv_i64 fp0
= tcg_temp_new_i64();
11873 TCGv_i64 fp1
= tcg_temp_new_i64();
11875 gen_load_fpr64(ctx
, fp0
, fs
);
11876 gen_load_fpr64(ctx
, fp1
, ft
);
11877 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
11878 tcg_temp_free_i64(fp1
);
11879 gen_store_fpr64(ctx
, fp0
, fd
);
11880 tcg_temp_free_i64(fp0
);
11883 case OPC_RECIP1_PS
:
11886 TCGv_i64 fp0
= tcg_temp_new_i64();
11888 gen_load_fpr64(ctx
, fp0
, fs
);
11889 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
11890 gen_store_fpr64(ctx
, fp0
, fd
);
11891 tcg_temp_free_i64(fp0
);
11894 case OPC_RSQRT1_PS
:
11897 TCGv_i64 fp0
= tcg_temp_new_i64();
11899 gen_load_fpr64(ctx
, fp0
, fs
);
11900 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
11901 gen_store_fpr64(ctx
, fp0
, fd
);
11902 tcg_temp_free_i64(fp0
);
11905 case OPC_RSQRT2_PS
:
11908 TCGv_i64 fp0
= tcg_temp_new_i64();
11909 TCGv_i64 fp1
= tcg_temp_new_i64();
11911 gen_load_fpr64(ctx
, fp0
, fs
);
11912 gen_load_fpr64(ctx
, fp1
, ft
);
11913 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
11914 tcg_temp_free_i64(fp1
);
11915 gen_store_fpr64(ctx
, fp0
, fd
);
11916 tcg_temp_free_i64(fp0
);
11920 check_cp1_64bitmode(ctx
);
11922 TCGv_i32 fp0
= tcg_temp_new_i32();
11924 gen_load_fpr32h(ctx
, fp0
, fs
);
11925 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
11926 gen_store_fpr32(ctx
, fp0
, fd
);
11927 tcg_temp_free_i32(fp0
);
11930 case OPC_CVT_PW_PS
:
11933 TCGv_i64 fp0
= tcg_temp_new_i64();
11935 gen_load_fpr64(ctx
, fp0
, fs
);
11936 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
11937 gen_store_fpr64(ctx
, fp0
, fd
);
11938 tcg_temp_free_i64(fp0
);
11942 check_cp1_64bitmode(ctx
);
11944 TCGv_i32 fp0
= tcg_temp_new_i32();
11946 gen_load_fpr32(ctx
, fp0
, fs
);
11947 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
11948 gen_store_fpr32(ctx
, fp0
, fd
);
11949 tcg_temp_free_i32(fp0
);
11955 TCGv_i32 fp0
= tcg_temp_new_i32();
11956 TCGv_i32 fp1
= tcg_temp_new_i32();
11958 gen_load_fpr32(ctx
, fp0
, fs
);
11959 gen_load_fpr32(ctx
, fp1
, ft
);
11960 gen_store_fpr32h(ctx
, fp0
, fd
);
11961 gen_store_fpr32(ctx
, fp1
, fd
);
11962 tcg_temp_free_i32(fp0
);
11963 tcg_temp_free_i32(fp1
);
11969 TCGv_i32 fp0
= tcg_temp_new_i32();
11970 TCGv_i32 fp1
= tcg_temp_new_i32();
11972 gen_load_fpr32(ctx
, fp0
, fs
);
11973 gen_load_fpr32h(ctx
, fp1
, ft
);
11974 gen_store_fpr32(ctx
, fp1
, fd
);
11975 gen_store_fpr32h(ctx
, fp0
, fd
);
11976 tcg_temp_free_i32(fp0
);
11977 tcg_temp_free_i32(fp1
);
11983 TCGv_i32 fp0
= tcg_temp_new_i32();
11984 TCGv_i32 fp1
= tcg_temp_new_i32();
11986 gen_load_fpr32h(ctx
, fp0
, fs
);
11987 gen_load_fpr32(ctx
, fp1
, ft
);
11988 gen_store_fpr32(ctx
, fp1
, fd
);
11989 gen_store_fpr32h(ctx
, fp0
, fd
);
11990 tcg_temp_free_i32(fp0
);
11991 tcg_temp_free_i32(fp1
);
11997 TCGv_i32 fp0
= tcg_temp_new_i32();
11998 TCGv_i32 fp1
= tcg_temp_new_i32();
12000 gen_load_fpr32h(ctx
, fp0
, fs
);
12001 gen_load_fpr32h(ctx
, fp1
, ft
);
12002 gen_store_fpr32(ctx
, fp1
, fd
);
12003 gen_store_fpr32h(ctx
, fp0
, fd
);
12004 tcg_temp_free_i32(fp0
);
12005 tcg_temp_free_i32(fp1
);
12009 case OPC_CMP_UN_PS
:
12010 case OPC_CMP_EQ_PS
:
12011 case OPC_CMP_UEQ_PS
:
12012 case OPC_CMP_OLT_PS
:
12013 case OPC_CMP_ULT_PS
:
12014 case OPC_CMP_OLE_PS
:
12015 case OPC_CMP_ULE_PS
:
12016 case OPC_CMP_SF_PS
:
12017 case OPC_CMP_NGLE_PS
:
12018 case OPC_CMP_SEQ_PS
:
12019 case OPC_CMP_NGL_PS
:
12020 case OPC_CMP_LT_PS
:
12021 case OPC_CMP_NGE_PS
:
12022 case OPC_CMP_LE_PS
:
12023 case OPC_CMP_NGT_PS
:
12024 if (ctx
->opcode
& (1 << 6)) {
12025 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
12027 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
12031 MIPS_INVAL("farith");
12032 generate_exception_end(ctx
, EXCP_RI
);
12037 /* Coprocessor 3 (FPU) */
12038 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
12039 int fd
, int fs
, int base
, int index
)
12041 TCGv t0
= tcg_temp_new();
12044 gen_load_gpr(t0
, index
);
12045 } else if (index
== 0) {
12046 gen_load_gpr(t0
, base
);
12048 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12050 /* Don't do NOP if destination is zero: we must perform the actual
12056 TCGv_i32 fp0
= tcg_temp_new_i32();
12058 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12059 tcg_gen_trunc_tl_i32(fp0
, t0
);
12060 gen_store_fpr32(ctx
, fp0
, fd
);
12061 tcg_temp_free_i32(fp0
);
12066 check_cp1_registers(ctx
, fd
);
12068 TCGv_i64 fp0
= tcg_temp_new_i64();
12069 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12070 gen_store_fpr64(ctx
, fp0
, fd
);
12071 tcg_temp_free_i64(fp0
);
12075 check_cp1_64bitmode(ctx
);
12076 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12078 TCGv_i64 fp0
= tcg_temp_new_i64();
12080 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12081 gen_store_fpr64(ctx
, fp0
, fd
);
12082 tcg_temp_free_i64(fp0
);
12088 TCGv_i32 fp0
= tcg_temp_new_i32();
12089 gen_load_fpr32(ctx
, fp0
, fs
);
12090 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12091 tcg_temp_free_i32(fp0
);
12096 check_cp1_registers(ctx
, fs
);
12098 TCGv_i64 fp0
= tcg_temp_new_i64();
12099 gen_load_fpr64(ctx
, fp0
, fs
);
12100 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12101 tcg_temp_free_i64(fp0
);
12105 check_cp1_64bitmode(ctx
);
12106 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12108 TCGv_i64 fp0
= tcg_temp_new_i64();
12109 gen_load_fpr64(ctx
, fp0
, fs
);
12110 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12111 tcg_temp_free_i64(fp0
);
12118 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
12119 int fd
, int fr
, int fs
, int ft
)
12125 TCGv t0
= tcg_temp_local_new();
12126 TCGv_i32 fp
= tcg_temp_new_i32();
12127 TCGv_i32 fph
= tcg_temp_new_i32();
12128 TCGLabel
*l1
= gen_new_label();
12129 TCGLabel
*l2
= gen_new_label();
12131 gen_load_gpr(t0
, fr
);
12132 tcg_gen_andi_tl(t0
, t0
, 0x7);
12134 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12135 gen_load_fpr32(ctx
, fp
, fs
);
12136 gen_load_fpr32h(ctx
, fph
, fs
);
12137 gen_store_fpr32(ctx
, fp
, fd
);
12138 gen_store_fpr32h(ctx
, fph
, fd
);
12141 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12143 #ifdef TARGET_WORDS_BIGENDIAN
12144 gen_load_fpr32(ctx
, fp
, fs
);
12145 gen_load_fpr32h(ctx
, fph
, ft
);
12146 gen_store_fpr32h(ctx
, fp
, fd
);
12147 gen_store_fpr32(ctx
, fph
, fd
);
12149 gen_load_fpr32h(ctx
, fph
, fs
);
12150 gen_load_fpr32(ctx
, fp
, ft
);
12151 gen_store_fpr32(ctx
, fph
, fd
);
12152 gen_store_fpr32h(ctx
, fp
, fd
);
12155 tcg_temp_free_i32(fp
);
12156 tcg_temp_free_i32(fph
);
12162 TCGv_i32 fp0
= tcg_temp_new_i32();
12163 TCGv_i32 fp1
= tcg_temp_new_i32();
12164 TCGv_i32 fp2
= tcg_temp_new_i32();
12166 gen_load_fpr32(ctx
, fp0
, fs
);
12167 gen_load_fpr32(ctx
, fp1
, ft
);
12168 gen_load_fpr32(ctx
, fp2
, fr
);
12169 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12170 tcg_temp_free_i32(fp0
);
12171 tcg_temp_free_i32(fp1
);
12172 gen_store_fpr32(ctx
, fp2
, fd
);
12173 tcg_temp_free_i32(fp2
);
12178 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12180 TCGv_i64 fp0
= tcg_temp_new_i64();
12181 TCGv_i64 fp1
= tcg_temp_new_i64();
12182 TCGv_i64 fp2
= tcg_temp_new_i64();
12184 gen_load_fpr64(ctx
, fp0
, fs
);
12185 gen_load_fpr64(ctx
, fp1
, ft
);
12186 gen_load_fpr64(ctx
, fp2
, fr
);
12187 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12188 tcg_temp_free_i64(fp0
);
12189 tcg_temp_free_i64(fp1
);
12190 gen_store_fpr64(ctx
, fp2
, fd
);
12191 tcg_temp_free_i64(fp2
);
12197 TCGv_i64 fp0
= tcg_temp_new_i64();
12198 TCGv_i64 fp1
= tcg_temp_new_i64();
12199 TCGv_i64 fp2
= tcg_temp_new_i64();
12201 gen_load_fpr64(ctx
, fp0
, fs
);
12202 gen_load_fpr64(ctx
, fp1
, ft
);
12203 gen_load_fpr64(ctx
, fp2
, fr
);
12204 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12205 tcg_temp_free_i64(fp0
);
12206 tcg_temp_free_i64(fp1
);
12207 gen_store_fpr64(ctx
, fp2
, fd
);
12208 tcg_temp_free_i64(fp2
);
12214 TCGv_i32 fp0
= tcg_temp_new_i32();
12215 TCGv_i32 fp1
= tcg_temp_new_i32();
12216 TCGv_i32 fp2
= tcg_temp_new_i32();
12218 gen_load_fpr32(ctx
, fp0
, fs
);
12219 gen_load_fpr32(ctx
, fp1
, ft
);
12220 gen_load_fpr32(ctx
, fp2
, fr
);
12221 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12222 tcg_temp_free_i32(fp0
);
12223 tcg_temp_free_i32(fp1
);
12224 gen_store_fpr32(ctx
, fp2
, fd
);
12225 tcg_temp_free_i32(fp2
);
12230 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12232 TCGv_i64 fp0
= tcg_temp_new_i64();
12233 TCGv_i64 fp1
= tcg_temp_new_i64();
12234 TCGv_i64 fp2
= tcg_temp_new_i64();
12236 gen_load_fpr64(ctx
, fp0
, fs
);
12237 gen_load_fpr64(ctx
, fp1
, ft
);
12238 gen_load_fpr64(ctx
, fp2
, fr
);
12239 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12240 tcg_temp_free_i64(fp0
);
12241 tcg_temp_free_i64(fp1
);
12242 gen_store_fpr64(ctx
, fp2
, fd
);
12243 tcg_temp_free_i64(fp2
);
12249 TCGv_i64 fp0
= tcg_temp_new_i64();
12250 TCGv_i64 fp1
= tcg_temp_new_i64();
12251 TCGv_i64 fp2
= tcg_temp_new_i64();
12253 gen_load_fpr64(ctx
, fp0
, fs
);
12254 gen_load_fpr64(ctx
, fp1
, ft
);
12255 gen_load_fpr64(ctx
, fp2
, fr
);
12256 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12257 tcg_temp_free_i64(fp0
);
12258 tcg_temp_free_i64(fp1
);
12259 gen_store_fpr64(ctx
, fp2
, fd
);
12260 tcg_temp_free_i64(fp2
);
12266 TCGv_i32 fp0
= tcg_temp_new_i32();
12267 TCGv_i32 fp1
= tcg_temp_new_i32();
12268 TCGv_i32 fp2
= tcg_temp_new_i32();
12270 gen_load_fpr32(ctx
, fp0
, fs
);
12271 gen_load_fpr32(ctx
, fp1
, ft
);
12272 gen_load_fpr32(ctx
, fp2
, fr
);
12273 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12274 tcg_temp_free_i32(fp0
);
12275 tcg_temp_free_i32(fp1
);
12276 gen_store_fpr32(ctx
, fp2
, fd
);
12277 tcg_temp_free_i32(fp2
);
12282 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12284 TCGv_i64 fp0
= tcg_temp_new_i64();
12285 TCGv_i64 fp1
= tcg_temp_new_i64();
12286 TCGv_i64 fp2
= tcg_temp_new_i64();
12288 gen_load_fpr64(ctx
, fp0
, fs
);
12289 gen_load_fpr64(ctx
, fp1
, ft
);
12290 gen_load_fpr64(ctx
, fp2
, fr
);
12291 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12292 tcg_temp_free_i64(fp0
);
12293 tcg_temp_free_i64(fp1
);
12294 gen_store_fpr64(ctx
, fp2
, fd
);
12295 tcg_temp_free_i64(fp2
);
12301 TCGv_i64 fp0
= tcg_temp_new_i64();
12302 TCGv_i64 fp1
= tcg_temp_new_i64();
12303 TCGv_i64 fp2
= tcg_temp_new_i64();
12305 gen_load_fpr64(ctx
, fp0
, fs
);
12306 gen_load_fpr64(ctx
, fp1
, ft
);
12307 gen_load_fpr64(ctx
, fp2
, fr
);
12308 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12309 tcg_temp_free_i64(fp0
);
12310 tcg_temp_free_i64(fp1
);
12311 gen_store_fpr64(ctx
, fp2
, fd
);
12312 tcg_temp_free_i64(fp2
);
12318 TCGv_i32 fp0
= tcg_temp_new_i32();
12319 TCGv_i32 fp1
= tcg_temp_new_i32();
12320 TCGv_i32 fp2
= tcg_temp_new_i32();
12322 gen_load_fpr32(ctx
, fp0
, fs
);
12323 gen_load_fpr32(ctx
, fp1
, ft
);
12324 gen_load_fpr32(ctx
, fp2
, fr
);
12325 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12326 tcg_temp_free_i32(fp0
);
12327 tcg_temp_free_i32(fp1
);
12328 gen_store_fpr32(ctx
, fp2
, fd
);
12329 tcg_temp_free_i32(fp2
);
12334 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12336 TCGv_i64 fp0
= tcg_temp_new_i64();
12337 TCGv_i64 fp1
= tcg_temp_new_i64();
12338 TCGv_i64 fp2
= tcg_temp_new_i64();
12340 gen_load_fpr64(ctx
, fp0
, fs
);
12341 gen_load_fpr64(ctx
, fp1
, ft
);
12342 gen_load_fpr64(ctx
, fp2
, fr
);
12343 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12344 tcg_temp_free_i64(fp0
);
12345 tcg_temp_free_i64(fp1
);
12346 gen_store_fpr64(ctx
, fp2
, fd
);
12347 tcg_temp_free_i64(fp2
);
12353 TCGv_i64 fp0
= tcg_temp_new_i64();
12354 TCGv_i64 fp1
= tcg_temp_new_i64();
12355 TCGv_i64 fp2
= tcg_temp_new_i64();
12357 gen_load_fpr64(ctx
, fp0
, fs
);
12358 gen_load_fpr64(ctx
, fp1
, ft
);
12359 gen_load_fpr64(ctx
, fp2
, fr
);
12360 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12361 tcg_temp_free_i64(fp0
);
12362 tcg_temp_free_i64(fp1
);
12363 gen_store_fpr64(ctx
, fp2
, fd
);
12364 tcg_temp_free_i64(fp2
);
12368 MIPS_INVAL("flt3_arith");
12369 generate_exception_end(ctx
, EXCP_RI
);
12374 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12378 #if !defined(CONFIG_USER_ONLY)
12379 /* The Linux kernel will emulate rdhwr if it's not supported natively.
12380 Therefore only check the ISA in system mode. */
12381 check_insn(ctx
, ISA_MIPS32R2
);
12383 t0
= tcg_temp_new();
12387 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12388 gen_store_gpr(t0
, rt
);
12391 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12392 gen_store_gpr(t0
, rt
);
12395 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12398 gen_helper_rdhwr_cc(t0
, cpu_env
);
12399 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12402 gen_store_gpr(t0
, rt
);
12403 /* Break the TB to be able to take timer interrupts immediately
12404 after reading count. DISAS_STOP isn't sufficient, we need to ensure
12405 we break completely out of translated code. */
12406 gen_save_pc(ctx
->base
.pc_next
+ 4);
12407 ctx
->base
.is_jmp
= DISAS_EXIT
;
12410 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12411 gen_store_gpr(t0
, rt
);
12414 check_insn(ctx
, ISA_MIPS32R6
);
12416 /* Performance counter registers are not implemented other than
12417 * control register 0.
12419 generate_exception(ctx
, EXCP_RI
);
12421 gen_helper_rdhwr_performance(t0
, cpu_env
);
12422 gen_store_gpr(t0
, rt
);
12425 check_insn(ctx
, ISA_MIPS32R6
);
12426 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12427 gen_store_gpr(t0
, rt
);
12430 #if defined(CONFIG_USER_ONLY)
12431 tcg_gen_ld_tl(t0
, cpu_env
,
12432 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12433 gen_store_gpr(t0
, rt
);
12436 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12437 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12438 tcg_gen_ld_tl(t0
, cpu_env
,
12439 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12440 gen_store_gpr(t0
, rt
);
12442 generate_exception_end(ctx
, EXCP_RI
);
12446 default: /* Invalid */
12447 MIPS_INVAL("rdhwr");
12448 generate_exception_end(ctx
, EXCP_RI
);
12454 static inline void clear_branch_hflags(DisasContext
*ctx
)
12456 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12457 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12458 save_cpu_state(ctx
, 0);
12460 /* it is not safe to save ctx->hflags as hflags may be changed
12461 in execution time by the instruction in delay / forbidden slot. */
12462 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12466 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12468 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12469 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12470 /* Branches completion */
12471 clear_branch_hflags(ctx
);
12472 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12473 /* FIXME: Need to clear can_do_io. */
12474 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12475 case MIPS_HFLAG_FBNSLOT
:
12476 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12479 /* unconditional branch */
12480 if (proc_hflags
& MIPS_HFLAG_BX
) {
12481 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12483 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12485 case MIPS_HFLAG_BL
:
12486 /* blikely taken case */
12487 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12489 case MIPS_HFLAG_BC
:
12490 /* Conditional branch */
12492 TCGLabel
*l1
= gen_new_label();
12494 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12495 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12497 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12500 case MIPS_HFLAG_BR
:
12501 /* unconditional branch to register */
12502 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12503 TCGv t0
= tcg_temp_new();
12504 TCGv_i32 t1
= tcg_temp_new_i32();
12506 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12507 tcg_gen_trunc_tl_i32(t1
, t0
);
12509 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12510 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12511 tcg_gen_or_i32(hflags
, hflags
, t1
);
12512 tcg_temp_free_i32(t1
);
12514 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12516 tcg_gen_mov_tl(cpu_PC
, btarget
);
12518 if (ctx
->base
.singlestep_enabled
) {
12519 save_cpu_state(ctx
, 0);
12520 gen_helper_raise_exception_debug(cpu_env
);
12522 tcg_gen_lookup_and_goto_ptr();
12525 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12531 /* Compact Branches */
12532 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12533 int rs
, int rt
, int32_t offset
)
12535 int bcond_compute
= 0;
12536 TCGv t0
= tcg_temp_new();
12537 TCGv t1
= tcg_temp_new();
12538 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12540 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12541 #ifdef MIPS_DEBUG_DISAS
12542 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12543 "\n", ctx
->base
.pc_next
);
12545 generate_exception_end(ctx
, EXCP_RI
);
12549 /* Load needed operands and calculate btarget */
12551 /* compact branch */
12552 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12553 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12554 gen_load_gpr(t0
, rs
);
12555 gen_load_gpr(t1
, rt
);
12557 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12558 if (rs
<= rt
&& rs
== 0) {
12559 /* OPC_BEQZALC, OPC_BNEZALC */
12560 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12563 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12564 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12565 gen_load_gpr(t0
, rs
);
12566 gen_load_gpr(t1
, rt
);
12568 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12570 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12571 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12572 if (rs
== 0 || rs
== rt
) {
12573 /* OPC_BLEZALC, OPC_BGEZALC */
12574 /* OPC_BGTZALC, OPC_BLTZALC */
12575 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12577 gen_load_gpr(t0
, rs
);
12578 gen_load_gpr(t1
, rt
);
12580 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12584 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12589 /* OPC_BEQZC, OPC_BNEZC */
12590 gen_load_gpr(t0
, rs
);
12592 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12594 /* OPC_JIC, OPC_JIALC */
12595 TCGv tbase
= tcg_temp_new();
12596 TCGv toffset
= tcg_temp_new();
12598 gen_load_gpr(tbase
, rt
);
12599 tcg_gen_movi_tl(toffset
, offset
);
12600 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
12601 tcg_temp_free(tbase
);
12602 tcg_temp_free(toffset
);
12606 MIPS_INVAL("Compact branch/jump");
12607 generate_exception_end(ctx
, EXCP_RI
);
12611 if (bcond_compute
== 0) {
12612 /* Uncoditional compact branch */
12615 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12618 ctx
->hflags
|= MIPS_HFLAG_BR
;
12621 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12624 ctx
->hflags
|= MIPS_HFLAG_B
;
12627 MIPS_INVAL("Compact branch/jump");
12628 generate_exception_end(ctx
, EXCP_RI
);
12632 /* Generating branch here as compact branches don't have delay slot */
12633 gen_branch(ctx
, 4);
12635 /* Conditional compact branch */
12636 TCGLabel
*fs
= gen_new_label();
12637 save_cpu_state(ctx
, 0);
12640 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12641 if (rs
== 0 && rt
!= 0) {
12643 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12644 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12646 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12649 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
12652 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12653 if (rs
== 0 && rt
!= 0) {
12655 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12656 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12658 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12661 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
12664 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12665 if (rs
== 0 && rt
!= 0) {
12667 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12668 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12670 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12673 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
12676 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12677 if (rs
== 0 && rt
!= 0) {
12679 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12680 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12682 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12685 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
12688 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12689 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12691 /* OPC_BOVC, OPC_BNVC */
12692 TCGv t2
= tcg_temp_new();
12693 TCGv t3
= tcg_temp_new();
12694 TCGv t4
= tcg_temp_new();
12695 TCGv input_overflow
= tcg_temp_new();
12697 gen_load_gpr(t0
, rs
);
12698 gen_load_gpr(t1
, rt
);
12699 tcg_gen_ext32s_tl(t2
, t0
);
12700 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
12701 tcg_gen_ext32s_tl(t3
, t1
);
12702 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
12703 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
12705 tcg_gen_add_tl(t4
, t2
, t3
);
12706 tcg_gen_ext32s_tl(t4
, t4
);
12707 tcg_gen_xor_tl(t2
, t2
, t3
);
12708 tcg_gen_xor_tl(t3
, t4
, t3
);
12709 tcg_gen_andc_tl(t2
, t3
, t2
);
12710 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
12711 tcg_gen_or_tl(t4
, t4
, input_overflow
);
12712 if (opc
== OPC_BOVC
) {
12714 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
12717 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
12719 tcg_temp_free(input_overflow
);
12723 } else if (rs
< rt
&& rs
== 0) {
12724 /* OPC_BEQZALC, OPC_BNEZALC */
12725 if (opc
== OPC_BEQZALC
) {
12727 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
12730 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
12733 /* OPC_BEQC, OPC_BNEC */
12734 if (opc
== OPC_BEQC
) {
12736 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
12739 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
12744 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
12747 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
12750 MIPS_INVAL("Compact conditional branch/jump");
12751 generate_exception_end(ctx
, EXCP_RI
);
12755 /* Generating branch here as compact branches don't have delay slot */
12756 gen_goto_tb(ctx
, 1, ctx
->btarget
);
12759 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
12767 /* ISA extensions (ASEs) */
12768 /* MIPS16 extension to MIPS32 */
12770 /* MIPS16 major opcodes */
12772 M16_OPC_ADDIUSP
= 0x00,
12773 M16_OPC_ADDIUPC
= 0x01,
12775 M16_OPC_JAL
= 0x03,
12776 M16_OPC_BEQZ
= 0x04,
12777 M16_OPC_BNEQZ
= 0x05,
12778 M16_OPC_SHIFT
= 0x06,
12780 M16_OPC_RRIA
= 0x08,
12781 M16_OPC_ADDIU8
= 0x09,
12782 M16_OPC_SLTI
= 0x0a,
12783 M16_OPC_SLTIU
= 0x0b,
12786 M16_OPC_CMPI
= 0x0e,
12790 M16_OPC_LWSP
= 0x12,
12792 M16_OPC_LBU
= 0x14,
12793 M16_OPC_LHU
= 0x15,
12794 M16_OPC_LWPC
= 0x16,
12795 M16_OPC_LWU
= 0x17,
12798 M16_OPC_SWSP
= 0x1a,
12800 M16_OPC_RRR
= 0x1c,
12802 M16_OPC_EXTEND
= 0x1e,
12806 /* I8 funct field */
12825 /* RR funct field */
12859 /* I64 funct field */
12867 I64_DADDIUPC
= 0x6,
12871 /* RR ry field for CNVT */
12873 RR_RY_CNVT_ZEB
= 0x0,
12874 RR_RY_CNVT_ZEH
= 0x1,
12875 RR_RY_CNVT_ZEW
= 0x2,
12876 RR_RY_CNVT_SEB
= 0x4,
12877 RR_RY_CNVT_SEH
= 0x5,
12878 RR_RY_CNVT_SEW
= 0x6,
12881 static int xlat (int r
)
12883 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12888 static void gen_mips16_save (DisasContext
*ctx
,
12889 int xsregs
, int aregs
,
12890 int do_ra
, int do_s0
, int do_s1
,
12893 TCGv t0
= tcg_temp_new();
12894 TCGv t1
= tcg_temp_new();
12895 TCGv t2
= tcg_temp_new();
12925 generate_exception_end(ctx
, EXCP_RI
);
12931 gen_base_offset_addr(ctx
, t0
, 29, 12);
12932 gen_load_gpr(t1
, 7);
12933 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12936 gen_base_offset_addr(ctx
, t0
, 29, 8);
12937 gen_load_gpr(t1
, 6);
12938 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12941 gen_base_offset_addr(ctx
, t0
, 29, 4);
12942 gen_load_gpr(t1
, 5);
12943 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12946 gen_base_offset_addr(ctx
, t0
, 29, 0);
12947 gen_load_gpr(t1
, 4);
12948 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12951 gen_load_gpr(t0
, 29);
12953 #define DECR_AND_STORE(reg) do { \
12954 tcg_gen_movi_tl(t2, -4); \
12955 gen_op_addr_add(ctx, t0, t0, t2); \
12956 gen_load_gpr(t1, reg); \
12957 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
12961 DECR_AND_STORE(31);
12966 DECR_AND_STORE(30);
12969 DECR_AND_STORE(23);
12972 DECR_AND_STORE(22);
12975 DECR_AND_STORE(21);
12978 DECR_AND_STORE(20);
12981 DECR_AND_STORE(19);
12984 DECR_AND_STORE(18);
12988 DECR_AND_STORE(17);
12991 DECR_AND_STORE(16);
13021 generate_exception_end(ctx
, EXCP_RI
);
13037 #undef DECR_AND_STORE
13039 tcg_gen_movi_tl(t2
, -framesize
);
13040 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13046 static void gen_mips16_restore (DisasContext
*ctx
,
13047 int xsregs
, int aregs
,
13048 int do_ra
, int do_s0
, int do_s1
,
13052 TCGv t0
= tcg_temp_new();
13053 TCGv t1
= tcg_temp_new();
13054 TCGv t2
= tcg_temp_new();
13056 tcg_gen_movi_tl(t2
, framesize
);
13057 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13059 #define DECR_AND_LOAD(reg) do { \
13060 tcg_gen_movi_tl(t2, -4); \
13061 gen_op_addr_add(ctx, t0, t0, t2); \
13062 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13063 gen_store_gpr(t1, reg); \
13127 generate_exception_end(ctx
, EXCP_RI
);
13143 #undef DECR_AND_LOAD
13145 tcg_gen_movi_tl(t2
, framesize
);
13146 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13152 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
13153 int is_64_bit
, int extended
)
13157 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13158 generate_exception_end(ctx
, EXCP_RI
);
13162 t0
= tcg_temp_new();
13164 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13165 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13167 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13173 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13176 TCGv_i32 t0
= tcg_const_i32(op
);
13177 TCGv t1
= tcg_temp_new();
13178 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13179 gen_helper_cache(cpu_env
, t1
, t0
);
13182 #if defined(TARGET_MIPS64)
13183 static void decode_i64_mips16 (DisasContext
*ctx
,
13184 int ry
, int funct
, int16_t offset
,
13189 check_insn(ctx
, ISA_MIPS3
);
13190 check_mips_64(ctx
);
13191 offset
= extended
? offset
: offset
<< 3;
13192 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13195 check_insn(ctx
, ISA_MIPS3
);
13196 check_mips_64(ctx
);
13197 offset
= extended
? offset
: offset
<< 3;
13198 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13201 check_insn(ctx
, ISA_MIPS3
);
13202 check_mips_64(ctx
);
13203 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13204 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13207 check_insn(ctx
, ISA_MIPS3
);
13208 check_mips_64(ctx
);
13209 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13210 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13213 check_insn(ctx
, ISA_MIPS3
);
13214 check_mips_64(ctx
);
13215 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13216 generate_exception_end(ctx
, EXCP_RI
);
13218 offset
= extended
? offset
: offset
<< 3;
13219 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13223 check_insn(ctx
, ISA_MIPS3
);
13224 check_mips_64(ctx
);
13225 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13226 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13229 check_insn(ctx
, ISA_MIPS3
);
13230 check_mips_64(ctx
);
13231 offset
= extended
? offset
: offset
<< 2;
13232 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13235 check_insn(ctx
, ISA_MIPS3
);
13236 check_mips_64(ctx
);
13237 offset
= extended
? offset
: offset
<< 2;
13238 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13244 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13246 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13247 int op
, rx
, ry
, funct
, sa
;
13248 int16_t imm
, offset
;
13250 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13251 op
= (ctx
->opcode
>> 11) & 0x1f;
13252 sa
= (ctx
->opcode
>> 22) & 0x1f;
13253 funct
= (ctx
->opcode
>> 8) & 0x7;
13254 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13255 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13256 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13257 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13258 | (ctx
->opcode
& 0x1f));
13260 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
13263 case M16_OPC_ADDIUSP
:
13264 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13266 case M16_OPC_ADDIUPC
:
13267 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13270 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13271 /* No delay slot, so just process as a normal instruction */
13274 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13275 /* No delay slot, so just process as a normal instruction */
13277 case M16_OPC_BNEQZ
:
13278 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13279 /* No delay slot, so just process as a normal instruction */
13281 case M16_OPC_SHIFT
:
13282 switch (ctx
->opcode
& 0x3) {
13284 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13287 #if defined(TARGET_MIPS64)
13288 check_mips_64(ctx
);
13289 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13291 generate_exception_end(ctx
, EXCP_RI
);
13295 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13298 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13302 #if defined(TARGET_MIPS64)
13304 check_insn(ctx
, ISA_MIPS3
);
13305 check_mips_64(ctx
);
13306 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13310 imm
= ctx
->opcode
& 0xf;
13311 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13312 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13313 imm
= (int16_t) (imm
<< 1) >> 1;
13314 if ((ctx
->opcode
>> 4) & 0x1) {
13315 #if defined(TARGET_MIPS64)
13316 check_mips_64(ctx
);
13317 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13319 generate_exception_end(ctx
, EXCP_RI
);
13322 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13325 case M16_OPC_ADDIU8
:
13326 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13329 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13331 case M16_OPC_SLTIU
:
13332 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13337 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13340 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13343 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13346 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13349 check_insn(ctx
, ISA_MIPS32
);
13351 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13352 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13353 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13354 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13355 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13356 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13357 | (ctx
->opcode
& 0xf)) << 3;
13359 if (ctx
->opcode
& (1 << 7)) {
13360 gen_mips16_save(ctx
, xsregs
, aregs
,
13361 do_ra
, do_s0
, do_s1
,
13364 gen_mips16_restore(ctx
, xsregs
, aregs
,
13365 do_ra
, do_s0
, do_s1
,
13371 generate_exception_end(ctx
, EXCP_RI
);
13376 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13379 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13381 #if defined(TARGET_MIPS64)
13383 check_insn(ctx
, ISA_MIPS3
);
13384 check_mips_64(ctx
);
13385 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13389 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13392 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13395 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13398 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13401 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13404 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13407 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13409 #if defined(TARGET_MIPS64)
13411 check_insn(ctx
, ISA_MIPS3
);
13412 check_mips_64(ctx
);
13413 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13417 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13420 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13423 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13426 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13428 #if defined(TARGET_MIPS64)
13430 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13434 generate_exception_end(ctx
, EXCP_RI
);
13441 static inline bool is_uhi(int sdbbp_code
)
13443 #ifdef CONFIG_USER_ONLY
13446 return semihosting_enabled() && sdbbp_code
== 1;
13450 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13454 int op
, cnvt_op
, op1
, offset
;
13458 op
= (ctx
->opcode
>> 11) & 0x1f;
13459 sa
= (ctx
->opcode
>> 2) & 0x7;
13460 sa
= sa
== 0 ? 8 : sa
;
13461 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13462 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13463 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13464 op1
= offset
= ctx
->opcode
& 0x1f;
13469 case M16_OPC_ADDIUSP
:
13471 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13473 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13476 case M16_OPC_ADDIUPC
:
13477 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13480 offset
= (ctx
->opcode
& 0x7ff) << 1;
13481 offset
= (int16_t)(offset
<< 4) >> 4;
13482 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13483 /* No delay slot, so just process as a normal instruction */
13486 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13487 offset
= (((ctx
->opcode
& 0x1f) << 21)
13488 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13490 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13491 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13495 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13496 ((int8_t)ctx
->opcode
) << 1, 0);
13497 /* No delay slot, so just process as a normal instruction */
13499 case M16_OPC_BNEQZ
:
13500 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13501 ((int8_t)ctx
->opcode
) << 1, 0);
13502 /* No delay slot, so just process as a normal instruction */
13504 case M16_OPC_SHIFT
:
13505 switch (ctx
->opcode
& 0x3) {
13507 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13510 #if defined(TARGET_MIPS64)
13511 check_insn(ctx
, ISA_MIPS3
);
13512 check_mips_64(ctx
);
13513 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13515 generate_exception_end(ctx
, EXCP_RI
);
13519 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13522 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13526 #if defined(TARGET_MIPS64)
13528 check_insn(ctx
, ISA_MIPS3
);
13529 check_mips_64(ctx
);
13530 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13535 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13537 if ((ctx
->opcode
>> 4) & 1) {
13538 #if defined(TARGET_MIPS64)
13539 check_insn(ctx
, ISA_MIPS3
);
13540 check_mips_64(ctx
);
13541 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13543 generate_exception_end(ctx
, EXCP_RI
);
13546 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13550 case M16_OPC_ADDIU8
:
13552 int16_t imm
= (int8_t) ctx
->opcode
;
13554 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13559 int16_t imm
= (uint8_t) ctx
->opcode
;
13560 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13563 case M16_OPC_SLTIU
:
13565 int16_t imm
= (uint8_t) ctx
->opcode
;
13566 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13573 funct
= (ctx
->opcode
>> 8) & 0x7;
13576 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
13577 ((int8_t)ctx
->opcode
) << 1, 0);
13580 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
13581 ((int8_t)ctx
->opcode
) << 1, 0);
13584 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
13587 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
13588 ((int8_t)ctx
->opcode
) << 3);
13591 check_insn(ctx
, ISA_MIPS32
);
13593 int do_ra
= ctx
->opcode
& (1 << 6);
13594 int do_s0
= ctx
->opcode
& (1 << 5);
13595 int do_s1
= ctx
->opcode
& (1 << 4);
13596 int framesize
= ctx
->opcode
& 0xf;
13598 if (framesize
== 0) {
13601 framesize
= framesize
<< 3;
13604 if (ctx
->opcode
& (1 << 7)) {
13605 gen_mips16_save(ctx
, 0, 0,
13606 do_ra
, do_s0
, do_s1
, framesize
);
13608 gen_mips16_restore(ctx
, 0, 0,
13609 do_ra
, do_s0
, do_s1
, framesize
);
13615 int rz
= xlat(ctx
->opcode
& 0x7);
13617 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
13618 ((ctx
->opcode
>> 5) & 0x7);
13619 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
13623 reg32
= ctx
->opcode
& 0x1f;
13624 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
13627 generate_exception_end(ctx
, EXCP_RI
);
13634 int16_t imm
= (uint8_t) ctx
->opcode
;
13636 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
13641 int16_t imm
= (uint8_t) ctx
->opcode
;
13642 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
13645 #if defined(TARGET_MIPS64)
13647 check_insn(ctx
, ISA_MIPS3
);
13648 check_mips_64(ctx
);
13649 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
13653 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13656 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
13659 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13662 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
13665 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13668 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
13671 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
13673 #if defined (TARGET_MIPS64)
13675 check_insn(ctx
, ISA_MIPS3
);
13676 check_mips_64(ctx
);
13677 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
13681 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13684 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
13687 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13690 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
13694 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
13697 switch (ctx
->opcode
& 0x3) {
13699 mips32_op
= OPC_ADDU
;
13702 mips32_op
= OPC_SUBU
;
13704 #if defined(TARGET_MIPS64)
13706 mips32_op
= OPC_DADDU
;
13707 check_insn(ctx
, ISA_MIPS3
);
13708 check_mips_64(ctx
);
13711 mips32_op
= OPC_DSUBU
;
13712 check_insn(ctx
, ISA_MIPS3
);
13713 check_mips_64(ctx
);
13717 generate_exception_end(ctx
, EXCP_RI
);
13721 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
13730 int nd
= (ctx
->opcode
>> 7) & 0x1;
13731 int link
= (ctx
->opcode
>> 6) & 0x1;
13732 int ra
= (ctx
->opcode
>> 5) & 0x1;
13735 check_insn(ctx
, ISA_MIPS32
);
13744 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
13749 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
13750 gen_helper_do_semihosting(cpu_env
);
13752 /* XXX: not clear which exception should be raised
13753 * when in debug mode...
13755 check_insn(ctx
, ISA_MIPS32
);
13756 generate_exception_end(ctx
, EXCP_DBp
);
13760 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
13763 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
13766 generate_exception_end(ctx
, EXCP_BREAK
);
13769 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
13772 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
13775 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
13777 #if defined (TARGET_MIPS64)
13779 check_insn(ctx
, ISA_MIPS3
);
13780 check_mips_64(ctx
);
13781 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
13785 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
13788 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
13791 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
13794 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
13797 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
13800 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
13803 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
13806 check_insn(ctx
, ISA_MIPS32
);
13808 case RR_RY_CNVT_ZEB
:
13809 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13811 case RR_RY_CNVT_ZEH
:
13812 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13814 case RR_RY_CNVT_SEB
:
13815 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13817 case RR_RY_CNVT_SEH
:
13818 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13820 #if defined (TARGET_MIPS64)
13821 case RR_RY_CNVT_ZEW
:
13822 check_insn(ctx
, ISA_MIPS64
);
13823 check_mips_64(ctx
);
13824 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13826 case RR_RY_CNVT_SEW
:
13827 check_insn(ctx
, ISA_MIPS64
);
13828 check_mips_64(ctx
);
13829 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13833 generate_exception_end(ctx
, EXCP_RI
);
13838 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
13840 #if defined (TARGET_MIPS64)
13842 check_insn(ctx
, ISA_MIPS3
);
13843 check_mips_64(ctx
);
13844 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
13847 check_insn(ctx
, ISA_MIPS3
);
13848 check_mips_64(ctx
);
13849 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
13852 check_insn(ctx
, ISA_MIPS3
);
13853 check_mips_64(ctx
);
13854 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
13857 check_insn(ctx
, ISA_MIPS3
);
13858 check_mips_64(ctx
);
13859 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
13863 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
13866 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
13869 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
13872 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
13874 #if defined (TARGET_MIPS64)
13876 check_insn(ctx
, ISA_MIPS3
);
13877 check_mips_64(ctx
);
13878 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
13881 check_insn(ctx
, ISA_MIPS3
);
13882 check_mips_64(ctx
);
13883 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
13886 check_insn(ctx
, ISA_MIPS3
);
13887 check_mips_64(ctx
);
13888 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
13891 check_insn(ctx
, ISA_MIPS3
);
13892 check_mips_64(ctx
);
13893 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
13897 generate_exception_end(ctx
, EXCP_RI
);
13901 case M16_OPC_EXTEND
:
13902 decode_extended_mips16_opc(env
, ctx
);
13905 #if defined(TARGET_MIPS64)
13907 funct
= (ctx
->opcode
>> 8) & 0x7;
13908 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
13912 generate_exception_end(ctx
, EXCP_RI
);
13919 /* microMIPS extension to MIPS32/MIPS64 */
13922 * microMIPS32/microMIPS64 major opcodes
13924 * 1. MIPS Architecture for Programmers Volume II-B:
13925 * The microMIPS32 Instruction Set (Revision 3.05)
13927 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
13929 * 2. MIPS Architecture For Programmers Volume II-A:
13930 * The MIPS64 Instruction Set (Revision 3.51)
13960 POOL32S
= 0x16, /* MIPS64 */
13961 DADDIU32
= 0x17, /* MIPS64 */
13990 /* 0x29 is reserved */
14003 /* 0x31 is reserved */
14016 SD32
= 0x36, /* MIPS64 */
14017 LD32
= 0x37, /* MIPS64 */
14019 /* 0x39 is reserved */
14035 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14057 /* POOL32A encoding of minor opcode field */
14060 /* These opcodes are distinguished only by bits 9..6; those bits are
14061 * what are recorded below. */
14098 /* The following can be distinguished by their lower 6 bits. */
14108 /* POOL32AXF encoding of minor opcode field extension */
14111 * 1. MIPS Architecture for Programmers Volume II-B:
14112 * The microMIPS32 Instruction Set (Revision 3.05)
14114 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14116 * 2. MIPS Architecture for Programmers VolumeIV-e:
14117 * The MIPS DSP Application-Specific Extension
14118 * to the microMIPS32 Architecture (Revision 2.34)
14120 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14135 /* begin of microMIPS32 DSP */
14137 /* bits 13..12 for 0x01 */
14143 /* bits 13..12 for 0x2a */
14149 /* bits 13..12 for 0x32 */
14153 /* end of microMIPS32 DSP */
14155 /* bits 15..12 for 0x2c */
14172 /* bits 15..12 for 0x34 */
14180 /* bits 15..12 for 0x3c */
14182 JR
= 0x0, /* alias */
14190 /* bits 15..12 for 0x05 */
14194 /* bits 15..12 for 0x0d */
14206 /* bits 15..12 for 0x15 */
14212 /* bits 15..12 for 0x1d */
14216 /* bits 15..12 for 0x2d */
14221 /* bits 15..12 for 0x35 */
14228 /* POOL32B encoding of minor opcode field (bits 15..12) */
14244 /* POOL32C encoding of minor opcode field (bits 15..12) */
14265 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14278 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14291 /* POOL32F encoding of minor opcode field (bits 5..0) */
14294 /* These are the bit 7..6 values */
14303 /* These are the bit 8..6 values */
14328 MOVZ_FMT_05
= 0x05,
14362 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14369 /* POOL32Fxf encoding of minor opcode extension field */
14407 /* POOL32I encoding of minor opcode field (bits 25..21) */
14437 /* These overlap and are distinguished by bit16 of the instruction */
14446 /* POOL16A encoding of minor opcode field */
14453 /* POOL16B encoding of minor opcode field */
14460 /* POOL16C encoding of minor opcode field */
14480 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14504 /* POOL16D encoding of minor opcode field */
14511 /* POOL16E encoding of minor opcode field */
14518 static int mmreg (int r
)
14520 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14525 /* Used for 16-bit store instructions. */
14526 static int mmreg2 (int r
)
14528 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14533 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14534 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14535 #define uMIPS_RS2(op) uMIPS_RS(op)
14536 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14537 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14538 #define uMIPS_RS5(op) (op & 0x1f)
14540 /* Signed immediate */
14541 #define SIMM(op, start, width) \
14542 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14545 /* Zero-extended immediate */
14546 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14548 static void gen_addiur1sp(DisasContext
*ctx
)
14550 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14552 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14555 static void gen_addiur2(DisasContext
*ctx
)
14557 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14558 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14559 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14561 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14564 static void gen_addiusp(DisasContext
*ctx
)
14566 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14569 if (encoded
<= 1) {
14570 decoded
= 256 + encoded
;
14571 } else if (encoded
<= 255) {
14573 } else if (encoded
<= 509) {
14574 decoded
= encoded
- 512;
14576 decoded
= encoded
- 768;
14579 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
14582 static void gen_addius5(DisasContext
*ctx
)
14584 int imm
= SIMM(ctx
->opcode
, 1, 4);
14585 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14587 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
14590 static void gen_andi16(DisasContext
*ctx
)
14592 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
14593 31, 32, 63, 64, 255, 32768, 65535 };
14594 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14595 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14596 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
14598 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
14601 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
14602 int base
, int16_t offset
)
14607 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
14608 generate_exception_end(ctx
, EXCP_RI
);
14612 t0
= tcg_temp_new();
14614 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14616 t1
= tcg_const_tl(reglist
);
14617 t2
= tcg_const_i32(ctx
->mem_idx
);
14619 save_cpu_state(ctx
, 1);
14622 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
14625 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
14627 #ifdef TARGET_MIPS64
14629 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
14632 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
14638 tcg_temp_free_i32(t2
);
14642 static void gen_pool16c_insn(DisasContext
*ctx
)
14644 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
14645 int rs
= mmreg(ctx
->opcode
& 0x7);
14647 switch (((ctx
->opcode
) >> 4) & 0x3f) {
14652 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
14658 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
14664 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
14670 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
14677 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14678 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14680 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
14689 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14690 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14692 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
14699 int reg
= ctx
->opcode
& 0x1f;
14701 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
14707 int reg
= ctx
->opcode
& 0x1f;
14708 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
14709 /* Let normal delay slot handling in our caller take us
14710 to the branch target. */
14715 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
14716 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14720 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
14721 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14725 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
14729 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
14732 generate_exception_end(ctx
, EXCP_BREAK
);
14735 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
14736 gen_helper_do_semihosting(cpu_env
);
14738 /* XXX: not clear which exception should be raised
14739 * when in debug mode...
14741 check_insn(ctx
, ISA_MIPS32
);
14742 generate_exception_end(ctx
, EXCP_DBp
);
14745 case JRADDIUSP
+ 0:
14746 case JRADDIUSP
+ 1:
14748 int imm
= ZIMM(ctx
->opcode
, 0, 5);
14749 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14750 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14751 /* Let normal delay slot handling in our caller take us
14752 to the branch target. */
14756 generate_exception_end(ctx
, EXCP_RI
);
14761 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
14764 int rd
, rs
, re
, rt
;
14765 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
14766 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
14767 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
14768 rd
= rd_enc
[enc_dest
];
14769 re
= re_enc
[enc_dest
];
14770 rs
= rs_rt_enc
[enc_rs
];
14771 rt
= rs_rt_enc
[enc_rt
];
14773 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
14775 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
14778 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
14780 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
14784 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
14786 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
14787 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
14789 switch (ctx
->opcode
& 0xf) {
14791 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
14794 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
14798 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14799 int offset
= extract32(ctx
->opcode
, 4, 4);
14800 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
14803 case R6_JRC16
: /* JRCADDIUSP */
14804 if ((ctx
->opcode
>> 4) & 1) {
14806 int imm
= extract32(ctx
->opcode
, 5, 5);
14807 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14808 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14811 rs
= extract32(ctx
->opcode
, 5, 5);
14812 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
14824 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14825 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14826 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
14827 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14831 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
14834 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
14838 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14839 int offset
= extract32(ctx
->opcode
, 4, 4);
14840 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
14843 case JALRC16
: /* BREAK16, SDBBP16 */
14844 switch (ctx
->opcode
& 0x3f) {
14846 case JALRC16
+ 0x20:
14848 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
14853 generate_exception(ctx
, EXCP_BREAK
);
14857 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
14858 gen_helper_do_semihosting(cpu_env
);
14860 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
14861 generate_exception(ctx
, EXCP_RI
);
14863 generate_exception(ctx
, EXCP_DBp
);
14870 generate_exception(ctx
, EXCP_RI
);
14875 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
14877 TCGv t0
= tcg_temp_new();
14878 TCGv t1
= tcg_temp_new();
14880 gen_load_gpr(t0
, base
);
14883 gen_load_gpr(t1
, index
);
14884 tcg_gen_shli_tl(t1
, t1
, 2);
14885 gen_op_addr_add(ctx
, t0
, t1
, t0
);
14888 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14889 gen_store_gpr(t1
, rd
);
14895 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
14896 int base
, int16_t offset
)
14900 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
14901 generate_exception_end(ctx
, EXCP_RI
);
14905 t0
= tcg_temp_new();
14906 t1
= tcg_temp_new();
14908 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14913 generate_exception_end(ctx
, EXCP_RI
);
14916 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14917 gen_store_gpr(t1
, rd
);
14918 tcg_gen_movi_tl(t1
, 4);
14919 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14920 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14921 gen_store_gpr(t1
, rd
+1);
14924 gen_load_gpr(t1
, rd
);
14925 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
14926 tcg_gen_movi_tl(t1
, 4);
14927 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14928 gen_load_gpr(t1
, rd
+1);
14929 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
14931 #ifdef TARGET_MIPS64
14934 generate_exception_end(ctx
, EXCP_RI
);
14937 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14938 gen_store_gpr(t1
, rd
);
14939 tcg_gen_movi_tl(t1
, 8);
14940 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14941 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14942 gen_store_gpr(t1
, rd
+1);
14945 gen_load_gpr(t1
, rd
);
14946 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14947 tcg_gen_movi_tl(t1
, 8);
14948 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14949 gen_load_gpr(t1
, rd
+1);
14950 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14958 static void gen_sync(int stype
)
14960 TCGBar tcg_mo
= TCG_BAR_SC
;
14963 case 0x4: /* SYNC_WMB */
14964 tcg_mo
|= TCG_MO_ST_ST
;
14966 case 0x10: /* SYNC_MB */
14967 tcg_mo
|= TCG_MO_ALL
;
14969 case 0x11: /* SYNC_ACQUIRE */
14970 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
14972 case 0x12: /* SYNC_RELEASE */
14973 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
14975 case 0x13: /* SYNC_RMB */
14976 tcg_mo
|= TCG_MO_LD_LD
;
14979 tcg_mo
|= TCG_MO_ALL
;
14983 tcg_gen_mb(tcg_mo
);
14986 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
14988 int extension
= (ctx
->opcode
>> 6) & 0x3f;
14989 int minor
= (ctx
->opcode
>> 12) & 0xf;
14990 uint32_t mips32_op
;
14992 switch (extension
) {
14994 mips32_op
= OPC_TEQ
;
14997 mips32_op
= OPC_TGE
;
15000 mips32_op
= OPC_TGEU
;
15003 mips32_op
= OPC_TLT
;
15006 mips32_op
= OPC_TLTU
;
15009 mips32_op
= OPC_TNE
;
15011 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
15013 #ifndef CONFIG_USER_ONLY
15016 check_cp0_enabled(ctx
);
15018 /* Treat as NOP. */
15021 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
15025 check_cp0_enabled(ctx
);
15027 TCGv t0
= tcg_temp_new();
15029 gen_load_gpr(t0
, rt
);
15030 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
15036 switch (minor
& 3) {
15038 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15041 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15044 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15047 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15050 goto pool32axf_invalid
;
15054 switch (minor
& 3) {
15056 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15059 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15062 goto pool32axf_invalid
;
15068 check_insn(ctx
, ISA_MIPS32R6
);
15069 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15072 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15075 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15078 mips32_op
= OPC_CLO
;
15081 mips32_op
= OPC_CLZ
;
15083 check_insn(ctx
, ISA_MIPS32
);
15084 gen_cl(ctx
, mips32_op
, rt
, rs
);
15087 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15088 gen_rdhwr(ctx
, rt
, rs
, 0);
15091 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15094 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15095 mips32_op
= OPC_MULT
;
15098 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15099 mips32_op
= OPC_MULTU
;
15102 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15103 mips32_op
= OPC_DIV
;
15106 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15107 mips32_op
= OPC_DIVU
;
15110 check_insn(ctx
, ISA_MIPS32
);
15111 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15114 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15115 mips32_op
= OPC_MADD
;
15118 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15119 mips32_op
= OPC_MADDU
;
15122 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15123 mips32_op
= OPC_MSUB
;
15126 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15127 mips32_op
= OPC_MSUBU
;
15129 check_insn(ctx
, ISA_MIPS32
);
15130 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15133 goto pool32axf_invalid
;
15144 generate_exception_err(ctx
, EXCP_CpU
, 2);
15147 goto pool32axf_invalid
;
15152 case JALR
: /* JALRC */
15153 case JALR_HB
: /* JALRC_HB */
15154 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15155 /* JALRC, JALRC_HB */
15156 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15158 /* JALR, JALR_HB */
15159 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15160 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15165 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15166 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15167 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15170 goto pool32axf_invalid
;
15176 check_cp0_enabled(ctx
);
15177 check_insn(ctx
, ISA_MIPS32R2
);
15178 gen_load_srsgpr(rs
, rt
);
15181 check_cp0_enabled(ctx
);
15182 check_insn(ctx
, ISA_MIPS32R2
);
15183 gen_store_srsgpr(rs
, rt
);
15186 goto pool32axf_invalid
;
15189 #ifndef CONFIG_USER_ONLY
15193 mips32_op
= OPC_TLBP
;
15196 mips32_op
= OPC_TLBR
;
15199 mips32_op
= OPC_TLBWI
;
15202 mips32_op
= OPC_TLBWR
;
15205 mips32_op
= OPC_TLBINV
;
15208 mips32_op
= OPC_TLBINVF
;
15211 mips32_op
= OPC_WAIT
;
15214 mips32_op
= OPC_DERET
;
15217 mips32_op
= OPC_ERET
;
15219 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15222 goto pool32axf_invalid
;
15228 check_cp0_enabled(ctx
);
15230 TCGv t0
= tcg_temp_new();
15232 save_cpu_state(ctx
, 1);
15233 gen_helper_di(t0
, cpu_env
);
15234 gen_store_gpr(t0
, rs
);
15235 /* Stop translation as we may have switched the execution mode */
15236 ctx
->base
.is_jmp
= DISAS_STOP
;
15241 check_cp0_enabled(ctx
);
15243 TCGv t0
= tcg_temp_new();
15245 save_cpu_state(ctx
, 1);
15246 gen_helper_ei(t0
, cpu_env
);
15247 gen_store_gpr(t0
, rs
);
15248 /* DISAS_STOP isn't sufficient, we need to ensure we break out
15249 of translated code to check for pending interrupts. */
15250 gen_save_pc(ctx
->base
.pc_next
+ 4);
15251 ctx
->base
.is_jmp
= DISAS_EXIT
;
15256 goto pool32axf_invalid
;
15263 gen_sync(extract32(ctx
->opcode
, 16, 5));
15266 generate_exception_end(ctx
, EXCP_SYSCALL
);
15269 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15270 gen_helper_do_semihosting(cpu_env
);
15272 check_insn(ctx
, ISA_MIPS32
);
15273 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15274 generate_exception_end(ctx
, EXCP_RI
);
15276 generate_exception_end(ctx
, EXCP_DBp
);
15281 goto pool32axf_invalid
;
15285 switch (minor
& 3) {
15287 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15290 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15293 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15296 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15299 goto pool32axf_invalid
;
15303 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15306 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15309 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15312 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15315 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15318 goto pool32axf_invalid
;
15323 MIPS_INVAL("pool32axf");
15324 generate_exception_end(ctx
, EXCP_RI
);
15329 /* Values for microMIPS fmt field. Variable-width, depending on which
15330 formats the instruction supports. */
15349 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15351 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15352 uint32_t mips32_op
;
15354 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
15355 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
15356 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
15358 switch (extension
) {
15359 case FLOAT_1BIT_FMT(CFC1
, 0):
15360 mips32_op
= OPC_CFC1
;
15362 case FLOAT_1BIT_FMT(CTC1
, 0):
15363 mips32_op
= OPC_CTC1
;
15365 case FLOAT_1BIT_FMT(MFC1
, 0):
15366 mips32_op
= OPC_MFC1
;
15368 case FLOAT_1BIT_FMT(MTC1
, 0):
15369 mips32_op
= OPC_MTC1
;
15371 case FLOAT_1BIT_FMT(MFHC1
, 0):
15372 mips32_op
= OPC_MFHC1
;
15374 case FLOAT_1BIT_FMT(MTHC1
, 0):
15375 mips32_op
= OPC_MTHC1
;
15377 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15380 /* Reciprocal square root */
15381 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15382 mips32_op
= OPC_RSQRT_S
;
15384 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15385 mips32_op
= OPC_RSQRT_D
;
15389 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15390 mips32_op
= OPC_SQRT_S
;
15392 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15393 mips32_op
= OPC_SQRT_D
;
15397 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15398 mips32_op
= OPC_RECIP_S
;
15400 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15401 mips32_op
= OPC_RECIP_D
;
15405 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15406 mips32_op
= OPC_FLOOR_L_S
;
15408 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15409 mips32_op
= OPC_FLOOR_L_D
;
15411 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15412 mips32_op
= OPC_FLOOR_W_S
;
15414 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15415 mips32_op
= OPC_FLOOR_W_D
;
15419 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15420 mips32_op
= OPC_CEIL_L_S
;
15422 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15423 mips32_op
= OPC_CEIL_L_D
;
15425 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15426 mips32_op
= OPC_CEIL_W_S
;
15428 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15429 mips32_op
= OPC_CEIL_W_D
;
15433 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15434 mips32_op
= OPC_TRUNC_L_S
;
15436 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15437 mips32_op
= OPC_TRUNC_L_D
;
15439 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15440 mips32_op
= OPC_TRUNC_W_S
;
15442 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15443 mips32_op
= OPC_TRUNC_W_D
;
15447 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15448 mips32_op
= OPC_ROUND_L_S
;
15450 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15451 mips32_op
= OPC_ROUND_L_D
;
15453 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15454 mips32_op
= OPC_ROUND_W_S
;
15456 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15457 mips32_op
= OPC_ROUND_W_D
;
15460 /* Integer to floating-point conversion */
15461 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15462 mips32_op
= OPC_CVT_L_S
;
15464 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15465 mips32_op
= OPC_CVT_L_D
;
15467 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15468 mips32_op
= OPC_CVT_W_S
;
15470 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15471 mips32_op
= OPC_CVT_W_D
;
15474 /* Paired-foo conversions */
15475 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15476 mips32_op
= OPC_CVT_S_PL
;
15478 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15479 mips32_op
= OPC_CVT_S_PU
;
15481 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15482 mips32_op
= OPC_CVT_PW_PS
;
15484 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15485 mips32_op
= OPC_CVT_PS_PW
;
15488 /* Floating-point moves */
15489 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15490 mips32_op
= OPC_MOV_S
;
15492 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15493 mips32_op
= OPC_MOV_D
;
15495 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15496 mips32_op
= OPC_MOV_PS
;
15499 /* Absolute value */
15500 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15501 mips32_op
= OPC_ABS_S
;
15503 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15504 mips32_op
= OPC_ABS_D
;
15506 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15507 mips32_op
= OPC_ABS_PS
;
15511 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15512 mips32_op
= OPC_NEG_S
;
15514 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15515 mips32_op
= OPC_NEG_D
;
15517 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15518 mips32_op
= OPC_NEG_PS
;
15521 /* Reciprocal square root step */
15522 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15523 mips32_op
= OPC_RSQRT1_S
;
15525 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15526 mips32_op
= OPC_RSQRT1_D
;
15528 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15529 mips32_op
= OPC_RSQRT1_PS
;
15532 /* Reciprocal step */
15533 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15534 mips32_op
= OPC_RECIP1_S
;
15536 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15537 mips32_op
= OPC_RECIP1_S
;
15539 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15540 mips32_op
= OPC_RECIP1_PS
;
15543 /* Conversions from double */
15544 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15545 mips32_op
= OPC_CVT_D_S
;
15547 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15548 mips32_op
= OPC_CVT_D_W
;
15550 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15551 mips32_op
= OPC_CVT_D_L
;
15554 /* Conversions from single */
15555 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15556 mips32_op
= OPC_CVT_S_D
;
15558 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15559 mips32_op
= OPC_CVT_S_W
;
15561 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15562 mips32_op
= OPC_CVT_S_L
;
15564 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
15567 /* Conditional moves on floating-point codes */
15568 case COND_FLOAT_MOV(MOVT
, 0):
15569 case COND_FLOAT_MOV(MOVT
, 1):
15570 case COND_FLOAT_MOV(MOVT
, 2):
15571 case COND_FLOAT_MOV(MOVT
, 3):
15572 case COND_FLOAT_MOV(MOVT
, 4):
15573 case COND_FLOAT_MOV(MOVT
, 5):
15574 case COND_FLOAT_MOV(MOVT
, 6):
15575 case COND_FLOAT_MOV(MOVT
, 7):
15576 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15577 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
15579 case COND_FLOAT_MOV(MOVF
, 0):
15580 case COND_FLOAT_MOV(MOVF
, 1):
15581 case COND_FLOAT_MOV(MOVF
, 2):
15582 case COND_FLOAT_MOV(MOVF
, 3):
15583 case COND_FLOAT_MOV(MOVF
, 4):
15584 case COND_FLOAT_MOV(MOVF
, 5):
15585 case COND_FLOAT_MOV(MOVF
, 6):
15586 case COND_FLOAT_MOV(MOVF
, 7):
15587 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15588 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
15591 MIPS_INVAL("pool32fxf");
15592 generate_exception_end(ctx
, EXCP_RI
);
15597 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15601 int rt
, rs
, rd
, rr
;
15603 uint32_t op
, minor
, minor2
, mips32_op
;
15604 uint32_t cond
, fmt
, cc
;
15606 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
15607 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
15609 rt
= (ctx
->opcode
>> 21) & 0x1f;
15610 rs
= (ctx
->opcode
>> 16) & 0x1f;
15611 rd
= (ctx
->opcode
>> 11) & 0x1f;
15612 rr
= (ctx
->opcode
>> 6) & 0x1f;
15613 imm
= (int16_t) ctx
->opcode
;
15615 op
= (ctx
->opcode
>> 26) & 0x3f;
15618 minor
= ctx
->opcode
& 0x3f;
15621 minor
= (ctx
->opcode
>> 6) & 0xf;
15624 mips32_op
= OPC_SLL
;
15627 mips32_op
= OPC_SRA
;
15630 mips32_op
= OPC_SRL
;
15633 mips32_op
= OPC_ROTR
;
15635 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
15638 check_insn(ctx
, ISA_MIPS32R6
);
15639 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
15642 check_insn(ctx
, ISA_MIPS32R6
);
15643 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
15646 check_insn(ctx
, ISA_MIPS32R6
);
15647 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
15650 goto pool32a_invalid
;
15654 minor
= (ctx
->opcode
>> 6) & 0xf;
15658 mips32_op
= OPC_ADD
;
15661 mips32_op
= OPC_ADDU
;
15664 mips32_op
= OPC_SUB
;
15667 mips32_op
= OPC_SUBU
;
15670 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15671 mips32_op
= OPC_MUL
;
15673 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
15677 mips32_op
= OPC_SLLV
;
15680 mips32_op
= OPC_SRLV
;
15683 mips32_op
= OPC_SRAV
;
15686 mips32_op
= OPC_ROTRV
;
15688 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
15690 /* Logical operations */
15692 mips32_op
= OPC_AND
;
15695 mips32_op
= OPC_OR
;
15698 mips32_op
= OPC_NOR
;
15701 mips32_op
= OPC_XOR
;
15703 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
15705 /* Set less than */
15707 mips32_op
= OPC_SLT
;
15710 mips32_op
= OPC_SLTU
;
15712 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
15715 goto pool32a_invalid
;
15719 minor
= (ctx
->opcode
>> 6) & 0xf;
15721 /* Conditional moves */
15722 case MOVN
: /* MUL */
15723 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15725 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
15728 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
15731 case MOVZ
: /* MUH */
15732 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15734 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
15737 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
15741 check_insn(ctx
, ISA_MIPS32R6
);
15742 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
15745 check_insn(ctx
, ISA_MIPS32R6
);
15746 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
15748 case LWXS
: /* DIV */
15749 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15751 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
15754 gen_ldxs(ctx
, rs
, rt
, rd
);
15758 check_insn(ctx
, ISA_MIPS32R6
);
15759 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
15762 check_insn(ctx
, ISA_MIPS32R6
);
15763 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
15766 check_insn(ctx
, ISA_MIPS32R6
);
15767 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
15770 goto pool32a_invalid
;
15774 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
15777 check_insn(ctx
, ISA_MIPS32R6
);
15778 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
15779 extract32(ctx
->opcode
, 9, 2));
15782 check_insn(ctx
, ISA_MIPS32R6
);
15783 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
15786 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
15789 gen_pool32axf(env
, ctx
, rt
, rs
);
15792 generate_exception_end(ctx
, EXCP_BREAK
);
15795 check_insn(ctx
, ISA_MIPS32R6
);
15796 generate_exception_end(ctx
, EXCP_RI
);
15800 MIPS_INVAL("pool32a");
15801 generate_exception_end(ctx
, EXCP_RI
);
15806 minor
= (ctx
->opcode
>> 12) & 0xf;
15809 check_cp0_enabled(ctx
);
15810 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15811 gen_cache_operation(ctx
, rt
, rs
, imm
);
15816 /* COP2: Not implemented. */
15817 generate_exception_err(ctx
, EXCP_CpU
, 2);
15819 #ifdef TARGET_MIPS64
15822 check_insn(ctx
, ISA_MIPS3
);
15823 check_mips_64(ctx
);
15828 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
15830 #ifdef TARGET_MIPS64
15833 check_insn(ctx
, ISA_MIPS3
);
15834 check_mips_64(ctx
);
15839 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
15842 MIPS_INVAL("pool32b");
15843 generate_exception_end(ctx
, EXCP_RI
);
15848 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
15849 minor
= ctx
->opcode
& 0x3f;
15850 check_cp1_enabled(ctx
);
15853 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15854 mips32_op
= OPC_ALNV_PS
;
15857 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15858 mips32_op
= OPC_MADD_S
;
15861 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15862 mips32_op
= OPC_MADD_D
;
15865 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15866 mips32_op
= OPC_MADD_PS
;
15869 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15870 mips32_op
= OPC_MSUB_S
;
15873 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15874 mips32_op
= OPC_MSUB_D
;
15877 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15878 mips32_op
= OPC_MSUB_PS
;
15881 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15882 mips32_op
= OPC_NMADD_S
;
15885 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15886 mips32_op
= OPC_NMADD_D
;
15889 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15890 mips32_op
= OPC_NMADD_PS
;
15893 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15894 mips32_op
= OPC_NMSUB_S
;
15897 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15898 mips32_op
= OPC_NMSUB_D
;
15901 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15902 mips32_op
= OPC_NMSUB_PS
;
15904 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
15906 case CABS_COND_FMT
:
15907 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15908 cond
= (ctx
->opcode
>> 6) & 0xf;
15909 cc
= (ctx
->opcode
>> 13) & 0x7;
15910 fmt
= (ctx
->opcode
>> 10) & 0x3;
15913 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
15916 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
15919 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
15922 goto pool32f_invalid
;
15926 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15927 cond
= (ctx
->opcode
>> 6) & 0xf;
15928 cc
= (ctx
->opcode
>> 13) & 0x7;
15929 fmt
= (ctx
->opcode
>> 10) & 0x3;
15932 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
15935 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
15938 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
15941 goto pool32f_invalid
;
15945 check_insn(ctx
, ISA_MIPS32R6
);
15946 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
15949 check_insn(ctx
, ISA_MIPS32R6
);
15950 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
15953 gen_pool32fxf(ctx
, rt
, rs
);
15957 switch ((ctx
->opcode
>> 6) & 0x7) {
15959 mips32_op
= OPC_PLL_PS
;
15962 mips32_op
= OPC_PLU_PS
;
15965 mips32_op
= OPC_PUL_PS
;
15968 mips32_op
= OPC_PUU_PS
;
15971 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15972 mips32_op
= OPC_CVT_PS_S
;
15974 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
15977 goto pool32f_invalid
;
15981 check_insn(ctx
, ISA_MIPS32R6
);
15982 switch ((ctx
->opcode
>> 9) & 0x3) {
15984 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
15987 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
15990 goto pool32f_invalid
;
15995 switch ((ctx
->opcode
>> 6) & 0x7) {
15997 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15998 mips32_op
= OPC_LWXC1
;
16001 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16002 mips32_op
= OPC_SWXC1
;
16005 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16006 mips32_op
= OPC_LDXC1
;
16009 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16010 mips32_op
= OPC_SDXC1
;
16013 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16014 mips32_op
= OPC_LUXC1
;
16017 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16018 mips32_op
= OPC_SUXC1
;
16020 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
16023 goto pool32f_invalid
;
16027 check_insn(ctx
, ISA_MIPS32R6
);
16028 switch ((ctx
->opcode
>> 9) & 0x3) {
16030 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
16033 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
16036 goto pool32f_invalid
;
16041 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16042 fmt
= (ctx
->opcode
>> 9) & 0x3;
16043 switch ((ctx
->opcode
>> 6) & 0x7) {
16047 mips32_op
= OPC_RSQRT2_S
;
16050 mips32_op
= OPC_RSQRT2_D
;
16053 mips32_op
= OPC_RSQRT2_PS
;
16056 goto pool32f_invalid
;
16062 mips32_op
= OPC_RECIP2_S
;
16065 mips32_op
= OPC_RECIP2_D
;
16068 mips32_op
= OPC_RECIP2_PS
;
16071 goto pool32f_invalid
;
16075 mips32_op
= OPC_ADDR_PS
;
16078 mips32_op
= OPC_MULR_PS
;
16080 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16083 goto pool32f_invalid
;
16087 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16088 cc
= (ctx
->opcode
>> 13) & 0x7;
16089 fmt
= (ctx
->opcode
>> 9) & 0x3;
16090 switch ((ctx
->opcode
>> 6) & 0x7) {
16091 case MOVF_FMT
: /* RINT_FMT */
16092 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16096 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16099 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16102 goto pool32f_invalid
;
16108 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16111 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16115 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16118 goto pool32f_invalid
;
16122 case MOVT_FMT
: /* CLASS_FMT */
16123 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16127 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16130 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16133 goto pool32f_invalid
;
16139 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16142 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16146 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16149 goto pool32f_invalid
;
16154 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16157 goto pool32f_invalid
;
16160 #define FINSN_3ARG_SDPS(prfx) \
16161 switch ((ctx->opcode >> 8) & 0x3) { \
16163 mips32_op = OPC_##prfx##_S; \
16166 mips32_op = OPC_##prfx##_D; \
16168 case FMT_SDPS_PS: \
16170 mips32_op = OPC_##prfx##_PS; \
16173 goto pool32f_invalid; \
16176 check_insn(ctx
, ISA_MIPS32R6
);
16177 switch ((ctx
->opcode
>> 9) & 0x3) {
16179 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16182 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16185 goto pool32f_invalid
;
16189 check_insn(ctx
, ISA_MIPS32R6
);
16190 switch ((ctx
->opcode
>> 9) & 0x3) {
16192 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16195 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16198 goto pool32f_invalid
;
16202 /* regular FP ops */
16203 switch ((ctx
->opcode
>> 6) & 0x3) {
16205 FINSN_3ARG_SDPS(ADD
);
16208 FINSN_3ARG_SDPS(SUB
);
16211 FINSN_3ARG_SDPS(MUL
);
16214 fmt
= (ctx
->opcode
>> 8) & 0x3;
16216 mips32_op
= OPC_DIV_D
;
16217 } else if (fmt
== 0) {
16218 mips32_op
= OPC_DIV_S
;
16220 goto pool32f_invalid
;
16224 goto pool32f_invalid
;
16229 switch ((ctx
->opcode
>> 6) & 0x7) {
16230 case MOVN_FMT
: /* SELEQZ_FMT */
16231 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16233 switch ((ctx
->opcode
>> 9) & 0x3) {
16235 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16238 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16241 goto pool32f_invalid
;
16245 FINSN_3ARG_SDPS(MOVN
);
16249 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16250 FINSN_3ARG_SDPS(MOVN
);
16252 case MOVZ_FMT
: /* SELNEZ_FMT */
16253 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16255 switch ((ctx
->opcode
>> 9) & 0x3) {
16257 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16260 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16263 goto pool32f_invalid
;
16267 FINSN_3ARG_SDPS(MOVZ
);
16271 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16272 FINSN_3ARG_SDPS(MOVZ
);
16275 check_insn(ctx
, ISA_MIPS32R6
);
16276 switch ((ctx
->opcode
>> 9) & 0x3) {
16278 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16281 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16284 goto pool32f_invalid
;
16288 check_insn(ctx
, ISA_MIPS32R6
);
16289 switch ((ctx
->opcode
>> 9) & 0x3) {
16291 mips32_op
= OPC_MADDF_S
;
16294 mips32_op
= OPC_MADDF_D
;
16297 goto pool32f_invalid
;
16301 check_insn(ctx
, ISA_MIPS32R6
);
16302 switch ((ctx
->opcode
>> 9) & 0x3) {
16304 mips32_op
= OPC_MSUBF_S
;
16307 mips32_op
= OPC_MSUBF_D
;
16310 goto pool32f_invalid
;
16314 goto pool32f_invalid
;
16318 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16322 MIPS_INVAL("pool32f");
16323 generate_exception_end(ctx
, EXCP_RI
);
16327 generate_exception_err(ctx
, EXCP_CpU
, 1);
16331 minor
= (ctx
->opcode
>> 21) & 0x1f;
16334 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16335 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16338 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16339 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16340 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16343 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16344 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16345 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16348 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16349 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16352 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16353 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16354 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16357 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16358 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16359 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16362 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16363 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16366 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16367 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16371 case TLTI
: /* BC1EQZC */
16372 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16374 check_cp1_enabled(ctx
);
16375 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16378 mips32_op
= OPC_TLTI
;
16382 case TGEI
: /* BC1NEZC */
16383 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16385 check_cp1_enabled(ctx
);
16386 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16389 mips32_op
= OPC_TGEI
;
16394 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16395 mips32_op
= OPC_TLTIU
;
16398 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16399 mips32_op
= OPC_TGEIU
;
16401 case TNEI
: /* SYNCI */
16402 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16404 /* Break the TB to be able to sync copied instructions
16406 ctx
->base
.is_jmp
= DISAS_STOP
;
16409 mips32_op
= OPC_TNEI
;
16414 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16415 mips32_op
= OPC_TEQI
;
16417 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16422 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16423 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16424 4, rs
, 0, imm
<< 1, 0);
16425 /* Compact branches don't have a delay slot, so just let
16426 the normal delay slot handling take us to the branch
16430 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16431 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16434 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16435 /* Break the TB to be able to sync copied instructions
16437 ctx
->base
.is_jmp
= DISAS_STOP
;
16441 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16442 /* COP2: Not implemented. */
16443 generate_exception_err(ctx
, EXCP_CpU
, 2);
16446 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16447 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16450 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16451 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16454 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16455 mips32_op
= OPC_BC1FANY4
;
16458 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16459 mips32_op
= OPC_BC1TANY4
;
16462 check_insn(ctx
, ASE_MIPS3D
);
16465 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16466 check_cp1_enabled(ctx
);
16467 gen_compute_branch1(ctx
, mips32_op
,
16468 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16470 generate_exception_err(ctx
, EXCP_CpU
, 1);
16475 /* MIPS DSP: not implemented */
16478 MIPS_INVAL("pool32i");
16479 generate_exception_end(ctx
, EXCP_RI
);
16484 minor
= (ctx
->opcode
>> 12) & 0xf;
16485 offset
= sextract32(ctx
->opcode
, 0,
16486 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16489 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16490 mips32_op
= OPC_LWL
;
16493 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16494 mips32_op
= OPC_SWL
;
16497 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16498 mips32_op
= OPC_LWR
;
16501 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16502 mips32_op
= OPC_SWR
;
16504 #if defined(TARGET_MIPS64)
16506 check_insn(ctx
, ISA_MIPS3
);
16507 check_mips_64(ctx
);
16508 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16509 mips32_op
= OPC_LDL
;
16512 check_insn(ctx
, ISA_MIPS3
);
16513 check_mips_64(ctx
);
16514 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16515 mips32_op
= OPC_SDL
;
16518 check_insn(ctx
, ISA_MIPS3
);
16519 check_mips_64(ctx
);
16520 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16521 mips32_op
= OPC_LDR
;
16524 check_insn(ctx
, ISA_MIPS3
);
16525 check_mips_64(ctx
);
16526 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16527 mips32_op
= OPC_SDR
;
16530 check_insn(ctx
, ISA_MIPS3
);
16531 check_mips_64(ctx
);
16532 mips32_op
= OPC_LWU
;
16535 check_insn(ctx
, ISA_MIPS3
);
16536 check_mips_64(ctx
);
16537 mips32_op
= OPC_LLD
;
16541 mips32_op
= OPC_LL
;
16544 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16547 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16550 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
16552 #if defined(TARGET_MIPS64)
16554 check_insn(ctx
, ISA_MIPS3
);
16555 check_mips_64(ctx
);
16556 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
16561 MIPS_INVAL("pool32c ld-eva");
16562 generate_exception_end(ctx
, EXCP_RI
);
16565 check_cp0_enabled(ctx
);
16567 minor2
= (ctx
->opcode
>> 9) & 0x7;
16568 offset
= sextract32(ctx
->opcode
, 0, 9);
16571 mips32_op
= OPC_LBUE
;
16574 mips32_op
= OPC_LHUE
;
16577 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16578 mips32_op
= OPC_LWLE
;
16581 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16582 mips32_op
= OPC_LWRE
;
16585 mips32_op
= OPC_LBE
;
16588 mips32_op
= OPC_LHE
;
16591 mips32_op
= OPC_LLE
;
16594 mips32_op
= OPC_LWE
;
16600 MIPS_INVAL("pool32c st-eva");
16601 generate_exception_end(ctx
, EXCP_RI
);
16604 check_cp0_enabled(ctx
);
16606 minor2
= (ctx
->opcode
>> 9) & 0x7;
16607 offset
= sextract32(ctx
->opcode
, 0, 9);
16610 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16611 mips32_op
= OPC_SWLE
;
16614 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16615 mips32_op
= OPC_SWRE
;
16618 /* Treat as no-op */
16619 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16620 /* hint codes 24-31 are reserved and signal RI */
16621 generate_exception(ctx
, EXCP_RI
);
16625 /* Treat as no-op */
16626 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16627 gen_cache_operation(ctx
, rt
, rs
, offset
);
16631 mips32_op
= OPC_SBE
;
16634 mips32_op
= OPC_SHE
;
16637 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
16640 mips32_op
= OPC_SWE
;
16645 /* Treat as no-op */
16646 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16647 /* hint codes 24-31 are reserved and signal RI */
16648 generate_exception(ctx
, EXCP_RI
);
16652 MIPS_INVAL("pool32c");
16653 generate_exception_end(ctx
, EXCP_RI
);
16657 case ADDI32
: /* AUI, LUI */
16658 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16660 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
16663 mips32_op
= OPC_ADDI
;
16668 mips32_op
= OPC_ADDIU
;
16670 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16673 /* Logical operations */
16675 mips32_op
= OPC_ORI
;
16678 mips32_op
= OPC_XORI
;
16681 mips32_op
= OPC_ANDI
;
16683 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16686 /* Set less than immediate */
16688 mips32_op
= OPC_SLTI
;
16691 mips32_op
= OPC_SLTIU
;
16693 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16696 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16697 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
16698 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
16699 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16701 case JALS32
: /* BOVC, BEQC, BEQZALC */
16702 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16705 mips32_op
= OPC_BOVC
;
16706 } else if (rs
< rt
&& rs
== 0) {
16708 mips32_op
= OPC_BEQZALC
;
16711 mips32_op
= OPC_BEQC
;
16713 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16716 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
16717 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
16718 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16721 case BEQ32
: /* BC */
16722 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16724 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
16725 sextract32(ctx
->opcode
<< 1, 0, 27));
16728 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
16731 case BNE32
: /* BALC */
16732 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16734 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
16735 sextract32(ctx
->opcode
<< 1, 0, 27));
16738 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
16741 case J32
: /* BGTZC, BLTZC, BLTC */
16742 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16743 if (rs
== 0 && rt
!= 0) {
16745 mips32_op
= OPC_BGTZC
;
16746 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16748 mips32_op
= OPC_BLTZC
;
16751 mips32_op
= OPC_BLTC
;
16753 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16756 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
16757 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16760 case JAL32
: /* BLEZC, BGEZC, BGEC */
16761 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16762 if (rs
== 0 && rt
!= 0) {
16764 mips32_op
= OPC_BLEZC
;
16765 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16767 mips32_op
= OPC_BGEZC
;
16770 mips32_op
= OPC_BGEC
;
16772 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16775 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
16776 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16777 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16780 /* Floating point (COP1) */
16782 mips32_op
= OPC_LWC1
;
16785 mips32_op
= OPC_LDC1
;
16788 mips32_op
= OPC_SWC1
;
16791 mips32_op
= OPC_SDC1
;
16793 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
16795 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16796 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16797 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16798 switch ((ctx
->opcode
>> 16) & 0x1f) {
16807 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16810 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
16813 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
16823 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16826 generate_exception(ctx
, EXCP_RI
);
16831 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
16832 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
16834 gen_addiupc(ctx
, reg
, offset
, 0, 0);
16837 case BNVC
: /* BNEC, BNEZALC */
16838 check_insn(ctx
, ISA_MIPS32R6
);
16841 mips32_op
= OPC_BNVC
;
16842 } else if (rs
< rt
&& rs
== 0) {
16844 mips32_op
= OPC_BNEZALC
;
16847 mips32_op
= OPC_BNEC
;
16849 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16851 case R6_BNEZC
: /* JIALC */
16852 check_insn(ctx
, ISA_MIPS32R6
);
16855 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
16856 sextract32(ctx
->opcode
<< 1, 0, 22));
16859 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
16862 case R6_BEQZC
: /* JIC */
16863 check_insn(ctx
, ISA_MIPS32R6
);
16866 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
16867 sextract32(ctx
->opcode
<< 1, 0, 22));
16870 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
16873 case BLEZALC
: /* BGEZALC, BGEUC */
16874 check_insn(ctx
, ISA_MIPS32R6
);
16875 if (rs
== 0 && rt
!= 0) {
16877 mips32_op
= OPC_BLEZALC
;
16878 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16880 mips32_op
= OPC_BGEZALC
;
16883 mips32_op
= OPC_BGEUC
;
16885 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16887 case BGTZALC
: /* BLTZALC, BLTUC */
16888 check_insn(ctx
, ISA_MIPS32R6
);
16889 if (rs
== 0 && rt
!= 0) {
16891 mips32_op
= OPC_BGTZALC
;
16892 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16894 mips32_op
= OPC_BLTZALC
;
16897 mips32_op
= OPC_BLTUC
;
16899 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16901 /* Loads and stores */
16903 mips32_op
= OPC_LB
;
16906 mips32_op
= OPC_LBU
;
16909 mips32_op
= OPC_LH
;
16912 mips32_op
= OPC_LHU
;
16915 mips32_op
= OPC_LW
;
16917 #ifdef TARGET_MIPS64
16919 check_insn(ctx
, ISA_MIPS3
);
16920 check_mips_64(ctx
);
16921 mips32_op
= OPC_LD
;
16924 check_insn(ctx
, ISA_MIPS3
);
16925 check_mips_64(ctx
);
16926 mips32_op
= OPC_SD
;
16930 mips32_op
= OPC_SB
;
16933 mips32_op
= OPC_SH
;
16936 mips32_op
= OPC_SW
;
16939 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
16942 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
16945 generate_exception_end(ctx
, EXCP_RI
);
16950 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
16954 /* make sure instructions are on a halfword boundary */
16955 if (ctx
->base
.pc_next
& 0x1) {
16956 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
16957 generate_exception_end(ctx
, EXCP_AdEL
);
16961 op
= (ctx
->opcode
>> 10) & 0x3f;
16962 /* Enforce properly-sized instructions in a delay slot */
16963 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
16964 switch (op
& 0x7) { /* MSB-3..MSB-5 */
16966 /* POOL32A, POOL32B, POOL32I, POOL32C */
16968 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
16970 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
16972 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
16974 /* LB32, LH32, LWC132, LDC132, LW32 */
16975 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
16976 generate_exception_end(ctx
, EXCP_RI
);
16981 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
16983 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
16985 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
16986 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
16987 generate_exception_end(ctx
, EXCP_RI
);
16997 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16998 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
16999 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
17002 switch (ctx
->opcode
& 0x1) {
17010 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17011 /* In the Release 6 the register number location in
17012 * the instruction encoding has changed.
17014 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
17016 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
17022 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17023 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
17024 int amount
= (ctx
->opcode
>> 1) & 0x7;
17026 amount
= amount
== 0 ? 8 : amount
;
17028 switch (ctx
->opcode
& 0x1) {
17037 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17041 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17042 gen_pool16c_r6_insn(ctx
);
17044 gen_pool16c_insn(ctx
);
17049 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17050 int rb
= 28; /* GP */
17051 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17053 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17057 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17058 if (ctx
->opcode
& 1) {
17059 generate_exception_end(ctx
, EXCP_RI
);
17062 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17063 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17064 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17065 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17070 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17071 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17072 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17073 offset
= (offset
== 0xf ? -1 : offset
);
17075 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17080 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17081 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17082 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17084 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17089 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17090 int rb
= 29; /* SP */
17091 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17093 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17098 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17099 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17100 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17102 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17107 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17108 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17109 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17111 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17116 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17117 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17118 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17120 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17125 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17126 int rb
= 29; /* SP */
17127 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17129 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17134 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17135 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17136 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17138 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17143 int rd
= uMIPS_RD5(ctx
->opcode
);
17144 int rs
= uMIPS_RS5(ctx
->opcode
);
17146 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17153 switch (ctx
->opcode
& 0x1) {
17163 switch (ctx
->opcode
& 0x1) {
17168 gen_addiur1sp(ctx
);
17172 case B16
: /* BC16 */
17173 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17174 sextract32(ctx
->opcode
, 0, 10) << 1,
17175 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17177 case BNEZ16
: /* BNEZC16 */
17178 case BEQZ16
: /* BEQZC16 */
17179 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17180 mmreg(uMIPS_RD(ctx
->opcode
)),
17181 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17182 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17187 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17188 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17190 imm
= (imm
== 0x7f ? -1 : imm
);
17191 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17197 generate_exception_end(ctx
, EXCP_RI
);
17200 decode_micromips32_opc(env
, ctx
);
17213 /* MAJOR, P16, and P32 pools opcodes */
17217 NM_MOVE_BALC
= 0x02,
17225 NM_P16_SHIFT
= 0x0c,
17243 NM_P_LS_U12
= 0x21,
17253 NM_P16_ADDU
= 0x2c,
17267 NM_MOVEPREV
= 0x3f,
17270 /* POOL32A instruction pool */
17272 NM_POOL32A0
= 0x00,
17273 NM_SPECIAL2
= 0x01,
17276 NM_POOL32A5
= 0x05,
17277 NM_POOL32A7
= 0x07,
17280 /* P.GP.W instruction pool */
17282 NM_ADDIUGP_W
= 0x00,
17287 /* P48I instruction pool */
17291 NM_ADDIUGP48
= 0x02,
17292 NM_ADDIUPC48
= 0x03,
17297 /* P.U12 instruction pool */
17306 NM_ADDIUNEG
= 0x08,
17313 /* POOL32F instruction pool */
17315 NM_POOL32F_0
= 0x00,
17316 NM_POOL32F_3
= 0x03,
17317 NM_POOL32F_5
= 0x05,
17320 /* POOL32S instruction pool */
17322 NM_POOL32S_0
= 0x00,
17323 NM_POOL32S_4
= 0x04,
17326 /* P.LUI instruction pool */
17332 /* P.GP.BH instruction pool */
17337 NM_ADDIUGP_B
= 0x03,
17340 NM_P_GP_CP1
= 0x06,
17343 /* P.LS.U12 instruction pool */
17348 NM_P_PREFU12
= 0x03,
17361 /* P.LS.S9 instruction pool */
17367 NM_P_LS_UAWM
= 0x05,
17370 /* P.BAL instruction pool */
17376 /* P.J instruction pool */
17379 NM_JALRC_HB
= 0x01,
17380 NM_P_BALRSC
= 0x08,
17383 /* P.BR1 instruction pool */
17391 /* P.BR2 instruction pool */
17398 /* P.BRI instruction pool */
17410 /* P16.SHIFT instruction pool */
17416 /* POOL16C instruction pool */
17418 NM_POOL16C_0
= 0x00,
17422 /* P16.A1 instruction pool */
17424 NM_ADDIUR1SP
= 0x01,
17427 /* P16.A2 instruction pool */
17430 NM_P_ADDIURS5
= 0x01,
17433 /* P16.ADDU instruction pool */
17439 /* P16.SR instruction pool */
17442 NM_RESTORE_JRC16
= 0x01,
17445 /* P16.4X4 instruction pool */
17451 /* P16.LB instruction pool */
17458 /* P16.LH instruction pool */
17465 /* P.RI instruction pool */
17468 NM_P_SYSCALL
= 0x01,
17473 /* POOL32A0 instruction pool */
17508 NM_D_E_MT_VPE
= 0x56,
17516 /* CRC32 instruction pool */
17526 /* POOL32A5 instruction pool */
17528 NM_CMP_EQ_PH
= 0x00,
17529 NM_CMP_LT_PH
= 0x08,
17530 NM_CMP_LE_PH
= 0x10,
17531 NM_CMPGU_EQ_QB
= 0x18,
17532 NM_CMPGU_LT_QB
= 0x20,
17533 NM_CMPGU_LE_QB
= 0x28,
17534 NM_CMPGDU_EQ_QB
= 0x30,
17535 NM_CMPGDU_LT_QB
= 0x38,
17536 NM_CMPGDU_LE_QB
= 0x40,
17537 NM_CMPU_EQ_QB
= 0x48,
17538 NM_CMPU_LT_QB
= 0x50,
17539 NM_CMPU_LE_QB
= 0x58,
17540 NM_ADDQ_S_W
= 0x60,
17541 NM_SUBQ_S_W
= 0x68,
17545 NM_ADDQ_S_PH
= 0x01,
17546 NM_ADDQH_R_PH
= 0x09,
17547 NM_ADDQH_R_W
= 0x11,
17548 NM_ADDU_S_QB
= 0x19,
17549 NM_ADDU_S_PH
= 0x21,
17550 NM_ADDUH_R_QB
= 0x29,
17551 NM_SHRAV_R_PH
= 0x31,
17552 NM_SHRAV_R_QB
= 0x39,
17553 NM_SUBQ_S_PH
= 0x41,
17554 NM_SUBQH_R_PH
= 0x49,
17555 NM_SUBQH_R_W
= 0x51,
17556 NM_SUBU_S_QB
= 0x59,
17557 NM_SUBU_S_PH
= 0x61,
17558 NM_SUBUH_R_QB
= 0x69,
17559 NM_SHLLV_S_PH
= 0x71,
17560 NM_PRECR_SRA_R_PH_W
= 0x79,
17562 NM_MULEU_S_PH_QBL
= 0x12,
17563 NM_MULEU_S_PH_QBR
= 0x1a,
17564 NM_MULQ_RS_PH
= 0x22,
17565 NM_MULQ_S_PH
= 0x2a,
17566 NM_MULQ_RS_W
= 0x32,
17567 NM_MULQ_S_W
= 0x3a,
17570 NM_SHRAV_R_W
= 0x5a,
17571 NM_SHRLV_PH
= 0x62,
17572 NM_SHRLV_QB
= 0x6a,
17573 NM_SHLLV_QB
= 0x72,
17574 NM_SHLLV_S_W
= 0x7a,
17578 NM_MULEQ_S_W_PHL
= 0x04,
17579 NM_MULEQ_S_W_PHR
= 0x0c,
17581 NM_MUL_S_PH
= 0x05,
17582 NM_PRECR_QB_PH
= 0x0d,
17583 NM_PRECRQ_QB_PH
= 0x15,
17584 NM_PRECRQ_PH_W
= 0x1d,
17585 NM_PRECRQ_RS_PH_W
= 0x25,
17586 NM_PRECRQU_S_QB_PH
= 0x2d,
17587 NM_PACKRL_PH
= 0x35,
17591 NM_SHRA_R_W
= 0x5e,
17592 NM_SHRA_R_PH
= 0x66,
17593 NM_SHLL_S_PH
= 0x76,
17594 NM_SHLL_S_W
= 0x7e,
17599 /* POOL32A7 instruction pool */
17604 NM_POOL32AXF
= 0x07,
17607 /* P.SR instruction pool */
17613 /* P.SHIFT instruction pool */
17621 /* P.ROTX instruction pool */
17626 /* P.INS instruction pool */
17631 /* P.EXT instruction pool */
17636 /* POOL32F_0 (fmt) instruction pool */
17641 NM_SELEQZ_S
= 0x07,
17642 NM_SELEQZ_D
= 0x47,
17646 NM_SELNEZ_S
= 0x0f,
17647 NM_SELNEZ_D
= 0x4f,
17662 /* POOL32F_3 instruction pool */
17666 NM_MINA_FMT
= 0x04,
17667 NM_MAXA_FMT
= 0x05,
17668 NM_POOL32FXF
= 0x07,
17671 /* POOL32F_5 instruction pool */
17673 NM_CMP_CONDN_S
= 0x00,
17674 NM_CMP_CONDN_D
= 0x02,
17677 /* P.GP.LH instruction pool */
17683 /* P.GP.SH instruction pool */
17688 /* P.GP.CP1 instruction pool */
17696 /* P.LS.S0 instruction pool */
17713 NM_P_PREFS9
= 0x03,
17719 /* P.LS.S1 instruction pool */
17721 NM_ASET_ACLR
= 0x02,
17729 /* P.LS.E0 instruction pool */
17745 /* P.PREFE instruction pool */
17751 /* P.LLE instruction pool */
17757 /* P.SCE instruction pool */
17763 /* P.LS.WM instruction pool */
17769 /* P.LS.UAWM instruction pool */
17775 /* P.BR3A instruction pool */
17781 NM_BPOSGE32C
= 0x04,
17784 /* P16.RI instruction pool */
17786 NM_P16_SYSCALL
= 0x01,
17791 /* POOL16C_0 instruction pool */
17793 NM_POOL16C_00
= 0x00,
17796 /* P16.JRC instruction pool */
17802 /* P.SYSCALL instruction pool */
17808 /* P.TRAP instruction pool */
17814 /* P.CMOVE instruction pool */
17820 /* POOL32Axf instruction pool */
17822 NM_POOL32AXF_1
= 0x01,
17823 NM_POOL32AXF_2
= 0x02,
17824 NM_POOL32AXF_4
= 0x04,
17825 NM_POOL32AXF_5
= 0x05,
17826 NM_POOL32AXF_7
= 0x07,
17829 /* POOL32Axf_1 instruction pool */
17831 NM_POOL32AXF_1_0
= 0x00,
17832 NM_POOL32AXF_1_1
= 0x01,
17833 NM_POOL32AXF_1_3
= 0x03,
17834 NM_POOL32AXF_1_4
= 0x04,
17835 NM_POOL32AXF_1_5
= 0x05,
17836 NM_POOL32AXF_1_7
= 0x07,
17839 /* POOL32Axf_2 instruction pool */
17841 NM_POOL32AXF_2_0_7
= 0x00,
17842 NM_POOL32AXF_2_8_15
= 0x01,
17843 NM_POOL32AXF_2_16_23
= 0x02,
17844 NM_POOL32AXF_2_24_31
= 0x03,
17847 /* POOL32Axf_7 instruction pool */
17849 NM_SHRA_R_QB
= 0x0,
17854 /* POOL32Axf_1_0 instruction pool */
17862 /* POOL32Axf_1_1 instruction pool */
17868 /* POOL32Axf_1_3 instruction pool */
17876 /* POOL32Axf_1_4 instruction pool */
17882 /* POOL32Axf_1_5 instruction pool */
17884 NM_MAQ_S_W_PHR
= 0x0,
17885 NM_MAQ_S_W_PHL
= 0x1,
17886 NM_MAQ_SA_W_PHR
= 0x2,
17887 NM_MAQ_SA_W_PHL
= 0x3,
17890 /* POOL32Axf_1_7 instruction pool */
17894 NM_EXTR_RS_W
= 0x2,
17898 /* POOL32Axf_2_0_7 instruction pool */
17901 NM_DPAQ_S_W_PH
= 0x1,
17903 NM_DPSQ_S_W_PH
= 0x3,
17910 /* POOL32Axf_2_8_15 instruction pool */
17912 NM_DPAX_W_PH
= 0x0,
17913 NM_DPAQ_SA_L_W
= 0x1,
17914 NM_DPSX_W_PH
= 0x2,
17915 NM_DPSQ_SA_L_W
= 0x3,
17918 NM_EXTRV_R_W
= 0x7,
17921 /* POOL32Axf_2_16_23 instruction pool */
17923 NM_DPAU_H_QBL
= 0x0,
17924 NM_DPAQX_S_W_PH
= 0x1,
17925 NM_DPSU_H_QBL
= 0x2,
17926 NM_DPSQX_S_W_PH
= 0x3,
17929 NM_MULSA_W_PH
= 0x6,
17930 NM_EXTRV_RS_W
= 0x7,
17933 /* POOL32Axf_2_24_31 instruction pool */
17935 NM_DPAU_H_QBR
= 0x0,
17936 NM_DPAQX_SA_W_PH
= 0x1,
17937 NM_DPSU_H_QBR
= 0x2,
17938 NM_DPSQX_SA_W_PH
= 0x3,
17941 NM_MULSAQ_S_W_PH
= 0x6,
17942 NM_EXTRV_S_H
= 0x7,
17945 /* POOL32Axf_{4, 5} instruction pool */
17964 /* nanoMIPS DSP instructions */
17965 NM_ABSQ_S_QB
= 0x00,
17966 NM_ABSQ_S_PH
= 0x08,
17967 NM_ABSQ_S_W
= 0x10,
17968 NM_PRECEQ_W_PHL
= 0x28,
17969 NM_PRECEQ_W_PHR
= 0x30,
17970 NM_PRECEQU_PH_QBL
= 0x38,
17971 NM_PRECEQU_PH_QBR
= 0x48,
17972 NM_PRECEU_PH_QBL
= 0x58,
17973 NM_PRECEU_PH_QBR
= 0x68,
17974 NM_PRECEQU_PH_QBLA
= 0x39,
17975 NM_PRECEQU_PH_QBRA
= 0x49,
17976 NM_PRECEU_PH_QBLA
= 0x59,
17977 NM_PRECEU_PH_QBRA
= 0x69,
17978 NM_REPLV_PH
= 0x01,
17979 NM_REPLV_QB
= 0x09,
17982 NM_RADDU_W_QB
= 0x78,
17988 /* PP.SR instruction pool */
17992 NM_RESTORE_JRC
= 0x03,
17995 /* P.SR.F instruction pool */
17998 NM_RESTOREF
= 0x01,
18001 /* P16.SYSCALL instruction pool */
18003 NM_SYSCALL16
= 0x00,
18004 NM_HYPCALL16
= 0x01,
18007 /* POOL16C_00 instruction pool */
18015 /* PP.LSX and PP.LSXS instruction pool */
18053 /* ERETx instruction pool */
18059 /* POOL32FxF_{0, 1} insturction pool */
18068 NM_CVT_S_PL
= 0x84,
18069 NM_CVT_S_PU
= 0xa4,
18071 NM_CVT_L_S
= 0x004,
18072 NM_CVT_L_D
= 0x104,
18073 NM_CVT_W_S
= 0x024,
18074 NM_CVT_W_D
= 0x124,
18076 NM_RSQRT_S
= 0x008,
18077 NM_RSQRT_D
= 0x108,
18082 NM_RECIP_S
= 0x048,
18083 NM_RECIP_D
= 0x148,
18085 NM_FLOOR_L_S
= 0x00c,
18086 NM_FLOOR_L_D
= 0x10c,
18088 NM_FLOOR_W_S
= 0x02c,
18089 NM_FLOOR_W_D
= 0x12c,
18091 NM_CEIL_L_S
= 0x04c,
18092 NM_CEIL_L_D
= 0x14c,
18093 NM_CEIL_W_S
= 0x06c,
18094 NM_CEIL_W_D
= 0x16c,
18095 NM_TRUNC_L_S
= 0x08c,
18096 NM_TRUNC_L_D
= 0x18c,
18097 NM_TRUNC_W_S
= 0x0ac,
18098 NM_TRUNC_W_D
= 0x1ac,
18099 NM_ROUND_L_S
= 0x0cc,
18100 NM_ROUND_L_D
= 0x1cc,
18101 NM_ROUND_W_S
= 0x0ec,
18102 NM_ROUND_W_D
= 0x1ec,
18110 NM_CVT_D_S
= 0x04d,
18111 NM_CVT_D_W
= 0x0cd,
18112 NM_CVT_D_L
= 0x14d,
18113 NM_CVT_S_D
= 0x06d,
18114 NM_CVT_S_W
= 0x0ed,
18115 NM_CVT_S_L
= 0x16d,
18118 /* P.LL instruction pool */
18124 /* P.SC instruction pool */
18130 /* P.DVP instruction pool */
18139 * nanoMIPS decoding engine
18144 /* extraction utilities */
18146 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18147 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18148 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18149 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18150 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18151 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18153 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18154 static inline int decode_gpr_gpr3(int r
)
18156 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18158 return map
[r
& 0x7];
18161 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18162 static inline int decode_gpr_gpr3_src_store(int r
)
18164 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18166 return map
[r
& 0x7];
18169 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18170 static inline int decode_gpr_gpr4(int r
)
18172 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18173 16, 17, 18, 19, 20, 21, 22, 23 };
18175 return map
[r
& 0xf];
18178 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18179 static inline int decode_gpr_gpr4_zero(int r
)
18181 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18182 16, 17, 18, 19, 20, 21, 22, 23 };
18184 return map
[r
& 0xf];
18188 /* extraction utilities */
18190 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18191 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18192 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18193 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18194 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18195 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18198 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18200 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18203 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18204 uint8_t gp
, uint16_t u
)
18207 TCGv va
= tcg_temp_new();
18208 TCGv t0
= tcg_temp_new();
18210 while (counter
!= count
) {
18211 bool use_gp
= gp
&& (counter
== count
- 1);
18212 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18213 int this_offset
= -((counter
+ 1) << 2);
18214 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18215 gen_load_gpr(t0
, this_rt
);
18216 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18217 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18221 /* adjust stack pointer */
18222 gen_adjust_sp(ctx
, -u
);
18228 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18229 uint8_t gp
, uint16_t u
)
18232 TCGv va
= tcg_temp_new();
18233 TCGv t0
= tcg_temp_new();
18235 while (counter
!= count
) {
18236 bool use_gp
= gp
&& (counter
== count
- 1);
18237 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18238 int this_offset
= u
- ((counter
+ 1) << 2);
18239 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18240 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18241 ctx
->default_tcg_memop_mask
);
18242 tcg_gen_ext32s_tl(t0
, t0
);
18243 gen_store_gpr(t0
, this_rt
);
18247 /* adjust stack pointer */
18248 gen_adjust_sp(ctx
, u
);
18254 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18256 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
18257 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
18259 switch (extract32(ctx
->opcode
, 2, 2)) {
18261 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18264 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18267 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18270 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18275 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18277 int rt
= extract32(ctx
->opcode
, 21, 5);
18278 int rs
= extract32(ctx
->opcode
, 16, 5);
18279 int rd
= extract32(ctx
->opcode
, 11, 5);
18281 switch (extract32(ctx
->opcode
, 3, 7)) {
18283 switch (extract32(ctx
->opcode
, 10, 1)) {
18286 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18290 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18296 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18300 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18303 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18306 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18309 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18312 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18315 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18318 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18321 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18325 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18328 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18331 switch (extract32(ctx
->opcode
, 10, 1)) {
18333 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18336 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18341 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18344 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18347 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18350 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18353 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18358 #ifndef CONFIG_USER_ONLY
18359 TCGv t0
= tcg_temp_new();
18360 switch (extract32(ctx
->opcode
, 10, 1)) {
18363 check_cp0_enabled(ctx
);
18364 gen_helper_dvp(t0
, cpu_env
);
18365 gen_store_gpr(t0
, rt
);
18370 check_cp0_enabled(ctx
);
18371 gen_helper_evp(t0
, cpu_env
);
18372 gen_store_gpr(t0
, rt
);
18379 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18384 TCGv t0
= tcg_temp_new();
18385 TCGv t1
= tcg_temp_new();
18386 TCGv t2
= tcg_temp_new();
18388 gen_load_gpr(t1
, rs
);
18389 gen_load_gpr(t2
, rt
);
18390 tcg_gen_add_tl(t0
, t1
, t2
);
18391 tcg_gen_ext32s_tl(t0
, t0
);
18392 tcg_gen_xor_tl(t1
, t1
, t2
);
18393 tcg_gen_xor_tl(t2
, t0
, t2
);
18394 tcg_gen_andc_tl(t1
, t2
, t1
);
18396 /* operands of same sign, result different sign */
18397 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18398 gen_store_gpr(t0
, rd
);
18406 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18409 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18412 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18415 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18418 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18421 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18424 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18427 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18429 #ifndef CONFIG_USER_ONLY
18431 check_cp0_enabled(ctx
);
18433 /* Treat as NOP. */
18436 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18439 check_cp0_enabled(ctx
);
18441 TCGv t0
= tcg_temp_new();
18443 gen_load_gpr(t0
, rt
);
18444 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18448 case NM_D_E_MT_VPE
:
18450 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18451 TCGv t0
= tcg_temp_new();
18458 gen_helper_dmt(t0
);
18459 gen_store_gpr(t0
, rt
);
18460 } else if (rs
== 0) {
18463 gen_helper_dvpe(t0
, cpu_env
);
18464 gen_store_gpr(t0
, rt
);
18466 generate_exception_end(ctx
, EXCP_RI
);
18473 gen_helper_emt(t0
);
18474 gen_store_gpr(t0
, rt
);
18475 } else if (rs
== 0) {
18478 gen_helper_evpe(t0
, cpu_env
);
18479 gen_store_gpr(t0
, rt
);
18481 generate_exception_end(ctx
, EXCP_RI
);
18492 TCGv t0
= tcg_temp_new();
18493 TCGv t1
= tcg_temp_new();
18495 gen_load_gpr(t0
, rt
);
18496 gen_load_gpr(t1
, rs
);
18497 gen_helper_fork(t0
, t1
);
18504 check_cp0_enabled(ctx
);
18506 /* Treat as NOP. */
18509 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18510 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18514 check_cp0_enabled(ctx
);
18515 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18516 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18521 TCGv t0
= tcg_temp_new();
18523 gen_load_gpr(t0
, rs
);
18524 gen_helper_yield(t0
, cpu_env
, t0
);
18525 gen_store_gpr(t0
, rt
);
18531 generate_exception_end(ctx
, EXCP_RI
);
18537 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18538 int ret
, int v1
, int v2
)
18544 t0
= tcg_temp_new_i32();
18546 v0_t
= tcg_temp_new();
18547 v1_t
= tcg_temp_new();
18549 tcg_gen_movi_i32(t0
, v2
>> 3);
18551 gen_load_gpr(v0_t
, ret
);
18552 gen_load_gpr(v1_t
, v1
);
18555 case NM_MAQ_S_W_PHR
:
18557 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18559 case NM_MAQ_S_W_PHL
:
18561 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18563 case NM_MAQ_SA_W_PHR
:
18565 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18567 case NM_MAQ_SA_W_PHL
:
18569 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18572 generate_exception_end(ctx
, EXCP_RI
);
18576 tcg_temp_free_i32(t0
);
18578 tcg_temp_free(v0_t
);
18579 tcg_temp_free(v1_t
);
18583 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18584 int ret
, int v1
, int v2
)
18587 TCGv t0
= tcg_temp_new();
18588 TCGv t1
= tcg_temp_new();
18589 TCGv v0_t
= tcg_temp_new();
18591 gen_load_gpr(v0_t
, v1
);
18594 case NM_POOL32AXF_1_0
:
18596 switch (extract32(ctx
->opcode
, 12, 2)) {
18598 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
18601 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
18604 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
18607 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
18611 case NM_POOL32AXF_1_1
:
18613 switch (extract32(ctx
->opcode
, 12, 2)) {
18615 tcg_gen_movi_tl(t0
, v2
);
18616 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
18619 tcg_gen_movi_tl(t0
, v2
>> 3);
18620 gen_helper_shilo(t0
, v0_t
, cpu_env
);
18623 generate_exception_end(ctx
, EXCP_RI
);
18627 case NM_POOL32AXF_1_3
:
18629 imm
= extract32(ctx
->opcode
, 14, 7);
18630 switch (extract32(ctx
->opcode
, 12, 2)) {
18632 tcg_gen_movi_tl(t0
, imm
);
18633 gen_helper_rddsp(t0
, t0
, cpu_env
);
18634 gen_store_gpr(t0
, ret
);
18637 gen_load_gpr(t0
, ret
);
18638 tcg_gen_movi_tl(t1
, imm
);
18639 gen_helper_wrdsp(t0
, t1
, cpu_env
);
18642 tcg_gen_movi_tl(t0
, v2
>> 3);
18643 tcg_gen_movi_tl(t1
, v1
);
18644 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
18645 gen_store_gpr(t0
, ret
);
18648 tcg_gen_movi_tl(t0
, v2
>> 3);
18649 tcg_gen_movi_tl(t1
, v1
);
18650 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
18651 gen_store_gpr(t0
, ret
);
18655 case NM_POOL32AXF_1_4
:
18657 tcg_gen_movi_tl(t0
, v2
>> 2);
18658 switch (extract32(ctx
->opcode
, 12, 1)) {
18660 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
18661 gen_store_gpr(t0
, ret
);
18664 gen_helper_shrl_qb(t0
, t0
, v0_t
);
18665 gen_store_gpr(t0
, ret
);
18669 case NM_POOL32AXF_1_5
:
18670 opc
= extract32(ctx
->opcode
, 12, 2);
18671 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
18673 case NM_POOL32AXF_1_7
:
18675 tcg_gen_movi_tl(t0
, v2
>> 3);
18676 tcg_gen_movi_tl(t1
, v1
);
18677 switch (extract32(ctx
->opcode
, 12, 2)) {
18679 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
18680 gen_store_gpr(t0
, ret
);
18683 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
18684 gen_store_gpr(t0
, ret
);
18687 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
18688 gen_store_gpr(t0
, ret
);
18691 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
18692 gen_store_gpr(t0
, ret
);
18697 generate_exception_end(ctx
, EXCP_RI
);
18703 tcg_temp_free(v0_t
);
18706 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
18707 TCGv v0
, TCGv v1
, int rd
)
18711 t0
= tcg_temp_new_i32();
18713 tcg_gen_movi_i32(t0
, rd
>> 3);
18716 case NM_POOL32AXF_2_0_7
:
18717 switch (extract32(ctx
->opcode
, 9, 3)) {
18720 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
18722 case NM_DPAQ_S_W_PH
:
18724 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18728 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
18730 case NM_DPSQ_S_W_PH
:
18732 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18735 generate_exception_end(ctx
, EXCP_RI
);
18739 case NM_POOL32AXF_2_8_15
:
18740 switch (extract32(ctx
->opcode
, 9, 3)) {
18743 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
18745 case NM_DPAQ_SA_L_W
:
18747 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18751 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
18753 case NM_DPSQ_SA_L_W
:
18755 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18758 generate_exception_end(ctx
, EXCP_RI
);
18762 case NM_POOL32AXF_2_16_23
:
18763 switch (extract32(ctx
->opcode
, 9, 3)) {
18764 case NM_DPAU_H_QBL
:
18766 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
18768 case NM_DPAQX_S_W_PH
:
18770 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18772 case NM_DPSU_H_QBL
:
18774 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
18776 case NM_DPSQX_S_W_PH
:
18778 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18780 case NM_MULSA_W_PH
:
18782 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
18785 generate_exception_end(ctx
, EXCP_RI
);
18789 case NM_POOL32AXF_2_24_31
:
18790 switch (extract32(ctx
->opcode
, 9, 3)) {
18791 case NM_DPAU_H_QBR
:
18793 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
18795 case NM_DPAQX_SA_W_PH
:
18797 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18799 case NM_DPSU_H_QBR
:
18801 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
18803 case NM_DPSQX_SA_W_PH
:
18805 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18807 case NM_MULSAQ_S_W_PH
:
18809 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18812 generate_exception_end(ctx
, EXCP_RI
);
18817 generate_exception_end(ctx
, EXCP_RI
);
18821 tcg_temp_free_i32(t0
);
18824 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18825 int rt
, int rs
, int rd
)
18828 TCGv t0
= tcg_temp_new();
18829 TCGv t1
= tcg_temp_new();
18830 TCGv v0_t
= tcg_temp_new();
18831 TCGv v1_t
= tcg_temp_new();
18833 gen_load_gpr(v0_t
, rt
);
18834 gen_load_gpr(v1_t
, rs
);
18837 case NM_POOL32AXF_2_0_7
:
18838 switch (extract32(ctx
->opcode
, 9, 3)) {
18840 case NM_DPAQ_S_W_PH
:
18842 case NM_DPSQ_S_W_PH
:
18843 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18848 gen_load_gpr(t0
, rs
);
18850 if (rd
!= 0 && rd
!= 2) {
18851 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
18852 tcg_gen_ext32u_tl(t0
, t0
);
18853 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
18854 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
18856 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
18862 int acc
= extract32(ctx
->opcode
, 14, 2);
18863 TCGv_i64 t2
= tcg_temp_new_i64();
18864 TCGv_i64 t3
= tcg_temp_new_i64();
18866 gen_load_gpr(t0
, rt
);
18867 gen_load_gpr(t1
, rs
);
18868 tcg_gen_ext_tl_i64(t2
, t0
);
18869 tcg_gen_ext_tl_i64(t3
, t1
);
18870 tcg_gen_mul_i64(t2
, t2
, t3
);
18871 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18872 tcg_gen_add_i64(t2
, t2
, t3
);
18873 tcg_temp_free_i64(t3
);
18874 gen_move_low32(cpu_LO
[acc
], t2
);
18875 gen_move_high32(cpu_HI
[acc
], t2
);
18876 tcg_temp_free_i64(t2
);
18882 int acc
= extract32(ctx
->opcode
, 14, 2);
18883 TCGv_i32 t2
= tcg_temp_new_i32();
18884 TCGv_i32 t3
= tcg_temp_new_i32();
18886 gen_load_gpr(t0
, rs
);
18887 gen_load_gpr(t1
, rt
);
18888 tcg_gen_trunc_tl_i32(t2
, t0
);
18889 tcg_gen_trunc_tl_i32(t3
, t1
);
18890 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
18891 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
18892 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
18893 tcg_temp_free_i32(t2
);
18894 tcg_temp_free_i32(t3
);
18899 gen_load_gpr(v1_t
, rs
);
18900 tcg_gen_movi_tl(t0
, rd
>> 3);
18901 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
18902 gen_store_gpr(t0
, ret
);
18906 case NM_POOL32AXF_2_8_15
:
18907 switch (extract32(ctx
->opcode
, 9, 3)) {
18909 case NM_DPAQ_SA_L_W
:
18911 case NM_DPSQ_SA_L_W
:
18912 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18917 int acc
= extract32(ctx
->opcode
, 14, 2);
18918 TCGv_i64 t2
= tcg_temp_new_i64();
18919 TCGv_i64 t3
= tcg_temp_new_i64();
18921 gen_load_gpr(t0
, rs
);
18922 gen_load_gpr(t1
, rt
);
18923 tcg_gen_ext32u_tl(t0
, t0
);
18924 tcg_gen_ext32u_tl(t1
, t1
);
18925 tcg_gen_extu_tl_i64(t2
, t0
);
18926 tcg_gen_extu_tl_i64(t3
, t1
);
18927 tcg_gen_mul_i64(t2
, t2
, t3
);
18928 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18929 tcg_gen_add_i64(t2
, t2
, t3
);
18930 tcg_temp_free_i64(t3
);
18931 gen_move_low32(cpu_LO
[acc
], t2
);
18932 gen_move_high32(cpu_HI
[acc
], t2
);
18933 tcg_temp_free_i64(t2
);
18939 int acc
= extract32(ctx
->opcode
, 14, 2);
18940 TCGv_i32 t2
= tcg_temp_new_i32();
18941 TCGv_i32 t3
= tcg_temp_new_i32();
18943 gen_load_gpr(t0
, rs
);
18944 gen_load_gpr(t1
, rt
);
18945 tcg_gen_trunc_tl_i32(t2
, t0
);
18946 tcg_gen_trunc_tl_i32(t3
, t1
);
18947 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
18948 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
18949 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
18950 tcg_temp_free_i32(t2
);
18951 tcg_temp_free_i32(t3
);
18956 tcg_gen_movi_tl(t0
, rd
>> 3);
18957 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
18958 gen_store_gpr(t0
, ret
);
18961 generate_exception_end(ctx
, EXCP_RI
);
18965 case NM_POOL32AXF_2_16_23
:
18966 switch (extract32(ctx
->opcode
, 9, 3)) {
18967 case NM_DPAU_H_QBL
:
18968 case NM_DPAQX_S_W_PH
:
18969 case NM_DPSU_H_QBL
:
18970 case NM_DPSQX_S_W_PH
:
18971 case NM_MULSA_W_PH
:
18972 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18976 tcg_gen_movi_tl(t0
, rd
>> 3);
18977 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
18978 gen_store_gpr(t0
, ret
);
18983 int acc
= extract32(ctx
->opcode
, 14, 2);
18984 TCGv_i64 t2
= tcg_temp_new_i64();
18985 TCGv_i64 t3
= tcg_temp_new_i64();
18987 gen_load_gpr(t0
, rs
);
18988 gen_load_gpr(t1
, rt
);
18989 tcg_gen_ext_tl_i64(t2
, t0
);
18990 tcg_gen_ext_tl_i64(t3
, t1
);
18991 tcg_gen_mul_i64(t2
, t2
, t3
);
18992 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18993 tcg_gen_sub_i64(t2
, t3
, t2
);
18994 tcg_temp_free_i64(t3
);
18995 gen_move_low32(cpu_LO
[acc
], t2
);
18996 gen_move_high32(cpu_HI
[acc
], t2
);
18997 tcg_temp_free_i64(t2
);
19000 case NM_EXTRV_RS_W
:
19002 tcg_gen_movi_tl(t0
, rd
>> 3);
19003 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
19004 gen_store_gpr(t0
, ret
);
19008 case NM_POOL32AXF_2_24_31
:
19009 switch (extract32(ctx
->opcode
, 9, 3)) {
19010 case NM_DPAU_H_QBR
:
19011 case NM_DPAQX_SA_W_PH
:
19012 case NM_DPSU_H_QBR
:
19013 case NM_DPSQX_SA_W_PH
:
19014 case NM_MULSAQ_S_W_PH
:
19015 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19019 tcg_gen_movi_tl(t0
, rd
>> 3);
19020 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
19021 gen_store_gpr(t0
, ret
);
19026 int acc
= extract32(ctx
->opcode
, 14, 2);
19027 TCGv_i64 t2
= tcg_temp_new_i64();
19028 TCGv_i64 t3
= tcg_temp_new_i64();
19030 gen_load_gpr(t0
, rs
);
19031 gen_load_gpr(t1
, rt
);
19032 tcg_gen_ext32u_tl(t0
, t0
);
19033 tcg_gen_ext32u_tl(t1
, t1
);
19034 tcg_gen_extu_tl_i64(t2
, t0
);
19035 tcg_gen_extu_tl_i64(t3
, t1
);
19036 tcg_gen_mul_i64(t2
, t2
, t3
);
19037 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19038 tcg_gen_sub_i64(t2
, t3
, t2
);
19039 tcg_temp_free_i64(t3
);
19040 gen_move_low32(cpu_LO
[acc
], t2
);
19041 gen_move_high32(cpu_HI
[acc
], t2
);
19042 tcg_temp_free_i64(t2
);
19047 tcg_gen_movi_tl(t0
, rd
>> 3);
19048 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19049 gen_store_gpr(t0
, ret
);
19054 generate_exception_end(ctx
, EXCP_RI
);
19061 tcg_temp_free(v0_t
);
19062 tcg_temp_free(v1_t
);
19065 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19069 TCGv t0
= tcg_temp_new();
19070 TCGv v0_t
= tcg_temp_new();
19072 gen_load_gpr(v0_t
, rs
);
19077 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19078 gen_store_gpr(v0_t
, ret
);
19082 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19083 gen_store_gpr(v0_t
, ret
);
19087 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19088 gen_store_gpr(v0_t
, ret
);
19090 case NM_PRECEQ_W_PHL
:
19092 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19093 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19094 gen_store_gpr(v0_t
, ret
);
19096 case NM_PRECEQ_W_PHR
:
19098 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19099 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19100 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19101 gen_store_gpr(v0_t
, ret
);
19103 case NM_PRECEQU_PH_QBL
:
19105 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19106 gen_store_gpr(v0_t
, ret
);
19108 case NM_PRECEQU_PH_QBR
:
19110 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19111 gen_store_gpr(v0_t
, ret
);
19113 case NM_PRECEQU_PH_QBLA
:
19115 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19116 gen_store_gpr(v0_t
, ret
);
19118 case NM_PRECEQU_PH_QBRA
:
19120 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19121 gen_store_gpr(v0_t
, ret
);
19123 case NM_PRECEU_PH_QBL
:
19125 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19126 gen_store_gpr(v0_t
, ret
);
19128 case NM_PRECEU_PH_QBR
:
19130 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19131 gen_store_gpr(v0_t
, ret
);
19133 case NM_PRECEU_PH_QBLA
:
19135 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19136 gen_store_gpr(v0_t
, ret
);
19138 case NM_PRECEU_PH_QBRA
:
19140 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19141 gen_store_gpr(v0_t
, ret
);
19145 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19146 tcg_gen_shli_tl(t0
, v0_t
, 16);
19147 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19148 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19149 gen_store_gpr(v0_t
, ret
);
19153 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19154 tcg_gen_shli_tl(t0
, v0_t
, 8);
19155 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19156 tcg_gen_shli_tl(t0
, v0_t
, 16);
19157 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19158 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19159 gen_store_gpr(v0_t
, ret
);
19163 gen_helper_bitrev(v0_t
, v0_t
);
19164 gen_store_gpr(v0_t
, ret
);
19169 TCGv tv0
= tcg_temp_new();
19171 gen_load_gpr(tv0
, rt
);
19172 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19173 gen_store_gpr(v0_t
, ret
);
19174 tcg_temp_free(tv0
);
19177 case NM_RADDU_W_QB
:
19179 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19180 gen_store_gpr(v0_t
, ret
);
19183 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19187 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19191 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19194 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19197 generate_exception_end(ctx
, EXCP_RI
);
19201 tcg_temp_free(v0_t
);
19205 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19206 int rt
, int rs
, int rd
)
19208 TCGv t0
= tcg_temp_new();
19209 TCGv rs_t
= tcg_temp_new();
19211 gen_load_gpr(rs_t
, rs
);
19216 tcg_gen_movi_tl(t0
, rd
>> 2);
19217 switch (extract32(ctx
->opcode
, 12, 1)) {
19220 gen_helper_shra_qb(t0
, t0
, rs_t
);
19221 gen_store_gpr(t0
, rt
);
19225 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19226 gen_store_gpr(t0
, rt
);
19232 tcg_gen_movi_tl(t0
, rd
>> 1);
19233 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19234 gen_store_gpr(t0
, rt
);
19240 target_long result
;
19241 imm
= extract32(ctx
->opcode
, 13, 8);
19242 result
= (uint32_t)imm
<< 24 |
19243 (uint32_t)imm
<< 16 |
19244 (uint32_t)imm
<< 8 |
19246 result
= (int32_t)result
;
19247 tcg_gen_movi_tl(t0
, result
);
19248 gen_store_gpr(t0
, rt
);
19252 generate_exception_end(ctx
, EXCP_RI
);
19256 tcg_temp_free(rs_t
);
19260 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19262 int rt
= extract32(ctx
->opcode
, 21, 5);
19263 int rs
= extract32(ctx
->opcode
, 16, 5);
19264 int rd
= extract32(ctx
->opcode
, 11, 5);
19266 switch (extract32(ctx
->opcode
, 6, 3)) {
19267 case NM_POOL32AXF_1
:
19269 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19270 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19273 case NM_POOL32AXF_2
:
19275 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19276 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19279 case NM_POOL32AXF_4
:
19281 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19282 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19285 case NM_POOL32AXF_5
:
19286 switch (extract32(ctx
->opcode
, 9, 7)) {
19287 #ifndef CONFIG_USER_ONLY
19289 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19292 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19295 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19298 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19301 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19304 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19307 check_cp0_enabled(ctx
);
19309 TCGv t0
= tcg_temp_new();
19311 save_cpu_state(ctx
, 1);
19312 gen_helper_di(t0
, cpu_env
);
19313 gen_store_gpr(t0
, rt
);
19314 /* Stop translation as we may have switched the execution mode */
19315 ctx
->base
.is_jmp
= DISAS_STOP
;
19320 check_cp0_enabled(ctx
);
19322 TCGv t0
= tcg_temp_new();
19324 save_cpu_state(ctx
, 1);
19325 gen_helper_ei(t0
, cpu_env
);
19326 gen_store_gpr(t0
, rt
);
19327 /* Stop translation as we may have switched the execution mode */
19328 ctx
->base
.is_jmp
= DISAS_STOP
;
19333 gen_load_srsgpr(rs
, rt
);
19336 gen_store_srsgpr(rs
, rt
);
19339 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19342 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19345 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19349 generate_exception_end(ctx
, EXCP_RI
);
19353 case NM_POOL32AXF_7
:
19355 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19356 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19360 generate_exception_end(ctx
, EXCP_RI
);
19365 /* Immediate Value Compact Branches */
19366 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19367 int rt
, int32_t imm
, int32_t offset
)
19370 int bcond_compute
= 0;
19371 TCGv t0
= tcg_temp_new();
19372 TCGv t1
= tcg_temp_new();
19374 gen_load_gpr(t0
, rt
);
19375 tcg_gen_movi_tl(t1
, imm
);
19376 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19378 /* Load needed operands and calculate btarget */
19381 if (rt
== 0 && imm
== 0) {
19382 /* Unconditional branch */
19383 } else if (rt
== 0 && imm
!= 0) {
19388 cond
= TCG_COND_EQ
;
19394 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19395 generate_exception_end(ctx
, EXCP_RI
);
19397 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19398 /* Unconditional branch */
19399 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19403 tcg_gen_shri_tl(t0
, t0
, imm
);
19404 tcg_gen_andi_tl(t0
, t0
, 1);
19405 tcg_gen_movi_tl(t1
, 0);
19407 if (opc
== NM_BBEQZC
) {
19408 cond
= TCG_COND_EQ
;
19410 cond
= TCG_COND_NE
;
19415 if (rt
== 0 && imm
== 0) {
19418 } else if (rt
== 0 && imm
!= 0) {
19419 /* Unconditional branch */
19422 cond
= TCG_COND_NE
;
19426 if (rt
== 0 && imm
== 0) {
19427 /* Unconditional branch */
19430 cond
= TCG_COND_GE
;
19435 cond
= TCG_COND_LT
;
19438 if (rt
== 0 && imm
== 0) {
19439 /* Unconditional branch */
19442 cond
= TCG_COND_GEU
;
19447 cond
= TCG_COND_LTU
;
19450 MIPS_INVAL("Immediate Value Compact branch");
19451 generate_exception_end(ctx
, EXCP_RI
);
19455 if (bcond_compute
== 0) {
19456 /* Uncoditional compact branch */
19457 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19459 /* Conditional compact branch */
19460 TCGLabel
*fs
= gen_new_label();
19462 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19464 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19467 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19475 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19476 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19479 TCGv t0
= tcg_temp_new();
19480 TCGv t1
= tcg_temp_new();
19483 gen_load_gpr(t0
, rs
);
19487 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19490 /* calculate btarget */
19491 tcg_gen_shli_tl(t0
, t0
, 1);
19492 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19493 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19495 /* unconditional branch to register */
19496 tcg_gen_mov_tl(cpu_PC
, btarget
);
19497 tcg_gen_lookup_and_goto_ptr();
19503 /* nanoMIPS Branches */
19504 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19505 int rs
, int rt
, int32_t offset
)
19507 int bcond_compute
= 0;
19508 TCGv t0
= tcg_temp_new();
19509 TCGv t1
= tcg_temp_new();
19511 /* Load needed operands and calculate btarget */
19513 /* compact branch */
19516 gen_load_gpr(t0
, rs
);
19517 gen_load_gpr(t1
, rt
);
19519 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19523 if (rs
== 0 || rs
== rt
) {
19524 /* OPC_BLEZALC, OPC_BGEZALC */
19525 /* OPC_BGTZALC, OPC_BLTZALC */
19526 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19528 gen_load_gpr(t0
, rs
);
19529 gen_load_gpr(t1
, rt
);
19531 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19534 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19538 /* OPC_BEQZC, OPC_BNEZC */
19539 gen_load_gpr(t0
, rs
);
19541 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19543 /* OPC_JIC, OPC_JIALC */
19544 TCGv tbase
= tcg_temp_new();
19545 TCGv toffset
= tcg_temp_new();
19547 gen_load_gpr(tbase
, rt
);
19548 tcg_gen_movi_tl(toffset
, offset
);
19549 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19550 tcg_temp_free(tbase
);
19551 tcg_temp_free(toffset
);
19555 MIPS_INVAL("Compact branch/jump");
19556 generate_exception_end(ctx
, EXCP_RI
);
19560 if (bcond_compute
== 0) {
19561 /* Uncoditional compact branch */
19564 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19567 MIPS_INVAL("Compact branch/jump");
19568 generate_exception_end(ctx
, EXCP_RI
);
19572 /* Conditional compact branch */
19573 TCGLabel
*fs
= gen_new_label();
19577 if (rs
== 0 && rt
!= 0) {
19579 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19580 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19582 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19585 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
19589 if (rs
== 0 && rt
!= 0) {
19591 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19592 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19594 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19597 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
19601 if (rs
== 0 && rt
!= 0) {
19603 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19604 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19606 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19609 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
19613 if (rs
== 0 && rt
!= 0) {
19615 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19616 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19618 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19621 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
19625 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
19628 MIPS_INVAL("Compact conditional branch/jump");
19629 generate_exception_end(ctx
, EXCP_RI
);
19633 /* Generating branch here as compact branches don't have delay slot */
19634 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19637 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19646 /* nanoMIPS CP1 Branches */
19647 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
19648 int32_t ft
, int32_t offset
)
19650 target_ulong btarget
;
19651 TCGv_i64 t0
= tcg_temp_new_i64();
19653 gen_load_fpr64(ctx
, t0
, ft
);
19654 tcg_gen_andi_i64(t0
, t0
, 1);
19656 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19660 tcg_gen_xori_i64(t0
, t0
, 1);
19661 ctx
->hflags
|= MIPS_HFLAG_BC
;
19664 /* t0 already set */
19665 ctx
->hflags
|= MIPS_HFLAG_BC
;
19668 MIPS_INVAL("cp1 cond branch");
19669 generate_exception_end(ctx
, EXCP_RI
);
19673 tcg_gen_trunc_i64_tl(bcond
, t0
);
19675 ctx
->btarget
= btarget
;
19678 tcg_temp_free_i64(t0
);
19682 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
19685 t0
= tcg_temp_new();
19686 t1
= tcg_temp_new();
19688 gen_load_gpr(t0
, rs
);
19689 gen_load_gpr(t1
, rt
);
19691 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
19692 /* PP.LSXS instructions require shifting */
19693 switch (extract32(ctx
->opcode
, 7, 4)) {
19698 tcg_gen_shli_tl(t0
, t0
, 1);
19705 tcg_gen_shli_tl(t0
, t0
, 2);
19709 tcg_gen_shli_tl(t0
, t0
, 3);
19713 gen_op_addr_add(ctx
, t0
, t0
, t1
);
19715 switch (extract32(ctx
->opcode
, 7, 4)) {
19717 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19719 gen_store_gpr(t0
, rd
);
19723 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19725 gen_store_gpr(t0
, rd
);
19729 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19731 gen_store_gpr(t0
, rd
);
19734 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19736 gen_store_gpr(t0
, rd
);
19740 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19742 gen_store_gpr(t0
, rd
);
19746 gen_load_gpr(t1
, rd
);
19747 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19753 gen_load_gpr(t1
, rd
);
19754 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19760 gen_load_gpr(t1
, rd
);
19761 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19765 /*case NM_LWC1XS:*/
19767 /*case NM_LDC1XS:*/
19769 /*case NM_SWC1XS:*/
19771 /*case NM_SDC1XS:*/
19772 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19773 check_cp1_enabled(ctx
);
19774 switch (extract32(ctx
->opcode
, 7, 4)) {
19776 /*case NM_LWC1XS:*/
19777 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
19780 /*case NM_LDC1XS:*/
19781 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
19784 /*case NM_SWC1XS:*/
19785 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
19788 /*case NM_SDC1XS:*/
19789 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
19793 generate_exception_err(ctx
, EXCP_CpU
, 1);
19797 generate_exception_end(ctx
, EXCP_RI
);
19805 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
19809 rt
= extract32(ctx
->opcode
, 21, 5);
19810 rs
= extract32(ctx
->opcode
, 16, 5);
19811 rd
= extract32(ctx
->opcode
, 11, 5);
19813 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
19814 generate_exception_end(ctx
, EXCP_RI
);
19817 check_cp1_enabled(ctx
);
19818 switch (extract32(ctx
->opcode
, 0, 3)) {
19820 switch (extract32(ctx
->opcode
, 3, 7)) {
19822 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
19825 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
19828 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
19831 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
19834 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
19837 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
19840 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
19843 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
19846 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
19849 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
19852 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
19855 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
19858 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
19861 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
19864 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
19867 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
19870 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
19873 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
19876 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
19879 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
19882 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
19885 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
19888 generate_exception_end(ctx
, EXCP_RI
);
19893 switch (extract32(ctx
->opcode
, 3, 3)) {
19895 switch (extract32(ctx
->opcode
, 9, 1)) {
19897 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
19900 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
19905 switch (extract32(ctx
->opcode
, 9, 1)) {
19907 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
19910 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
19915 switch (extract32(ctx
->opcode
, 9, 1)) {
19917 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
19920 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
19925 switch (extract32(ctx
->opcode
, 9, 1)) {
19927 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
19930 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
19935 switch (extract32(ctx
->opcode
, 6, 8)) {
19937 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
19940 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
19943 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
19946 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
19949 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
19952 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
19955 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
19958 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
19961 switch (extract32(ctx
->opcode
, 6, 9)) {
19963 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
19966 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
19969 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
19972 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
19975 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
19978 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
19981 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
19984 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
19987 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
19990 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
19993 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
19996 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
19999 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
20002 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
20005 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
20008 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
20011 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
20014 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
20017 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
20020 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
20023 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
20026 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
20029 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
20032 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
20035 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
20038 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
20041 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
20044 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
20047 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20050 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20053 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20056 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20059 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20062 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20065 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20068 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20071 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20074 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20077 generate_exception_end(ctx
, EXCP_RI
);
20086 switch (extract32(ctx
->opcode
, 3, 3)) {
20087 case NM_CMP_CONDN_S
:
20088 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20090 case NM_CMP_CONDN_D
:
20091 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20094 generate_exception_end(ctx
, EXCP_RI
);
20099 generate_exception_end(ctx
, EXCP_RI
);
20104 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20105 int rd
, int rs
, int rt
)
20108 TCGv t0
= tcg_temp_new();
20109 TCGv v1_t
= tcg_temp_new();
20110 TCGv v2_t
= tcg_temp_new();
20112 gen_load_gpr(v1_t
, rs
);
20113 gen_load_gpr(v2_t
, rt
);
20118 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20122 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20126 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20128 case NM_CMPU_EQ_QB
:
20130 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20132 case NM_CMPU_LT_QB
:
20134 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20136 case NM_CMPU_LE_QB
:
20138 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20140 case NM_CMPGU_EQ_QB
:
20142 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20143 gen_store_gpr(v1_t
, ret
);
20145 case NM_CMPGU_LT_QB
:
20147 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20148 gen_store_gpr(v1_t
, ret
);
20150 case NM_CMPGU_LE_QB
:
20152 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20153 gen_store_gpr(v1_t
, ret
);
20155 case NM_CMPGDU_EQ_QB
:
20157 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20158 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20159 gen_store_gpr(v1_t
, ret
);
20161 case NM_CMPGDU_LT_QB
:
20163 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20164 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20165 gen_store_gpr(v1_t
, ret
);
20167 case NM_CMPGDU_LE_QB
:
20169 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20170 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20171 gen_store_gpr(v1_t
, ret
);
20175 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20176 gen_store_gpr(v1_t
, ret
);
20180 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20181 gen_store_gpr(v1_t
, ret
);
20185 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20186 gen_store_gpr(v1_t
, ret
);
20190 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20191 gen_store_gpr(v1_t
, ret
);
20195 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20196 gen_store_gpr(v1_t
, ret
);
20200 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20201 gen_store_gpr(v1_t
, ret
);
20205 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20206 gen_store_gpr(v1_t
, ret
);
20210 switch (extract32(ctx
->opcode
, 10, 1)) {
20213 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20214 gen_store_gpr(v1_t
, ret
);
20218 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20219 gen_store_gpr(v1_t
, ret
);
20223 case NM_ADDQH_R_PH
:
20225 switch (extract32(ctx
->opcode
, 10, 1)) {
20228 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20229 gen_store_gpr(v1_t
, ret
);
20233 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20234 gen_store_gpr(v1_t
, ret
);
20240 switch (extract32(ctx
->opcode
, 10, 1)) {
20243 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20244 gen_store_gpr(v1_t
, ret
);
20248 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20249 gen_store_gpr(v1_t
, ret
);
20255 switch (extract32(ctx
->opcode
, 10, 1)) {
20258 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20259 gen_store_gpr(v1_t
, ret
);
20263 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20264 gen_store_gpr(v1_t
, ret
);
20270 switch (extract32(ctx
->opcode
, 10, 1)) {
20273 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20274 gen_store_gpr(v1_t
, ret
);
20278 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20279 gen_store_gpr(v1_t
, ret
);
20283 case NM_ADDUH_R_QB
:
20285 switch (extract32(ctx
->opcode
, 10, 1)) {
20288 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20289 gen_store_gpr(v1_t
, ret
);
20293 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20294 gen_store_gpr(v1_t
, ret
);
20298 case NM_SHRAV_R_PH
:
20300 switch (extract32(ctx
->opcode
, 10, 1)) {
20303 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20304 gen_store_gpr(v1_t
, ret
);
20308 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20309 gen_store_gpr(v1_t
, ret
);
20313 case NM_SHRAV_R_QB
:
20315 switch (extract32(ctx
->opcode
, 10, 1)) {
20318 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20319 gen_store_gpr(v1_t
, ret
);
20323 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20324 gen_store_gpr(v1_t
, ret
);
20330 switch (extract32(ctx
->opcode
, 10, 1)) {
20333 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20334 gen_store_gpr(v1_t
, ret
);
20338 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20339 gen_store_gpr(v1_t
, ret
);
20343 case NM_SUBQH_R_PH
:
20345 switch (extract32(ctx
->opcode
, 10, 1)) {
20348 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20349 gen_store_gpr(v1_t
, ret
);
20353 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20354 gen_store_gpr(v1_t
, ret
);
20360 switch (extract32(ctx
->opcode
, 10, 1)) {
20363 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20364 gen_store_gpr(v1_t
, ret
);
20368 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20369 gen_store_gpr(v1_t
, ret
);
20375 switch (extract32(ctx
->opcode
, 10, 1)) {
20378 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20379 gen_store_gpr(v1_t
, ret
);
20383 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20384 gen_store_gpr(v1_t
, ret
);
20390 switch (extract32(ctx
->opcode
, 10, 1)) {
20393 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20394 gen_store_gpr(v1_t
, ret
);
20398 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20399 gen_store_gpr(v1_t
, ret
);
20403 case NM_SUBUH_R_QB
:
20405 switch (extract32(ctx
->opcode
, 10, 1)) {
20408 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20409 gen_store_gpr(v1_t
, ret
);
20413 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20414 gen_store_gpr(v1_t
, ret
);
20418 case NM_SHLLV_S_PH
:
20420 switch (extract32(ctx
->opcode
, 10, 1)) {
20423 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20424 gen_store_gpr(v1_t
, ret
);
20428 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20429 gen_store_gpr(v1_t
, ret
);
20433 case NM_PRECR_SRA_R_PH_W
:
20435 switch (extract32(ctx
->opcode
, 10, 1)) {
20437 /* PRECR_SRA_PH_W */
20439 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20440 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20442 gen_store_gpr(v1_t
, rt
);
20443 tcg_temp_free_i32(sa_t
);
20447 /* PRECR_SRA_R_PH_W */
20449 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20450 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20452 gen_store_gpr(v1_t
, rt
);
20453 tcg_temp_free_i32(sa_t
);
20458 case NM_MULEU_S_PH_QBL
:
20460 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20461 gen_store_gpr(v1_t
, ret
);
20463 case NM_MULEU_S_PH_QBR
:
20465 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20466 gen_store_gpr(v1_t
, ret
);
20468 case NM_MULQ_RS_PH
:
20470 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20471 gen_store_gpr(v1_t
, ret
);
20475 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20476 gen_store_gpr(v1_t
, ret
);
20480 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20481 gen_store_gpr(v1_t
, ret
);
20485 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20486 gen_store_gpr(v1_t
, ret
);
20490 gen_load_gpr(t0
, rs
);
20492 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20494 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20498 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20499 gen_store_gpr(v1_t
, ret
);
20503 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20504 gen_store_gpr(v1_t
, ret
);
20508 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20509 gen_store_gpr(v1_t
, ret
);
20513 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20514 gen_store_gpr(v1_t
, ret
);
20518 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20519 gen_store_gpr(v1_t
, ret
);
20523 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20524 gen_store_gpr(v1_t
, ret
);
20529 TCGv tv0
= tcg_temp_new();
20530 TCGv tv1
= tcg_temp_new();
20531 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20533 tcg_gen_movi_tl(tv0
, rd
>> 3);
20534 tcg_gen_movi_tl(tv1
, imm
);
20535 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20538 case NM_MULEQ_S_W_PHL
:
20540 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20541 gen_store_gpr(v1_t
, ret
);
20543 case NM_MULEQ_S_W_PHR
:
20545 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20546 gen_store_gpr(v1_t
, ret
);
20550 switch (extract32(ctx
->opcode
, 10, 1)) {
20553 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20554 gen_store_gpr(v1_t
, ret
);
20558 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20559 gen_store_gpr(v1_t
, ret
);
20563 case NM_PRECR_QB_PH
:
20565 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
20566 gen_store_gpr(v1_t
, ret
);
20568 case NM_PRECRQ_QB_PH
:
20570 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
20571 gen_store_gpr(v1_t
, ret
);
20573 case NM_PRECRQ_PH_W
:
20575 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
20576 gen_store_gpr(v1_t
, ret
);
20578 case NM_PRECRQ_RS_PH_W
:
20580 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20581 gen_store_gpr(v1_t
, ret
);
20583 case NM_PRECRQU_S_QB_PH
:
20585 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20586 gen_store_gpr(v1_t
, ret
);
20590 tcg_gen_movi_tl(t0
, rd
);
20591 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
20592 gen_store_gpr(v1_t
, rt
);
20596 tcg_gen_movi_tl(t0
, rd
>> 1);
20597 switch (extract32(ctx
->opcode
, 10, 1)) {
20600 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
20601 gen_store_gpr(v1_t
, rt
);
20605 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
20606 gen_store_gpr(v1_t
, rt
);
20612 tcg_gen_movi_tl(t0
, rd
>> 1);
20613 switch (extract32(ctx
->opcode
, 10, 2)) {
20616 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
20617 gen_store_gpr(v1_t
, rt
);
20621 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
20622 gen_store_gpr(v1_t
, rt
);
20625 generate_exception_end(ctx
, EXCP_RI
);
20631 tcg_gen_movi_tl(t0
, rd
);
20632 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
20633 gen_store_gpr(v1_t
, rt
);
20639 imm
= sextract32(ctx
->opcode
, 11, 11);
20640 imm
= (int16_t)(imm
<< 6) >> 6;
20642 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
20647 generate_exception_end(ctx
, EXCP_RI
);
20652 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
20660 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
20661 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
20663 rt
= extract32(ctx
->opcode
, 21, 5);
20664 rs
= extract32(ctx
->opcode
, 16, 5);
20665 rd
= extract32(ctx
->opcode
, 11, 5);
20667 op
= extract32(ctx
->opcode
, 26, 6);
20672 switch (extract32(ctx
->opcode
, 19, 2)) {
20675 generate_exception_end(ctx
, EXCP_RI
);
20678 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
20679 generate_exception_end(ctx
, EXCP_SYSCALL
);
20681 generate_exception_end(ctx
, EXCP_RI
);
20685 generate_exception_end(ctx
, EXCP_BREAK
);
20688 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
20689 gen_helper_do_semihosting(cpu_env
);
20691 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
20692 generate_exception_end(ctx
, EXCP_RI
);
20694 generate_exception_end(ctx
, EXCP_DBp
);
20701 imm
= extract32(ctx
->opcode
, 0, 16);
20703 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
20705 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
20707 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20712 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
20713 extract32(ctx
->opcode
, 1, 20) << 1;
20714 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20715 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20719 switch (ctx
->opcode
& 0x07) {
20721 gen_pool32a0_nanomips_insn(env
, ctx
);
20725 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
20726 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
20730 switch (extract32(ctx
->opcode
, 3, 3)) {
20732 gen_p_lsx(ctx
, rd
, rs
, rt
);
20735 /* In nanoMIPS, the shift field directly encodes the shift
20736 * amount, meaning that the supported shift values are in
20737 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
20738 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
20739 extract32(ctx
->opcode
, 9, 2) - 1);
20742 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
20745 gen_pool32axf_nanomips_insn(env
, ctx
);
20748 generate_exception_end(ctx
, EXCP_RI
);
20753 generate_exception_end(ctx
, EXCP_RI
);
20758 switch (ctx
->opcode
& 0x03) {
20761 offset
= extract32(ctx
->opcode
, 0, 21);
20762 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
20766 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20769 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20772 generate_exception_end(ctx
, EXCP_RI
);
20778 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
20779 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
20780 switch (extract32(ctx
->opcode
, 16, 5)) {
20784 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
20790 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
20791 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20797 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
20803 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20806 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20813 t0
= tcg_temp_new();
20815 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20818 tcg_gen_movi_tl(t0
, addr
);
20819 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
20827 t0
= tcg_temp_new();
20828 t1
= tcg_temp_new();
20830 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20833 tcg_gen_movi_tl(t0
, addr
);
20834 gen_load_gpr(t1
, rt
);
20836 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
20843 generate_exception_end(ctx
, EXCP_RI
);
20849 switch (extract32(ctx
->opcode
, 12, 4)) {
20851 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20854 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20857 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20860 switch (extract32(ctx
->opcode
, 20, 1)) {
20862 switch (ctx
->opcode
& 3) {
20864 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
20865 extract32(ctx
->opcode
, 2, 1),
20866 extract32(ctx
->opcode
, 3, 9) << 3);
20869 case NM_RESTORE_JRC
:
20870 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
20871 extract32(ctx
->opcode
, 2, 1),
20872 extract32(ctx
->opcode
, 3, 9) << 3);
20873 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
20874 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
20878 generate_exception_end(ctx
, EXCP_RI
);
20883 generate_exception_end(ctx
, EXCP_RI
);
20888 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20891 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20895 TCGv t0
= tcg_temp_new();
20897 imm
= extract32(ctx
->opcode
, 0, 12);
20898 gen_load_gpr(t0
, rs
);
20899 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
20900 gen_store_gpr(t0
, rt
);
20906 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
20907 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
20911 int shift
= extract32(ctx
->opcode
, 0, 5);
20912 switch (extract32(ctx
->opcode
, 5, 4)) {
20914 if (rt
== 0 && shift
== 0) {
20916 } else if (rt
== 0 && shift
== 3) {
20917 /* EHB - treat as NOP */
20918 } else if (rt
== 0 && shift
== 5) {
20919 /* PAUSE - treat as NOP */
20920 } else if (rt
== 0 && shift
== 6) {
20922 gen_sync(extract32(ctx
->opcode
, 16, 5));
20925 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
20926 extract32(ctx
->opcode
, 0, 5));
20930 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
20931 extract32(ctx
->opcode
, 0, 5));
20934 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
20935 extract32(ctx
->opcode
, 0, 5));
20938 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
20939 extract32(ctx
->opcode
, 0, 5));
20947 TCGv t0
= tcg_temp_new();
20948 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
20949 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
20951 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
20953 gen_load_gpr(t0
, rs
);
20954 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
20957 tcg_temp_free_i32(shift
);
20958 tcg_temp_free_i32(shiftx
);
20959 tcg_temp_free_i32(stripe
);
20963 switch (((ctx
->opcode
>> 10) & 2) |
20964 (extract32(ctx
->opcode
, 5, 1))) {
20967 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
20968 extract32(ctx
->opcode
, 6, 5));
20971 generate_exception_end(ctx
, EXCP_RI
);
20976 switch (((ctx
->opcode
>> 10) & 2) |
20977 (extract32(ctx
->opcode
, 5, 1))) {
20980 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
20981 extract32(ctx
->opcode
, 6, 5));
20984 generate_exception_end(ctx
, EXCP_RI
);
20989 generate_exception_end(ctx
, EXCP_RI
);
20994 gen_pool32f_nanomips_insn(ctx
);
20999 switch (extract32(ctx
->opcode
, 1, 1)) {
21002 tcg_gen_movi_tl(cpu_gpr
[rt
],
21003 sextract32(ctx
->opcode
, 0, 1) << 31 |
21004 extract32(ctx
->opcode
, 2, 10) << 21 |
21005 extract32(ctx
->opcode
, 12, 9) << 12);
21010 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
21011 extract32(ctx
->opcode
, 2, 10) << 21 |
21012 extract32(ctx
->opcode
, 12, 9) << 12;
21014 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21015 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21022 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
21024 switch (extract32(ctx
->opcode
, 18, 3)) {
21026 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
21029 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
21032 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
21036 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
21041 switch (ctx
->opcode
& 1) {
21043 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
21046 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21052 switch (ctx
->opcode
& 1) {
21054 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21057 generate_exception_end(ctx
, EXCP_RI
);
21063 switch (ctx
->opcode
& 0x3) {
21065 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21068 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21071 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21074 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21079 generate_exception_end(ctx
, EXCP_RI
);
21086 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21088 switch (extract32(ctx
->opcode
, 12, 4)) {
21092 /* Break the TB to be able to sync copied instructions
21094 ctx
->base
.is_jmp
= DISAS_STOP
;
21097 /* Treat as NOP. */
21101 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21104 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21107 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21110 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21113 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21116 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21119 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21122 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21125 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21128 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21131 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21134 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21137 generate_exception_end(ctx
, EXCP_RI
);
21144 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21145 extract32(ctx
->opcode
, 0, 8);
21147 switch (extract32(ctx
->opcode
, 8, 3)) {
21149 switch (extract32(ctx
->opcode
, 11, 4)) {
21151 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21154 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21157 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21160 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21163 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21166 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21169 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21172 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21175 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21178 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21181 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21184 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21189 /* Break the TB to be able to sync copied instructions
21191 ctx
->base
.is_jmp
= DISAS_STOP
;
21194 /* Treat as NOP. */
21198 generate_exception_end(ctx
, EXCP_RI
);
21203 switch (extract32(ctx
->opcode
, 11, 4)) {
21208 TCGv t0
= tcg_temp_new();
21209 TCGv t1
= tcg_temp_new();
21211 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21213 switch (extract32(ctx
->opcode
, 11, 4)) {
21215 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21217 gen_store_gpr(t0
, rt
);
21220 gen_load_gpr(t1
, rt
);
21221 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21230 switch (ctx
->opcode
& 0x03) {
21232 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21236 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21241 switch (ctx
->opcode
& 0x03) {
21243 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, s
);
21247 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21252 check_cp0_enabled(ctx
);
21253 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21254 gen_cache_operation(ctx
, rt
, rs
, s
);
21260 switch (extract32(ctx
->opcode
, 11, 4)) {
21263 check_cp0_enabled(ctx
);
21264 gen_ld(ctx
, OPC_LBE
, rt
, rs
, s
);
21268 check_cp0_enabled(ctx
);
21269 gen_st(ctx
, OPC_SBE
, rt
, rs
, s
);
21273 check_cp0_enabled(ctx
);
21274 gen_ld(ctx
, OPC_LBUE
, rt
, rs
, s
);
21278 /* case NM_SYNCIE */
21280 check_cp0_enabled(ctx
);
21281 /* Break the TB to be able to sync copied instructions
21283 ctx
->base
.is_jmp
= DISAS_STOP
;
21285 /* case NM_PREFE */
21287 check_cp0_enabled(ctx
);
21288 /* Treat as NOP. */
21293 check_cp0_enabled(ctx
);
21294 gen_ld(ctx
, OPC_LHE
, rt
, rs
, s
);
21298 check_cp0_enabled(ctx
);
21299 gen_st(ctx
, OPC_SHE
, rt
, rs
, s
);
21303 check_cp0_enabled(ctx
);
21304 gen_ld(ctx
, OPC_LHUE
, rt
, rs
, s
);
21307 check_nms_dl_il_sl_tl_l2c(ctx
);
21308 gen_cache_operation(ctx
, rt
, rs
, s
);
21312 check_cp0_enabled(ctx
);
21313 gen_ld(ctx
, OPC_LWE
, rt
, rs
, s
);
21317 check_cp0_enabled(ctx
);
21318 gen_st(ctx
, OPC_SWE
, rt
, rs
, s
);
21321 switch (extract32(ctx
->opcode
, 2, 2)) {
21325 check_cp0_enabled(ctx
);
21326 gen_ld(ctx
, OPC_LLE
, rt
, rs
, s
);
21331 check_cp0_enabled(ctx
);
21332 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21335 generate_exception_end(ctx
, EXCP_RI
);
21340 switch (extract32(ctx
->opcode
, 2, 2)) {
21344 check_cp0_enabled(ctx
);
21345 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, s
);
21350 check_cp0_enabled(ctx
);
21351 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21354 generate_exception_end(ctx
, EXCP_RI
);
21364 int count
= extract32(ctx
->opcode
, 12, 3);
21367 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21368 extract32(ctx
->opcode
, 0, 8);
21369 TCGv va
= tcg_temp_new();
21370 TCGv t1
= tcg_temp_new();
21371 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21372 NM_P_LS_UAWM
? MO_UNALN
: 0;
21374 count
= (count
== 0) ? 8 : count
;
21375 while (counter
!= count
) {
21376 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21377 int this_offset
= offset
+ (counter
<< 2);
21379 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21381 switch (extract32(ctx
->opcode
, 11, 1)) {
21383 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21385 gen_store_gpr(t1
, this_rt
);
21386 if ((this_rt
== rs
) &&
21387 (counter
!= (count
- 1))) {
21388 /* UNPREDICTABLE */
21392 this_rt
= (rt
== 0) ? 0 : this_rt
;
21393 gen_load_gpr(t1
, this_rt
);
21394 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21405 generate_exception_end(ctx
, EXCP_RI
);
21413 TCGv t0
= tcg_temp_new();
21414 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21415 extract32(ctx
->opcode
, 1, 20) << 1;
21416 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21417 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21418 extract32(ctx
->opcode
, 21, 3));
21419 gen_load_gpr(t0
, rt
);
21420 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21421 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21427 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21428 extract32(ctx
->opcode
, 1, 24) << 1;
21430 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21432 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21435 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21440 switch (extract32(ctx
->opcode
, 12, 4)) {
21443 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21446 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21449 generate_exception_end(ctx
, EXCP_RI
);
21455 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21456 extract32(ctx
->opcode
, 1, 13) << 1;
21457 switch (extract32(ctx
->opcode
, 14, 2)) {
21460 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21463 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21464 extract32(ctx
->opcode
, 1, 13) << 1;
21465 check_cp1_enabled(ctx
);
21466 switch (extract32(ctx
->opcode
, 16, 5)) {
21468 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21471 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21476 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21477 extract32(ctx
->opcode
, 0, 1) << 13;
21479 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21484 generate_exception_end(ctx
, EXCP_RI
);
21490 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21492 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21496 if (rs
== rt
|| rt
== 0) {
21497 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21498 } else if (rs
== 0) {
21499 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21501 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21509 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21510 extract32(ctx
->opcode
, 1, 13) << 1;
21511 switch (extract32(ctx
->opcode
, 14, 2)) {
21514 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21517 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21519 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21521 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21525 if (rs
== 0 || rs
== rt
) {
21527 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21529 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21533 generate_exception_end(ctx
, EXCP_RI
);
21540 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21541 extract32(ctx
->opcode
, 1, 10) << 1;
21542 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21544 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
21549 generate_exception_end(ctx
, EXCP_RI
);
21555 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21558 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21559 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21560 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS1(ctx
->opcode
));
21564 /* make sure instructions are on a halfword boundary */
21565 if (ctx
->base
.pc_next
& 0x1) {
21566 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
21567 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
21568 tcg_temp_free(tmp
);
21569 generate_exception_end(ctx
, EXCP_AdEL
);
21573 op
= extract32(ctx
->opcode
, 10, 6);
21576 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21579 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
21580 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
21583 switch (extract32(ctx
->opcode
, 3, 2)) {
21584 case NM_P16_SYSCALL
:
21585 if (extract32(ctx
->opcode
, 2, 1) == 0) {
21586 generate_exception_end(ctx
, EXCP_SYSCALL
);
21588 generate_exception_end(ctx
, EXCP_RI
);
21592 generate_exception_end(ctx
, EXCP_BREAK
);
21595 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
21596 gen_helper_do_semihosting(cpu_env
);
21598 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21599 generate_exception_end(ctx
, EXCP_RI
);
21601 generate_exception_end(ctx
, EXCP_DBp
);
21606 generate_exception_end(ctx
, EXCP_RI
);
21613 int shift
= extract32(ctx
->opcode
, 0, 3);
21615 shift
= (shift
== 0) ? 8 : shift
;
21617 switch (extract32(ctx
->opcode
, 3, 1)) {
21625 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
21629 switch (ctx
->opcode
& 1) {
21631 gen_pool16c_nanomips_insn(ctx
);
21634 gen_ldxs(ctx
, rt
, rs
, rd
);
21639 switch (extract32(ctx
->opcode
, 6, 1)) {
21641 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
21642 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
21645 generate_exception_end(ctx
, EXCP_RI
);
21650 switch (extract32(ctx
->opcode
, 3, 1)) {
21652 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
21653 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
21655 case NM_P_ADDIURS5
:
21656 rt
= extract32(ctx
->opcode
, 5, 5);
21658 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
21659 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
21660 (extract32(ctx
->opcode
, 0, 3));
21661 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
21667 switch (ctx
->opcode
& 0x1) {
21669 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
21672 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
21677 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21678 extract32(ctx
->opcode
, 5, 3);
21679 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21680 extract32(ctx
->opcode
, 0, 3);
21681 rt
= decode_gpr_gpr4(rt
);
21682 rs
= decode_gpr_gpr4(rs
);
21683 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
21684 (extract32(ctx
->opcode
, 3, 1))) {
21687 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
21691 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
21694 generate_exception_end(ctx
, EXCP_RI
);
21700 int imm
= extract32(ctx
->opcode
, 0, 7);
21701 imm
= (imm
== 0x7f ? -1 : imm
);
21703 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21709 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
21710 u
= (u
== 12) ? 0xff :
21711 (u
== 13) ? 0xffff : u
;
21712 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
21716 offset
= extract32(ctx
->opcode
, 0, 2);
21717 switch (extract32(ctx
->opcode
, 2, 2)) {
21719 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
21722 rt
= decode_gpr_gpr3_src_store(
21723 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21724 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
21727 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
21730 generate_exception_end(ctx
, EXCP_RI
);
21735 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
21736 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
21738 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
21741 rt
= decode_gpr_gpr3_src_store(
21742 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21743 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
21746 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
21749 generate_exception_end(ctx
, EXCP_RI
);
21754 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21755 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21758 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21759 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21760 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
21764 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21765 extract32(ctx
->opcode
, 5, 3);
21766 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21767 extract32(ctx
->opcode
, 0, 3);
21768 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21769 (extract32(ctx
->opcode
, 8, 1) << 2);
21770 rt
= decode_gpr_gpr4(rt
);
21771 rs
= decode_gpr_gpr4(rs
);
21772 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21776 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21777 extract32(ctx
->opcode
, 5, 3);
21778 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21779 extract32(ctx
->opcode
, 0, 3);
21780 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21781 (extract32(ctx
->opcode
, 8, 1) << 2);
21782 rt
= decode_gpr_gpr4_zero(rt
);
21783 rs
= decode_gpr_gpr4(rs
);
21784 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21787 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21788 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
21791 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21792 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21793 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
21796 rt
= decode_gpr_gpr3_src_store(
21797 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21798 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21799 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21800 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21803 rt
= decode_gpr_gpr3_src_store(
21804 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21805 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21806 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
21809 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
21810 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21811 (extract32(ctx
->opcode
, 1, 9) << 1));
21814 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
21815 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21816 (extract32(ctx
->opcode
, 1, 9) << 1));
21819 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
21820 (sextract32(ctx
->opcode
, 0, 1) << 7) |
21821 (extract32(ctx
->opcode
, 1, 6) << 1));
21824 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
21825 (sextract32(ctx
->opcode
, 0, 1) << 7) |
21826 (extract32(ctx
->opcode
, 1, 6) << 1));
21829 switch (ctx
->opcode
& 0xf) {
21832 switch (extract32(ctx
->opcode
, 4, 1)) {
21834 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
21835 extract32(ctx
->opcode
, 5, 5), 0, 0);
21838 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
21839 extract32(ctx
->opcode
, 5, 5), 31, 0);
21846 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
21847 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
21848 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
21849 extract32(ctx
->opcode
, 0, 4) << 1);
21856 int count
= extract32(ctx
->opcode
, 0, 4);
21857 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
21859 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
21860 switch (extract32(ctx
->opcode
, 8, 1)) {
21862 gen_save(ctx
, rt
, count
, 0, u
);
21864 case NM_RESTORE_JRC16
:
21865 gen_restore(ctx
, rt
, count
, 0, u
);
21866 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21875 static const int gpr2reg1
[] = {4, 5, 6, 7};
21876 static const int gpr2reg2
[] = {5, 6, 7, 8};
21878 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
21879 extract32(ctx
->opcode
, 8, 1);
21880 int r1
= gpr2reg1
[rd2
];
21881 int r2
= gpr2reg2
[rd2
];
21882 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
21883 extract32(ctx
->opcode
, 0, 3);
21884 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
21885 extract32(ctx
->opcode
, 5, 3);
21886 TCGv t0
= tcg_temp_new();
21887 TCGv t1
= tcg_temp_new();
21888 if (op
== NM_MOVEP
) {
21891 rs
= decode_gpr_gpr4_zero(r3
);
21892 rt
= decode_gpr_gpr4_zero(r4
);
21894 rd
= decode_gpr_gpr4(r3
);
21895 re
= decode_gpr_gpr4(r4
);
21899 gen_load_gpr(t0
, rs
);
21900 gen_load_gpr(t1
, rt
);
21901 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21902 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
21908 return decode_nanomips_32_48_opc(env
, ctx
);
21915 /* SmartMIPS extension to MIPS32 */
21917 #if defined(TARGET_MIPS64)
21919 /* MDMX extension to MIPS64 */
21923 /* MIPSDSP functions. */
21924 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
21925 int rd
, int base
, int offset
)
21930 t0
= tcg_temp_new();
21933 gen_load_gpr(t0
, offset
);
21934 } else if (offset
== 0) {
21935 gen_load_gpr(t0
, base
);
21937 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
21942 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
21943 gen_store_gpr(t0
, rd
);
21946 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
21947 gen_store_gpr(t0
, rd
);
21950 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
21951 gen_store_gpr(t0
, rd
);
21953 #if defined(TARGET_MIPS64)
21955 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
21956 gen_store_gpr(t0
, rd
);
21963 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
21964 int ret
, int v1
, int v2
)
21970 /* Treat as NOP. */
21974 v1_t
= tcg_temp_new();
21975 v2_t
= tcg_temp_new();
21977 gen_load_gpr(v1_t
, v1
);
21978 gen_load_gpr(v2_t
, v2
);
21981 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
21982 case OPC_MULT_G_2E
:
21986 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21988 case OPC_ADDUH_R_QB
:
21989 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21992 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21994 case OPC_ADDQH_R_PH
:
21995 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21998 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22000 case OPC_ADDQH_R_W
:
22001 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22004 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22006 case OPC_SUBUH_R_QB
:
22007 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22010 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22012 case OPC_SUBQH_R_PH
:
22013 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22016 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22018 case OPC_SUBQH_R_W
:
22019 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22023 case OPC_ABSQ_S_PH_DSP
:
22025 case OPC_ABSQ_S_QB
:
22027 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
22029 case OPC_ABSQ_S_PH
:
22031 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
22035 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
22037 case OPC_PRECEQ_W_PHL
:
22039 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
22040 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22042 case OPC_PRECEQ_W_PHR
:
22044 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
22045 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
22046 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22048 case OPC_PRECEQU_PH_QBL
:
22050 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
22052 case OPC_PRECEQU_PH_QBR
:
22054 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
22056 case OPC_PRECEQU_PH_QBLA
:
22058 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
22060 case OPC_PRECEQU_PH_QBRA
:
22062 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
22064 case OPC_PRECEU_PH_QBL
:
22066 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
22068 case OPC_PRECEU_PH_QBR
:
22070 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
22072 case OPC_PRECEU_PH_QBLA
:
22074 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
22076 case OPC_PRECEU_PH_QBRA
:
22078 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
22082 case OPC_ADDU_QB_DSP
:
22086 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22088 case OPC_ADDQ_S_PH
:
22090 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22094 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22098 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22100 case OPC_ADDU_S_QB
:
22102 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22106 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22108 case OPC_ADDU_S_PH
:
22110 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22114 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22116 case OPC_SUBQ_S_PH
:
22118 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22122 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22126 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22128 case OPC_SUBU_S_QB
:
22130 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22134 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22136 case OPC_SUBU_S_PH
:
22138 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22142 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22146 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22150 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22152 case OPC_RADDU_W_QB
:
22154 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22158 case OPC_CMPU_EQ_QB_DSP
:
22160 case OPC_PRECR_QB_PH
:
22162 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22164 case OPC_PRECRQ_QB_PH
:
22166 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22168 case OPC_PRECR_SRA_PH_W
:
22171 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22172 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22174 tcg_temp_free_i32(sa_t
);
22177 case OPC_PRECR_SRA_R_PH_W
:
22180 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22181 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22183 tcg_temp_free_i32(sa_t
);
22186 case OPC_PRECRQ_PH_W
:
22188 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22190 case OPC_PRECRQ_RS_PH_W
:
22192 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22194 case OPC_PRECRQU_S_QB_PH
:
22196 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22200 #ifdef TARGET_MIPS64
22201 case OPC_ABSQ_S_QH_DSP
:
22203 case OPC_PRECEQ_L_PWL
:
22205 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22207 case OPC_PRECEQ_L_PWR
:
22209 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22211 case OPC_PRECEQ_PW_QHL
:
22213 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22215 case OPC_PRECEQ_PW_QHR
:
22217 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22219 case OPC_PRECEQ_PW_QHLA
:
22221 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22223 case OPC_PRECEQ_PW_QHRA
:
22225 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22227 case OPC_PRECEQU_QH_OBL
:
22229 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22231 case OPC_PRECEQU_QH_OBR
:
22233 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22235 case OPC_PRECEQU_QH_OBLA
:
22237 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22239 case OPC_PRECEQU_QH_OBRA
:
22241 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22243 case OPC_PRECEU_QH_OBL
:
22245 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22247 case OPC_PRECEU_QH_OBR
:
22249 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22251 case OPC_PRECEU_QH_OBLA
:
22253 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22255 case OPC_PRECEU_QH_OBRA
:
22257 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22259 case OPC_ABSQ_S_OB
:
22261 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22263 case OPC_ABSQ_S_PW
:
22265 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22267 case OPC_ABSQ_S_QH
:
22269 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22273 case OPC_ADDU_OB_DSP
:
22275 case OPC_RADDU_L_OB
:
22277 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22281 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22283 case OPC_SUBQ_S_PW
:
22285 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22289 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22291 case OPC_SUBQ_S_QH
:
22293 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22297 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22299 case OPC_SUBU_S_OB
:
22301 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22305 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22307 case OPC_SUBU_S_QH
:
22309 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22313 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22315 case OPC_SUBUH_R_OB
:
22317 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22321 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22323 case OPC_ADDQ_S_PW
:
22325 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22329 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22331 case OPC_ADDQ_S_QH
:
22333 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22337 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22339 case OPC_ADDU_S_OB
:
22341 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22345 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22347 case OPC_ADDU_S_QH
:
22349 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22353 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22355 case OPC_ADDUH_R_OB
:
22357 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22361 case OPC_CMPU_EQ_OB_DSP
:
22363 case OPC_PRECR_OB_QH
:
22365 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22367 case OPC_PRECR_SRA_QH_PW
:
22370 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22371 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22372 tcg_temp_free_i32(ret_t
);
22375 case OPC_PRECR_SRA_R_QH_PW
:
22378 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22379 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22380 tcg_temp_free_i32(sa_v
);
22383 case OPC_PRECRQ_OB_QH
:
22385 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22387 case OPC_PRECRQ_PW_L
:
22389 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22391 case OPC_PRECRQ_QH_PW
:
22393 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22395 case OPC_PRECRQ_RS_QH_PW
:
22397 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22399 case OPC_PRECRQU_S_OB_QH
:
22401 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22408 tcg_temp_free(v1_t
);
22409 tcg_temp_free(v2_t
);
22412 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22413 int ret
, int v1
, int v2
)
22421 /* Treat as NOP. */
22425 t0
= tcg_temp_new();
22426 v1_t
= tcg_temp_new();
22427 v2_t
= tcg_temp_new();
22429 tcg_gen_movi_tl(t0
, v1
);
22430 gen_load_gpr(v1_t
, v1
);
22431 gen_load_gpr(v2_t
, v2
);
22434 case OPC_SHLL_QB_DSP
:
22436 op2
= MASK_SHLL_QB(ctx
->opcode
);
22440 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22444 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22448 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22452 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22454 case OPC_SHLL_S_PH
:
22456 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22458 case OPC_SHLLV_S_PH
:
22460 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22464 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22466 case OPC_SHLLV_S_W
:
22468 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22472 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22476 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22480 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22484 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22488 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22490 case OPC_SHRA_R_QB
:
22492 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22496 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22498 case OPC_SHRAV_R_QB
:
22500 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22504 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22506 case OPC_SHRA_R_PH
:
22508 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22512 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22514 case OPC_SHRAV_R_PH
:
22516 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22520 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22522 case OPC_SHRAV_R_W
:
22524 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22526 default: /* Invalid */
22527 MIPS_INVAL("MASK SHLL.QB");
22528 generate_exception_end(ctx
, EXCP_RI
);
22533 #ifdef TARGET_MIPS64
22534 case OPC_SHLL_OB_DSP
:
22535 op2
= MASK_SHLL_OB(ctx
->opcode
);
22539 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22543 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22545 case OPC_SHLL_S_PW
:
22547 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22549 case OPC_SHLLV_S_PW
:
22551 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22555 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22559 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22563 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22567 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22569 case OPC_SHLL_S_QH
:
22571 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22573 case OPC_SHLLV_S_QH
:
22575 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22579 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
22583 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22585 case OPC_SHRA_R_OB
:
22587 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
22589 case OPC_SHRAV_R_OB
:
22591 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22595 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
22599 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22601 case OPC_SHRA_R_PW
:
22603 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
22605 case OPC_SHRAV_R_PW
:
22607 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22611 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
22615 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22617 case OPC_SHRA_R_QH
:
22619 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
22621 case OPC_SHRAV_R_QH
:
22623 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22627 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
22631 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22635 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
22639 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22641 default: /* Invalid */
22642 MIPS_INVAL("MASK SHLL.OB");
22643 generate_exception_end(ctx
, EXCP_RI
);
22651 tcg_temp_free(v1_t
);
22652 tcg_temp_free(v2_t
);
22655 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22656 int ret
, int v1
, int v2
, int check_ret
)
22662 if ((ret
== 0) && (check_ret
== 1)) {
22663 /* Treat as NOP. */
22667 t0
= tcg_temp_new_i32();
22668 v1_t
= tcg_temp_new();
22669 v2_t
= tcg_temp_new();
22671 tcg_gen_movi_i32(t0
, ret
);
22672 gen_load_gpr(v1_t
, v1
);
22673 gen_load_gpr(v2_t
, v2
);
22676 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22677 * the same mask and op1. */
22678 case OPC_MULT_G_2E
:
22682 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22685 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22688 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22690 case OPC_MULQ_RS_W
:
22691 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22695 case OPC_DPA_W_PH_DSP
:
22697 case OPC_DPAU_H_QBL
:
22699 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22701 case OPC_DPAU_H_QBR
:
22703 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22705 case OPC_DPSU_H_QBL
:
22707 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22709 case OPC_DPSU_H_QBR
:
22711 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22715 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22717 case OPC_DPAX_W_PH
:
22719 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22721 case OPC_DPAQ_S_W_PH
:
22723 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22725 case OPC_DPAQX_S_W_PH
:
22727 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22729 case OPC_DPAQX_SA_W_PH
:
22731 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22735 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22737 case OPC_DPSX_W_PH
:
22739 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22741 case OPC_DPSQ_S_W_PH
:
22743 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22745 case OPC_DPSQX_S_W_PH
:
22747 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22749 case OPC_DPSQX_SA_W_PH
:
22751 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22753 case OPC_MULSAQ_S_W_PH
:
22755 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22757 case OPC_DPAQ_SA_L_W
:
22759 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22761 case OPC_DPSQ_SA_L_W
:
22763 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22765 case OPC_MAQ_S_W_PHL
:
22767 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22769 case OPC_MAQ_S_W_PHR
:
22771 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22773 case OPC_MAQ_SA_W_PHL
:
22775 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22777 case OPC_MAQ_SA_W_PHR
:
22779 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22781 case OPC_MULSA_W_PH
:
22783 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22787 #ifdef TARGET_MIPS64
22788 case OPC_DPAQ_W_QH_DSP
:
22790 int ac
= ret
& 0x03;
22791 tcg_gen_movi_i32(t0
, ac
);
22796 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
22800 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
22804 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
22808 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
22812 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22814 case OPC_DPAQ_S_W_QH
:
22816 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22818 case OPC_DPAQ_SA_L_PW
:
22820 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22822 case OPC_DPAU_H_OBL
:
22824 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
22826 case OPC_DPAU_H_OBR
:
22828 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
22832 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22834 case OPC_DPSQ_S_W_QH
:
22836 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22838 case OPC_DPSQ_SA_L_PW
:
22840 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22842 case OPC_DPSU_H_OBL
:
22844 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
22846 case OPC_DPSU_H_OBR
:
22848 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
22850 case OPC_MAQ_S_L_PWL
:
22852 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
22854 case OPC_MAQ_S_L_PWR
:
22856 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
22858 case OPC_MAQ_S_W_QHLL
:
22860 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
22862 case OPC_MAQ_SA_W_QHLL
:
22864 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
22866 case OPC_MAQ_S_W_QHLR
:
22868 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
22870 case OPC_MAQ_SA_W_QHLR
:
22872 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
22874 case OPC_MAQ_S_W_QHRL
:
22876 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
22878 case OPC_MAQ_SA_W_QHRL
:
22880 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
22882 case OPC_MAQ_S_W_QHRR
:
22884 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
22886 case OPC_MAQ_SA_W_QHRR
:
22888 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
22890 case OPC_MULSAQ_S_L_PW
:
22892 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22894 case OPC_MULSAQ_S_W_QH
:
22896 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22902 case OPC_ADDU_QB_DSP
:
22904 case OPC_MULEU_S_PH_QBL
:
22906 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22908 case OPC_MULEU_S_PH_QBR
:
22910 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22912 case OPC_MULQ_RS_PH
:
22914 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22916 case OPC_MULEQ_S_W_PHL
:
22918 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22920 case OPC_MULEQ_S_W_PHR
:
22922 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22924 case OPC_MULQ_S_PH
:
22926 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22930 #ifdef TARGET_MIPS64
22931 case OPC_ADDU_OB_DSP
:
22933 case OPC_MULEQ_S_PW_QHL
:
22935 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22937 case OPC_MULEQ_S_PW_QHR
:
22939 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22941 case OPC_MULEU_S_QH_OBL
:
22943 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22945 case OPC_MULEU_S_QH_OBR
:
22947 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22949 case OPC_MULQ_RS_QH
:
22951 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22958 tcg_temp_free_i32(t0
);
22959 tcg_temp_free(v1_t
);
22960 tcg_temp_free(v2_t
);
22963 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22971 /* Treat as NOP. */
22975 t0
= tcg_temp_new();
22976 val_t
= tcg_temp_new();
22977 gen_load_gpr(val_t
, val
);
22980 case OPC_ABSQ_S_PH_DSP
:
22984 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
22989 target_long result
;
22990 imm
= (ctx
->opcode
>> 16) & 0xFF;
22991 result
= (uint32_t)imm
<< 24 |
22992 (uint32_t)imm
<< 16 |
22993 (uint32_t)imm
<< 8 |
22995 result
= (int32_t)result
;
22996 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
23001 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23002 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23003 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23004 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23005 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23006 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23011 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23012 imm
= (int16_t)(imm
<< 6) >> 6;
23013 tcg_gen_movi_tl(cpu_gpr
[ret
], \
23014 (target_long
)((int32_t)imm
<< 16 | \
23020 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23021 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23022 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23023 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23027 #ifdef TARGET_MIPS64
23028 case OPC_ABSQ_S_QH_DSP
:
23035 imm
= (ctx
->opcode
>> 16) & 0xFF;
23036 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
23037 temp
= (temp
<< 16) | temp
;
23038 temp
= (temp
<< 32) | temp
;
23039 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23047 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23048 imm
= (int16_t)(imm
<< 6) >> 6;
23049 temp
= ((target_long
)imm
<< 32) \
23050 | ((target_long
)imm
& 0xFFFFFFFF);
23051 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23059 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23060 imm
= (int16_t)(imm
<< 6) >> 6;
23062 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
23063 ((uint64_t)(uint16_t)imm
<< 32) |
23064 ((uint64_t)(uint16_t)imm
<< 16) |
23065 (uint64_t)(uint16_t)imm
;
23066 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23071 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23072 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23073 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23074 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23075 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23076 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23077 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23081 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
23082 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23083 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23087 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23088 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23089 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23090 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23091 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23098 tcg_temp_free(val_t
);
23101 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
23102 uint32_t op1
, uint32_t op2
,
23103 int ret
, int v1
, int v2
, int check_ret
)
23109 if ((ret
== 0) && (check_ret
== 1)) {
23110 /* Treat as NOP. */
23114 t1
= tcg_temp_new();
23115 v1_t
= tcg_temp_new();
23116 v2_t
= tcg_temp_new();
23118 gen_load_gpr(v1_t
, v1
);
23119 gen_load_gpr(v2_t
, v2
);
23122 case OPC_CMPU_EQ_QB_DSP
:
23124 case OPC_CMPU_EQ_QB
:
23126 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
23128 case OPC_CMPU_LT_QB
:
23130 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
23132 case OPC_CMPU_LE_QB
:
23134 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
23136 case OPC_CMPGU_EQ_QB
:
23138 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23140 case OPC_CMPGU_LT_QB
:
23142 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23144 case OPC_CMPGU_LE_QB
:
23146 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23148 case OPC_CMPGDU_EQ_QB
:
23150 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23151 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23152 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23153 tcg_gen_shli_tl(t1
, t1
, 24);
23154 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23156 case OPC_CMPGDU_LT_QB
:
23158 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23159 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23160 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23161 tcg_gen_shli_tl(t1
, t1
, 24);
23162 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23164 case OPC_CMPGDU_LE_QB
:
23166 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23167 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23168 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23169 tcg_gen_shli_tl(t1
, t1
, 24);
23170 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23172 case OPC_CMP_EQ_PH
:
23174 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23176 case OPC_CMP_LT_PH
:
23178 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23180 case OPC_CMP_LE_PH
:
23182 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23186 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23190 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23192 case OPC_PACKRL_PH
:
23194 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23198 #ifdef TARGET_MIPS64
23199 case OPC_CMPU_EQ_OB_DSP
:
23201 case OPC_CMP_EQ_PW
:
23203 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23205 case OPC_CMP_LT_PW
:
23207 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23209 case OPC_CMP_LE_PW
:
23211 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23213 case OPC_CMP_EQ_QH
:
23215 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23217 case OPC_CMP_LT_QH
:
23219 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23221 case OPC_CMP_LE_QH
:
23223 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23225 case OPC_CMPGDU_EQ_OB
:
23227 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23229 case OPC_CMPGDU_LT_OB
:
23231 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23233 case OPC_CMPGDU_LE_OB
:
23235 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23237 case OPC_CMPGU_EQ_OB
:
23239 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23241 case OPC_CMPGU_LT_OB
:
23243 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23245 case OPC_CMPGU_LE_OB
:
23247 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23249 case OPC_CMPU_EQ_OB
:
23251 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23253 case OPC_CMPU_LT_OB
:
23255 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23257 case OPC_CMPU_LE_OB
:
23259 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23261 case OPC_PACKRL_PW
:
23263 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23267 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23271 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23275 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23283 tcg_temp_free(v1_t
);
23284 tcg_temp_free(v2_t
);
23287 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23288 uint32_t op1
, int rt
, int rs
, int sa
)
23295 /* Treat as NOP. */
23299 t0
= tcg_temp_new();
23300 gen_load_gpr(t0
, rs
);
23303 case OPC_APPEND_DSP
:
23304 switch (MASK_APPEND(ctx
->opcode
)) {
23307 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23309 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23313 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23314 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23315 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23316 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23318 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23322 if (sa
!= 0 && sa
!= 2) {
23323 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23324 tcg_gen_ext32u_tl(t0
, t0
);
23325 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23326 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23328 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23330 default: /* Invalid */
23331 MIPS_INVAL("MASK APPEND");
23332 generate_exception_end(ctx
, EXCP_RI
);
23336 #ifdef TARGET_MIPS64
23337 case OPC_DAPPEND_DSP
:
23338 switch (MASK_DAPPEND(ctx
->opcode
)) {
23341 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23345 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23346 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23347 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23351 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23352 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23353 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23358 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23359 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23360 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23361 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23364 default: /* Invalid */
23365 MIPS_INVAL("MASK DAPPEND");
23366 generate_exception_end(ctx
, EXCP_RI
);
23375 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23376 int ret
, int v1
, int v2
, int check_ret
)
23385 if ((ret
== 0) && (check_ret
== 1)) {
23386 /* Treat as NOP. */
23390 t0
= tcg_temp_new();
23391 t1
= tcg_temp_new();
23392 v1_t
= tcg_temp_new();
23393 v2_t
= tcg_temp_new();
23395 gen_load_gpr(v1_t
, v1
);
23396 gen_load_gpr(v2_t
, v2
);
23399 case OPC_EXTR_W_DSP
:
23403 tcg_gen_movi_tl(t0
, v2
);
23404 tcg_gen_movi_tl(t1
, v1
);
23405 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23408 tcg_gen_movi_tl(t0
, v2
);
23409 tcg_gen_movi_tl(t1
, v1
);
23410 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23412 case OPC_EXTR_RS_W
:
23413 tcg_gen_movi_tl(t0
, v2
);
23414 tcg_gen_movi_tl(t1
, v1
);
23415 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23418 tcg_gen_movi_tl(t0
, v2
);
23419 tcg_gen_movi_tl(t1
, v1
);
23420 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23422 case OPC_EXTRV_S_H
:
23423 tcg_gen_movi_tl(t0
, v2
);
23424 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23427 tcg_gen_movi_tl(t0
, v2
);
23428 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23430 case OPC_EXTRV_R_W
:
23431 tcg_gen_movi_tl(t0
, v2
);
23432 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23434 case OPC_EXTRV_RS_W
:
23435 tcg_gen_movi_tl(t0
, v2
);
23436 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23439 tcg_gen_movi_tl(t0
, v2
);
23440 tcg_gen_movi_tl(t1
, v1
);
23441 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23444 tcg_gen_movi_tl(t0
, v2
);
23445 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23448 tcg_gen_movi_tl(t0
, v2
);
23449 tcg_gen_movi_tl(t1
, v1
);
23450 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23453 tcg_gen_movi_tl(t0
, v2
);
23454 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23457 imm
= (ctx
->opcode
>> 20) & 0x3F;
23458 tcg_gen_movi_tl(t0
, ret
);
23459 tcg_gen_movi_tl(t1
, imm
);
23460 gen_helper_shilo(t0
, t1
, cpu_env
);
23463 tcg_gen_movi_tl(t0
, ret
);
23464 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23467 tcg_gen_movi_tl(t0
, ret
);
23468 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23471 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23472 tcg_gen_movi_tl(t0
, imm
);
23473 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23476 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23477 tcg_gen_movi_tl(t0
, imm
);
23478 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23482 #ifdef TARGET_MIPS64
23483 case OPC_DEXTR_W_DSP
:
23487 tcg_gen_movi_tl(t0
, ret
);
23488 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23492 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23493 int ac
= (ctx
->opcode
>> 11) & 0x03;
23494 tcg_gen_movi_tl(t0
, shift
);
23495 tcg_gen_movi_tl(t1
, ac
);
23496 gen_helper_dshilo(t0
, t1
, cpu_env
);
23501 int ac
= (ctx
->opcode
>> 11) & 0x03;
23502 tcg_gen_movi_tl(t0
, ac
);
23503 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23507 tcg_gen_movi_tl(t0
, v2
);
23508 tcg_gen_movi_tl(t1
, v1
);
23510 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23513 tcg_gen_movi_tl(t0
, v2
);
23514 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23517 tcg_gen_movi_tl(t0
, v2
);
23518 tcg_gen_movi_tl(t1
, v1
);
23519 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23522 tcg_gen_movi_tl(t0
, v2
);
23523 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23526 tcg_gen_movi_tl(t0
, v2
);
23527 tcg_gen_movi_tl(t1
, v1
);
23528 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23530 case OPC_DEXTR_R_L
:
23531 tcg_gen_movi_tl(t0
, v2
);
23532 tcg_gen_movi_tl(t1
, v1
);
23533 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23535 case OPC_DEXTR_RS_L
:
23536 tcg_gen_movi_tl(t0
, v2
);
23537 tcg_gen_movi_tl(t1
, v1
);
23538 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23541 tcg_gen_movi_tl(t0
, v2
);
23542 tcg_gen_movi_tl(t1
, v1
);
23543 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23545 case OPC_DEXTR_R_W
:
23546 tcg_gen_movi_tl(t0
, v2
);
23547 tcg_gen_movi_tl(t1
, v1
);
23548 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23550 case OPC_DEXTR_RS_W
:
23551 tcg_gen_movi_tl(t0
, v2
);
23552 tcg_gen_movi_tl(t1
, v1
);
23553 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23555 case OPC_DEXTR_S_H
:
23556 tcg_gen_movi_tl(t0
, v2
);
23557 tcg_gen_movi_tl(t1
, v1
);
23558 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23560 case OPC_DEXTRV_S_H
:
23561 tcg_gen_movi_tl(t0
, v2
);
23562 tcg_gen_movi_tl(t1
, v1
);
23563 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23566 tcg_gen_movi_tl(t0
, v2
);
23567 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23569 case OPC_DEXTRV_R_L
:
23570 tcg_gen_movi_tl(t0
, v2
);
23571 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23573 case OPC_DEXTRV_RS_L
:
23574 tcg_gen_movi_tl(t0
, v2
);
23575 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23578 tcg_gen_movi_tl(t0
, v2
);
23579 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23581 case OPC_DEXTRV_R_W
:
23582 tcg_gen_movi_tl(t0
, v2
);
23583 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23585 case OPC_DEXTRV_RS_W
:
23586 tcg_gen_movi_tl(t0
, v2
);
23587 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23596 tcg_temp_free(v1_t
);
23597 tcg_temp_free(v2_t
);
23600 /* End MIPSDSP functions. */
23602 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23604 int rs
, rt
, rd
, sa
;
23607 rs
= (ctx
->opcode
>> 21) & 0x1f;
23608 rt
= (ctx
->opcode
>> 16) & 0x1f;
23609 rd
= (ctx
->opcode
>> 11) & 0x1f;
23610 sa
= (ctx
->opcode
>> 6) & 0x1f;
23612 op1
= MASK_SPECIAL(ctx
->opcode
);
23615 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23621 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23631 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23634 MIPS_INVAL("special_r6 muldiv");
23635 generate_exception_end(ctx
, EXCP_RI
);
23641 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23645 if (rt
== 0 && sa
== 1) {
23646 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23647 We need additionally to check other fields */
23648 gen_cl(ctx
, op1
, rd
, rs
);
23650 generate_exception_end(ctx
, EXCP_RI
);
23654 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23655 gen_helper_do_semihosting(cpu_env
);
23657 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
23658 generate_exception_end(ctx
, EXCP_RI
);
23660 generate_exception_end(ctx
, EXCP_DBp
);
23664 #if defined(TARGET_MIPS64)
23666 check_mips_64(ctx
);
23667 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23671 if (rt
== 0 && sa
== 1) {
23672 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23673 We need additionally to check other fields */
23674 check_mips_64(ctx
);
23675 gen_cl(ctx
, op1
, rd
, rs
);
23677 generate_exception_end(ctx
, EXCP_RI
);
23685 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23695 check_mips_64(ctx
);
23696 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23699 MIPS_INVAL("special_r6 muldiv");
23700 generate_exception_end(ctx
, EXCP_RI
);
23705 default: /* Invalid */
23706 MIPS_INVAL("special_r6");
23707 generate_exception_end(ctx
, EXCP_RI
);
23712 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23714 int rs
, rt
, rd
, sa
;
23717 rs
= (ctx
->opcode
>> 21) & 0x1f;
23718 rt
= (ctx
->opcode
>> 16) & 0x1f;
23719 rd
= (ctx
->opcode
>> 11) & 0x1f;
23720 sa
= (ctx
->opcode
>> 6) & 0x1f;
23722 op1
= MASK_SPECIAL(ctx
->opcode
);
23724 case OPC_MOVN
: /* Conditional move */
23726 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
23727 INSN_LOONGSON2E
| INSN_LOONGSON2F
| INSN_R5900
);
23728 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23730 case OPC_MFHI
: /* Move from HI/LO */
23732 gen_HILO(ctx
, op1
, rs
& 3, rd
);
23735 case OPC_MTLO
: /* Move to HI/LO */
23736 gen_HILO(ctx
, op1
, rd
& 3, rs
);
23739 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
23740 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
23741 check_cp1_enabled(ctx
);
23742 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
23743 (ctx
->opcode
>> 16) & 1);
23745 generate_exception_err(ctx
, EXCP_CpU
, 1);
23751 check_insn(ctx
, INSN_VR54XX
);
23752 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
23753 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
23754 } else if (ctx
->insn_flags
& INSN_R5900
) {
23755 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
23757 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
23762 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23764 #if defined(TARGET_MIPS64)
23769 check_insn(ctx
, ISA_MIPS3
);
23770 check_insn_opc_user_only(ctx
, INSN_R5900
);
23771 check_mips_64(ctx
);
23772 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23776 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
23779 #ifdef MIPS_STRICT_STANDARD
23780 MIPS_INVAL("SPIM");
23781 generate_exception_end(ctx
, EXCP_RI
);
23783 /* Implemented as RI exception for now. */
23784 MIPS_INVAL("spim (unofficial)");
23785 generate_exception_end(ctx
, EXCP_RI
);
23788 default: /* Invalid */
23789 MIPS_INVAL("special_legacy");
23790 generate_exception_end(ctx
, EXCP_RI
);
23795 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
23797 int rs
, rt
, rd
, sa
;
23800 rs
= (ctx
->opcode
>> 21) & 0x1f;
23801 rt
= (ctx
->opcode
>> 16) & 0x1f;
23802 rd
= (ctx
->opcode
>> 11) & 0x1f;
23803 sa
= (ctx
->opcode
>> 6) & 0x1f;
23805 op1
= MASK_SPECIAL(ctx
->opcode
);
23807 case OPC_SLL
: /* Shift with immediate */
23808 if (sa
== 5 && rd
== 0 &&
23809 rs
== 0 && rt
== 0) { /* PAUSE */
23810 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
23811 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
23812 generate_exception_end(ctx
, EXCP_RI
);
23818 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23821 switch ((ctx
->opcode
>> 21) & 0x1f) {
23823 /* rotr is decoded as srl on non-R2 CPUs */
23824 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23829 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23832 generate_exception_end(ctx
, EXCP_RI
);
23840 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23842 case OPC_SLLV
: /* Shifts */
23844 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23847 switch ((ctx
->opcode
>> 6) & 0x1f) {
23849 /* rotrv is decoded as srlv on non-R2 CPUs */
23850 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23855 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23858 generate_exception_end(ctx
, EXCP_RI
);
23862 case OPC_SLT
: /* Set on less than */
23864 gen_slt(ctx
, op1
, rd
, rs
, rt
);
23866 case OPC_AND
: /* Logic*/
23870 gen_logic(ctx
, op1
, rd
, rs
, rt
);
23873 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
23875 case OPC_TGE
: /* Traps */
23881 check_insn(ctx
, ISA_MIPS2
);
23882 gen_trap(ctx
, op1
, rs
, rt
, -1);
23884 case OPC_LSA
: /* OPC_PMON */
23885 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
23886 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
23887 decode_opc_special_r6(env
, ctx
);
23889 /* Pmon entry point, also R4010 selsl */
23890 #ifdef MIPS_STRICT_STANDARD
23891 MIPS_INVAL("PMON / selsl");
23892 generate_exception_end(ctx
, EXCP_RI
);
23894 gen_helper_0e0i(pmon
, sa
);
23899 generate_exception_end(ctx
, EXCP_SYSCALL
);
23902 generate_exception_end(ctx
, EXCP_BREAK
);
23905 check_insn(ctx
, ISA_MIPS2
);
23906 gen_sync(extract32(ctx
->opcode
, 6, 5));
23909 #if defined(TARGET_MIPS64)
23910 /* MIPS64 specific opcodes */
23915 check_insn(ctx
, ISA_MIPS3
);
23916 check_mips_64(ctx
);
23917 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23920 switch ((ctx
->opcode
>> 21) & 0x1f) {
23922 /* drotr is decoded as dsrl on non-R2 CPUs */
23923 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23928 check_insn(ctx
, ISA_MIPS3
);
23929 check_mips_64(ctx
);
23930 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23933 generate_exception_end(ctx
, EXCP_RI
);
23938 switch ((ctx
->opcode
>> 21) & 0x1f) {
23940 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
23941 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23946 check_insn(ctx
, ISA_MIPS3
);
23947 check_mips_64(ctx
);
23948 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23951 generate_exception_end(ctx
, EXCP_RI
);
23959 check_insn(ctx
, ISA_MIPS3
);
23960 check_mips_64(ctx
);
23961 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23965 check_insn(ctx
, ISA_MIPS3
);
23966 check_mips_64(ctx
);
23967 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23970 switch ((ctx
->opcode
>> 6) & 0x1f) {
23972 /* drotrv is decoded as dsrlv on non-R2 CPUs */
23973 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23978 check_insn(ctx
, ISA_MIPS3
);
23979 check_mips_64(ctx
);
23980 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23983 generate_exception_end(ctx
, EXCP_RI
);
23988 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
23989 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
23990 decode_opc_special_r6(env
, ctx
);
23995 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
23996 decode_opc_special_r6(env
, ctx
);
23998 decode_opc_special_legacy(env
, ctx
);
24003 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
24008 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24010 rs
= (ctx
->opcode
>> 21) & 0x1f;
24011 rt
= (ctx
->opcode
>> 16) & 0x1f;
24012 rd
= (ctx
->opcode
>> 11) & 0x1f;
24014 op1
= MASK_SPECIAL2(ctx
->opcode
);
24016 case OPC_MADD
: /* Multiply and add/sub */
24020 check_insn(ctx
, ISA_MIPS32
);
24021 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
24024 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24027 case OPC_DIVU_G_2F
:
24028 case OPC_MULT_G_2F
:
24029 case OPC_MULTU_G_2F
:
24031 case OPC_MODU_G_2F
:
24032 check_insn(ctx
, INSN_LOONGSON2F
);
24033 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
24037 check_insn(ctx
, ISA_MIPS32
);
24038 gen_cl(ctx
, op1
, rd
, rs
);
24041 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
24042 gen_helper_do_semihosting(cpu_env
);
24044 /* XXX: not clear which exception should be raised
24045 * when in debug mode...
24047 check_insn(ctx
, ISA_MIPS32
);
24048 generate_exception_end(ctx
, EXCP_DBp
);
24051 #if defined(TARGET_MIPS64)
24054 check_insn(ctx
, ISA_MIPS64
);
24055 check_mips_64(ctx
);
24056 gen_cl(ctx
, op1
, rd
, rs
);
24058 case OPC_DMULT_G_2F
:
24059 case OPC_DMULTU_G_2F
:
24060 case OPC_DDIV_G_2F
:
24061 case OPC_DDIVU_G_2F
:
24062 case OPC_DMOD_G_2F
:
24063 case OPC_DMODU_G_2F
:
24064 check_insn(ctx
, INSN_LOONGSON2F
);
24065 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
24068 default: /* Invalid */
24069 MIPS_INVAL("special2_legacy");
24070 generate_exception_end(ctx
, EXCP_RI
);
24075 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
24077 int rs
, rt
, rd
, sa
;
24081 rs
= (ctx
->opcode
>> 21) & 0x1f;
24082 rt
= (ctx
->opcode
>> 16) & 0x1f;
24083 rd
= (ctx
->opcode
>> 11) & 0x1f;
24084 sa
= (ctx
->opcode
>> 6) & 0x1f;
24085 imm
= (int16_t)ctx
->opcode
>> 7;
24087 op1
= MASK_SPECIAL3(ctx
->opcode
);
24091 /* hint codes 24-31 are reserved and signal RI */
24092 generate_exception_end(ctx
, EXCP_RI
);
24094 /* Treat as NOP. */
24097 check_cp0_enabled(ctx
);
24098 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
24099 gen_cache_operation(ctx
, rt
, rs
, imm
);
24103 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
24106 gen_ld(ctx
, op1
, rt
, rs
, imm
);
24111 /* Treat as NOP. */
24114 op2
= MASK_BSHFL(ctx
->opcode
);
24120 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
24123 gen_bitswap(ctx
, op2
, rd
, rt
);
24128 #if defined(TARGET_MIPS64)
24130 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
24133 gen_ld(ctx
, op1
, rt
, rs
, imm
);
24136 check_mips_64(ctx
);
24139 /* Treat as NOP. */
24142 op2
= MASK_DBSHFL(ctx
->opcode
);
24152 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
24155 gen_bitswap(ctx
, op2
, rd
, rt
);
24162 default: /* Invalid */
24163 MIPS_INVAL("special3_r6");
24164 generate_exception_end(ctx
, EXCP_RI
);
24169 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
24174 rs
= (ctx
->opcode
>> 21) & 0x1f;
24175 rt
= (ctx
->opcode
>> 16) & 0x1f;
24176 rd
= (ctx
->opcode
>> 11) & 0x1f;
24178 op1
= MASK_SPECIAL3(ctx
->opcode
);
24181 case OPC_DIVU_G_2E
:
24183 case OPC_MODU_G_2E
:
24184 case OPC_MULT_G_2E
:
24185 case OPC_MULTU_G_2E
:
24186 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
24187 * the same mask and op1. */
24188 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
24189 op2
= MASK_ADDUH_QB(ctx
->opcode
);
24192 case OPC_ADDUH_R_QB
:
24194 case OPC_ADDQH_R_PH
:
24196 case OPC_ADDQH_R_W
:
24198 case OPC_SUBUH_R_QB
:
24200 case OPC_SUBQH_R_PH
:
24202 case OPC_SUBQH_R_W
:
24203 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24208 case OPC_MULQ_RS_W
:
24209 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24212 MIPS_INVAL("MASK ADDUH.QB");
24213 generate_exception_end(ctx
, EXCP_RI
);
24216 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
24217 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
24219 generate_exception_end(ctx
, EXCP_RI
);
24223 op2
= MASK_LX(ctx
->opcode
);
24225 #if defined(TARGET_MIPS64)
24231 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
24233 default: /* Invalid */
24234 MIPS_INVAL("MASK LX");
24235 generate_exception_end(ctx
, EXCP_RI
);
24239 case OPC_ABSQ_S_PH_DSP
:
24240 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
24242 case OPC_ABSQ_S_QB
:
24243 case OPC_ABSQ_S_PH
:
24245 case OPC_PRECEQ_W_PHL
:
24246 case OPC_PRECEQ_W_PHR
:
24247 case OPC_PRECEQU_PH_QBL
:
24248 case OPC_PRECEQU_PH_QBR
:
24249 case OPC_PRECEQU_PH_QBLA
:
24250 case OPC_PRECEQU_PH_QBRA
:
24251 case OPC_PRECEU_PH_QBL
:
24252 case OPC_PRECEU_PH_QBR
:
24253 case OPC_PRECEU_PH_QBLA
:
24254 case OPC_PRECEU_PH_QBRA
:
24255 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24262 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
24265 MIPS_INVAL("MASK ABSQ_S.PH");
24266 generate_exception_end(ctx
, EXCP_RI
);
24270 case OPC_ADDU_QB_DSP
:
24271 op2
= MASK_ADDU_QB(ctx
->opcode
);
24274 case OPC_ADDQ_S_PH
:
24277 case OPC_ADDU_S_QB
:
24279 case OPC_ADDU_S_PH
:
24281 case OPC_SUBQ_S_PH
:
24284 case OPC_SUBU_S_QB
:
24286 case OPC_SUBU_S_PH
:
24290 case OPC_RADDU_W_QB
:
24291 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24293 case OPC_MULEU_S_PH_QBL
:
24294 case OPC_MULEU_S_PH_QBR
:
24295 case OPC_MULQ_RS_PH
:
24296 case OPC_MULEQ_S_W_PHL
:
24297 case OPC_MULEQ_S_W_PHR
:
24298 case OPC_MULQ_S_PH
:
24299 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24301 default: /* Invalid */
24302 MIPS_INVAL("MASK ADDU.QB");
24303 generate_exception_end(ctx
, EXCP_RI
);
24308 case OPC_CMPU_EQ_QB_DSP
:
24309 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
24311 case OPC_PRECR_SRA_PH_W
:
24312 case OPC_PRECR_SRA_R_PH_W
:
24313 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
24315 case OPC_PRECR_QB_PH
:
24316 case OPC_PRECRQ_QB_PH
:
24317 case OPC_PRECRQ_PH_W
:
24318 case OPC_PRECRQ_RS_PH_W
:
24319 case OPC_PRECRQU_S_QB_PH
:
24320 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24322 case OPC_CMPU_EQ_QB
:
24323 case OPC_CMPU_LT_QB
:
24324 case OPC_CMPU_LE_QB
:
24325 case OPC_CMP_EQ_PH
:
24326 case OPC_CMP_LT_PH
:
24327 case OPC_CMP_LE_PH
:
24328 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24330 case OPC_CMPGU_EQ_QB
:
24331 case OPC_CMPGU_LT_QB
:
24332 case OPC_CMPGU_LE_QB
:
24333 case OPC_CMPGDU_EQ_QB
:
24334 case OPC_CMPGDU_LT_QB
:
24335 case OPC_CMPGDU_LE_QB
:
24338 case OPC_PACKRL_PH
:
24339 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24341 default: /* Invalid */
24342 MIPS_INVAL("MASK CMPU.EQ.QB");
24343 generate_exception_end(ctx
, EXCP_RI
);
24347 case OPC_SHLL_QB_DSP
:
24348 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
24350 case OPC_DPA_W_PH_DSP
:
24351 op2
= MASK_DPA_W_PH(ctx
->opcode
);
24353 case OPC_DPAU_H_QBL
:
24354 case OPC_DPAU_H_QBR
:
24355 case OPC_DPSU_H_QBL
:
24356 case OPC_DPSU_H_QBR
:
24358 case OPC_DPAX_W_PH
:
24359 case OPC_DPAQ_S_W_PH
:
24360 case OPC_DPAQX_S_W_PH
:
24361 case OPC_DPAQX_SA_W_PH
:
24363 case OPC_DPSX_W_PH
:
24364 case OPC_DPSQ_S_W_PH
:
24365 case OPC_DPSQX_S_W_PH
:
24366 case OPC_DPSQX_SA_W_PH
:
24367 case OPC_MULSAQ_S_W_PH
:
24368 case OPC_DPAQ_SA_L_W
:
24369 case OPC_DPSQ_SA_L_W
:
24370 case OPC_MAQ_S_W_PHL
:
24371 case OPC_MAQ_S_W_PHR
:
24372 case OPC_MAQ_SA_W_PHL
:
24373 case OPC_MAQ_SA_W_PHR
:
24374 case OPC_MULSA_W_PH
:
24375 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24377 default: /* Invalid */
24378 MIPS_INVAL("MASK DPAW.PH");
24379 generate_exception_end(ctx
, EXCP_RI
);
24384 op2
= MASK_INSV(ctx
->opcode
);
24395 t0
= tcg_temp_new();
24396 t1
= tcg_temp_new();
24398 gen_load_gpr(t0
, rt
);
24399 gen_load_gpr(t1
, rs
);
24401 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
24407 default: /* Invalid */
24408 MIPS_INVAL("MASK INSV");
24409 generate_exception_end(ctx
, EXCP_RI
);
24413 case OPC_APPEND_DSP
:
24414 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
24416 case OPC_EXTR_W_DSP
:
24417 op2
= MASK_EXTR_W(ctx
->opcode
);
24421 case OPC_EXTR_RS_W
:
24423 case OPC_EXTRV_S_H
:
24425 case OPC_EXTRV_R_W
:
24426 case OPC_EXTRV_RS_W
:
24431 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
24434 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24440 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24442 default: /* Invalid */
24443 MIPS_INVAL("MASK EXTR.W");
24444 generate_exception_end(ctx
, EXCP_RI
);
24448 #if defined(TARGET_MIPS64)
24449 case OPC_DDIV_G_2E
:
24450 case OPC_DDIVU_G_2E
:
24451 case OPC_DMULT_G_2E
:
24452 case OPC_DMULTU_G_2E
:
24453 case OPC_DMOD_G_2E
:
24454 case OPC_DMODU_G_2E
:
24455 check_insn(ctx
, INSN_LOONGSON2E
);
24456 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
24458 case OPC_ABSQ_S_QH_DSP
:
24459 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
24461 case OPC_PRECEQ_L_PWL
:
24462 case OPC_PRECEQ_L_PWR
:
24463 case OPC_PRECEQ_PW_QHL
:
24464 case OPC_PRECEQ_PW_QHR
:
24465 case OPC_PRECEQ_PW_QHLA
:
24466 case OPC_PRECEQ_PW_QHRA
:
24467 case OPC_PRECEQU_QH_OBL
:
24468 case OPC_PRECEQU_QH_OBR
:
24469 case OPC_PRECEQU_QH_OBLA
:
24470 case OPC_PRECEQU_QH_OBRA
:
24471 case OPC_PRECEU_QH_OBL
:
24472 case OPC_PRECEU_QH_OBR
:
24473 case OPC_PRECEU_QH_OBLA
:
24474 case OPC_PRECEU_QH_OBRA
:
24475 case OPC_ABSQ_S_OB
:
24476 case OPC_ABSQ_S_PW
:
24477 case OPC_ABSQ_S_QH
:
24478 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24486 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
24488 default: /* Invalid */
24489 MIPS_INVAL("MASK ABSQ_S.QH");
24490 generate_exception_end(ctx
, EXCP_RI
);
24494 case OPC_ADDU_OB_DSP
:
24495 op2
= MASK_ADDU_OB(ctx
->opcode
);
24497 case OPC_RADDU_L_OB
:
24499 case OPC_SUBQ_S_PW
:
24501 case OPC_SUBQ_S_QH
:
24503 case OPC_SUBU_S_OB
:
24505 case OPC_SUBU_S_QH
:
24507 case OPC_SUBUH_R_OB
:
24509 case OPC_ADDQ_S_PW
:
24511 case OPC_ADDQ_S_QH
:
24513 case OPC_ADDU_S_OB
:
24515 case OPC_ADDU_S_QH
:
24517 case OPC_ADDUH_R_OB
:
24518 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24520 case OPC_MULEQ_S_PW_QHL
:
24521 case OPC_MULEQ_S_PW_QHR
:
24522 case OPC_MULEU_S_QH_OBL
:
24523 case OPC_MULEU_S_QH_OBR
:
24524 case OPC_MULQ_RS_QH
:
24525 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24527 default: /* Invalid */
24528 MIPS_INVAL("MASK ADDU.OB");
24529 generate_exception_end(ctx
, EXCP_RI
);
24533 case OPC_CMPU_EQ_OB_DSP
:
24534 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
24536 case OPC_PRECR_SRA_QH_PW
:
24537 case OPC_PRECR_SRA_R_QH_PW
:
24538 /* Return value is rt. */
24539 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
24541 case OPC_PRECR_OB_QH
:
24542 case OPC_PRECRQ_OB_QH
:
24543 case OPC_PRECRQ_PW_L
:
24544 case OPC_PRECRQ_QH_PW
:
24545 case OPC_PRECRQ_RS_QH_PW
:
24546 case OPC_PRECRQU_S_OB_QH
:
24547 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24549 case OPC_CMPU_EQ_OB
:
24550 case OPC_CMPU_LT_OB
:
24551 case OPC_CMPU_LE_OB
:
24552 case OPC_CMP_EQ_QH
:
24553 case OPC_CMP_LT_QH
:
24554 case OPC_CMP_LE_QH
:
24555 case OPC_CMP_EQ_PW
:
24556 case OPC_CMP_LT_PW
:
24557 case OPC_CMP_LE_PW
:
24558 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24560 case OPC_CMPGDU_EQ_OB
:
24561 case OPC_CMPGDU_LT_OB
:
24562 case OPC_CMPGDU_LE_OB
:
24563 case OPC_CMPGU_EQ_OB
:
24564 case OPC_CMPGU_LT_OB
:
24565 case OPC_CMPGU_LE_OB
:
24566 case OPC_PACKRL_PW
:
24570 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24572 default: /* Invalid */
24573 MIPS_INVAL("MASK CMPU_EQ.OB");
24574 generate_exception_end(ctx
, EXCP_RI
);
24578 case OPC_DAPPEND_DSP
:
24579 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
24581 case OPC_DEXTR_W_DSP
:
24582 op2
= MASK_DEXTR_W(ctx
->opcode
);
24589 case OPC_DEXTR_R_L
:
24590 case OPC_DEXTR_RS_L
:
24592 case OPC_DEXTR_R_W
:
24593 case OPC_DEXTR_RS_W
:
24594 case OPC_DEXTR_S_H
:
24596 case OPC_DEXTRV_R_L
:
24597 case OPC_DEXTRV_RS_L
:
24598 case OPC_DEXTRV_S_H
:
24600 case OPC_DEXTRV_R_W
:
24601 case OPC_DEXTRV_RS_W
:
24602 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
24607 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24609 default: /* Invalid */
24610 MIPS_INVAL("MASK EXTR.W");
24611 generate_exception_end(ctx
, EXCP_RI
);
24615 case OPC_DPAQ_W_QH_DSP
:
24616 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
24618 case OPC_DPAU_H_OBL
:
24619 case OPC_DPAU_H_OBR
:
24620 case OPC_DPSU_H_OBL
:
24621 case OPC_DPSU_H_OBR
:
24623 case OPC_DPAQ_S_W_QH
:
24625 case OPC_DPSQ_S_W_QH
:
24626 case OPC_MULSAQ_S_W_QH
:
24627 case OPC_DPAQ_SA_L_PW
:
24628 case OPC_DPSQ_SA_L_PW
:
24629 case OPC_MULSAQ_S_L_PW
:
24630 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24632 case OPC_MAQ_S_W_QHLL
:
24633 case OPC_MAQ_S_W_QHLR
:
24634 case OPC_MAQ_S_W_QHRL
:
24635 case OPC_MAQ_S_W_QHRR
:
24636 case OPC_MAQ_SA_W_QHLL
:
24637 case OPC_MAQ_SA_W_QHLR
:
24638 case OPC_MAQ_SA_W_QHRL
:
24639 case OPC_MAQ_SA_W_QHRR
:
24640 case OPC_MAQ_S_L_PWL
:
24641 case OPC_MAQ_S_L_PWR
:
24646 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24648 default: /* Invalid */
24649 MIPS_INVAL("MASK DPAQ.W.QH");
24650 generate_exception_end(ctx
, EXCP_RI
);
24654 case OPC_DINSV_DSP
:
24655 op2
= MASK_INSV(ctx
->opcode
);
24666 t0
= tcg_temp_new();
24667 t1
= tcg_temp_new();
24669 gen_load_gpr(t0
, rt
);
24670 gen_load_gpr(t1
, rs
);
24672 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
24678 default: /* Invalid */
24679 MIPS_INVAL("MASK DINSV");
24680 generate_exception_end(ctx
, EXCP_RI
);
24684 case OPC_SHLL_OB_DSP
:
24685 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
24688 default: /* Invalid */
24689 MIPS_INVAL("special3_legacy");
24690 generate_exception_end(ctx
, EXCP_RI
);
24695 static void decode_tx79_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
24697 uint32_t opc
= MASK_TX79_MMI0(ctx
->opcode
);
24700 case TX79_MMI0_PADDW
: /* TODO: TX79_MMI0_PADDW */
24701 case TX79_MMI0_PSUBW
: /* TODO: TX79_MMI0_PSUBW */
24702 case TX79_MMI0_PCGTW
: /* TODO: TX79_MMI0_PCGTW */
24703 case TX79_MMI0_PMAXW
: /* TODO: TX79_MMI0_PMAXW */
24704 case TX79_MMI0_PADDH
: /* TODO: TX79_MMI0_PADDH */
24705 case TX79_MMI0_PSUBH
: /* TODO: TX79_MMI0_PSUBH */
24706 case TX79_MMI0_PCGTH
: /* TODO: TX79_MMI0_PCGTH */
24707 case TX79_MMI0_PMAXH
: /* TODO: TX79_MMI0_PMAXH */
24708 case TX79_MMI0_PADDB
: /* TODO: TX79_MMI0_PADDB */
24709 case TX79_MMI0_PSUBB
: /* TODO: TX79_MMI0_PSUBB */
24710 case TX79_MMI0_PCGTB
: /* TODO: TX79_MMI0_PCGTB */
24711 case TX79_MMI0_PADDSW
: /* TODO: TX79_MMI0_PADDSW */
24712 case TX79_MMI0_PSUBSW
: /* TODO: TX79_MMI0_PSUBSW */
24713 case TX79_MMI0_PEXTLW
: /* TODO: TX79_MMI0_PEXTLW */
24714 case TX79_MMI0_PPACW
: /* TODO: TX79_MMI0_PPACW */
24715 case TX79_MMI0_PADDSH
: /* TODO: TX79_MMI0_PADDSH */
24716 case TX79_MMI0_PSUBSH
: /* TODO: TX79_MMI0_PSUBSH */
24717 case TX79_MMI0_PEXTLH
: /* TODO: TX79_MMI0_PEXTLH */
24718 case TX79_MMI0_PPACH
: /* TODO: TX79_MMI0_PPACH */
24719 case TX79_MMI0_PADDSB
: /* TODO: TX79_MMI0_PADDSB */
24720 case TX79_MMI0_PSUBSB
: /* TODO: TX79_MMI0_PSUBSB */
24721 case TX79_MMI0_PEXTLB
: /* TODO: TX79_MMI0_PEXTLB */
24722 case TX79_MMI0_PPACB
: /* TODO: TX79_MMI0_PPACB */
24723 case TX79_MMI0_PEXT5
: /* TODO: TX79_MMI0_PEXT5 */
24724 case TX79_MMI0_PPAC5
: /* TODO: TX79_MMI0_PPAC5 */
24725 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI0 */
24728 MIPS_INVAL("TX79 MMI class MMI0");
24729 generate_exception_end(ctx
, EXCP_RI
);
24734 static void decode_tx79_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
24736 uint32_t opc
= MASK_TX79_MMI1(ctx
->opcode
);
24739 case TX79_MMI1_PABSW
: /* TODO: TX79_MMI1_PABSW */
24740 case TX79_MMI1_PCEQW
: /* TODO: TX79_MMI1_PCEQW */
24741 case TX79_MMI1_PMINW
: /* TODO: TX79_MMI1_PMINW */
24742 case TX79_MMI1_PADSBH
: /* TODO: TX79_MMI1_PADSBH */
24743 case TX79_MMI1_PABSH
: /* TODO: TX79_MMI1_PABSH */
24744 case TX79_MMI1_PCEQH
: /* TODO: TX79_MMI1_PCEQH */
24745 case TX79_MMI1_PMINH
: /* TODO: TX79_MMI1_PMINH */
24746 case TX79_MMI1_PCEQB
: /* TODO: TX79_MMI1_PCEQB */
24747 case TX79_MMI1_PADDUW
: /* TODO: TX79_MMI1_PADDUW */
24748 case TX79_MMI1_PSUBUW
: /* TODO: TX79_MMI1_PSUBUW */
24749 case TX79_MMI1_PEXTUW
: /* TODO: TX79_MMI1_PEXTUW */
24750 case TX79_MMI1_PADDUH
: /* TODO: TX79_MMI1_PADDUH */
24751 case TX79_MMI1_PSUBUH
: /* TODO: TX79_MMI1_PSUBUH */
24752 case TX79_MMI1_PEXTUH
: /* TODO: TX79_MMI1_PEXTUH */
24753 case TX79_MMI1_PADDUB
: /* TODO: TX79_MMI1_PADDUB */
24754 case TX79_MMI1_PSUBUB
: /* TODO: TX79_MMI1_PSUBUB */
24755 case TX79_MMI1_PEXTUB
: /* TODO: TX79_MMI1_PEXTUB */
24756 case TX79_MMI1_QFSRV
: /* TODO: TX79_MMI1_QFSRV */
24757 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI1 */
24760 MIPS_INVAL("TX79 MMI class MMI1");
24761 generate_exception_end(ctx
, EXCP_RI
);
24766 static void decode_tx79_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
24768 uint32_t opc
= MASK_TX79_MMI2(ctx
->opcode
);
24771 case TX79_MMI2_PMADDW
: /* TODO: TX79_MMI2_PMADDW */
24772 case TX79_MMI2_PSLLVW
: /* TODO: TX79_MMI2_PSLLVW */
24773 case TX79_MMI2_PSRLVW
: /* TODO: TX79_MMI2_PSRLVW */
24774 case TX79_MMI2_PMSUBW
: /* TODO: TX79_MMI2_PMSUBW */
24775 case TX79_MMI2_PMFHI
: /* TODO: TX79_MMI2_PMFHI */
24776 case TX79_MMI2_PMFLO
: /* TODO: TX79_MMI2_PMFLO */
24777 case TX79_MMI2_PINTH
: /* TODO: TX79_MMI2_PINTH */
24778 case TX79_MMI2_PMULTW
: /* TODO: TX79_MMI2_PMULTW */
24779 case TX79_MMI2_PDIVW
: /* TODO: TX79_MMI2_PDIVW */
24780 case TX79_MMI2_PCPYLD
: /* TODO: TX79_MMI2_PCPYLD */
24781 case TX79_MMI2_PMADDH
: /* TODO: TX79_MMI2_PMADDH */
24782 case TX79_MMI2_PHMADH
: /* TODO: TX79_MMI2_PHMADH */
24783 case TX79_MMI2_PAND
: /* TODO: TX79_MMI2_PAND */
24784 case TX79_MMI2_PXOR
: /* TODO: TX79_MMI2_PXOR */
24785 case TX79_MMI2_PMSUBH
: /* TODO: TX79_MMI2_PMSUBH */
24786 case TX79_MMI2_PHMSBH
: /* TODO: TX79_MMI2_PHMSBH */
24787 case TX79_MMI2_PEXEH
: /* TODO: TX79_MMI2_PEXEH */
24788 case TX79_MMI2_PREVH
: /* TODO: TX79_MMI2_PREVH */
24789 case TX79_MMI2_PMULTH
: /* TODO: TX79_MMI2_PMULTH */
24790 case TX79_MMI2_PDIVBW
: /* TODO: TX79_MMI2_PDIVBW */
24791 case TX79_MMI2_PEXEW
: /* TODO: TX79_MMI2_PEXEW */
24792 case TX79_MMI2_PROT3W
: /* TODO: TX79_MMI2_PROT3W */
24793 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI2 */
24796 MIPS_INVAL("TX79 MMI class MMI2");
24797 generate_exception_end(ctx
, EXCP_RI
);
24802 static void decode_tx79_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
24804 uint32_t opc
= MASK_TX79_MMI3(ctx
->opcode
);
24807 case TX79_MMI3_PMADDUW
: /* TODO: TX79_MMI3_PMADDUW */
24808 case TX79_MMI3_PSRAVW
: /* TODO: TX79_MMI3_PSRAVW */
24809 case TX79_MMI3_PMTHI
: /* TODO: TX79_MMI3_PMTHI */
24810 case TX79_MMI3_PMTLO
: /* TODO: TX79_MMI3_PMTLO */
24811 case TX79_MMI3_PINTEH
: /* TODO: TX79_MMI3_PINTEH */
24812 case TX79_MMI3_PMULTUW
: /* TODO: TX79_MMI3_PMULTUW */
24813 case TX79_MMI3_PDIVUW
: /* TODO: TX79_MMI3_PDIVUW */
24814 case TX79_MMI3_PCPYUD
: /* TODO: TX79_MMI3_PCPYUD */
24815 case TX79_MMI3_POR
: /* TODO: TX79_MMI3_POR */
24816 case TX79_MMI3_PNOR
: /* TODO: TX79_MMI3_PNOR */
24817 case TX79_MMI3_PEXCH
: /* TODO: TX79_MMI3_PEXCH */
24818 case TX79_MMI3_PCPYH
: /* TODO: TX79_MMI3_PCPYH */
24819 case TX79_MMI3_PEXCW
: /* TODO: TX79_MMI3_PEXCW */
24820 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI3 */
24823 MIPS_INVAL("TX79 MMI class MMI3");
24824 generate_exception_end(ctx
, EXCP_RI
);
24829 static void decode_tx79_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
24831 uint32_t opc
= MASK_TX79_MMI(ctx
->opcode
);
24832 int rs
= extract32(ctx
->opcode
, 21, 5);
24833 int rt
= extract32(ctx
->opcode
, 16, 5);
24834 int rd
= extract32(ctx
->opcode
, 11, 5);
24837 case TX79_MMI_CLASS_MMI0
:
24838 decode_tx79_mmi0(env
, ctx
);
24840 case TX79_MMI_CLASS_MMI1
:
24841 decode_tx79_mmi1(env
, ctx
);
24843 case TX79_MMI_CLASS_MMI2
:
24844 decode_tx79_mmi2(env
, ctx
);
24846 case TX79_MMI_CLASS_MMI3
:
24847 decode_tx79_mmi3(env
, ctx
);
24849 case TX79_MMI_MULT1
:
24850 case TX79_MMI_MULTU1
:
24851 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
24853 case TX79_MMI_DIV1
:
24854 case TX79_MMI_DIVU1
:
24855 gen_muldiv(ctx
, opc
, 1, rs
, rt
);
24857 case TX79_MMI_MTLO1
:
24858 case TX79_MMI_MTHI1
:
24859 gen_HILO(ctx
, opc
, 1, rs
);
24861 case TX79_MMI_MFLO1
:
24862 case TX79_MMI_MFHI1
:
24863 gen_HILO(ctx
, opc
, 1, rd
);
24865 case TX79_MMI_MADD
: /* TODO: TX79_MMI_MADD */
24866 case TX79_MMI_MADDU
: /* TODO: TX79_MMI_MADDU */
24867 case TX79_MMI_PLZCW
: /* TODO: TX79_MMI_PLZCW */
24868 case TX79_MMI_MADD1
: /* TODO: TX79_MMI_MADD1 */
24869 case TX79_MMI_MADDU1
: /* TODO: TX79_MMI_MADDU1 */
24870 case TX79_MMI_PMFHL
: /* TODO: TX79_MMI_PMFHL */
24871 case TX79_MMI_PMTHL
: /* TODO: TX79_MMI_PMTHL */
24872 case TX79_MMI_PSLLH
: /* TODO: TX79_MMI_PSLLH */
24873 case TX79_MMI_PSRLH
: /* TODO: TX79_MMI_PSRLH */
24874 case TX79_MMI_PSRAH
: /* TODO: TX79_MMI_PSRAH */
24875 case TX79_MMI_PSLLW
: /* TODO: TX79_MMI_PSLLW */
24876 case TX79_MMI_PSRLW
: /* TODO: TX79_MMI_PSRLW */
24877 case TX79_MMI_PSRAW
: /* TODO: TX79_MMI_PSRAW */
24878 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_CLASS_MMI */
24881 MIPS_INVAL("TX79 MMI class");
24882 generate_exception_end(ctx
, EXCP_RI
);
24887 static void decode_tx79_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
24889 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_LQ */
24892 static void gen_tx79_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
24894 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_SQ */
24898 * The TX79-specific instruction Store Quadword
24900 * +--------+-------+-------+------------------------+
24901 * | 011111 | base | rt | offset | SQ
24902 * +--------+-------+-------+------------------------+
24905 * has the same opcode as the Read Hardware Register instruction
24907 * +--------+-------+-------+-------+-------+--------+
24908 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
24909 * +--------+-------+-------+-------+-------+--------+
24912 * that is required, trapped and emulated by the Linux kernel. However, all
24913 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
24914 * offset is odd. Therefore all valid SQ instructions can execute normally.
24915 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
24916 * between SQ and RDHWR, as the Linux kernel does.
24918 static void decode_tx79_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
24920 int base
= extract32(ctx
->opcode
, 21, 5);
24921 int rt
= extract32(ctx
->opcode
, 16, 5);
24922 int offset
= extract32(ctx
->opcode
, 0, 16);
24924 #ifdef CONFIG_USER_ONLY
24925 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
24926 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
24928 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
24929 int rd
= extract32(ctx
->opcode
, 11, 5);
24931 gen_rdhwr(ctx
, rt
, rd
, 0);
24936 gen_tx79_sq(ctx
, base
, rt
, offset
);
24939 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
24941 int rs
, rt
, rd
, sa
;
24945 rs
= (ctx
->opcode
>> 21) & 0x1f;
24946 rt
= (ctx
->opcode
>> 16) & 0x1f;
24947 rd
= (ctx
->opcode
>> 11) & 0x1f;
24948 sa
= (ctx
->opcode
>> 6) & 0x1f;
24949 imm
= sextract32(ctx
->opcode
, 7, 9);
24951 op1
= MASK_SPECIAL3(ctx
->opcode
);
24954 * EVA loads and stores overlap Loongson 2E instructions decoded by
24955 * decode_opc_special3_legacy(), so be careful to allow their decoding when
24962 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24970 check_cp0_enabled(ctx
);
24971 gen_ld(ctx
, op1
, rt
, rs
, imm
);
24975 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24980 check_cp0_enabled(ctx
);
24981 gen_st(ctx
, op1
, rt
, rs
, imm
);
24984 check_cp0_enabled(ctx
);
24985 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
24988 check_cp0_enabled(ctx
);
24989 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
24990 gen_cache_operation(ctx
, rt
, rs
, imm
);
24992 /* Treat as NOP. */
24995 check_cp0_enabled(ctx
);
24996 /* Treat as NOP. */
25004 check_insn(ctx
, ISA_MIPS32R2
);
25005 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
25008 op2
= MASK_BSHFL(ctx
->opcode
);
25015 check_insn(ctx
, ISA_MIPS32R6
);
25016 decode_opc_special3_r6(env
, ctx
);
25019 check_insn(ctx
, ISA_MIPS32R2
);
25020 gen_bshfl(ctx
, op2
, rt
, rd
);
25024 #if defined(TARGET_MIPS64)
25031 check_insn(ctx
, ISA_MIPS64R2
);
25032 check_mips_64(ctx
);
25033 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
25036 op2
= MASK_DBSHFL(ctx
->opcode
);
25047 check_insn(ctx
, ISA_MIPS32R6
);
25048 decode_opc_special3_r6(env
, ctx
);
25051 check_insn(ctx
, ISA_MIPS64R2
);
25052 check_mips_64(ctx
);
25053 op2
= MASK_DBSHFL(ctx
->opcode
);
25054 gen_bshfl(ctx
, op2
, rt
, rd
);
25060 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
25065 TCGv t0
= tcg_temp_new();
25066 TCGv t1
= tcg_temp_new();
25068 gen_load_gpr(t0
, rt
);
25069 gen_load_gpr(t1
, rs
);
25070 gen_helper_fork(t0
, t1
);
25078 TCGv t0
= tcg_temp_new();
25080 gen_load_gpr(t0
, rs
);
25081 gen_helper_yield(t0
, cpu_env
, t0
);
25082 gen_store_gpr(t0
, rd
);
25087 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25088 decode_opc_special3_r6(env
, ctx
);
25090 decode_opc_special3_legacy(env
, ctx
);
25095 /* MIPS SIMD Architecture (MSA) */
25096 static inline int check_msa_access(DisasContext
*ctx
)
25098 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
25099 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
25100 generate_exception_end(ctx
, EXCP_RI
);
25104 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
25105 if (ctx
->insn_flags
& ASE_MSA
) {
25106 generate_exception_end(ctx
, EXCP_MSADIS
);
25109 generate_exception_end(ctx
, EXCP_RI
);
25116 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
25118 /* generates tcg ops to check if any element is 0 */
25119 /* Note this function only works with MSA_WRLEN = 128 */
25120 uint64_t eval_zero_or_big
= 0;
25121 uint64_t eval_big
= 0;
25122 TCGv_i64 t0
= tcg_temp_new_i64();
25123 TCGv_i64 t1
= tcg_temp_new_i64();
25126 eval_zero_or_big
= 0x0101010101010101ULL
;
25127 eval_big
= 0x8080808080808080ULL
;
25130 eval_zero_or_big
= 0x0001000100010001ULL
;
25131 eval_big
= 0x8000800080008000ULL
;
25134 eval_zero_or_big
= 0x0000000100000001ULL
;
25135 eval_big
= 0x8000000080000000ULL
;
25138 eval_zero_or_big
= 0x0000000000000001ULL
;
25139 eval_big
= 0x8000000000000000ULL
;
25142 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
25143 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
25144 tcg_gen_andi_i64(t0
, t0
, eval_big
);
25145 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
25146 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
25147 tcg_gen_andi_i64(t1
, t1
, eval_big
);
25148 tcg_gen_or_i64(t0
, t0
, t1
);
25149 /* if all bits are zero then all elements are not zero */
25150 /* if some bit is non-zero then some element is zero */
25151 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
25152 tcg_gen_trunc_i64_tl(tresult
, t0
);
25153 tcg_temp_free_i64(t0
);
25154 tcg_temp_free_i64(t1
);
25157 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
25159 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
25160 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25161 int64_t s16
= (int16_t)ctx
->opcode
;
25163 check_msa_access(ctx
);
25165 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
25166 generate_exception_end(ctx
, EXCP_RI
);
25173 TCGv_i64 t0
= tcg_temp_new_i64();
25174 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
25175 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
25176 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
25177 tcg_gen_trunc_i64_tl(bcond
, t0
);
25178 tcg_temp_free_i64(t0
);
25185 gen_check_zero_element(bcond
, df
, wt
);
25191 gen_check_zero_element(bcond
, df
, wt
);
25192 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
25196 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
25198 ctx
->hflags
|= MIPS_HFLAG_BC
;
25199 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
25202 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
25204 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
25205 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
25206 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25207 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25209 TCGv_i32 twd
= tcg_const_i32(wd
);
25210 TCGv_i32 tws
= tcg_const_i32(ws
);
25211 TCGv_i32 ti8
= tcg_const_i32(i8
);
25213 switch (MASK_MSA_I8(ctx
->opcode
)) {
25215 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
25218 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
25221 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
25224 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
25227 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
25230 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
25233 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
25239 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
25240 if (df
== DF_DOUBLE
) {
25241 generate_exception_end(ctx
, EXCP_RI
);
25243 TCGv_i32 tdf
= tcg_const_i32(df
);
25244 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
25245 tcg_temp_free_i32(tdf
);
25250 MIPS_INVAL("MSA instruction");
25251 generate_exception_end(ctx
, EXCP_RI
);
25255 tcg_temp_free_i32(twd
);
25256 tcg_temp_free_i32(tws
);
25257 tcg_temp_free_i32(ti8
);
25260 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
25262 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
25263 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
25264 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
25265 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
25266 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25267 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25269 TCGv_i32 tdf
= tcg_const_i32(df
);
25270 TCGv_i32 twd
= tcg_const_i32(wd
);
25271 TCGv_i32 tws
= tcg_const_i32(ws
);
25272 TCGv_i32 timm
= tcg_temp_new_i32();
25273 tcg_gen_movi_i32(timm
, u5
);
25275 switch (MASK_MSA_I5(ctx
->opcode
)) {
25277 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
25280 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
25282 case OPC_MAXI_S_df
:
25283 tcg_gen_movi_i32(timm
, s5
);
25284 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25286 case OPC_MAXI_U_df
:
25287 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25289 case OPC_MINI_S_df
:
25290 tcg_gen_movi_i32(timm
, s5
);
25291 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25293 case OPC_MINI_U_df
:
25294 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25297 tcg_gen_movi_i32(timm
, s5
);
25298 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
25300 case OPC_CLTI_S_df
:
25301 tcg_gen_movi_i32(timm
, s5
);
25302 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25304 case OPC_CLTI_U_df
:
25305 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25307 case OPC_CLEI_S_df
:
25308 tcg_gen_movi_i32(timm
, s5
);
25309 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25311 case OPC_CLEI_U_df
:
25312 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25316 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
25317 tcg_gen_movi_i32(timm
, s10
);
25318 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
25322 MIPS_INVAL("MSA instruction");
25323 generate_exception_end(ctx
, EXCP_RI
);
25327 tcg_temp_free_i32(tdf
);
25328 tcg_temp_free_i32(twd
);
25329 tcg_temp_free_i32(tws
);
25330 tcg_temp_free_i32(timm
);
25333 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
25335 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
25336 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
25337 uint32_t df
= 0, m
= 0;
25338 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25339 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25346 if ((dfm
& 0x40) == 0x00) {
25349 } else if ((dfm
& 0x60) == 0x40) {
25352 } else if ((dfm
& 0x70) == 0x60) {
25355 } else if ((dfm
& 0x78) == 0x70) {
25359 generate_exception_end(ctx
, EXCP_RI
);
25363 tdf
= tcg_const_i32(df
);
25364 tm
= tcg_const_i32(m
);
25365 twd
= tcg_const_i32(wd
);
25366 tws
= tcg_const_i32(ws
);
25368 switch (MASK_MSA_BIT(ctx
->opcode
)) {
25370 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
25373 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
25376 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
25379 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
25382 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
25385 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
25387 case OPC_BINSLI_df
:
25388 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
25390 case OPC_BINSRI_df
:
25391 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
25394 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
25397 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
25400 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
25403 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
25406 MIPS_INVAL("MSA instruction");
25407 generate_exception_end(ctx
, EXCP_RI
);
25411 tcg_temp_free_i32(tdf
);
25412 tcg_temp_free_i32(tm
);
25413 tcg_temp_free_i32(twd
);
25414 tcg_temp_free_i32(tws
);
25417 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
25419 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
25420 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
25421 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25422 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25423 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25425 TCGv_i32 tdf
= tcg_const_i32(df
);
25426 TCGv_i32 twd
= tcg_const_i32(wd
);
25427 TCGv_i32 tws
= tcg_const_i32(ws
);
25428 TCGv_i32 twt
= tcg_const_i32(wt
);
25430 switch (MASK_MSA_3R(ctx
->opcode
)) {
25432 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
25435 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25438 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25441 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25443 case OPC_SUBS_S_df
:
25444 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25447 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25450 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
25453 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25456 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
25459 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25461 case OPC_ADDS_A_df
:
25462 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25464 case OPC_SUBS_U_df
:
25465 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25468 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25471 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
25474 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
25477 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
25480 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25483 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25485 case OPC_ADDS_S_df
:
25486 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25488 case OPC_SUBSUS_U_df
:
25489 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25492 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25495 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
25498 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25501 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25504 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25507 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25509 case OPC_ADDS_U_df
:
25510 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25512 case OPC_SUBSUU_S_df
:
25513 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25516 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
25519 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
25522 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25525 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25528 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25530 case OPC_ASUB_S_df
:
25531 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25534 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25537 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
25540 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
25543 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25546 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25549 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25551 case OPC_ASUB_U_df
:
25552 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25555 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25558 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25561 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
25564 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25566 case OPC_AVER_S_df
:
25567 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25570 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25573 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
25576 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25579 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25581 case OPC_AVER_U_df
:
25582 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25585 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25588 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
25591 case OPC_DOTP_S_df
:
25592 case OPC_DOTP_U_df
:
25593 case OPC_DPADD_S_df
:
25594 case OPC_DPADD_U_df
:
25595 case OPC_DPSUB_S_df
:
25596 case OPC_HADD_S_df
:
25597 case OPC_DPSUB_U_df
:
25598 case OPC_HADD_U_df
:
25599 case OPC_HSUB_S_df
:
25600 case OPC_HSUB_U_df
:
25601 if (df
== DF_BYTE
) {
25602 generate_exception_end(ctx
, EXCP_RI
);
25605 switch (MASK_MSA_3R(ctx
->opcode
)) {
25606 case OPC_DOTP_S_df
:
25607 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25609 case OPC_DOTP_U_df
:
25610 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25612 case OPC_DPADD_S_df
:
25613 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25615 case OPC_DPADD_U_df
:
25616 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25618 case OPC_DPSUB_S_df
:
25619 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25621 case OPC_HADD_S_df
:
25622 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25624 case OPC_DPSUB_U_df
:
25625 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25627 case OPC_HADD_U_df
:
25628 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25630 case OPC_HSUB_S_df
:
25631 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25633 case OPC_HSUB_U_df
:
25634 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25639 MIPS_INVAL("MSA instruction");
25640 generate_exception_end(ctx
, EXCP_RI
);
25643 tcg_temp_free_i32(twd
);
25644 tcg_temp_free_i32(tws
);
25645 tcg_temp_free_i32(twt
);
25646 tcg_temp_free_i32(tdf
);
25649 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
25651 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
25652 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
25653 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
25654 TCGv telm
= tcg_temp_new();
25655 TCGv_i32 tsr
= tcg_const_i32(source
);
25656 TCGv_i32 tdt
= tcg_const_i32(dest
);
25658 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
25660 gen_load_gpr(telm
, source
);
25661 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
25664 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
25665 gen_store_gpr(telm
, dest
);
25668 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
25671 MIPS_INVAL("MSA instruction");
25672 generate_exception_end(ctx
, EXCP_RI
);
25676 tcg_temp_free(telm
);
25677 tcg_temp_free_i32(tdt
);
25678 tcg_temp_free_i32(tsr
);
25681 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
25684 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
25685 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25686 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25688 TCGv_i32 tws
= tcg_const_i32(ws
);
25689 TCGv_i32 twd
= tcg_const_i32(wd
);
25690 TCGv_i32 tn
= tcg_const_i32(n
);
25691 TCGv_i32 tdf
= tcg_const_i32(df
);
25693 switch (MASK_MSA_ELM(ctx
->opcode
)) {
25695 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
25697 case OPC_SPLATI_df
:
25698 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
25701 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
25703 case OPC_COPY_S_df
:
25704 case OPC_COPY_U_df
:
25705 case OPC_INSERT_df
:
25706 #if !defined(TARGET_MIPS64)
25707 /* Double format valid only for MIPS64 */
25708 if (df
== DF_DOUBLE
) {
25709 generate_exception_end(ctx
, EXCP_RI
);
25713 switch (MASK_MSA_ELM(ctx
->opcode
)) {
25714 case OPC_COPY_S_df
:
25715 if (likely(wd
!= 0)) {
25716 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
25719 case OPC_COPY_U_df
:
25720 if (likely(wd
!= 0)) {
25721 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
25724 case OPC_INSERT_df
:
25725 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
25730 MIPS_INVAL("MSA instruction");
25731 generate_exception_end(ctx
, EXCP_RI
);
25733 tcg_temp_free_i32(twd
);
25734 tcg_temp_free_i32(tws
);
25735 tcg_temp_free_i32(tn
);
25736 tcg_temp_free_i32(tdf
);
25739 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
25741 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
25742 uint32_t df
= 0, n
= 0;
25744 if ((dfn
& 0x30) == 0x00) {
25747 } else if ((dfn
& 0x38) == 0x20) {
25750 } else if ((dfn
& 0x3c) == 0x30) {
25753 } else if ((dfn
& 0x3e) == 0x38) {
25756 } else if (dfn
== 0x3E) {
25757 /* CTCMSA, CFCMSA, MOVE.V */
25758 gen_msa_elm_3e(env
, ctx
);
25761 generate_exception_end(ctx
, EXCP_RI
);
25765 gen_msa_elm_df(env
, ctx
, df
, n
);
25768 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
25770 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
25771 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
25772 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25773 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25774 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25776 TCGv_i32 twd
= tcg_const_i32(wd
);
25777 TCGv_i32 tws
= tcg_const_i32(ws
);
25778 TCGv_i32 twt
= tcg_const_i32(wt
);
25779 TCGv_i32 tdf
= tcg_temp_new_i32();
25781 /* adjust df value for floating-point instruction */
25782 tcg_gen_movi_i32(tdf
, df
+ 2);
25784 switch (MASK_MSA_3RF(ctx
->opcode
)) {
25786 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25789 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
25792 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
25795 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
25798 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
25801 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25804 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
25807 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
25810 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25813 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25816 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
25819 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
25822 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
25825 tcg_gen_movi_i32(tdf
, df
+ 1);
25826 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25829 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
25832 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
25834 case OPC_MADD_Q_df
:
25835 tcg_gen_movi_i32(tdf
, df
+ 1);
25836 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25839 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
25841 case OPC_MSUB_Q_df
:
25842 tcg_gen_movi_i32(tdf
, df
+ 1);
25843 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25846 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
25849 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
25852 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25855 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
25858 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
25861 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
25864 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25867 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25870 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
25873 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25876 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
25879 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
25882 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
25884 case OPC_MULR_Q_df
:
25885 tcg_gen_movi_i32(tdf
, df
+ 1);
25886 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25889 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
25891 case OPC_FMIN_A_df
:
25892 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25894 case OPC_MADDR_Q_df
:
25895 tcg_gen_movi_i32(tdf
, df
+ 1);
25896 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25899 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
25902 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
25904 case OPC_MSUBR_Q_df
:
25905 tcg_gen_movi_i32(tdf
, df
+ 1);
25906 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25909 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
25911 case OPC_FMAX_A_df
:
25912 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25915 MIPS_INVAL("MSA instruction");
25916 generate_exception_end(ctx
, EXCP_RI
);
25920 tcg_temp_free_i32(twd
);
25921 tcg_temp_free_i32(tws
);
25922 tcg_temp_free_i32(twt
);
25923 tcg_temp_free_i32(tdf
);
25926 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
25928 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
25929 (op & (0x7 << 18)))
25930 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25931 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25932 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25933 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
25934 TCGv_i32 twd
= tcg_const_i32(wd
);
25935 TCGv_i32 tws
= tcg_const_i32(ws
);
25936 TCGv_i32 twt
= tcg_const_i32(wt
);
25937 TCGv_i32 tdf
= tcg_const_i32(df
);
25939 switch (MASK_MSA_2R(ctx
->opcode
)) {
25941 #if !defined(TARGET_MIPS64)
25942 /* Double format valid only for MIPS64 */
25943 if (df
== DF_DOUBLE
) {
25944 generate_exception_end(ctx
, EXCP_RI
);
25948 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
25951 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
25954 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
25957 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
25960 MIPS_INVAL("MSA instruction");
25961 generate_exception_end(ctx
, EXCP_RI
);
25965 tcg_temp_free_i32(twd
);
25966 tcg_temp_free_i32(tws
);
25967 tcg_temp_free_i32(twt
);
25968 tcg_temp_free_i32(tdf
);
25971 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
25973 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
25974 (op & (0xf << 17)))
25975 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25976 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25977 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25978 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
25979 TCGv_i32 twd
= tcg_const_i32(wd
);
25980 TCGv_i32 tws
= tcg_const_i32(ws
);
25981 TCGv_i32 twt
= tcg_const_i32(wt
);
25982 /* adjust df value for floating-point instruction */
25983 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
25985 switch (MASK_MSA_2RF(ctx
->opcode
)) {
25986 case OPC_FCLASS_df
:
25987 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
25989 case OPC_FTRUNC_S_df
:
25990 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
25992 case OPC_FTRUNC_U_df
:
25993 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
25996 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
25998 case OPC_FRSQRT_df
:
25999 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
26002 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
26005 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
26008 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
26010 case OPC_FEXUPL_df
:
26011 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
26013 case OPC_FEXUPR_df
:
26014 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
26017 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
26020 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
26022 case OPC_FTINT_S_df
:
26023 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
26025 case OPC_FTINT_U_df
:
26026 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
26028 case OPC_FFINT_S_df
:
26029 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
26031 case OPC_FFINT_U_df
:
26032 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
26036 tcg_temp_free_i32(twd
);
26037 tcg_temp_free_i32(tws
);
26038 tcg_temp_free_i32(twt
);
26039 tcg_temp_free_i32(tdf
);
26042 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
26044 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
26045 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
26046 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
26047 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
26048 TCGv_i32 twd
= tcg_const_i32(wd
);
26049 TCGv_i32 tws
= tcg_const_i32(ws
);
26050 TCGv_i32 twt
= tcg_const_i32(wt
);
26052 switch (MASK_MSA_VEC(ctx
->opcode
)) {
26054 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
26057 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
26060 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
26063 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
26066 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
26069 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
26072 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
26075 MIPS_INVAL("MSA instruction");
26076 generate_exception_end(ctx
, EXCP_RI
);
26080 tcg_temp_free_i32(twd
);
26081 tcg_temp_free_i32(tws
);
26082 tcg_temp_free_i32(twt
);
26085 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
26087 switch (MASK_MSA_VEC(ctx
->opcode
)) {
26095 gen_msa_vec_v(env
, ctx
);
26098 gen_msa_2r(env
, ctx
);
26101 gen_msa_2rf(env
, ctx
);
26104 MIPS_INVAL("MSA instruction");
26105 generate_exception_end(ctx
, EXCP_RI
);
26110 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
26112 uint32_t opcode
= ctx
->opcode
;
26113 check_insn(ctx
, ASE_MSA
);
26114 check_msa_access(ctx
);
26116 switch (MASK_MSA_MINOR(opcode
)) {
26117 case OPC_MSA_I8_00
:
26118 case OPC_MSA_I8_01
:
26119 case OPC_MSA_I8_02
:
26120 gen_msa_i8(env
, ctx
);
26122 case OPC_MSA_I5_06
:
26123 case OPC_MSA_I5_07
:
26124 gen_msa_i5(env
, ctx
);
26126 case OPC_MSA_BIT_09
:
26127 case OPC_MSA_BIT_0A
:
26128 gen_msa_bit(env
, ctx
);
26130 case OPC_MSA_3R_0D
:
26131 case OPC_MSA_3R_0E
:
26132 case OPC_MSA_3R_0F
:
26133 case OPC_MSA_3R_10
:
26134 case OPC_MSA_3R_11
:
26135 case OPC_MSA_3R_12
:
26136 case OPC_MSA_3R_13
:
26137 case OPC_MSA_3R_14
:
26138 case OPC_MSA_3R_15
:
26139 gen_msa_3r(env
, ctx
);
26142 gen_msa_elm(env
, ctx
);
26144 case OPC_MSA_3RF_1A
:
26145 case OPC_MSA_3RF_1B
:
26146 case OPC_MSA_3RF_1C
:
26147 gen_msa_3rf(env
, ctx
);
26150 gen_msa_vec(env
, ctx
);
26161 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
26162 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
26163 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
26164 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
26166 TCGv_i32 twd
= tcg_const_i32(wd
);
26167 TCGv taddr
= tcg_temp_new();
26168 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
26170 switch (MASK_MSA_MINOR(opcode
)) {
26172 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
26175 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
26178 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
26181 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
26184 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
26187 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
26190 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
26193 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
26197 tcg_temp_free_i32(twd
);
26198 tcg_temp_free(taddr
);
26202 MIPS_INVAL("MSA instruction");
26203 generate_exception_end(ctx
, EXCP_RI
);
26209 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
26212 int rs
, rt
, rd
, sa
;
26216 /* make sure instructions are on a word boundary */
26217 if (ctx
->base
.pc_next
& 0x3) {
26218 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
26219 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
26223 /* Handle blikely not taken case */
26224 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
26225 TCGLabel
*l1
= gen_new_label();
26227 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
26228 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
26229 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
26233 op
= MASK_OP_MAJOR(ctx
->opcode
);
26234 rs
= (ctx
->opcode
>> 21) & 0x1f;
26235 rt
= (ctx
->opcode
>> 16) & 0x1f;
26236 rd
= (ctx
->opcode
>> 11) & 0x1f;
26237 sa
= (ctx
->opcode
>> 6) & 0x1f;
26238 imm
= (int16_t)ctx
->opcode
;
26241 decode_opc_special(env
, ctx
);
26244 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
26245 decode_tx79_mmi(env
, ctx
);
26247 decode_opc_special2_legacy(env
, ctx
);
26251 if (ctx
->insn_flags
& INSN_R5900
) {
26252 decode_tx79_sq(env
, ctx
); /* TX79_SQ */
26254 decode_opc_special3(env
, ctx
);
26258 op1
= MASK_REGIMM(ctx
->opcode
);
26260 case OPC_BLTZL
: /* REGIMM branches */
26264 check_insn(ctx
, ISA_MIPS2
);
26265 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26269 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
26273 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26275 /* OPC_NAL, OPC_BAL */
26276 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
26278 generate_exception_end(ctx
, EXCP_RI
);
26281 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
26284 case OPC_TGEI
: /* REGIMM traps */
26291 check_insn(ctx
, ISA_MIPS2
);
26292 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26293 gen_trap(ctx
, op1
, rs
, -1, imm
);
26296 check_insn(ctx
, ISA_MIPS32R6
);
26297 generate_exception_end(ctx
, EXCP_RI
);
26300 check_insn(ctx
, ISA_MIPS32R2
);
26301 /* Break the TB to be able to sync copied instructions
26303 ctx
->base
.is_jmp
= DISAS_STOP
;
26305 case OPC_BPOSGE32
: /* MIPS DSP branch */
26306 #if defined(TARGET_MIPS64)
26310 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
26312 #if defined(TARGET_MIPS64)
26314 check_insn(ctx
, ISA_MIPS32R6
);
26315 check_mips_64(ctx
);
26317 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
26321 check_insn(ctx
, ISA_MIPS32R6
);
26322 check_mips_64(ctx
);
26324 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
26328 default: /* Invalid */
26329 MIPS_INVAL("regimm");
26330 generate_exception_end(ctx
, EXCP_RI
);
26335 check_cp0_enabled(ctx
);
26336 op1
= MASK_CP0(ctx
->opcode
);
26344 #if defined(TARGET_MIPS64)
26348 #ifndef CONFIG_USER_ONLY
26349 gen_cp0(env
, ctx
, op1
, rt
, rd
);
26350 #endif /* !CONFIG_USER_ONLY */
26368 #ifndef CONFIG_USER_ONLY
26369 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
26370 #endif /* !CONFIG_USER_ONLY */
26373 #ifndef CONFIG_USER_ONLY
26376 TCGv t0
= tcg_temp_new();
26378 op2
= MASK_MFMC0(ctx
->opcode
);
26382 gen_helper_dmt(t0
);
26383 gen_store_gpr(t0
, rt
);
26387 gen_helper_emt(t0
);
26388 gen_store_gpr(t0
, rt
);
26392 gen_helper_dvpe(t0
, cpu_env
);
26393 gen_store_gpr(t0
, rt
);
26397 gen_helper_evpe(t0
, cpu_env
);
26398 gen_store_gpr(t0
, rt
);
26401 check_insn(ctx
, ISA_MIPS32R6
);
26403 gen_helper_dvp(t0
, cpu_env
);
26404 gen_store_gpr(t0
, rt
);
26408 check_insn(ctx
, ISA_MIPS32R6
);
26410 gen_helper_evp(t0
, cpu_env
);
26411 gen_store_gpr(t0
, rt
);
26415 check_insn(ctx
, ISA_MIPS32R2
);
26416 save_cpu_state(ctx
, 1);
26417 gen_helper_di(t0
, cpu_env
);
26418 gen_store_gpr(t0
, rt
);
26419 /* Stop translation as we may have switched
26420 the execution mode. */
26421 ctx
->base
.is_jmp
= DISAS_STOP
;
26424 check_insn(ctx
, ISA_MIPS32R2
);
26425 save_cpu_state(ctx
, 1);
26426 gen_helper_ei(t0
, cpu_env
);
26427 gen_store_gpr(t0
, rt
);
26428 /* DISAS_STOP isn't sufficient, we need to ensure we break
26429 out of translated code to check for pending interrupts */
26430 gen_save_pc(ctx
->base
.pc_next
+ 4);
26431 ctx
->base
.is_jmp
= DISAS_EXIT
;
26433 default: /* Invalid */
26434 MIPS_INVAL("mfmc0");
26435 generate_exception_end(ctx
, EXCP_RI
);
26440 #endif /* !CONFIG_USER_ONLY */
26443 check_insn(ctx
, ISA_MIPS32R2
);
26444 gen_load_srsgpr(rt
, rd
);
26447 check_insn(ctx
, ISA_MIPS32R2
);
26448 gen_store_srsgpr(rt
, rd
);
26452 generate_exception_end(ctx
, EXCP_RI
);
26456 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
26457 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26458 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
26459 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26462 /* Arithmetic with immediate opcode */
26463 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26467 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26469 case OPC_SLTI
: /* Set on less than with immediate opcode */
26471 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
26473 case OPC_ANDI
: /* Arithmetic with immediate opcode */
26474 case OPC_LUI
: /* OPC_AUI */
26477 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
26479 case OPC_J
: /* Jump */
26481 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
26482 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
26485 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
26486 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26488 generate_exception_end(ctx
, EXCP_RI
);
26491 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
26492 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26495 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26498 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
26499 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26501 generate_exception_end(ctx
, EXCP_RI
);
26504 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
26505 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26508 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26511 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
26514 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26516 check_insn(ctx
, ISA_MIPS32R6
);
26517 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
26518 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26521 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
26524 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26526 check_insn(ctx
, ISA_MIPS32R6
);
26527 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
26528 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26533 check_insn(ctx
, ISA_MIPS2
);
26534 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26538 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26540 case OPC_LL
: /* Load and stores */
26541 check_insn(ctx
, ISA_MIPS2
);
26542 check_insn_opc_user_only(ctx
, INSN_R5900
);
26546 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26554 gen_ld(ctx
, op
, rt
, rs
, imm
);
26558 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26563 gen_st(ctx
, op
, rt
, rs
, imm
);
26566 check_insn(ctx
, ISA_MIPS2
);
26567 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26568 check_insn_opc_user_only(ctx
, INSN_R5900
);
26569 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
26572 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26573 check_cp0_enabled(ctx
);
26574 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
26575 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
26576 gen_cache_operation(ctx
, rt
, rs
, imm
);
26578 /* Treat as NOP. */
26581 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26582 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
26584 /* Treat as NOP. */
26587 /* Floating point (COP1). */
26592 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
26596 op1
= MASK_CP1(ctx
->opcode
);
26601 check_cp1_enabled(ctx
);
26602 check_insn(ctx
, ISA_MIPS32R2
);
26608 check_cp1_enabled(ctx
);
26609 gen_cp1(ctx
, op1
, rt
, rd
);
26611 #if defined(TARGET_MIPS64)
26614 check_cp1_enabled(ctx
);
26615 check_insn(ctx
, ISA_MIPS3
);
26616 check_mips_64(ctx
);
26617 gen_cp1(ctx
, op1
, rt
, rd
);
26620 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
26621 check_cp1_enabled(ctx
);
26622 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26624 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
26629 check_insn(ctx
, ASE_MIPS3D
);
26630 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
26631 (rt
>> 2) & 0x7, imm
<< 2);
26635 check_cp1_enabled(ctx
);
26636 check_insn(ctx
, ISA_MIPS32R6
);
26637 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
26641 check_cp1_enabled(ctx
);
26642 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26644 check_insn(ctx
, ASE_MIPS3D
);
26647 check_cp1_enabled(ctx
);
26648 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26649 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
26650 (rt
>> 2) & 0x7, imm
<< 2);
26657 check_cp1_enabled(ctx
);
26658 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
26664 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
26665 check_cp1_enabled(ctx
);
26666 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26668 case R6_OPC_CMP_AF_S
:
26669 case R6_OPC_CMP_UN_S
:
26670 case R6_OPC_CMP_EQ_S
:
26671 case R6_OPC_CMP_UEQ_S
:
26672 case R6_OPC_CMP_LT_S
:
26673 case R6_OPC_CMP_ULT_S
:
26674 case R6_OPC_CMP_LE_S
:
26675 case R6_OPC_CMP_ULE_S
:
26676 case R6_OPC_CMP_SAF_S
:
26677 case R6_OPC_CMP_SUN_S
:
26678 case R6_OPC_CMP_SEQ_S
:
26679 case R6_OPC_CMP_SEUQ_S
:
26680 case R6_OPC_CMP_SLT_S
:
26681 case R6_OPC_CMP_SULT_S
:
26682 case R6_OPC_CMP_SLE_S
:
26683 case R6_OPC_CMP_SULE_S
:
26684 case R6_OPC_CMP_OR_S
:
26685 case R6_OPC_CMP_UNE_S
:
26686 case R6_OPC_CMP_NE_S
:
26687 case R6_OPC_CMP_SOR_S
:
26688 case R6_OPC_CMP_SUNE_S
:
26689 case R6_OPC_CMP_SNE_S
:
26690 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
26692 case R6_OPC_CMP_AF_D
:
26693 case R6_OPC_CMP_UN_D
:
26694 case R6_OPC_CMP_EQ_D
:
26695 case R6_OPC_CMP_UEQ_D
:
26696 case R6_OPC_CMP_LT_D
:
26697 case R6_OPC_CMP_ULT_D
:
26698 case R6_OPC_CMP_LE_D
:
26699 case R6_OPC_CMP_ULE_D
:
26700 case R6_OPC_CMP_SAF_D
:
26701 case R6_OPC_CMP_SUN_D
:
26702 case R6_OPC_CMP_SEQ_D
:
26703 case R6_OPC_CMP_SEUQ_D
:
26704 case R6_OPC_CMP_SLT_D
:
26705 case R6_OPC_CMP_SULT_D
:
26706 case R6_OPC_CMP_SLE_D
:
26707 case R6_OPC_CMP_SULE_D
:
26708 case R6_OPC_CMP_OR_D
:
26709 case R6_OPC_CMP_UNE_D
:
26710 case R6_OPC_CMP_NE_D
:
26711 case R6_OPC_CMP_SOR_D
:
26712 case R6_OPC_CMP_SUNE_D
:
26713 case R6_OPC_CMP_SNE_D
:
26714 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
26717 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
26718 rt
, rd
, sa
, (imm
>> 8) & 0x7);
26723 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
26738 check_insn(ctx
, ASE_MSA
);
26739 gen_msa_branch(env
, ctx
, op1
);
26743 generate_exception_end(ctx
, EXCP_RI
);
26748 /* Compact branches [R6] and COP2 [non-R6] */
26749 case OPC_BC
: /* OPC_LWC2 */
26750 case OPC_BALC
: /* OPC_SWC2 */
26751 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26752 /* OPC_BC, OPC_BALC */
26753 gen_compute_compact_branch(ctx
, op
, 0, 0,
26754 sextract32(ctx
->opcode
<< 2, 0, 28));
26756 /* OPC_LWC2, OPC_SWC2 */
26757 /* COP2: Not implemented. */
26758 generate_exception_err(ctx
, EXCP_CpU
, 2);
26761 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
26762 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
26763 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26765 /* OPC_BEQZC, OPC_BNEZC */
26766 gen_compute_compact_branch(ctx
, op
, rs
, 0,
26767 sextract32(ctx
->opcode
<< 2, 0, 23));
26769 /* OPC_JIC, OPC_JIALC */
26770 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
26773 /* OPC_LWC2, OPC_SWC2 */
26774 /* COP2: Not implemented. */
26775 generate_exception_err(ctx
, EXCP_CpU
, 2);
26779 check_insn(ctx
, INSN_LOONGSON2F
);
26780 /* Note that these instructions use different fields. */
26781 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
26785 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26786 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
26787 check_cp1_enabled(ctx
);
26788 op1
= MASK_CP3(ctx
->opcode
);
26792 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
26798 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26799 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
26802 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26803 /* Treat as NOP. */
26806 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
26820 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26821 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
26825 generate_exception_end(ctx
, EXCP_RI
);
26829 generate_exception_err(ctx
, EXCP_CpU
, 1);
26833 #if defined(TARGET_MIPS64)
26834 /* MIPS64 opcodes */
26836 check_insn_opc_user_only(ctx
, INSN_R5900
);
26840 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26844 check_insn(ctx
, ISA_MIPS3
);
26845 check_mips_64(ctx
);
26846 gen_ld(ctx
, op
, rt
, rs
, imm
);
26850 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26853 check_insn(ctx
, ISA_MIPS3
);
26854 check_mips_64(ctx
);
26855 gen_st(ctx
, op
, rt
, rs
, imm
);
26858 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26859 check_insn(ctx
, ISA_MIPS3
);
26860 check_insn_opc_user_only(ctx
, INSN_R5900
);
26861 check_mips_64(ctx
);
26862 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
26864 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
26865 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26866 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
26867 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26870 check_insn(ctx
, ISA_MIPS3
);
26871 check_mips_64(ctx
);
26872 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26876 check_insn(ctx
, ISA_MIPS3
);
26877 check_mips_64(ctx
);
26878 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26881 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
26882 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26883 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26885 MIPS_INVAL("major opcode");
26886 generate_exception_end(ctx
, EXCP_RI
);
26890 case OPC_DAUI
: /* OPC_JALX */
26891 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26892 #if defined(TARGET_MIPS64)
26894 check_mips_64(ctx
);
26896 generate_exception(ctx
, EXCP_RI
);
26897 } else if (rt
!= 0) {
26898 TCGv t0
= tcg_temp_new();
26899 gen_load_gpr(t0
, rs
);
26900 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
26904 generate_exception_end(ctx
, EXCP_RI
);
26905 MIPS_INVAL("major opcode");
26909 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
26910 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
26911 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
26914 case OPC_MSA
: /* OPC_MDMX */
26915 if (ctx
->insn_flags
& INSN_R5900
) {
26916 decode_tx79_lq(env
, ctx
); /* TX79_LQ */
26918 /* MDMX: Not implemented. */
26923 check_insn(ctx
, ISA_MIPS32R6
);
26924 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
26926 default: /* Invalid */
26927 MIPS_INVAL("major opcode");
26928 generate_exception_end(ctx
, EXCP_RI
);
26933 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
26935 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26936 CPUMIPSState
*env
= cs
->env_ptr
;
26938 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
26939 ctx
->saved_pc
= -1;
26940 ctx
->insn_flags
= env
->insn_flags
;
26941 ctx
->CP0_Config1
= env
->CP0_Config1
;
26942 ctx
->CP0_Config2
= env
->CP0_Config2
;
26943 ctx
->CP0_Config3
= env
->CP0_Config3
;
26944 ctx
->CP0_Config5
= env
->CP0_Config5
;
26946 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
26947 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
26948 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
26949 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
26950 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
26951 ctx
->PAMask
= env
->PAMask
;
26952 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
26953 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
26954 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
26955 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
26956 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
26957 /* Restore delay slot state from the tb context. */
26958 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
26959 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
26960 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
26961 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
26962 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
26963 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
26964 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
26965 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
26966 restore_cpu_state(env
, ctx
);
26967 #ifdef CONFIG_USER_ONLY
26968 ctx
->mem_idx
= MIPS_HFLAG_UM
;
26970 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
26972 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
26973 MO_UNALN
: MO_ALIGN
;
26975 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
26979 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
26983 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
26985 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26987 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
26991 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
26992 const CPUBreakpoint
*bp
)
26994 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26996 save_cpu_state(ctx
, 1);
26997 ctx
->base
.is_jmp
= DISAS_NORETURN
;
26998 gen_helper_raise_exception_debug(cpu_env
);
26999 /* The address covered by the breakpoint must be included in
27000 [tb->pc, tb->pc + tb->size) in order to for it to be
27001 properly cleared -- thus we increment the PC here so that
27002 the logic setting tb->size below does the right thing. */
27003 ctx
->base
.pc_next
+= 4;
27007 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
27009 CPUMIPSState
*env
= cs
->env_ptr
;
27010 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
27014 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
27015 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
27016 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
27017 insn_bytes
= decode_nanomips_opc(env
, ctx
);
27018 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
27019 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
27021 decode_opc(env
, ctx
);
27022 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
27023 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
27024 insn_bytes
= decode_micromips_opc(env
, ctx
);
27025 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
27026 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
27027 insn_bytes
= decode_mips16_opc(env
, ctx
);
27029 generate_exception_end(ctx
, EXCP_RI
);
27030 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
27034 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
27035 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
27036 MIPS_HFLAG_FBNSLOT
))) {
27037 /* force to generate branch as there is neither delay nor
27041 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
27042 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
27043 /* Force to generate branch as microMIPS R6 doesn't restrict
27044 branches in the forbidden slot. */
27049 gen_branch(ctx
, insn_bytes
);
27051 ctx
->base
.pc_next
+= insn_bytes
;
27053 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
27056 /* Execute a branch and its delay slot as a single instruction.
27057 This is what GDB expects and is consistent with what the
27058 hardware does (e.g. if a delay slot instruction faults, the
27059 reported PC is the PC of the branch). */
27060 if (ctx
->base
.singlestep_enabled
&&
27061 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
27062 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
27064 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
27065 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
27069 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
27071 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
27073 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
27074 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
27075 gen_helper_raise_exception_debug(cpu_env
);
27077 switch (ctx
->base
.is_jmp
) {
27079 gen_save_pc(ctx
->base
.pc_next
);
27080 tcg_gen_lookup_and_goto_ptr();
27083 case DISAS_TOO_MANY
:
27084 save_cpu_state(ctx
, 0);
27085 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
27088 tcg_gen_exit_tb(NULL
, 0);
27090 case DISAS_NORETURN
:
27093 g_assert_not_reached();
27098 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
27100 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
27101 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
27104 static const TranslatorOps mips_tr_ops
= {
27105 .init_disas_context
= mips_tr_init_disas_context
,
27106 .tb_start
= mips_tr_tb_start
,
27107 .insn_start
= mips_tr_insn_start
,
27108 .breakpoint_check
= mips_tr_breakpoint_check
,
27109 .translate_insn
= mips_tr_translate_insn
,
27110 .tb_stop
= mips_tr_tb_stop
,
27111 .disas_log
= mips_tr_disas_log
,
27114 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
27118 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
27121 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
27125 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
27127 #define printfpr(fp) \
27130 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
27131 " fd:%13g fs:%13g psu: %13g\n", \
27132 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
27133 (double)(fp)->fd, \
27134 (double)(fp)->fs[FP_ENDIAN_IDX], \
27135 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
27138 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
27139 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
27140 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
27141 " fd:%13g fs:%13g psu:%13g\n", \
27142 tmp.w[FP_ENDIAN_IDX], tmp.d, \
27144 (double)tmp.fs[FP_ENDIAN_IDX], \
27145 (double)tmp.fs[!FP_ENDIAN_IDX]); \
27150 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
27151 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
27152 get_float_exception_flags(&env
->active_fpu
.fp_status
));
27153 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
27154 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
27155 printfpr(&env
->active_fpu
.fpr
[i
]);
27161 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
27164 MIPSCPU
*cpu
= MIPS_CPU(cs
);
27165 CPUMIPSState
*env
= &cpu
->env
;
27168 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
27169 " LO=0x" TARGET_FMT_lx
" ds %04x "
27170 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
27171 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
27172 env
->hflags
, env
->btarget
, env
->bcond
);
27173 for (i
= 0; i
< 32; i
++) {
27175 cpu_fprintf(f
, "GPR%02d:", i
);
27176 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
27178 cpu_fprintf(f
, "\n");
27181 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
27182 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
27183 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
27185 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
27186 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
27187 env
->CP0_Config2
, env
->CP0_Config3
);
27188 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
27189 env
->CP0_Config4
, env
->CP0_Config5
);
27190 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
27191 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
27195 void mips_tcg_init(void)
27200 for (i
= 1; i
< 32; i
++)
27201 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
27202 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
27205 for (i
= 0; i
< 32; i
++) {
27206 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
27208 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
27209 /* The scalar floating-point unit (FPU) registers are mapped on
27210 * the MSA vector registers. */
27211 fpu_f64
[i
] = msa_wr_d
[i
* 2];
27212 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
27213 msa_wr_d
[i
* 2 + 1] =
27214 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
27217 cpu_PC
= tcg_global_mem_new(cpu_env
,
27218 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
27219 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
27220 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
27221 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
27223 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
27224 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
27227 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
27228 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
27230 bcond
= tcg_global_mem_new(cpu_env
,
27231 offsetof(CPUMIPSState
, bcond
), "bcond");
27232 btarget
= tcg_global_mem_new(cpu_env
,
27233 offsetof(CPUMIPSState
, btarget
), "btarget");
27234 hflags
= tcg_global_mem_new_i32(cpu_env
,
27235 offsetof(CPUMIPSState
, hflags
), "hflags");
27237 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
27238 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
27240 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
27241 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
27244 for (i
= 0; i
< NUMBER_OF_MXU_REGISTERS
- 1; i
++) {
27245 mxu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
27246 offsetof(CPUMIPSState
,
27247 active_tc
.mxu_gpr
[i
]),
27251 mxu_CR
= tcg_global_mem_new(cpu_env
,
27252 offsetof(CPUMIPSState
, active_tc
.mxu_cr
),
27253 mxuregnames
[NUMBER_OF_MXU_REGISTERS
- 1]);
27256 #include "translate_init.inc.c"
27258 void cpu_mips_realize_env(CPUMIPSState
*env
)
27260 env
->exception_base
= (int32_t)0xBFC00000;
27262 #ifndef CONFIG_USER_ONLY
27263 mmu_init(env
, env
->cpu_model
);
27265 fpu_init(env
, env
->cpu_model
);
27266 mvp_init(env
, env
->cpu_model
);
27269 bool cpu_supports_cps_smp(const char *cpu_type
)
27271 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
27272 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
27275 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
27277 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
27278 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
27281 void cpu_set_exception_base(int vp_index
, target_ulong address
)
27283 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
27284 vp
->env
.exception_base
= address
;
27287 void cpu_state_reset(CPUMIPSState
*env
)
27289 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
27290 CPUState
*cs
= CPU(cpu
);
27292 /* Reset registers to their default values */
27293 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
27294 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
27295 #ifdef TARGET_WORDS_BIGENDIAN
27296 env
->CP0_Config0
|= (1 << CP0C0_BE
);
27298 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
27299 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
27300 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
27301 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
27302 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
27303 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
27304 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
27305 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
27306 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
27307 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
27308 << env
->cpu_model
->CP0_LLAddr_shift
;
27309 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
27310 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
27311 env
->CCRes
= env
->cpu_model
->CCRes
;
27312 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
27313 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
27314 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
27315 env
->current_tc
= 0;
27316 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
27317 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
27318 #if defined(TARGET_MIPS64)
27319 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
27320 env
->SEGMask
|= 3ULL << 62;
27323 env
->PABITS
= env
->cpu_model
->PABITS
;
27324 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
27325 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
27326 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
27327 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
27328 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
27329 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
27330 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
27331 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
27332 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
27333 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
27334 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
27335 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
27336 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
27337 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
27338 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
27339 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
27340 env
->msair
= env
->cpu_model
->MSAIR
;
27341 env
->insn_flags
= env
->cpu_model
->insn_flags
;
27343 #if defined(CONFIG_USER_ONLY)
27344 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
27345 # ifdef TARGET_MIPS64
27346 /* Enable 64-bit register mode. */
27347 env
->CP0_Status
|= (1 << CP0St_PX
);
27349 # ifdef TARGET_ABI_MIPSN64
27350 /* Enable 64-bit address mode. */
27351 env
->CP0_Status
|= (1 << CP0St_UX
);
27353 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
27354 hardware registers. */
27355 env
->CP0_HWREna
|= 0x0000000F;
27356 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
27357 env
->CP0_Status
|= (1 << CP0St_CU1
);
27359 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
27360 env
->CP0_Status
|= (1 << CP0St_MX
);
27362 # if defined(TARGET_MIPS64)
27363 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
27364 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
27365 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
27366 env
->CP0_Status
|= (1 << CP0St_FR
);
27370 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
27371 /* If the exception was raised from a delay slot,
27372 come back to the jump. */
27373 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
27374 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
27376 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
27378 env
->active_tc
.PC
= env
->exception_base
;
27379 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
27380 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
27381 env
->CP0_Wired
= 0;
27382 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
27383 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
27384 if (mips_um_ksegs_enabled()) {
27385 env
->CP0_EBase
|= 0x40000000;
27387 env
->CP0_EBase
|= (int32_t)0x80000000;
27389 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
27390 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
27392 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
27394 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
27395 /* vectored interrupts not implemented, timer on int 7,
27396 no performance counters. */
27397 env
->CP0_IntCtl
= 0xe0000000;
27401 for (i
= 0; i
< 7; i
++) {
27402 env
->CP0_WatchLo
[i
] = 0;
27403 env
->CP0_WatchHi
[i
] = 0x80000000;
27405 env
->CP0_WatchLo
[7] = 0;
27406 env
->CP0_WatchHi
[7] = 0;
27408 /* Count register increments in debug mode, EJTAG version 1 */
27409 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
27411 cpu_mips_store_count(env
, 1);
27413 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
27416 /* Only TC0 on VPE 0 starts as active. */
27417 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
27418 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
27419 env
->tcs
[i
].CP0_TCHalt
= 1;
27421 env
->active_tc
.CP0_TCHalt
= 1;
27424 if (cs
->cpu_index
== 0) {
27425 /* VPE0 starts up enabled. */
27426 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
27427 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
27429 /* TC0 starts up unhalted. */
27431 env
->active_tc
.CP0_TCHalt
= 0;
27432 env
->tcs
[0].CP0_TCHalt
= 0;
27433 /* With thread 0 active. */
27434 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
27435 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
27440 * Configure default legacy segmentation control. We use this regardless of
27441 * whether segmentation control is presented to the guest.
27443 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
27444 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
27445 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
27446 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
27447 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
27448 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
27450 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
27451 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
27452 (3 << CP0SC_C
)) << 16;
27453 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
27454 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
27455 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
27456 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
27457 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
27458 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
27459 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
27460 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
27462 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
27463 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
27464 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
27465 env
->CP0_Status
|= (1 << CP0St_FR
);
27468 if (env
->insn_flags
& ISA_MIPS32R6
) {
27470 env
->CP0_PWSize
= 0x40;
27476 env
->CP0_PWField
= 0x0C30C302;
27483 env
->CP0_PWField
= 0x02;
27486 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
27487 /* microMIPS on reset when Config3.ISA is 3 */
27488 env
->hflags
|= MIPS_HFLAG_M16
;
27492 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
27496 compute_hflags(env
);
27497 restore_fp_status(env
);
27498 restore_pamask(env
);
27499 cs
->exception_index
= EXCP_NONE
;
27501 if (semihosting_get_argc()) {
27502 /* UHI interface can be used to obtain argc and argv */
27503 env
->active_tc
.gpr
[4] = -1;
27507 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
27508 target_ulong
*data
)
27510 env
->active_tc
.PC
= data
[0];
27511 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
27512 env
->hflags
|= data
[1];
27513 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
27514 case MIPS_HFLAG_BR
:
27516 case MIPS_HFLAG_BC
:
27517 case MIPS_HFLAG_BL
:
27519 env
->btarget
= data
[2];