2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
467 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
468 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
469 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
470 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
474 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
477 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
478 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
479 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
480 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
481 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
482 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
483 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
484 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
485 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
486 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
487 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
490 /* MIPS DSP REGIMM opcodes */
492 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
493 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
496 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
500 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
501 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
502 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
505 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
507 /* MIPS DSP Arithmetic Sub-class */
508 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
509 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
510 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
515 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
516 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
517 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
518 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
519 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
522 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
523 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
525 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
526 /* MIPS DSP Multiply Sub-class insns */
527 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
528 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
529 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
530 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
531 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
532 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
535 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
536 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
538 /* MIPS DSP Arithmetic Sub-class */
539 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
548 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
549 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
550 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
551 /* MIPS DSP Multiply Sub-class insns */
552 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
553 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
554 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
555 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
558 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
560 /* MIPS DSP Arithmetic Sub-class */
561 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
572 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
573 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
574 /* DSP Bit/Manipulation Sub-class */
575 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
576 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
577 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
578 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
579 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
582 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
584 /* MIPS DSP Arithmetic Sub-class */
585 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
592 /* DSP Compare-Pick Sub-class */
593 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
600 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
601 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
602 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
603 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
604 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
605 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
606 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
607 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
610 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
612 /* MIPS DSP GPR-Based Shift Sub-class */
613 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
627 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
628 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
629 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
630 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
631 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
632 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
633 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
634 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
637 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
639 /* MIPS DSP Multiply Sub-class insns */
640 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
654 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
655 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
656 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
657 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
658 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
659 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
660 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
661 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
664 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
666 /* DSP Bit/Manipulation Sub-class */
667 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
670 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Append Sub-class */
673 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
674 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
675 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
678 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
680 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
681 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
685 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
686 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
687 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
688 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
689 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
690 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
691 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
692 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
693 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
694 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
695 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
696 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
697 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
700 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
702 /* MIPS DSP Arithmetic Sub-class */
703 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
719 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
720 /* DSP Bit/Manipulation Sub-class */
721 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
722 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
723 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
724 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
725 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
726 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
729 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
731 /* MIPS DSP Multiply Sub-class insns */
732 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
733 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
734 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
735 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
737 /* MIPS DSP Arithmetic Sub-class */
738 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
741 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
742 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
743 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
744 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
745 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
747 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
751 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
752 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
753 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
754 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
755 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
756 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
757 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
758 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
761 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
763 /* DSP Compare-Pick Sub-class */
764 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
783 /* MIPS DSP Arithmetic Sub-class */
784 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
785 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
786 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
787 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
788 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
789 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
794 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
796 /* DSP Append Sub-class */
797 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
798 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
799 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
800 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
803 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
805 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
806 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
819 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
820 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
821 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
822 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
823 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
824 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
825 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
826 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
829 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
831 /* DSP Bit/Manipulation Sub-class */
832 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
835 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
837 /* MIPS DSP Multiply Sub-class insns */
838 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
856 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
857 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
858 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
859 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
860 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
861 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
862 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
863 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
866 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
868 /* MIPS DSP GPR-Based Shift Sub-class */
869 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
887 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
888 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
889 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
890 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
891 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
892 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
893 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
894 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
897 /* Coprocessor 0 (rs field) */
898 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
901 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
902 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
903 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
904 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
905 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
906 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
907 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
908 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
909 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
910 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
911 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
912 OPC_C0
= (0x10 << 21) | OPC_CP0
,
913 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
914 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
915 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
916 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
917 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
918 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
919 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
920 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
921 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
922 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
923 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
924 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
925 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
926 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
927 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
931 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
934 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
935 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
936 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
937 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
938 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
939 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
940 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
941 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
944 /* Coprocessor 0 (with rs == C0) */
945 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
948 OPC_TLBR
= 0x01 | OPC_C0
,
949 OPC_TLBWI
= 0x02 | OPC_C0
,
950 OPC_TLBINV
= 0x03 | OPC_C0
,
951 OPC_TLBINVF
= 0x04 | OPC_C0
,
952 OPC_TLBWR
= 0x06 | OPC_C0
,
953 OPC_TLBP
= 0x08 | OPC_C0
,
954 OPC_RFE
= 0x10 | OPC_C0
,
955 OPC_ERET
= 0x18 | OPC_C0
,
956 OPC_DERET
= 0x1F | OPC_C0
,
957 OPC_WAIT
= 0x20 | OPC_C0
,
960 /* Coprocessor 1 (rs field) */
961 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
963 /* Values for the fmt field in FP instructions */
965 /* 0 - 15 are reserved */
966 FMT_S
= 16, /* single fp */
967 FMT_D
= 17, /* double fp */
968 FMT_E
= 18, /* extended fp */
969 FMT_Q
= 19, /* quad fp */
970 FMT_W
= 20, /* 32-bit fixed */
971 FMT_L
= 21, /* 64-bit fixed */
972 FMT_PS
= 22, /* paired single fp */
973 /* 23 - 31 are reserved */
977 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
978 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
979 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
980 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
981 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
982 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
983 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
984 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
985 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
986 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
987 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
988 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
989 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
990 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
991 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
992 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
993 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
994 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
995 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
996 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
997 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
998 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
999 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
1000 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
1001 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
1002 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
1003 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
1004 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
1005 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
1006 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1009 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1010 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1013 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1014 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1015 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1016 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1020 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1021 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1025 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1026 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1029 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1032 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1033 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1034 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1035 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1036 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1037 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1038 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1039 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1040 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1041 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1042 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1045 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1048 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1050 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1051 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1052 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1053 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1054 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1055 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1057 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1059 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1060 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1061 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1062 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1063 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1064 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1066 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1068 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1069 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1070 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1071 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1072 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1073 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1075 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1077 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1078 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1079 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1080 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1081 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1082 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1084 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1085 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1086 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1087 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1088 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1089 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1091 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1092 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1093 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1094 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1095 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1096 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1098 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1099 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1100 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1101 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1102 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1103 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1105 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1106 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1107 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1108 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1109 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1110 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1112 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1113 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1114 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1115 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1116 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1117 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1119 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1120 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1121 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1122 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1123 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1124 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1126 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1127 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1128 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1129 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1130 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1131 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1133 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1134 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1135 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1136 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1137 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1138 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1142 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1145 OPC_LWXC1
= 0x00 | OPC_CP3
,
1146 OPC_LDXC1
= 0x01 | OPC_CP3
,
1147 OPC_LUXC1
= 0x05 | OPC_CP3
,
1148 OPC_SWXC1
= 0x08 | OPC_CP3
,
1149 OPC_SDXC1
= 0x09 | OPC_CP3
,
1150 OPC_SUXC1
= 0x0D | OPC_CP3
,
1151 OPC_PREFX
= 0x0F | OPC_CP3
,
1152 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1153 OPC_MADD_S
= 0x20 | OPC_CP3
,
1154 OPC_MADD_D
= 0x21 | OPC_CP3
,
1155 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1156 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1157 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1158 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1159 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1160 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1161 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1162 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1163 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1164 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1168 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1170 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1171 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1172 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1173 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1174 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1175 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1176 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1177 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1178 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1179 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1180 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1181 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1182 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1183 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1184 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1185 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1186 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1187 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1188 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1189 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1190 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1192 /* MI10 instruction */
1193 OPC_LD_B
= (0x20) | OPC_MSA
,
1194 OPC_LD_H
= (0x21) | OPC_MSA
,
1195 OPC_LD_W
= (0x22) | OPC_MSA
,
1196 OPC_LD_D
= (0x23) | OPC_MSA
,
1197 OPC_ST_B
= (0x24) | OPC_MSA
,
1198 OPC_ST_H
= (0x25) | OPC_MSA
,
1199 OPC_ST_W
= (0x26) | OPC_MSA
,
1200 OPC_ST_D
= (0x27) | OPC_MSA
,
1204 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1205 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1206 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1207 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1208 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1209 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1210 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1211 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1212 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1213 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1214 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1215 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1216 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1218 /* I8 instruction */
1219 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1220 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1221 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1222 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1223 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1224 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1225 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1226 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1227 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1228 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1230 /* VEC/2R/2RF instruction */
1231 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1232 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1233 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1234 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1235 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1236 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1237 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1239 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1240 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1242 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1243 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1244 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1245 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1246 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1248 /* 2RF instruction df(bit 16) = _w, _d */
1249 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1250 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1251 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1252 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1253 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1254 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1255 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1256 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1257 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1258 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1259 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1260 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1261 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1262 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1263 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1264 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1266 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1267 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1268 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1269 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1270 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1272 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1274 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1275 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1277 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1278 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1279 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1280 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1281 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1282 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1283 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1284 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1285 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1286 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1287 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1288 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1289 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1290 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1292 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1293 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1294 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1295 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1296 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1297 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1298 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1299 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1300 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1301 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1302 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1303 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1304 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1305 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1306 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1307 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1308 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1309 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1310 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1311 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1312 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1313 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1314 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1315 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1316 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1317 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1318 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1319 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1320 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1321 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1322 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1323 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1324 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1325 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1326 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1327 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1328 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1329 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1331 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1332 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1333 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1334 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1335 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1336 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1337 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1338 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1339 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1340 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1342 /* 3RF instruction _df(bit 21) = _w, _d */
1343 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1346 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1347 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1348 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1349 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1350 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1351 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1352 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1353 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1357 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1358 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1359 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1361 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1362 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1363 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1364 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1365 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1366 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1367 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1370 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1373 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1374 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1375 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1376 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1377 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1378 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1379 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1380 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1381 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1382 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1383 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1385 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1386 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1387 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1388 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1389 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1390 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1391 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1392 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1393 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1394 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1395 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1396 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1397 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1402 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1403 * ============================================
1405 * MXU (full name: MIPS eXtension/enhanced Unit) is an SIMD extension of MIPS32
1406 * instructions set. It is designed to fit the needs of signal, graphical and
1407 * video processing applications. MXU instruction set is used in Xburst family
1408 * of microprocessors by Ingenic.
1410 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1411 * the control register.
1413 * The notation used in MXU assembler mnemonics:
1415 * XRa, XRb, XRc, XRd - MXU registers
1416 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1417 * s12 - a subfield of an instruction code
1418 * strd2 - a subfield of an instruction code
1419 * eptn2 - a subfield of an instruction code
1420 * eptn3 - a subfield of an instruction code
1421 * optn2 - a subfield of an instruction code
1422 * optn3 - a subfield of an instruction code
1423 * sft4 - a subfield of an instruction code
1425 * Load/Store instructions Multiplication instructions
1426 * ----------------------- ---------------------------
1428 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1429 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1430 * S32LDDV XRa, Rb, rc, strd2 S32SUB XRa, XRd, Rs, Rt
1431 * S32STDV XRa, Rb, rc, strd2 S32SUBU XRa, XRd, Rs, Rt
1432 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1433 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1434 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1435 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1436 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1437 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1438 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1439 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1440 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1441 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1442 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1443 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1444 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1445 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1446 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1447 * S16SDI XRa, Rb, s10, eptn2
1448 * S8LDD XRa, Rb, s8, eptn3
1449 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1450 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1451 * S8SDI XRa, Rb, s8, eptn3
1452 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1453 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1454 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1455 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1456 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1457 * S32CPS XRa, XRb, XRc
1458 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1459 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1460 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1461 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1462 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1463 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1464 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1465 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1466 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1467 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1468 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1469 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1470 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1471 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1472 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1473 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1474 * Q8SLT XRa, XRb, XRc
1475 * Q8SLTU XRa, XRb, XRc
1476 * Q8MOVZ XRa, XRb, XRc Shift instructions
1477 * Q8MOVN XRa, XRb, XRc ------------------
1479 * D32SLL XRa, XRb, XRc, XRd, sft4
1480 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1481 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1482 * D32SARL XRa, XRb, XRc, sft4
1483 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1484 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1485 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1486 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1487 * Q16SLL XRa, XRb, XRc, XRd, sft4
1488 * Q16SLR XRa, XRb, XRc, XRd, sft4
1489 * Miscelaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1490 * ------------------------- Q16SLLV XRa, XRb, Rb
1491 * Q16SLRV XRa, XRb, Rb
1492 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1493 * S32ALN XRa, XRb, XRc, Rb
1494 * S32ALNI XRa, XRb, XRc, s3
1495 * S32LUI XRa, s8, optn3 Move instructions
1496 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1497 * S32EXTRV XRa, XRb, Rs, Rt
1498 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1499 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1505 * ┌─ 000000 ─ OPC_MXU_S32MADD
1506 * ├─ 000001 ─ OPC_MXU_S32MADDU
1507 * ├─ 000010 ─ <not assigned>
1509 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1510 * │ ├─ 001 ─ OPC_MXU_S32MIN
1511 * │ ├─ 010 ─ OPC_MXU_D16MAX
1512 * │ ├─ 011 ─ OPC_MXU_D16MIN
1513 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1514 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1515 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1516 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1517 * ├─ 000100 ─ OPC_MXU_S32MSUB
1518 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1519 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1520 * │ ├─ 001 ─ OPC_MXU_D16SLT
1521 * │ ├─ 010 ─ OPC_MXU_D16AVG
1522 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1523 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1524 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1525 * │ └─ 111 ─ OPC_MXU_Q8ADD
1528 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1529 * │ ├─ 010 ─ OPC_MXU_D16CPS
1530 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1531 * │ └─ 110 ─ OPC_MXU_Q16SAT
1532 * ├─ 001000 ─ OPC_MXU_D16MUL
1534 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1535 * │ └─ 01 ─ OPC_MXU_D16MULE
1536 * ├─ 001010 ─ OPC_MXU_D16MAC
1537 * ├─ 001011 ─ OPC_MXU_D16MACF
1538 * ├─ 001100 ─ OPC_MXU_D16MADL
1540 * ├─ 001101 ─ OPC_MXU__POOL04 ─┬─ 00 ─ OPC_MXU_S16MAD
1541 * │ └─ 01 ─ OPC_MXU_S16MAD_1
1542 * ├─ 001110 ─ OPC_MXU_Q16ADD
1543 * ├─ 001111 ─ OPC_MXU_D16MACE
1545 * ├─ 010000 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32LDD
1546 * │ └─ 1 ─ OPC_MXU_S32LDDR
1549 * ├─ 010001 ─ OPC_MXU__POOL06 ─┬─ 0 ─ OPC_MXU_S32STD
1550 * │ └─ 1 ─ OPC_MXU_S32STDR
1553 * ├─ 010010 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1554 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1557 * ├─ 010011 ─ OPC_MXU__POOL08 ─┬─ 0000 ─ OPC_MXU_S32STDV
1558 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1561 * ├─ 010100 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32LDI
1562 * │ └─ 1 ─ OPC_MXU_S32LDIR
1565 * ├─ 010101 ─ OPC_MXU__POOL10 ─┬─ 0 ─ OPC_MXU_S32SDI
1566 * │ └─ 1 ─ OPC_MXU_S32SDIR
1569 * ├─ 010110 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1570 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1573 * ├─ 010111 ─ OPC_MXU__POOL12 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1574 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1575 * ├─ 011000 ─ OPC_MXU_D32ADD
1577 * MXU ├─ 011001 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_D32ACC
1578 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1579 * │ └─ 10 ─ OPC_MXU_D32ASUM
1580 * ├─ 011010 ─ <not assigned>
1582 * ├─ 011011 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q16ACC
1583 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1584 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1587 * ├─ 011100 ─ OPC_MXU__POOL15 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1588 * │ ├─ 01 ─ OPC_MXU_D8SUM
1589 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1590 * ├─ 011110 ─ <not assigned>
1591 * ├─ 011111 ─ <not assigned>
1592 * ├─ 100000 ─ <not assigned>
1593 * ├─ 100001 ─ <not assigned>
1594 * ├─ 100010 ─ OPC_MXU_S8LDD
1595 * ├─ 100011 ─ OPC_MXU_S8STD
1596 * ├─ 100100 ─ OPC_MXU_S8LDI
1597 * ├─ 100101 ─ OPC_MXU_S8SDI
1599 * ├─ 100110 ─ OPC_MXU__POOL16 ─┬─ 00 ─ OPC_MXU_S32MUL
1600 * │ ├─ 00 ─ OPC_MXU_S32MULU
1601 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1602 * │ └─ 00 ─ OPC_MXU_S32EXTRV
1605 * ├─ 100111 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_D32SARW
1606 * │ ├─ 001 ─ OPC_MXU_S32ALN
1607 * ├─ 101000 ─ OPC_MXU_LXB ├─ 010 ─ OPC_MXU_S32ALNI
1608 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_S32NOR
1609 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_S32AND
1610 * ├─ 101011 ─ OPC_MXU_S16STD ├─ 101 ─ OPC_MXU_S32OR
1611 * ├─ 101100 ─ OPC_MXU_S16LDI ├─ 110 ─ OPC_MXU_S32XOR
1612 * ├─ 101101 ─ OPC_MXU_S16SDI └─ 111 ─ OPC_MXU_S32LUI
1613 * ├─ 101000 ─ <not assigned>
1614 * ├─ 101001 ─ <not assigned>
1615 * ├─ 101010 ─ <not assigned>
1616 * ├─ 101011 ─ <not assigned>
1617 * ├─ 101100 ─ <not assigned>
1618 * ├─ 101101 ─ <not assigned>
1619 * ├─ 101110 ─ OPC_MXU_S32M2I
1620 * ├─ 101111 ─ OPC_MXU_S32I2M
1621 * ├─ 110000 ─ OPC_MXU_D32SLL
1622 * ├─ 110001 ─ OPC_MXU_D32SLR
1623 * ├─ 110010 ─ OPC_MXU_D32SARL
1624 * ├─ 110011 ─ OPC_MXU_D32SAR
1625 * ├─ 110100 ─ OPC_MXU_Q16SLL
1626 * ├─ 110101 ─ OPC_MXU_Q16SLR 20..18
1627 * ├─ 110110 ─ OPC_MXU__POOL18 ─┬─ 000 ─ OPC_MXU_D32SLLV
1628 * │ ├─ 001 ─ OPC_MXU_D32SLRV
1629 * │ ├─ 010 ─ OPC_MXU_D32SARV
1630 * │ ├─ 011 ─ OPC_MXU_Q16SLLV
1631 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1632 * │ └─ 101 ─ OPC_MXU_Q16SARV
1633 * ├─ 110111 ─ OPC_MXU_Q16SAR
1635 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1636 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1639 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1640 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1641 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1642 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1643 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1644 * │ └─ 101 ─ OPC_MXU_S32MOV
1647 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1648 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1649 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1650 * ├─ 111100 ─ OPC_MXU_Q8MADL
1651 * ├─ 111101 ─ OPC_MXU_S32SFL
1652 * ├─ 111110 ─ OPC_MXU_Q8SAD
1653 * └─ 111111 ─ <not assigned>
1658 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1659 * Programming Manual", Ingenic Semiconductor Co, Ltd., 2017
1663 OPC_MXU_S32MADD
= 0x00,
1664 OPC_MXU_S32MADDU
= 0x01,
1665 /* not assigned 0x02 */
1666 OPC_MXU__POOL00
= 0x03,
1667 OPC_MXU_S32MSUB
= 0x04,
1668 OPC_MXU_S32MSUBU
= 0x05,
1669 OPC_MXU__POOL01
= 0x06,
1670 OPC_MXU__POOL02
= 0x07,
1671 OPC_MXU_D16MUL
= 0x08,
1672 OPC_MXU__POOL03
= 0x09,
1673 OPC_MXU_D16MAC
= 0x0A,
1674 OPC_MXU_D16MACF
= 0x0B,
1675 OPC_MXU_D16MADL
= 0x0C,
1676 OPC_MXU__POOL04
= 0x0D,
1677 OPC_MXU_Q16ADD
= 0x0E,
1678 OPC_MXU_D16MACE
= 0x0F,
1679 OPC_MXU__POOL05
= 0x10,
1680 OPC_MXU__POOL06
= 0x11,
1681 OPC_MXU__POOL07
= 0x12,
1682 OPC_MXU__POOL08
= 0x13,
1683 OPC_MXU__POOL09
= 0x14,
1684 OPC_MXU__POOL10
= 0x15,
1685 OPC_MXU__POOL11
= 0x16,
1686 OPC_MXU__POOL12
= 0x17,
1687 OPC_MXU_D32ADD
= 0x18,
1688 OPC_MXU__POOL13
= 0x19,
1689 /* not assigned 0x1A */
1690 OPC_MXU__POOL14
= 0x1B,
1691 OPC_MXU__POOL15
= 0x1C,
1692 OPC_MXU_Q8ACCE
= 0x1D,
1693 /* not assigned 0x1E */
1694 /* not assigned 0x1F */
1695 /* not assigned 0x20 */
1696 /* not assigned 0x21 */
1697 OPC_MXU_S8LDD
= 0x22,
1698 OPC_MXU_S8STD
= 0x23,
1699 OPC_MXU_S8LDI
= 0x24,
1700 OPC_MXU_S8SDI
= 0x25,
1701 OPC_MXU__POOL16
= 0x26,
1702 OPC_MXU__POOL17
= 0x27,
1704 /* not assigned 0x29 */
1705 OPC_MXU_S16LDD
= 0x2A,
1706 OPC_MXU_S16STD
= 0x2B,
1707 OPC_MXU_S16LDI
= 0x2C,
1708 OPC_MXU_S16SDI
= 0x2D,
1709 OPC_MXU_S32M2I
= 0x2E,
1710 OPC_MXU_S32I2M
= 0x2F,
1711 OPC_MXU_D32SLL
= 0x30,
1712 OPC_MXU_D32SLR
= 0x31,
1713 OPC_MXU_D32SARL
= 0x32,
1714 OPC_MXU_D32SAR
= 0x33,
1715 OPC_MXU_Q16SLL
= 0x34,
1716 OPC_MXU_Q16SLR
= 0x35,
1717 OPC_MXU__POOL18
= 0x36,
1718 OPC_MXU_Q16SAR
= 0x37,
1719 OPC_MXU__POOL19
= 0x38,
1720 OPC_MXU__POOL20
= 0x39,
1721 OPC_MXU__POOL21
= 0x3A,
1722 OPC_MXU_Q16SCOP
= 0x3B,
1723 OPC_MXU_Q8MADL
= 0x3C,
1724 OPC_MXU_S32SFL
= 0x3D,
1725 OPC_MXU_Q8SAD
= 0x3E,
1726 /* not assigned 0x3F */
1734 OPC_MXU_S32MAX
= 0x00,
1735 OPC_MXU_S32MIN
= 0x01,
1736 OPC_MXU_D16MAX
= 0x02,
1737 OPC_MXU_D16MIN
= 0x03,
1738 OPC_MXU_Q8MAX
= 0x04,
1739 OPC_MXU_Q8MIN
= 0x05,
1740 OPC_MXU_Q8SLT
= 0x06,
1741 OPC_MXU_Q8SLTU
= 0x07,
1748 OPC_MXU_S32SLT
= 0x00,
1749 OPC_MXU_D16SLT
= 0x01,
1750 OPC_MXU_D16AVG
= 0x02,
1751 OPC_MXU_D16AVGR
= 0x03,
1752 OPC_MXU_Q8AVG
= 0x04,
1753 OPC_MXU_Q8AVGR
= 0x05,
1754 OPC_MXU_Q8ADD
= 0x07,
1761 OPC_MXU_S32CPS
= 0x00,
1762 OPC_MXU_D16CPS
= 0x02,
1763 OPC_MXU_Q8ABD
= 0x04,
1764 OPC_MXU_Q16SAT
= 0x06,
1771 OPC_MXU_D16MULF
= 0x00,
1772 OPC_MXU_D16MULE
= 0x01,
1779 OPC_MXU_S16MAD
= 0x00,
1780 OPC_MXU_S16MAD_1
= 0x01,
1787 OPC_MXU_S32LDD
= 0x00,
1788 OPC_MXU_S32LDDR
= 0x01,
1795 OPC_MXU_S32STD
= 0x00,
1796 OPC_MXU_S32STDR
= 0x01,
1803 OPC_MXU_S32LDDV
= 0x00,
1804 OPC_MXU_S32LDDVR
= 0x01,
1811 OPC_MXU_S32STDV
= 0x00,
1812 OPC_MXU_S32STDVR
= 0x01,
1819 OPC_MXU_S32LDI
= 0x00,
1820 OPC_MXU_S32LDIR
= 0x01,
1827 OPC_MXU_S32SDI
= 0x00,
1828 OPC_MXU_S32SDIR
= 0x01,
1835 OPC_MXU_S32LDIV
= 0x00,
1836 OPC_MXU_S32LDIVR
= 0x01,
1843 OPC_MXU_S32SDIV
= 0x00,
1844 OPC_MXU_S32SDIVR
= 0x01,
1851 OPC_MXU_D32ACC
= 0x00,
1852 OPC_MXU_D32ACCM
= 0x01,
1853 OPC_MXU_D32ASUM
= 0x02,
1860 OPC_MXU_Q16ACC
= 0x00,
1861 OPC_MXU_Q16ACCM
= 0x01,
1862 OPC_MXU_Q16ASUM
= 0x02,
1869 OPC_MXU_Q8ADDE
= 0x00,
1870 OPC_MXU_D8SUM
= 0x01,
1871 OPC_MXU_D8SUMC
= 0x02,
1878 OPC_MXU_S32MUL
= 0x00,
1879 OPC_MXU_S32MULU
= 0x01,
1880 OPC_MXU_S32EXTR
= 0x02,
1881 OPC_MXU_S32EXTRV
= 0x03,
1888 OPC_MXU_D32SARW
= 0x00,
1889 OPC_MXU_S32ALN
= 0x01,
1890 OPC_MXU_S32ALNI
= 0x02,
1891 OPC_MXU_S32NOR
= 0x03,
1892 OPC_MXU_S32AND
= 0x04,
1893 OPC_MXU_S32OR
= 0x05,
1894 OPC_MXU_S32XOR
= 0x06,
1895 OPC_MXU_S32LUI
= 0x07,
1902 OPC_MXU_D32SLLV
= 0x00,
1903 OPC_MXU_D32SLRV
= 0x01,
1904 OPC_MXU_D32SARV
= 0x03,
1905 OPC_MXU_Q16SLLV
= 0x04,
1906 OPC_MXU_Q16SLRV
= 0x05,
1907 OPC_MXU_Q16SARV
= 0x07,
1914 OPC_MXU_Q8MUL
= 0x00,
1915 OPC_MXU_Q8MULSU
= 0x01,
1922 OPC_MXU_Q8MOVZ
= 0x00,
1923 OPC_MXU_Q8MOVN
= 0x01,
1924 OPC_MXU_D16MOVZ
= 0x02,
1925 OPC_MXU_D16MOVN
= 0x03,
1926 OPC_MXU_S32MOVZ
= 0x04,
1927 OPC_MXU_S32MOVN
= 0x05,
1934 OPC_MXU_Q8MAC
= 0x00,
1935 OPC_MXU_Q8MACSU
= 0x01,
1939 * Overview of the TX79-specific instruction set
1940 * =============================================
1942 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
1943 * are only used by the specific quadword (128-bit) LQ/SQ load/store
1944 * instructions and certain multimedia instructions (MMIs). These MMIs
1945 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
1946 * or sixteen 8-bit paths.
1950 * The Toshiba TX System RISC TX79 Core Architecture manual,
1951 * https://wiki.qemu.org/File:C790.pdf
1953 * Three-Operand Multiply and Multiply-Add (4 instructions)
1954 * --------------------------------------------------------
1955 * MADD [rd,] rs, rt Multiply/Add
1956 * MADDU [rd,] rs, rt Multiply/Add Unsigned
1957 * MULT [rd,] rs, rt Multiply (3-operand)
1958 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
1960 * Multiply Instructions for Pipeline 1 (10 instructions)
1961 * ------------------------------------------------------
1962 * MULT1 [rd,] rs, rt Multiply Pipeline 1
1963 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
1964 * DIV1 rs, rt Divide Pipeline 1
1965 * DIVU1 rs, rt Divide Unsigned Pipeline 1
1966 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
1967 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
1968 * MFHI1 rd Move From HI1 Register
1969 * MFLO1 rd Move From LO1 Register
1970 * MTHI1 rs Move To HI1 Register
1971 * MTLO1 rs Move To LO1 Register
1973 * Arithmetic (19 instructions)
1974 * ----------------------------
1975 * PADDB rd, rs, rt Parallel Add Byte
1976 * PSUBB rd, rs, rt Parallel Subtract Byte
1977 * PADDH rd, rs, rt Parallel Add Halfword
1978 * PSUBH rd, rs, rt Parallel Subtract Halfword
1979 * PADDW rd, rs, rt Parallel Add Word
1980 * PSUBW rd, rs, rt Parallel Subtract Word
1981 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
1982 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
1983 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
1984 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
1985 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
1986 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
1987 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
1988 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
1989 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
1990 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
1991 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
1992 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
1993 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
1995 * Min/Max (4 instructions)
1996 * ------------------------
1997 * PMAXH rd, rs, rt Parallel Maximum Halfword
1998 * PMINH rd, rs, rt Parallel Minimum Halfword
1999 * PMAXW rd, rs, rt Parallel Maximum Word
2000 * PMINW rd, rs, rt Parallel Minimum Word
2002 * Absolute (2 instructions)
2003 * -------------------------
2004 * PABSH rd, rt Parallel Absolute Halfword
2005 * PABSW rd, rt Parallel Absolute Word
2007 * Logical (4 instructions)
2008 * ------------------------
2009 * PAND rd, rs, rt Parallel AND
2010 * POR rd, rs, rt Parallel OR
2011 * PXOR rd, rs, rt Parallel XOR
2012 * PNOR rd, rs, rt Parallel NOR
2014 * Shift (9 instructions)
2015 * ----------------------
2016 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2017 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2018 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2019 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2020 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2021 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2022 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2023 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2024 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2026 * Compare (6 instructions)
2027 * ------------------------
2028 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2029 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2030 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2031 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2032 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2033 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2035 * LZC (1 instruction)
2036 * -------------------
2037 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2039 * Quadword Load and Store (2 instructions)
2040 * ----------------------------------------
2041 * LQ rt, offset(base) Load Quadword
2042 * SQ rt, offset(base) Store Quadword
2044 * Multiply and Divide (19 instructions)
2045 * -------------------------------------
2046 * PMULTW rd, rs, rt Parallel Multiply Word
2047 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2048 * PDIVW rs, rt Parallel Divide Word
2049 * PDIVUW rs, rt Parallel Divide Unsigned Word
2050 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2051 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2052 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2053 * PMULTH rd, rs, rt Parallel Multiply Halfword
2054 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2055 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2056 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2057 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2058 * PDIVBW rs, rt Parallel Divide Broadcast Word
2059 * PMFHI rd Parallel Move From HI Register
2060 * PMFLO rd Parallel Move From LO Register
2061 * PMTHI rs Parallel Move To HI Register
2062 * PMTLO rs Parallel Move To LO Register
2063 * PMFHL rd Parallel Move From HI/LO Register
2064 * PMTHL rs Parallel Move To HI/LO Register
2066 * Pack/Extend (11 instructions)
2067 * -----------------------------
2068 * PPAC5 rd, rt Parallel Pack to 5 bits
2069 * PPACB rd, rs, rt Parallel Pack to Byte
2070 * PPACH rd, rs, rt Parallel Pack to Halfword
2071 * PPACW rd, rs, rt Parallel Pack to Word
2072 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2073 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2074 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2075 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2076 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2077 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2078 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2080 * Others (16 instructions)
2081 * ------------------------
2082 * PCPYH rd, rt Parallel Copy Halfword
2083 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2084 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2085 * PREVH rd, rt Parallel Reverse Halfword
2086 * PINTH rd, rs, rt Parallel Interleave Halfword
2087 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2088 * PEXEH rd, rt Parallel Exchange Even Halfword
2089 * PEXCH rd, rt Parallel Exchange Center Halfword
2090 * PEXEW rd, rt Parallel Exchange Even Word
2091 * PEXCW rd, rt Parallel Exchange Center Word
2092 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2093 * MFSA rd Move from Shift Amount Register
2094 * MTSA rs Move to Shift Amount Register
2095 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2096 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2097 * PROT3W rd, rt Parallel Rotate 3 Words
2099 * The TX79-specific Multimedia Instruction encodings
2100 * ==================================================
2102 * TX79 Multimedia Instruction encoding table keys:
2104 * * This code is reserved for future use. An attempt to execute it
2105 * causes a Reserved Instruction exception.
2106 * % This code indicates an instruction class. The instruction word
2107 * must be further decoded by examining additional tables that show
2108 * the values for other instruction fields.
2109 * # This code is reserved for the unsupported instructions DMULT,
2110 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2111 * to execute it causes a Reserved Instruction exception.
2113 * TX79 Multimedia Instructions encoded by opcode field (MMI, LQ, SQ):
2116 * +--------+----------------------------------------+
2118 * +--------+----------------------------------------+
2120 * opcode bits 28..26
2121 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2122 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2123 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2124 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2125 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2126 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2127 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2128 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2129 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2130 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2131 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2135 TX79_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2136 TX79_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2137 TX79_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2141 * TX79 Multimedia Instructions with opcode field = MMI:
2144 * +--------+-------------------------------+--------+
2145 * | MMI | |function|
2146 * +--------+-------------------------------+--------+
2148 * function bits 2..0
2149 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2150 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2151 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2152 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2153 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2154 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2155 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2156 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2157 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2158 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2159 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2162 #define MASK_TX79_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2164 TX79_MMI_MADD
= 0x00 | TX79_CLASS_MMI
, /* Same as OPC_MADD */
2165 TX79_MMI_MADDU
= 0x01 | TX79_CLASS_MMI
, /* Same as OPC_MADDU */
2166 TX79_MMI_PLZCW
= 0x04 | TX79_CLASS_MMI
,
2167 TX79_MMI_CLASS_MMI0
= 0x08 | TX79_CLASS_MMI
,
2168 TX79_MMI_CLASS_MMI2
= 0x09 | TX79_CLASS_MMI
,
2169 TX79_MMI_MFHI1
= 0x10 | TX79_CLASS_MMI
, /* Same minor as OPC_MFHI */
2170 TX79_MMI_MTHI1
= 0x11 | TX79_CLASS_MMI
, /* Same minor as OPC_MTHI */
2171 TX79_MMI_MFLO1
= 0x12 | TX79_CLASS_MMI
, /* Same minor as OPC_MFLO */
2172 TX79_MMI_MTLO1
= 0x13 | TX79_CLASS_MMI
, /* Same minor as OPC_MTLO */
2173 TX79_MMI_MULT1
= 0x18 | TX79_CLASS_MMI
, /* Same minor as OPC_MULT */
2174 TX79_MMI_MULTU1
= 0x19 | TX79_CLASS_MMI
, /* Same minor as OPC_MULTU */
2175 TX79_MMI_DIV1
= 0x1A | TX79_CLASS_MMI
, /* Same minor as OPC_DIV */
2176 TX79_MMI_DIVU1
= 0x1B | TX79_CLASS_MMI
, /* Same minor as OPC_DIVU */
2177 TX79_MMI_MADD1
= 0x20 | TX79_CLASS_MMI
,
2178 TX79_MMI_MADDU1
= 0x21 | TX79_CLASS_MMI
,
2179 TX79_MMI_CLASS_MMI1
= 0x28 | TX79_CLASS_MMI
,
2180 TX79_MMI_CLASS_MMI3
= 0x29 | TX79_CLASS_MMI
,
2181 TX79_MMI_PMFHL
= 0x30 | TX79_CLASS_MMI
,
2182 TX79_MMI_PMTHL
= 0x31 | TX79_CLASS_MMI
,
2183 TX79_MMI_PSLLH
= 0x34 | TX79_CLASS_MMI
,
2184 TX79_MMI_PSRLH
= 0x36 | TX79_CLASS_MMI
,
2185 TX79_MMI_PSRAH
= 0x37 | TX79_CLASS_MMI
,
2186 TX79_MMI_PSLLW
= 0x3C | TX79_CLASS_MMI
,
2187 TX79_MMI_PSRLW
= 0x3E | TX79_CLASS_MMI
,
2188 TX79_MMI_PSRAW
= 0x3F | TX79_CLASS_MMI
,
2192 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI0:
2195 * +--------+----------------------+--------+--------+
2196 * | MMI | |function| MMI0 |
2197 * +--------+----------------------+--------+--------+
2199 * function bits 7..6
2200 * bits | 0 | 1 | 2 | 3
2201 * 10..8 | 00 | 01 | 10 | 11
2202 * -------+-------+-------+-------+-------
2203 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2204 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2205 * 2 010 | PADDB | PSUBB | PCGTB | *
2206 * 3 011 | * | * | * | *
2207 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2208 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2209 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2210 * 7 111 | * | * | PEXT5 | PPAC5
2213 #define MASK_TX79_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2215 TX79_MMI0_PADDW
= (0x00 << 6) | TX79_MMI_CLASS_MMI0
,
2216 TX79_MMI0_PSUBW
= (0x01 << 6) | TX79_MMI_CLASS_MMI0
,
2217 TX79_MMI0_PCGTW
= (0x02 << 6) | TX79_MMI_CLASS_MMI0
,
2218 TX79_MMI0_PMAXW
= (0x03 << 6) | TX79_MMI_CLASS_MMI0
,
2219 TX79_MMI0_PADDH
= (0x04 << 6) | TX79_MMI_CLASS_MMI0
,
2220 TX79_MMI0_PSUBH
= (0x05 << 6) | TX79_MMI_CLASS_MMI0
,
2221 TX79_MMI0_PCGTH
= (0x06 << 6) | TX79_MMI_CLASS_MMI0
,
2222 TX79_MMI0_PMAXH
= (0x07 << 6) | TX79_MMI_CLASS_MMI0
,
2223 TX79_MMI0_PADDB
= (0x08 << 6) | TX79_MMI_CLASS_MMI0
,
2224 TX79_MMI0_PSUBB
= (0x09 << 6) | TX79_MMI_CLASS_MMI0
,
2225 TX79_MMI0_PCGTB
= (0x0A << 6) | TX79_MMI_CLASS_MMI0
,
2226 TX79_MMI0_PADDSW
= (0x10 << 6) | TX79_MMI_CLASS_MMI0
,
2227 TX79_MMI0_PSUBSW
= (0x11 << 6) | TX79_MMI_CLASS_MMI0
,
2228 TX79_MMI0_PEXTLW
= (0x12 << 6) | TX79_MMI_CLASS_MMI0
,
2229 TX79_MMI0_PPACW
= (0x13 << 6) | TX79_MMI_CLASS_MMI0
,
2230 TX79_MMI0_PADDSH
= (0x14 << 6) | TX79_MMI_CLASS_MMI0
,
2231 TX79_MMI0_PSUBSH
= (0x15 << 6) | TX79_MMI_CLASS_MMI0
,
2232 TX79_MMI0_PEXTLH
= (0x16 << 6) | TX79_MMI_CLASS_MMI0
,
2233 TX79_MMI0_PPACH
= (0x17 << 6) | TX79_MMI_CLASS_MMI0
,
2234 TX79_MMI0_PADDSB
= (0x18 << 6) | TX79_MMI_CLASS_MMI0
,
2235 TX79_MMI0_PSUBSB
= (0x19 << 6) | TX79_MMI_CLASS_MMI0
,
2236 TX79_MMI0_PEXTLB
= (0x1A << 6) | TX79_MMI_CLASS_MMI0
,
2237 TX79_MMI0_PPACB
= (0x1B << 6) | TX79_MMI_CLASS_MMI0
,
2238 TX79_MMI0_PEXT5
= (0x1E << 6) | TX79_MMI_CLASS_MMI0
,
2239 TX79_MMI0_PPAC5
= (0x1F << 6) | TX79_MMI_CLASS_MMI0
,
2243 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI1:
2246 * +--------+----------------------+--------+--------+
2247 * | MMI | |function| MMI1 |
2248 * +--------+----------------------+--------+--------+
2250 * function bits 7..6
2251 * bits | 0 | 1 | 2 | 3
2252 * 10..8 | 00 | 01 | 10 | 11
2253 * -------+-------+-------+-------+-------
2254 * 0 000 | * | PABSW | PCEQW | PMINW
2255 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2256 * 2 010 | * | * | PCEQB | *
2257 * 3 011 | * | * | * | *
2258 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2259 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2260 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2261 * 7 111 | * | * | * | *
2264 #define MASK_TX79_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2266 TX79_MMI1_PABSW
= (0x01 << 6) | TX79_MMI_CLASS_MMI1
,
2267 TX79_MMI1_PCEQW
= (0x02 << 6) | TX79_MMI_CLASS_MMI1
,
2268 TX79_MMI1_PMINW
= (0x03 << 6) | TX79_MMI_CLASS_MMI1
,
2269 TX79_MMI1_PADSBH
= (0x04 << 6) | TX79_MMI_CLASS_MMI1
,
2270 TX79_MMI1_PABSH
= (0x05 << 6) | TX79_MMI_CLASS_MMI1
,
2271 TX79_MMI1_PCEQH
= (0x06 << 6) | TX79_MMI_CLASS_MMI1
,
2272 TX79_MMI1_PMINH
= (0x07 << 6) | TX79_MMI_CLASS_MMI1
,
2273 TX79_MMI1_PCEQB
= (0x0A << 6) | TX79_MMI_CLASS_MMI1
,
2274 TX79_MMI1_PADDUW
= (0x10 << 6) | TX79_MMI_CLASS_MMI1
,
2275 TX79_MMI1_PSUBUW
= (0x11 << 6) | TX79_MMI_CLASS_MMI1
,
2276 TX79_MMI1_PEXTUW
= (0x12 << 6) | TX79_MMI_CLASS_MMI1
,
2277 TX79_MMI1_PADDUH
= (0x14 << 6) | TX79_MMI_CLASS_MMI1
,
2278 TX79_MMI1_PSUBUH
= (0x15 << 6) | TX79_MMI_CLASS_MMI1
,
2279 TX79_MMI1_PEXTUH
= (0x16 << 6) | TX79_MMI_CLASS_MMI1
,
2280 TX79_MMI1_PADDUB
= (0x18 << 6) | TX79_MMI_CLASS_MMI1
,
2281 TX79_MMI1_PSUBUB
= (0x19 << 6) | TX79_MMI_CLASS_MMI1
,
2282 TX79_MMI1_PEXTUB
= (0x1A << 6) | TX79_MMI_CLASS_MMI1
,
2283 TX79_MMI1_QFSRV
= (0x1B << 6) | TX79_MMI_CLASS_MMI1
,
2287 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI2:
2290 * +--------+----------------------+--------+--------+
2291 * | MMI | |function| MMI2 |
2292 * +--------+----------------------+--------+--------+
2294 * function bits 7..6
2295 * bits | 0 | 1 | 2 | 3
2296 * 10..8 | 00 | 01 | 10 | 11
2297 * -------+-------+-------+-------+-------
2298 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2299 * 1 001 | PMSUBW| * | * | *
2300 * 2 010 | PMFHI | PMFLO | PINTH | *
2301 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2302 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2303 * 5 101 | PMSUBH| PHMSBH| * | *
2304 * 6 110 | * | * | PEXEH | PREVH
2305 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2308 #define MASK_TX79_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2310 TX79_MMI2_PMADDW
= (0x00 << 6) | TX79_MMI_CLASS_MMI2
,
2311 TX79_MMI2_PSLLVW
= (0x02 << 6) | TX79_MMI_CLASS_MMI2
,
2312 TX79_MMI2_PSRLVW
= (0x03 << 6) | TX79_MMI_CLASS_MMI2
,
2313 TX79_MMI2_PMSUBW
= (0x04 << 6) | TX79_MMI_CLASS_MMI2
,
2314 TX79_MMI2_PMFHI
= (0x08 << 6) | TX79_MMI_CLASS_MMI2
,
2315 TX79_MMI2_PMFLO
= (0x09 << 6) | TX79_MMI_CLASS_MMI2
,
2316 TX79_MMI2_PINTH
= (0x0A << 6) | TX79_MMI_CLASS_MMI2
,
2317 TX79_MMI2_PMULTW
= (0x0C << 6) | TX79_MMI_CLASS_MMI2
,
2318 TX79_MMI2_PDIVW
= (0x0D << 6) | TX79_MMI_CLASS_MMI2
,
2319 TX79_MMI2_PCPYLD
= (0x0E << 6) | TX79_MMI_CLASS_MMI2
,
2320 TX79_MMI2_PMADDH
= (0x10 << 6) | TX79_MMI_CLASS_MMI2
,
2321 TX79_MMI2_PHMADH
= (0x11 << 6) | TX79_MMI_CLASS_MMI2
,
2322 TX79_MMI2_PAND
= (0x12 << 6) | TX79_MMI_CLASS_MMI2
,
2323 TX79_MMI2_PXOR
= (0x13 << 6) | TX79_MMI_CLASS_MMI2
,
2324 TX79_MMI2_PMSUBH
= (0x14 << 6) | TX79_MMI_CLASS_MMI2
,
2325 TX79_MMI2_PHMSBH
= (0x15 << 6) | TX79_MMI_CLASS_MMI2
,
2326 TX79_MMI2_PEXEH
= (0x1A << 6) | TX79_MMI_CLASS_MMI2
,
2327 TX79_MMI2_PREVH
= (0x1B << 6) | TX79_MMI_CLASS_MMI2
,
2328 TX79_MMI2_PMULTH
= (0x1C << 6) | TX79_MMI_CLASS_MMI2
,
2329 TX79_MMI2_PDIVBW
= (0x1D << 6) | TX79_MMI_CLASS_MMI2
,
2330 TX79_MMI2_PEXEW
= (0x1E << 6) | TX79_MMI_CLASS_MMI2
,
2331 TX79_MMI2_PROT3W
= (0x1F << 6) | TX79_MMI_CLASS_MMI2
,
2335 * TX79 Multimedia Instructions with opcode field = MMI and bits 5..0 = MMI3:
2338 * +--------+----------------------+--------+--------+
2339 * | MMI | |function| MMI3 |
2340 * +--------+----------------------+--------+--------+
2342 * function bits 7..6
2343 * bits | 0 | 1 | 2 | 3
2344 * 10..8 | 00 | 01 | 10 | 11
2345 * -------+-------+-------+-------+-------
2346 * 0 000 |PMADDUW| * | * | PSRAVW
2347 * 1 001 | * | * | * | *
2348 * 2 010 | PMTHI | PMTLO | PINTEH| *
2349 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2350 * 4 100 | * | * | POR | PNOR
2351 * 5 101 | * | * | * | *
2352 * 6 110 | * | * | PEXCH | PCPYH
2353 * 7 111 | * | * | PEXCW | *
2356 #define MASK_TX79_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2358 TX79_MMI3_PMADDUW
= (0x00 << 6) | TX79_MMI_CLASS_MMI3
,
2359 TX79_MMI3_PSRAVW
= (0x03 << 6) | TX79_MMI_CLASS_MMI3
,
2360 TX79_MMI3_PMTHI
= (0x08 << 6) | TX79_MMI_CLASS_MMI3
,
2361 TX79_MMI3_PMTLO
= (0x09 << 6) | TX79_MMI_CLASS_MMI3
,
2362 TX79_MMI3_PINTEH
= (0x0A << 6) | TX79_MMI_CLASS_MMI3
,
2363 TX79_MMI3_PMULTUW
= (0x0C << 6) | TX79_MMI_CLASS_MMI3
,
2364 TX79_MMI3_PDIVUW
= (0x0D << 6) | TX79_MMI_CLASS_MMI3
,
2365 TX79_MMI3_PCPYUD
= (0x0E << 6) | TX79_MMI_CLASS_MMI3
,
2366 TX79_MMI3_POR
= (0x12 << 6) | TX79_MMI_CLASS_MMI3
,
2367 TX79_MMI3_PNOR
= (0x13 << 6) | TX79_MMI_CLASS_MMI3
,
2368 TX79_MMI3_PEXCH
= (0x1A << 6) | TX79_MMI_CLASS_MMI3
,
2369 TX79_MMI3_PCPYH
= (0x1B << 6) | TX79_MMI_CLASS_MMI3
,
2370 TX79_MMI3_PEXCW
= (0x1E << 6) | TX79_MMI_CLASS_MMI3
,
2373 /* global register indices */
2374 static TCGv cpu_gpr
[32], cpu_PC
;
2375 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2376 static TCGv cpu_dspctrl
, btarget
, bcond
;
2377 static TCGv_i32 hflags
;
2378 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2379 static TCGv_i64 fpu_f64
[32];
2380 static TCGv_i64 msa_wr_d
[64];
2382 #include "exec/gen-icount.h"
2384 #define gen_helper_0e0i(name, arg) do { \
2385 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2386 gen_helper_##name(cpu_env, helper_tmp); \
2387 tcg_temp_free_i32(helper_tmp); \
2390 #define gen_helper_0e1i(name, arg1, arg2) do { \
2391 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2392 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2393 tcg_temp_free_i32(helper_tmp); \
2396 #define gen_helper_1e0i(name, ret, arg1) do { \
2397 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2398 gen_helper_##name(ret, cpu_env, helper_tmp); \
2399 tcg_temp_free_i32(helper_tmp); \
2402 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2403 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2404 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2405 tcg_temp_free_i32(helper_tmp); \
2408 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2409 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2410 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2411 tcg_temp_free_i32(helper_tmp); \
2414 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2415 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2416 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2417 tcg_temp_free_i32(helper_tmp); \
2420 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2421 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2422 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2423 tcg_temp_free_i32(helper_tmp); \
2426 typedef struct DisasContext
{
2427 DisasContextBase base
;
2428 target_ulong saved_pc
;
2429 target_ulong page_start
;
2431 uint64_t insn_flags
;
2432 int32_t CP0_Config1
;
2433 int32_t CP0_Config2
;
2434 int32_t CP0_Config3
;
2435 int32_t CP0_Config5
;
2436 /* Routine used to access memory */
2438 TCGMemOp default_tcg_memop_mask
;
2439 uint32_t hflags
, saved_hflags
;
2440 target_ulong btarget
;
2451 int CP0_LLAddr_shift
;
2460 #define DISAS_STOP DISAS_TARGET_0
2461 #define DISAS_EXIT DISAS_TARGET_1
2463 static const char * const regnames
[] = {
2464 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2465 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2466 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2467 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2470 static const char * const regnames_HI
[] = {
2471 "HI0", "HI1", "HI2", "HI3",
2474 static const char * const regnames_LO
[] = {
2475 "LO0", "LO1", "LO2", "LO3",
2478 static const char * const fregnames
[] = {
2479 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2480 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2481 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2482 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2485 static const char * const msaregnames
[] = {
2486 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2487 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2488 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2489 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2490 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2491 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2492 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2493 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2494 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2495 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2496 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2497 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2498 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2499 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2500 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2501 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2504 #define LOG_DISAS(...) \
2506 if (MIPS_DEBUG_DISAS) { \
2507 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2511 #define MIPS_INVAL(op) \
2513 if (MIPS_DEBUG_DISAS) { \
2514 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2515 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2516 ctx->base.pc_next, ctx->opcode, op, \
2517 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2518 ((ctx->opcode >> 16) & 0x1F)); \
2522 /* General purpose registers moves. */
2523 static inline void gen_load_gpr (TCGv t
, int reg
)
2526 tcg_gen_movi_tl(t
, 0);
2528 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2531 static inline void gen_store_gpr (TCGv t
, int reg
)
2534 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2537 /* Moves to/from shadow registers. */
2538 static inline void gen_load_srsgpr (int from
, int to
)
2540 TCGv t0
= tcg_temp_new();
2543 tcg_gen_movi_tl(t0
, 0);
2545 TCGv_i32 t2
= tcg_temp_new_i32();
2546 TCGv_ptr addr
= tcg_temp_new_ptr();
2548 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2549 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2550 tcg_gen_andi_i32(t2
, t2
, 0xf);
2551 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2552 tcg_gen_ext_i32_ptr(addr
, t2
);
2553 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2555 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2556 tcg_temp_free_ptr(addr
);
2557 tcg_temp_free_i32(t2
);
2559 gen_store_gpr(t0
, to
);
2563 static inline void gen_store_srsgpr (int from
, int to
)
2566 TCGv t0
= tcg_temp_new();
2567 TCGv_i32 t2
= tcg_temp_new_i32();
2568 TCGv_ptr addr
= tcg_temp_new_ptr();
2570 gen_load_gpr(t0
, from
);
2571 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2572 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2573 tcg_gen_andi_i32(t2
, t2
, 0xf);
2574 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2575 tcg_gen_ext_i32_ptr(addr
, t2
);
2576 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2578 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2579 tcg_temp_free_ptr(addr
);
2580 tcg_temp_free_i32(t2
);
2586 static inline void gen_save_pc(target_ulong pc
)
2588 tcg_gen_movi_tl(cpu_PC
, pc
);
2591 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2593 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2594 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2595 gen_save_pc(ctx
->base
.pc_next
);
2596 ctx
->saved_pc
= ctx
->base
.pc_next
;
2598 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2599 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2600 ctx
->saved_hflags
= ctx
->hflags
;
2601 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2607 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2613 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2615 ctx
->saved_hflags
= ctx
->hflags
;
2616 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2622 ctx
->btarget
= env
->btarget
;
2627 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2629 TCGv_i32 texcp
= tcg_const_i32(excp
);
2630 TCGv_i32 terr
= tcg_const_i32(err
);
2631 save_cpu_state(ctx
, 1);
2632 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2633 tcg_temp_free_i32(terr
);
2634 tcg_temp_free_i32(texcp
);
2635 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2638 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2640 gen_helper_0e0i(raise_exception
, excp
);
2643 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2645 generate_exception_err(ctx
, excp
, 0);
2648 /* Floating point register moves. */
2649 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2651 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2652 generate_exception(ctx
, EXCP_RI
);
2654 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2657 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2660 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2661 generate_exception(ctx
, EXCP_RI
);
2663 t64
= tcg_temp_new_i64();
2664 tcg_gen_extu_i32_i64(t64
, t
);
2665 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2666 tcg_temp_free_i64(t64
);
2669 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2671 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2672 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2674 gen_load_fpr32(ctx
, t
, reg
| 1);
2678 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2680 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2681 TCGv_i64 t64
= tcg_temp_new_i64();
2682 tcg_gen_extu_i32_i64(t64
, t
);
2683 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2684 tcg_temp_free_i64(t64
);
2686 gen_store_fpr32(ctx
, t
, reg
| 1);
2690 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2692 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2693 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2695 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2699 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2701 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2702 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2705 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2706 t0
= tcg_temp_new_i64();
2707 tcg_gen_shri_i64(t0
, t
, 32);
2708 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2709 tcg_temp_free_i64(t0
);
2713 static inline int get_fp_bit (int cc
)
2721 /* Addresses computation */
2722 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
2724 tcg_gen_add_tl(ret
, arg0
, arg1
);
2726 #if defined(TARGET_MIPS64)
2727 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2728 tcg_gen_ext32s_i64(ret
, ret
);
2733 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2736 tcg_gen_addi_tl(ret
, base
, ofs
);
2738 #if defined(TARGET_MIPS64)
2739 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2740 tcg_gen_ext32s_i64(ret
, ret
);
2745 /* Addresses computation (translation time) */
2746 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2749 target_long sum
= base
+ offset
;
2751 #if defined(TARGET_MIPS64)
2752 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2759 /* Sign-extract the low 32-bits to a target_long. */
2760 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2762 #if defined(TARGET_MIPS64)
2763 tcg_gen_ext32s_i64(ret
, arg
);
2765 tcg_gen_extrl_i64_i32(ret
, arg
);
2769 /* Sign-extract the high 32-bits to a target_long. */
2770 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2772 #if defined(TARGET_MIPS64)
2773 tcg_gen_sari_i64(ret
, arg
, 32);
2775 tcg_gen_extrh_i64_i32(ret
, arg
);
2779 static inline void check_cp0_enabled(DisasContext
*ctx
)
2781 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
2782 generate_exception_err(ctx
, EXCP_CpU
, 0);
2785 static inline void check_cp1_enabled(DisasContext
*ctx
)
2787 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
2788 generate_exception_err(ctx
, EXCP_CpU
, 1);
2791 /* Verify that the processor is running with COP1X instructions enabled.
2792 This is associated with the nabla symbol in the MIPS32 and MIPS64
2795 static inline void check_cop1x(DisasContext
*ctx
)
2797 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
2798 generate_exception_end(ctx
, EXCP_RI
);
2801 /* Verify that the processor is running with 64-bit floating-point
2802 operations enabled. */
2804 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2806 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
2807 generate_exception_end(ctx
, EXCP_RI
);
2811 * Verify if floating point register is valid; an operation is not defined
2812 * if bit 0 of any register specification is set and the FR bit in the
2813 * Status register equals zero, since the register numbers specify an
2814 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2815 * in the Status register equals one, both even and odd register numbers
2816 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2818 * Multiple 64 bit wide registers can be checked by calling
2819 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2821 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2823 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
2824 generate_exception_end(ctx
, EXCP_RI
);
2827 /* Verify that the processor is running with DSP instructions enabled.
2828 This is enabled by CP0 Status register MX(24) bit.
2831 static inline void check_dsp(DisasContext
*ctx
)
2833 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2834 if (ctx
->insn_flags
& ASE_DSP
) {
2835 generate_exception_end(ctx
, EXCP_DSPDIS
);
2837 generate_exception_end(ctx
, EXCP_RI
);
2842 static inline void check_dsp_r2(DisasContext
*ctx
)
2844 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2845 if (ctx
->insn_flags
& ASE_DSP
) {
2846 generate_exception_end(ctx
, EXCP_DSPDIS
);
2848 generate_exception_end(ctx
, EXCP_RI
);
2853 static inline void check_dsp_r3(DisasContext
*ctx
)
2855 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2856 if (ctx
->insn_flags
& ASE_DSP
) {
2857 generate_exception_end(ctx
, EXCP_DSPDIS
);
2859 generate_exception_end(ctx
, EXCP_RI
);
2864 /* This code generates a "reserved instruction" exception if the
2865 CPU does not support the instruction set corresponding to flags. */
2866 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
2868 if (unlikely(!(ctx
->insn_flags
& flags
))) {
2869 generate_exception_end(ctx
, EXCP_RI
);
2873 /* This code generates a "reserved instruction" exception if the
2874 CPU has corresponding flag set which indicates that the instruction
2875 has been removed. */
2876 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
2878 if (unlikely(ctx
->insn_flags
& flags
)) {
2879 generate_exception_end(ctx
, EXCP_RI
);
2884 * The Linux kernel traps certain reserved instruction exceptions to
2885 * emulate the corresponding instructions. QEMU is the kernel in user
2886 * mode, so those traps are emulated by accepting the instructions.
2888 * A reserved instruction exception is generated for flagged CPUs if
2889 * QEMU runs in system mode.
2891 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
2893 #ifndef CONFIG_USER_ONLY
2894 check_insn_opc_removed(ctx
, flags
);
2898 /* This code generates a "reserved instruction" exception if the
2899 CPU does not support 64-bit paired-single (PS) floating point data type */
2900 static inline void check_ps(DisasContext
*ctx
)
2902 if (unlikely(!ctx
->ps
)) {
2903 generate_exception(ctx
, EXCP_RI
);
2905 check_cp1_64bitmode(ctx
);
2908 #ifdef TARGET_MIPS64
2909 /* This code generates a "reserved instruction" exception if 64-bit
2910 instructions are not enabled. */
2911 static inline void check_mips_64(DisasContext
*ctx
)
2913 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
2914 generate_exception_end(ctx
, EXCP_RI
);
2918 #ifndef CONFIG_USER_ONLY
2919 static inline void check_mvh(DisasContext
*ctx
)
2921 if (unlikely(!ctx
->mvh
)) {
2922 generate_exception(ctx
, EXCP_RI
);
2928 * This code generates a "reserved instruction" exception if the
2929 * Config5 XNP bit is set.
2931 static inline void check_xnp(DisasContext
*ctx
)
2933 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
2934 generate_exception_end(ctx
, EXCP_RI
);
2938 #ifndef CONFIG_USER_ONLY
2940 * This code generates a "reserved instruction" exception if the
2941 * Config3 PW bit is NOT set.
2943 static inline void check_pw(DisasContext
*ctx
)
2945 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
2946 generate_exception_end(ctx
, EXCP_RI
);
2952 * This code generates a "reserved instruction" exception if the
2953 * Config3 MT bit is NOT set.
2955 static inline void check_mt(DisasContext
*ctx
)
2957 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2958 generate_exception_end(ctx
, EXCP_RI
);
2962 #ifndef CONFIG_USER_ONLY
2964 * This code generates a "coprocessor unusable" exception if CP0 is not
2965 * available, and, if that is not the case, generates a "reserved instruction"
2966 * exception if the Config5 MT bit is NOT set. This is needed for availability
2967 * control of some of MT ASE instructions.
2969 static inline void check_cp0_mt(DisasContext
*ctx
)
2971 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
2972 generate_exception_err(ctx
, EXCP_CpU
, 0);
2974 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2975 generate_exception_err(ctx
, EXCP_RI
, 0);
2982 * This code generates a "reserved instruction" exception if the
2983 * Config5 NMS bit is set.
2985 static inline void check_nms(DisasContext
*ctx
)
2987 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
2988 generate_exception_end(ctx
, EXCP_RI
);
2993 /* Define small wrappers for gen_load_fpr* so that we have a uniform
2994 calling interface for 32 and 64-bit FPRs. No sense in changing
2995 all callers for gen_load_fpr32 when we need the CTX parameter for
2997 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
2998 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
2999 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
3000 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
3001 int ft, int fs, int cc) \
3003 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
3004 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
3013 check_cp1_registers(ctx, fs | ft); \
3021 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
3022 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
3024 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
3025 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
3026 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
3027 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
3028 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
3029 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
3030 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
3031 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
3032 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
3033 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
3034 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
3035 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
3036 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
3037 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
3038 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
3039 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
3042 tcg_temp_free_i##bits (fp0); \
3043 tcg_temp_free_i##bits (fp1); \
3046 FOP_CONDS(, 0, d
, FMT_D
, 64)
3047 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3048 FOP_CONDS(, 0, s
, FMT_S
, 32)
3049 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3050 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3051 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3054 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3055 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
3056 int ft, int fs, int fd) \
3058 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3059 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3060 if (ifmt == FMT_D) { \
3061 check_cp1_registers(ctx, fs | ft | fd); \
3063 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3064 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3067 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3070 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3073 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3076 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3079 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3082 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3085 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3088 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3091 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3094 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3097 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3100 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3103 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3106 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3109 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3112 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3115 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3118 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3121 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3124 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3127 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3130 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3136 tcg_temp_free_i ## bits (fp0); \
3137 tcg_temp_free_i ## bits (fp1); \
3140 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3141 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3143 #undef gen_ldcmp_fpr32
3144 #undef gen_ldcmp_fpr64
3146 /* load/store instructions. */
3147 #ifdef CONFIG_USER_ONLY
3148 #define OP_LD_ATOMIC(insn,fname) \
3149 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3150 DisasContext *ctx) \
3152 TCGv t0 = tcg_temp_new(); \
3153 tcg_gen_mov_tl(t0, arg1); \
3154 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3155 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3156 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3157 tcg_temp_free(t0); \
3160 #define OP_LD_ATOMIC(insn,fname) \
3161 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3162 DisasContext *ctx) \
3164 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3167 OP_LD_ATOMIC(ll
,ld32s
);
3168 #if defined(TARGET_MIPS64)
3169 OP_LD_ATOMIC(lld
,ld64
);
3173 #ifdef CONFIG_USER_ONLY
3174 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3175 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3176 DisasContext *ctx) \
3178 TCGv t0 = tcg_temp_new(); \
3179 TCGLabel *l1 = gen_new_label(); \
3180 TCGLabel *l2 = gen_new_label(); \
3182 tcg_gen_andi_tl(t0, arg2, almask); \
3183 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
3184 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
3185 generate_exception(ctx, EXCP_AdES); \
3186 gen_set_label(l1); \
3187 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3188 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
3189 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
3190 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
3191 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
3192 generate_exception_end(ctx, EXCP_SC); \
3193 gen_set_label(l2); \
3194 tcg_gen_movi_tl(t0, 0); \
3195 gen_store_gpr(t0, rt); \
3196 tcg_temp_free(t0); \
3199 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3200 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3201 DisasContext *ctx) \
3203 TCGv t0 = tcg_temp_new(); \
3204 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
3205 gen_store_gpr(t0, rt); \
3206 tcg_temp_free(t0); \
3209 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
3210 #if defined(TARGET_MIPS64)
3211 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
3215 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
3216 int base
, int offset
)
3219 tcg_gen_movi_tl(addr
, offset
);
3220 } else if (offset
== 0) {
3221 gen_load_gpr(addr
, base
);
3223 tcg_gen_movi_tl(addr
, offset
);
3224 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3228 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
3230 target_ulong pc
= ctx
->base
.pc_next
;
3232 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3233 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3238 pc
&= ~(target_ulong
)3;
3243 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3244 int rt
, int base
, int offset
)
3247 int mem_idx
= ctx
->mem_idx
;
3249 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3250 /* Loongson CPU uses a load to zero register for prefetch.
3251 We emulate it as a NOP. On other CPU we must perform the
3252 actual memory access. */
3256 t0
= tcg_temp_new();
3257 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3260 #if defined(TARGET_MIPS64)
3262 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3263 ctx
->default_tcg_memop_mask
);
3264 gen_store_gpr(t0
, rt
);
3267 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3268 ctx
->default_tcg_memop_mask
);
3269 gen_store_gpr(t0
, rt
);
3273 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3274 gen_store_gpr(t0
, rt
);
3277 t1
= tcg_temp_new();
3278 /* Do a byte access to possibly trigger a page
3279 fault with the unaligned address. */
3280 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3281 tcg_gen_andi_tl(t1
, t0
, 7);
3282 #ifndef TARGET_WORDS_BIGENDIAN
3283 tcg_gen_xori_tl(t1
, t1
, 7);
3285 tcg_gen_shli_tl(t1
, t1
, 3);
3286 tcg_gen_andi_tl(t0
, t0
, ~7);
3287 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3288 tcg_gen_shl_tl(t0
, t0
, t1
);
3289 t2
= tcg_const_tl(-1);
3290 tcg_gen_shl_tl(t2
, t2
, t1
);
3291 gen_load_gpr(t1
, rt
);
3292 tcg_gen_andc_tl(t1
, t1
, t2
);
3294 tcg_gen_or_tl(t0
, t0
, t1
);
3296 gen_store_gpr(t0
, rt
);
3299 t1
= tcg_temp_new();
3300 /* Do a byte access to possibly trigger a page
3301 fault with the unaligned address. */
3302 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3303 tcg_gen_andi_tl(t1
, t0
, 7);
3304 #ifdef TARGET_WORDS_BIGENDIAN
3305 tcg_gen_xori_tl(t1
, t1
, 7);
3307 tcg_gen_shli_tl(t1
, t1
, 3);
3308 tcg_gen_andi_tl(t0
, t0
, ~7);
3309 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3310 tcg_gen_shr_tl(t0
, t0
, t1
);
3311 tcg_gen_xori_tl(t1
, t1
, 63);
3312 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3313 tcg_gen_shl_tl(t2
, t2
, t1
);
3314 gen_load_gpr(t1
, rt
);
3315 tcg_gen_and_tl(t1
, t1
, t2
);
3317 tcg_gen_or_tl(t0
, t0
, t1
);
3319 gen_store_gpr(t0
, rt
);
3322 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3323 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3325 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3326 gen_store_gpr(t0
, rt
);
3330 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3331 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3333 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3334 gen_store_gpr(t0
, rt
);
3337 mem_idx
= MIPS_HFLAG_UM
;
3340 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3341 ctx
->default_tcg_memop_mask
);
3342 gen_store_gpr(t0
, rt
);
3345 mem_idx
= MIPS_HFLAG_UM
;
3348 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3349 ctx
->default_tcg_memop_mask
);
3350 gen_store_gpr(t0
, rt
);
3353 mem_idx
= MIPS_HFLAG_UM
;
3356 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3357 ctx
->default_tcg_memop_mask
);
3358 gen_store_gpr(t0
, rt
);
3361 mem_idx
= MIPS_HFLAG_UM
;
3364 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3365 gen_store_gpr(t0
, rt
);
3368 mem_idx
= MIPS_HFLAG_UM
;
3371 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3372 gen_store_gpr(t0
, rt
);
3375 mem_idx
= MIPS_HFLAG_UM
;
3378 t1
= tcg_temp_new();
3379 /* Do a byte access to possibly trigger a page
3380 fault with the unaligned address. */
3381 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3382 tcg_gen_andi_tl(t1
, t0
, 3);
3383 #ifndef TARGET_WORDS_BIGENDIAN
3384 tcg_gen_xori_tl(t1
, t1
, 3);
3386 tcg_gen_shli_tl(t1
, t1
, 3);
3387 tcg_gen_andi_tl(t0
, t0
, ~3);
3388 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3389 tcg_gen_shl_tl(t0
, t0
, t1
);
3390 t2
= tcg_const_tl(-1);
3391 tcg_gen_shl_tl(t2
, t2
, t1
);
3392 gen_load_gpr(t1
, rt
);
3393 tcg_gen_andc_tl(t1
, t1
, t2
);
3395 tcg_gen_or_tl(t0
, t0
, t1
);
3397 tcg_gen_ext32s_tl(t0
, t0
);
3398 gen_store_gpr(t0
, rt
);
3401 mem_idx
= MIPS_HFLAG_UM
;
3404 t1
= tcg_temp_new();
3405 /* Do a byte access to possibly trigger a page
3406 fault with the unaligned address. */
3407 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3408 tcg_gen_andi_tl(t1
, t0
, 3);
3409 #ifdef TARGET_WORDS_BIGENDIAN
3410 tcg_gen_xori_tl(t1
, t1
, 3);
3412 tcg_gen_shli_tl(t1
, t1
, 3);
3413 tcg_gen_andi_tl(t0
, t0
, ~3);
3414 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3415 tcg_gen_shr_tl(t0
, t0
, t1
);
3416 tcg_gen_xori_tl(t1
, t1
, 31);
3417 t2
= tcg_const_tl(0xfffffffeull
);
3418 tcg_gen_shl_tl(t2
, t2
, t1
);
3419 gen_load_gpr(t1
, rt
);
3420 tcg_gen_and_tl(t1
, t1
, t2
);
3422 tcg_gen_or_tl(t0
, t0
, t1
);
3424 tcg_gen_ext32s_tl(t0
, t0
);
3425 gen_store_gpr(t0
, rt
);
3428 mem_idx
= MIPS_HFLAG_UM
;
3432 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3433 gen_store_gpr(t0
, rt
);
3439 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3440 uint32_t reg1
, uint32_t reg2
)
3442 TCGv taddr
= tcg_temp_new();
3443 TCGv_i64 tval
= tcg_temp_new_i64();
3444 TCGv tmp1
= tcg_temp_new();
3445 TCGv tmp2
= tcg_temp_new();
3447 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3448 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3449 #ifdef TARGET_WORDS_BIGENDIAN
3450 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3452 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3454 gen_store_gpr(tmp1
, reg1
);
3455 tcg_temp_free(tmp1
);
3456 gen_store_gpr(tmp2
, reg2
);
3457 tcg_temp_free(tmp2
);
3458 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3459 tcg_temp_free_i64(tval
);
3460 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3461 tcg_temp_free(taddr
);
3465 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
3466 int base
, int offset
)
3468 TCGv t0
= tcg_temp_new();
3469 TCGv t1
= tcg_temp_new();
3470 int mem_idx
= ctx
->mem_idx
;
3472 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3473 gen_load_gpr(t1
, rt
);
3475 #if defined(TARGET_MIPS64)
3477 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3478 ctx
->default_tcg_memop_mask
);
3481 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3484 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3488 mem_idx
= MIPS_HFLAG_UM
;
3491 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3492 ctx
->default_tcg_memop_mask
);
3495 mem_idx
= MIPS_HFLAG_UM
;
3498 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3499 ctx
->default_tcg_memop_mask
);
3502 mem_idx
= MIPS_HFLAG_UM
;
3505 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3508 mem_idx
= MIPS_HFLAG_UM
;
3511 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3514 mem_idx
= MIPS_HFLAG_UM
;
3517 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3525 /* Store conditional */
3526 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
3527 int base
, int16_t offset
)
3530 int mem_idx
= ctx
->mem_idx
;
3532 #ifdef CONFIG_USER_ONLY
3533 t0
= tcg_temp_local_new();
3534 t1
= tcg_temp_local_new();
3536 t0
= tcg_temp_new();
3537 t1
= tcg_temp_new();
3539 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3540 gen_load_gpr(t1
, rt
);
3542 #if defined(TARGET_MIPS64)
3545 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
3549 mem_idx
= MIPS_HFLAG_UM
;
3553 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
3560 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3561 uint32_t reg1
, uint32_t reg2
)
3563 TCGv taddr
= tcg_temp_local_new();
3564 TCGv lladdr
= tcg_temp_local_new();
3565 TCGv_i64 tval
= tcg_temp_new_i64();
3566 TCGv_i64 llval
= tcg_temp_new_i64();
3567 TCGv_i64 val
= tcg_temp_new_i64();
3568 TCGv tmp1
= tcg_temp_new();
3569 TCGv tmp2
= tcg_temp_new();
3570 TCGLabel
*lab_fail
= gen_new_label();
3571 TCGLabel
*lab_done
= gen_new_label();
3573 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3575 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3576 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3578 gen_load_gpr(tmp1
, reg1
);
3579 gen_load_gpr(tmp2
, reg2
);
3581 #ifdef TARGET_WORDS_BIGENDIAN
3582 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3584 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3587 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3588 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3589 ctx
->mem_idx
, MO_64
);
3591 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3593 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3595 gen_set_label(lab_fail
);
3598 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3600 gen_set_label(lab_done
);
3601 tcg_gen_movi_tl(lladdr
, -1);
3602 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3605 /* Load and store */
3606 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
3609 /* Don't do NOP if destination is zero: we must perform the actual
3614 TCGv_i32 fp0
= tcg_temp_new_i32();
3615 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3616 ctx
->default_tcg_memop_mask
);
3617 gen_store_fpr32(ctx
, fp0
, ft
);
3618 tcg_temp_free_i32(fp0
);
3623 TCGv_i32 fp0
= tcg_temp_new_i32();
3624 gen_load_fpr32(ctx
, fp0
, ft
);
3625 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3626 ctx
->default_tcg_memop_mask
);
3627 tcg_temp_free_i32(fp0
);
3632 TCGv_i64 fp0
= tcg_temp_new_i64();
3633 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3634 ctx
->default_tcg_memop_mask
);
3635 gen_store_fpr64(ctx
, fp0
, ft
);
3636 tcg_temp_free_i64(fp0
);
3641 TCGv_i64 fp0
= tcg_temp_new_i64();
3642 gen_load_fpr64(ctx
, fp0
, ft
);
3643 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3644 ctx
->default_tcg_memop_mask
);
3645 tcg_temp_free_i64(fp0
);
3649 MIPS_INVAL("flt_ldst");
3650 generate_exception_end(ctx
, EXCP_RI
);
3655 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3656 int rs
, int16_t imm
)
3658 TCGv t0
= tcg_temp_new();
3660 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3661 check_cp1_enabled(ctx
);
3665 check_insn(ctx
, ISA_MIPS2
);
3668 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3669 gen_flt_ldst(ctx
, op
, rt
, t0
);
3672 generate_exception_err(ctx
, EXCP_CpU
, 1);
3677 /* Arithmetic with immediate operand */
3678 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3679 int rt
, int rs
, int imm
)
3681 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3683 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3684 /* If no destination, treat it as a NOP.
3685 For addi, we must generate the overflow exception when needed. */
3691 TCGv t0
= tcg_temp_local_new();
3692 TCGv t1
= tcg_temp_new();
3693 TCGv t2
= tcg_temp_new();
3694 TCGLabel
*l1
= gen_new_label();
3696 gen_load_gpr(t1
, rs
);
3697 tcg_gen_addi_tl(t0
, t1
, uimm
);
3698 tcg_gen_ext32s_tl(t0
, t0
);
3700 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3701 tcg_gen_xori_tl(t2
, t0
, uimm
);
3702 tcg_gen_and_tl(t1
, t1
, t2
);
3704 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3706 /* operands of same sign, result different sign */
3707 generate_exception(ctx
, EXCP_OVERFLOW
);
3709 tcg_gen_ext32s_tl(t0
, t0
);
3710 gen_store_gpr(t0
, rt
);
3716 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3717 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3719 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3722 #if defined(TARGET_MIPS64)
3725 TCGv t0
= tcg_temp_local_new();
3726 TCGv t1
= tcg_temp_new();
3727 TCGv t2
= tcg_temp_new();
3728 TCGLabel
*l1
= gen_new_label();
3730 gen_load_gpr(t1
, rs
);
3731 tcg_gen_addi_tl(t0
, t1
, uimm
);
3733 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3734 tcg_gen_xori_tl(t2
, t0
, uimm
);
3735 tcg_gen_and_tl(t1
, t1
, t2
);
3737 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3739 /* operands of same sign, result different sign */
3740 generate_exception(ctx
, EXCP_OVERFLOW
);
3742 gen_store_gpr(t0
, rt
);
3748 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3750 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3757 /* Logic with immediate operand */
3758 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3759 int rt
, int rs
, int16_t imm
)
3764 /* If no destination, treat it as a NOP. */
3767 uimm
= (uint16_t)imm
;
3770 if (likely(rs
!= 0))
3771 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3773 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3777 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3779 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3782 if (likely(rs
!= 0))
3783 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3785 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3788 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3790 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3791 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3793 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3802 /* Set on less than with immediate operand */
3803 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3804 int rt
, int rs
, int16_t imm
)
3806 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3810 /* If no destination, treat it as a NOP. */
3813 t0
= tcg_temp_new();
3814 gen_load_gpr(t0
, rs
);
3817 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3820 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3826 /* Shifts with immediate operand */
3827 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3828 int rt
, int rs
, int16_t imm
)
3830 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3834 /* If no destination, treat it as a NOP. */
3838 t0
= tcg_temp_new();
3839 gen_load_gpr(t0
, rs
);
3842 tcg_gen_shli_tl(t0
, t0
, uimm
);
3843 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3846 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3850 tcg_gen_ext32u_tl(t0
, t0
);
3851 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3853 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3858 TCGv_i32 t1
= tcg_temp_new_i32();
3860 tcg_gen_trunc_tl_i32(t1
, t0
);
3861 tcg_gen_rotri_i32(t1
, t1
, uimm
);
3862 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
3863 tcg_temp_free_i32(t1
);
3865 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3868 #if defined(TARGET_MIPS64)
3870 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
3873 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3876 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3880 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
3882 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
3886 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3889 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3892 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3895 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3903 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
3904 int rd
, int rs
, int rt
)
3906 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
3907 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
3908 /* If no destination, treat it as a NOP.
3909 For add & sub, we must generate the overflow exception when needed. */
3916 TCGv t0
= tcg_temp_local_new();
3917 TCGv t1
= tcg_temp_new();
3918 TCGv t2
= tcg_temp_new();
3919 TCGLabel
*l1
= gen_new_label();
3921 gen_load_gpr(t1
, rs
);
3922 gen_load_gpr(t2
, rt
);
3923 tcg_gen_add_tl(t0
, t1
, t2
);
3924 tcg_gen_ext32s_tl(t0
, t0
);
3925 tcg_gen_xor_tl(t1
, t1
, t2
);
3926 tcg_gen_xor_tl(t2
, t0
, t2
);
3927 tcg_gen_andc_tl(t1
, t2
, t1
);
3929 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3931 /* operands of same sign, result different sign */
3932 generate_exception(ctx
, EXCP_OVERFLOW
);
3934 gen_store_gpr(t0
, rd
);
3939 if (rs
!= 0 && rt
!= 0) {
3940 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3941 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3942 } else if (rs
== 0 && rt
!= 0) {
3943 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3944 } else if (rs
!= 0 && rt
== 0) {
3945 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3947 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3952 TCGv t0
= tcg_temp_local_new();
3953 TCGv t1
= tcg_temp_new();
3954 TCGv t2
= tcg_temp_new();
3955 TCGLabel
*l1
= gen_new_label();
3957 gen_load_gpr(t1
, rs
);
3958 gen_load_gpr(t2
, rt
);
3959 tcg_gen_sub_tl(t0
, t1
, t2
);
3960 tcg_gen_ext32s_tl(t0
, t0
);
3961 tcg_gen_xor_tl(t2
, t1
, t2
);
3962 tcg_gen_xor_tl(t1
, t0
, t1
);
3963 tcg_gen_and_tl(t1
, t1
, t2
);
3965 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3967 /* operands of different sign, first operand and result different sign */
3968 generate_exception(ctx
, EXCP_OVERFLOW
);
3970 gen_store_gpr(t0
, rd
);
3975 if (rs
!= 0 && rt
!= 0) {
3976 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3977 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3978 } else if (rs
== 0 && rt
!= 0) {
3979 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3980 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3981 } else if (rs
!= 0 && rt
== 0) {
3982 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3984 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3987 #if defined(TARGET_MIPS64)
3990 TCGv t0
= tcg_temp_local_new();
3991 TCGv t1
= tcg_temp_new();
3992 TCGv t2
= tcg_temp_new();
3993 TCGLabel
*l1
= gen_new_label();
3995 gen_load_gpr(t1
, rs
);
3996 gen_load_gpr(t2
, rt
);
3997 tcg_gen_add_tl(t0
, t1
, t2
);
3998 tcg_gen_xor_tl(t1
, t1
, t2
);
3999 tcg_gen_xor_tl(t2
, t0
, t2
);
4000 tcg_gen_andc_tl(t1
, t2
, t1
);
4002 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4004 /* operands of same sign, result different sign */
4005 generate_exception(ctx
, EXCP_OVERFLOW
);
4007 gen_store_gpr(t0
, rd
);
4012 if (rs
!= 0 && rt
!= 0) {
4013 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4014 } else if (rs
== 0 && rt
!= 0) {
4015 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4016 } else if (rs
!= 0 && rt
== 0) {
4017 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4019 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4024 TCGv t0
= tcg_temp_local_new();
4025 TCGv t1
= tcg_temp_new();
4026 TCGv t2
= tcg_temp_new();
4027 TCGLabel
*l1
= gen_new_label();
4029 gen_load_gpr(t1
, rs
);
4030 gen_load_gpr(t2
, rt
);
4031 tcg_gen_sub_tl(t0
, t1
, t2
);
4032 tcg_gen_xor_tl(t2
, t1
, t2
);
4033 tcg_gen_xor_tl(t1
, t0
, t1
);
4034 tcg_gen_and_tl(t1
, t1
, t2
);
4036 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4038 /* operands of different sign, first operand and result different sign */
4039 generate_exception(ctx
, EXCP_OVERFLOW
);
4041 gen_store_gpr(t0
, rd
);
4046 if (rs
!= 0 && rt
!= 0) {
4047 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4048 } else if (rs
== 0 && rt
!= 0) {
4049 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4050 } else if (rs
!= 0 && rt
== 0) {
4051 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4053 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4058 if (likely(rs
!= 0 && rt
!= 0)) {
4059 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4060 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4062 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4068 /* Conditional move */
4069 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4070 int rd
, int rs
, int rt
)
4075 /* If no destination, treat it as a NOP. */
4079 t0
= tcg_temp_new();
4080 gen_load_gpr(t0
, rt
);
4081 t1
= tcg_const_tl(0);
4082 t2
= tcg_temp_new();
4083 gen_load_gpr(t2
, rs
);
4086 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4089 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4092 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4095 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4104 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4105 int rd
, int rs
, int rt
)
4108 /* If no destination, treat it as a NOP. */
4114 if (likely(rs
!= 0 && rt
!= 0)) {
4115 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4117 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4121 if (rs
!= 0 && rt
!= 0) {
4122 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4123 } else if (rs
== 0 && rt
!= 0) {
4124 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4125 } else if (rs
!= 0 && rt
== 0) {
4126 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4128 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4132 if (likely(rs
!= 0 && rt
!= 0)) {
4133 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4134 } else if (rs
== 0 && rt
!= 0) {
4135 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4136 } else if (rs
!= 0 && rt
== 0) {
4137 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4139 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4143 if (likely(rs
!= 0 && rt
!= 0)) {
4144 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4145 } else if (rs
== 0 && rt
!= 0) {
4146 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4147 } else if (rs
!= 0 && rt
== 0) {
4148 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4150 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4156 /* Set on lower than */
4157 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4158 int rd
, int rs
, int rt
)
4163 /* If no destination, treat it as a NOP. */
4167 t0
= tcg_temp_new();
4168 t1
= tcg_temp_new();
4169 gen_load_gpr(t0
, rs
);
4170 gen_load_gpr(t1
, rt
);
4173 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4176 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4184 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4185 int rd
, int rs
, int rt
)
4190 /* If no destination, treat it as a NOP.
4191 For add & sub, we must generate the overflow exception when needed. */
4195 t0
= tcg_temp_new();
4196 t1
= tcg_temp_new();
4197 gen_load_gpr(t0
, rs
);
4198 gen_load_gpr(t1
, rt
);
4201 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4202 tcg_gen_shl_tl(t0
, t1
, t0
);
4203 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4206 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4207 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4210 tcg_gen_ext32u_tl(t1
, t1
);
4211 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4212 tcg_gen_shr_tl(t0
, t1
, t0
);
4213 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4217 TCGv_i32 t2
= tcg_temp_new_i32();
4218 TCGv_i32 t3
= tcg_temp_new_i32();
4220 tcg_gen_trunc_tl_i32(t2
, t0
);
4221 tcg_gen_trunc_tl_i32(t3
, t1
);
4222 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4223 tcg_gen_rotr_i32(t2
, t3
, t2
);
4224 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4225 tcg_temp_free_i32(t2
);
4226 tcg_temp_free_i32(t3
);
4229 #if defined(TARGET_MIPS64)
4231 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4232 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4235 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4236 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4239 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4240 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4243 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4244 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4252 /* Arithmetic on HI/LO registers */
4253 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4255 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== TX79_MMI_MFHI1
||
4256 opc
== OPC_MFLO
|| opc
== TX79_MMI_MFLO1
)) {
4262 if (!(ctx
->insn_flags
& INSN_R5900
)) {
4269 case TX79_MMI_MFHI1
:
4270 #if defined(TARGET_MIPS64)
4272 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4276 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4280 case TX79_MMI_MFLO1
:
4281 #if defined(TARGET_MIPS64)
4283 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4287 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4291 case TX79_MMI_MTHI1
:
4293 #if defined(TARGET_MIPS64)
4295 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4299 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4302 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4306 case TX79_MMI_MTLO1
:
4308 #if defined(TARGET_MIPS64)
4310 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4314 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4317 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4323 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4326 TCGv t0
= tcg_const_tl(addr
);
4327 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4328 gen_store_gpr(t0
, reg
);
4332 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4338 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4341 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4342 addr
= addr_add(ctx
, pc
, offset
);
4343 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4347 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4348 addr
= addr_add(ctx
, pc
, offset
);
4349 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4351 #if defined(TARGET_MIPS64)
4354 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4355 addr
= addr_add(ctx
, pc
, offset
);
4356 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4360 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4363 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4364 addr
= addr_add(ctx
, pc
, offset
);
4365 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4370 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4371 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4372 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4375 #if defined(TARGET_MIPS64)
4376 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4377 case R6_OPC_LDPC
+ (1 << 16):
4378 case R6_OPC_LDPC
+ (2 << 16):
4379 case R6_OPC_LDPC
+ (3 << 16):
4381 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4382 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4383 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4387 MIPS_INVAL("OPC_PCREL");
4388 generate_exception_end(ctx
, EXCP_RI
);
4395 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4404 t0
= tcg_temp_new();
4405 t1
= tcg_temp_new();
4407 gen_load_gpr(t0
, rs
);
4408 gen_load_gpr(t1
, rt
);
4413 TCGv t2
= tcg_temp_new();
4414 TCGv t3
= tcg_temp_new();
4415 tcg_gen_ext32s_tl(t0
, t0
);
4416 tcg_gen_ext32s_tl(t1
, t1
);
4417 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4418 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4419 tcg_gen_and_tl(t2
, t2
, t3
);
4420 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4421 tcg_gen_or_tl(t2
, t2
, t3
);
4422 tcg_gen_movi_tl(t3
, 0);
4423 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4424 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4425 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4432 TCGv t2
= tcg_temp_new();
4433 TCGv t3
= tcg_temp_new();
4434 tcg_gen_ext32s_tl(t0
, t0
);
4435 tcg_gen_ext32s_tl(t1
, t1
);
4436 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4437 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4438 tcg_gen_and_tl(t2
, t2
, t3
);
4439 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4440 tcg_gen_or_tl(t2
, t2
, t3
);
4441 tcg_gen_movi_tl(t3
, 0);
4442 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4443 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4444 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4451 TCGv t2
= tcg_const_tl(0);
4452 TCGv t3
= tcg_const_tl(1);
4453 tcg_gen_ext32u_tl(t0
, t0
);
4454 tcg_gen_ext32u_tl(t1
, t1
);
4455 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4456 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4457 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4464 TCGv t2
= tcg_const_tl(0);
4465 TCGv t3
= tcg_const_tl(1);
4466 tcg_gen_ext32u_tl(t0
, t0
);
4467 tcg_gen_ext32u_tl(t1
, t1
);
4468 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4469 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4470 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4477 TCGv_i32 t2
= tcg_temp_new_i32();
4478 TCGv_i32 t3
= tcg_temp_new_i32();
4479 tcg_gen_trunc_tl_i32(t2
, t0
);
4480 tcg_gen_trunc_tl_i32(t3
, t1
);
4481 tcg_gen_mul_i32(t2
, t2
, t3
);
4482 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4483 tcg_temp_free_i32(t2
);
4484 tcg_temp_free_i32(t3
);
4489 TCGv_i32 t2
= tcg_temp_new_i32();
4490 TCGv_i32 t3
= tcg_temp_new_i32();
4491 tcg_gen_trunc_tl_i32(t2
, t0
);
4492 tcg_gen_trunc_tl_i32(t3
, t1
);
4493 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4494 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4495 tcg_temp_free_i32(t2
);
4496 tcg_temp_free_i32(t3
);
4501 TCGv_i32 t2
= tcg_temp_new_i32();
4502 TCGv_i32 t3
= tcg_temp_new_i32();
4503 tcg_gen_trunc_tl_i32(t2
, t0
);
4504 tcg_gen_trunc_tl_i32(t3
, t1
);
4505 tcg_gen_mul_i32(t2
, t2
, t3
);
4506 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4507 tcg_temp_free_i32(t2
);
4508 tcg_temp_free_i32(t3
);
4513 TCGv_i32 t2
= tcg_temp_new_i32();
4514 TCGv_i32 t3
= tcg_temp_new_i32();
4515 tcg_gen_trunc_tl_i32(t2
, t0
);
4516 tcg_gen_trunc_tl_i32(t3
, t1
);
4517 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4518 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4519 tcg_temp_free_i32(t2
);
4520 tcg_temp_free_i32(t3
);
4523 #if defined(TARGET_MIPS64)
4526 TCGv t2
= tcg_temp_new();
4527 TCGv t3
= tcg_temp_new();
4528 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4529 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4530 tcg_gen_and_tl(t2
, t2
, t3
);
4531 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4532 tcg_gen_or_tl(t2
, t2
, t3
);
4533 tcg_gen_movi_tl(t3
, 0);
4534 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4535 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4542 TCGv t2
= tcg_temp_new();
4543 TCGv t3
= tcg_temp_new();
4544 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4545 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4546 tcg_gen_and_tl(t2
, t2
, t3
);
4547 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4548 tcg_gen_or_tl(t2
, t2
, t3
);
4549 tcg_gen_movi_tl(t3
, 0);
4550 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4551 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4558 TCGv t2
= tcg_const_tl(0);
4559 TCGv t3
= tcg_const_tl(1);
4560 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4561 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4568 TCGv t2
= tcg_const_tl(0);
4569 TCGv t3
= tcg_const_tl(1);
4570 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4571 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4577 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4581 TCGv t2
= tcg_temp_new();
4582 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4587 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4591 TCGv t2
= tcg_temp_new();
4592 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4598 MIPS_INVAL("r6 mul/div");
4599 generate_exception_end(ctx
, EXCP_RI
);
4607 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4608 int acc
, int rs
, int rt
)
4612 t0
= tcg_temp_new();
4613 t1
= tcg_temp_new();
4615 gen_load_gpr(t0
, rs
);
4616 gen_load_gpr(t1
, rt
);
4619 if (!(ctx
->insn_flags
& INSN_R5900
)) {
4628 TCGv t2
= tcg_temp_new();
4629 TCGv t3
= tcg_temp_new();
4630 tcg_gen_ext32s_tl(t0
, t0
);
4631 tcg_gen_ext32s_tl(t1
, t1
);
4632 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4633 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4634 tcg_gen_and_tl(t2
, t2
, t3
);
4635 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4636 tcg_gen_or_tl(t2
, t2
, t3
);
4637 tcg_gen_movi_tl(t3
, 0);
4638 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4639 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4640 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4641 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4642 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4648 case TX79_MMI_DIVU1
:
4650 TCGv t2
= tcg_const_tl(0);
4651 TCGv t3
= tcg_const_tl(1);
4652 tcg_gen_ext32u_tl(t0
, t0
);
4653 tcg_gen_ext32u_tl(t1
, t1
);
4654 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4655 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4656 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4657 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4658 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4665 TCGv_i32 t2
= tcg_temp_new_i32();
4666 TCGv_i32 t3
= tcg_temp_new_i32();
4667 tcg_gen_trunc_tl_i32(t2
, t0
);
4668 tcg_gen_trunc_tl_i32(t3
, t1
);
4669 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4670 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4671 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4672 tcg_temp_free_i32(t2
);
4673 tcg_temp_free_i32(t3
);
4678 TCGv_i32 t2
= tcg_temp_new_i32();
4679 TCGv_i32 t3
= tcg_temp_new_i32();
4680 tcg_gen_trunc_tl_i32(t2
, t0
);
4681 tcg_gen_trunc_tl_i32(t3
, t1
);
4682 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4683 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4684 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4685 tcg_temp_free_i32(t2
);
4686 tcg_temp_free_i32(t3
);
4689 #if defined(TARGET_MIPS64)
4692 TCGv t2
= tcg_temp_new();
4693 TCGv t3
= tcg_temp_new();
4694 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4695 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4696 tcg_gen_and_tl(t2
, t2
, t3
);
4697 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4698 tcg_gen_or_tl(t2
, t2
, t3
);
4699 tcg_gen_movi_tl(t3
, 0);
4700 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4701 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4702 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4709 TCGv t2
= tcg_const_tl(0);
4710 TCGv t3
= tcg_const_tl(1);
4711 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4712 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4713 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4719 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4722 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4727 TCGv_i64 t2
= tcg_temp_new_i64();
4728 TCGv_i64 t3
= tcg_temp_new_i64();
4730 tcg_gen_ext_tl_i64(t2
, t0
);
4731 tcg_gen_ext_tl_i64(t3
, t1
);
4732 tcg_gen_mul_i64(t2
, t2
, t3
);
4733 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4734 tcg_gen_add_i64(t2
, t2
, t3
);
4735 tcg_temp_free_i64(t3
);
4736 gen_move_low32(cpu_LO
[acc
], t2
);
4737 gen_move_high32(cpu_HI
[acc
], t2
);
4738 tcg_temp_free_i64(t2
);
4743 TCGv_i64 t2
= tcg_temp_new_i64();
4744 TCGv_i64 t3
= tcg_temp_new_i64();
4746 tcg_gen_ext32u_tl(t0
, t0
);
4747 tcg_gen_ext32u_tl(t1
, t1
);
4748 tcg_gen_extu_tl_i64(t2
, t0
);
4749 tcg_gen_extu_tl_i64(t3
, t1
);
4750 tcg_gen_mul_i64(t2
, t2
, t3
);
4751 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4752 tcg_gen_add_i64(t2
, t2
, t3
);
4753 tcg_temp_free_i64(t3
);
4754 gen_move_low32(cpu_LO
[acc
], t2
);
4755 gen_move_high32(cpu_HI
[acc
], t2
);
4756 tcg_temp_free_i64(t2
);
4761 TCGv_i64 t2
= tcg_temp_new_i64();
4762 TCGv_i64 t3
= tcg_temp_new_i64();
4764 tcg_gen_ext_tl_i64(t2
, t0
);
4765 tcg_gen_ext_tl_i64(t3
, t1
);
4766 tcg_gen_mul_i64(t2
, t2
, t3
);
4767 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4768 tcg_gen_sub_i64(t2
, t3
, t2
);
4769 tcg_temp_free_i64(t3
);
4770 gen_move_low32(cpu_LO
[acc
], t2
);
4771 gen_move_high32(cpu_HI
[acc
], t2
);
4772 tcg_temp_free_i64(t2
);
4777 TCGv_i64 t2
= tcg_temp_new_i64();
4778 TCGv_i64 t3
= tcg_temp_new_i64();
4780 tcg_gen_ext32u_tl(t0
, t0
);
4781 tcg_gen_ext32u_tl(t1
, t1
);
4782 tcg_gen_extu_tl_i64(t2
, t0
);
4783 tcg_gen_extu_tl_i64(t3
, t1
);
4784 tcg_gen_mul_i64(t2
, t2
, t3
);
4785 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4786 tcg_gen_sub_i64(t2
, t3
, t2
);
4787 tcg_temp_free_i64(t3
);
4788 gen_move_low32(cpu_LO
[acc
], t2
);
4789 gen_move_high32(cpu_HI
[acc
], t2
);
4790 tcg_temp_free_i64(t2
);
4794 MIPS_INVAL("mul/div");
4795 generate_exception_end(ctx
, EXCP_RI
);
4804 * These MULT and MULTU instructions implemented in for example the
4805 * Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
4806 * architectures are special three-operand variants with the syntax
4808 * MULT[U][1] rd, rs, rt
4812 * (rd, LO, HI) <- rs * rt
4814 * where the low-order 32-bits of the result is placed into both the
4815 * GPR rd and the special register LO. The high-order 32-bits of the
4816 * result is placed into the special register HI.
4818 * If the GPR rd is omitted in assembly language, it is taken to be 0,
4819 * which is the zero register that always reads as 0.
4821 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
4822 int rd
, int rs
, int rt
)
4824 TCGv t0
= tcg_temp_new();
4825 TCGv t1
= tcg_temp_new();
4828 gen_load_gpr(t0
, rs
);
4829 gen_load_gpr(t1
, rt
);
4832 case TX79_MMI_MULT1
:
4837 TCGv_i32 t2
= tcg_temp_new_i32();
4838 TCGv_i32 t3
= tcg_temp_new_i32();
4839 tcg_gen_trunc_tl_i32(t2
, t0
);
4840 tcg_gen_trunc_tl_i32(t3
, t1
);
4841 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4843 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4845 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4846 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4847 tcg_temp_free_i32(t2
);
4848 tcg_temp_free_i32(t3
);
4851 case TX79_MMI_MULTU1
:
4856 TCGv_i32 t2
= tcg_temp_new_i32();
4857 TCGv_i32 t3
= tcg_temp_new_i32();
4858 tcg_gen_trunc_tl_i32(t2
, t0
);
4859 tcg_gen_trunc_tl_i32(t3
, t1
);
4860 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4862 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4864 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4865 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4866 tcg_temp_free_i32(t2
);
4867 tcg_temp_free_i32(t3
);
4871 MIPS_INVAL("mul TXx9");
4872 generate_exception_end(ctx
, EXCP_RI
);
4881 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
4882 int rd
, int rs
, int rt
)
4884 TCGv t0
= tcg_temp_new();
4885 TCGv t1
= tcg_temp_new();
4887 gen_load_gpr(t0
, rs
);
4888 gen_load_gpr(t1
, rt
);
4891 case OPC_VR54XX_MULS
:
4892 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
4894 case OPC_VR54XX_MULSU
:
4895 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
4897 case OPC_VR54XX_MACC
:
4898 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
4900 case OPC_VR54XX_MACCU
:
4901 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
4903 case OPC_VR54XX_MSAC
:
4904 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
4906 case OPC_VR54XX_MSACU
:
4907 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
4909 case OPC_VR54XX_MULHI
:
4910 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
4912 case OPC_VR54XX_MULHIU
:
4913 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
4915 case OPC_VR54XX_MULSHI
:
4916 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
4918 case OPC_VR54XX_MULSHIU
:
4919 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
4921 case OPC_VR54XX_MACCHI
:
4922 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
4924 case OPC_VR54XX_MACCHIU
:
4925 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
4927 case OPC_VR54XX_MSACHI
:
4928 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
4930 case OPC_VR54XX_MSACHIU
:
4931 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
4934 MIPS_INVAL("mul vr54xx");
4935 generate_exception_end(ctx
, EXCP_RI
);
4938 gen_store_gpr(t0
, rd
);
4945 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
4955 gen_load_gpr(t0
, rs
);
4960 #if defined(TARGET_MIPS64)
4964 tcg_gen_not_tl(t0
, t0
);
4973 tcg_gen_ext32u_tl(t0
, t0
);
4974 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
4975 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
4977 #if defined(TARGET_MIPS64)
4982 tcg_gen_clzi_i64(t0
, t0
, 64);
4988 /* Godson integer instructions */
4989 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
4990 int rd
, int rs
, int rt
)
5002 case OPC_MULTU_G_2E
:
5003 case OPC_MULTU_G_2F
:
5004 #if defined(TARGET_MIPS64)
5005 case OPC_DMULT_G_2E
:
5006 case OPC_DMULT_G_2F
:
5007 case OPC_DMULTU_G_2E
:
5008 case OPC_DMULTU_G_2F
:
5010 t0
= tcg_temp_new();
5011 t1
= tcg_temp_new();
5014 t0
= tcg_temp_local_new();
5015 t1
= tcg_temp_local_new();
5019 gen_load_gpr(t0
, rs
);
5020 gen_load_gpr(t1
, rt
);
5025 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5026 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5028 case OPC_MULTU_G_2E
:
5029 case OPC_MULTU_G_2F
:
5030 tcg_gen_ext32u_tl(t0
, t0
);
5031 tcg_gen_ext32u_tl(t1
, t1
);
5032 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5033 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5038 TCGLabel
*l1
= gen_new_label();
5039 TCGLabel
*l2
= gen_new_label();
5040 TCGLabel
*l3
= gen_new_label();
5041 tcg_gen_ext32s_tl(t0
, t0
);
5042 tcg_gen_ext32s_tl(t1
, t1
);
5043 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5044 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5047 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5048 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5049 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5052 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5053 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5060 TCGLabel
*l1
= gen_new_label();
5061 TCGLabel
*l2
= gen_new_label();
5062 tcg_gen_ext32u_tl(t0
, t0
);
5063 tcg_gen_ext32u_tl(t1
, t1
);
5064 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5065 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5068 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5069 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5076 TCGLabel
*l1
= gen_new_label();
5077 TCGLabel
*l2
= gen_new_label();
5078 TCGLabel
*l3
= gen_new_label();
5079 tcg_gen_ext32u_tl(t0
, t0
);
5080 tcg_gen_ext32u_tl(t1
, t1
);
5081 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5082 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5083 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5085 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5088 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5089 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5096 TCGLabel
*l1
= gen_new_label();
5097 TCGLabel
*l2
= gen_new_label();
5098 tcg_gen_ext32u_tl(t0
, t0
);
5099 tcg_gen_ext32u_tl(t1
, t1
);
5100 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5101 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5104 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5105 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5109 #if defined(TARGET_MIPS64)
5110 case OPC_DMULT_G_2E
:
5111 case OPC_DMULT_G_2F
:
5112 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5114 case OPC_DMULTU_G_2E
:
5115 case OPC_DMULTU_G_2F
:
5116 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5121 TCGLabel
*l1
= gen_new_label();
5122 TCGLabel
*l2
= gen_new_label();
5123 TCGLabel
*l3
= gen_new_label();
5124 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5125 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5128 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5129 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5130 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5133 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5137 case OPC_DDIVU_G_2E
:
5138 case OPC_DDIVU_G_2F
:
5140 TCGLabel
*l1
= gen_new_label();
5141 TCGLabel
*l2
= gen_new_label();
5142 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5143 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5146 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5153 TCGLabel
*l1
= gen_new_label();
5154 TCGLabel
*l2
= gen_new_label();
5155 TCGLabel
*l3
= gen_new_label();
5156 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5157 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5158 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5160 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5163 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5167 case OPC_DMODU_G_2E
:
5168 case OPC_DMODU_G_2F
:
5170 TCGLabel
*l1
= gen_new_label();
5171 TCGLabel
*l2
= gen_new_label();
5172 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5173 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5176 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5187 /* Loongson multimedia instructions */
5188 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5190 uint32_t opc
, shift_max
;
5193 opc
= MASK_LMI(ctx
->opcode
);
5199 t0
= tcg_temp_local_new_i64();
5200 t1
= tcg_temp_local_new_i64();
5203 t0
= tcg_temp_new_i64();
5204 t1
= tcg_temp_new_i64();
5208 check_cp1_enabled(ctx
);
5209 gen_load_fpr64(ctx
, t0
, rs
);
5210 gen_load_fpr64(ctx
, t1
, rt
);
5212 #define LMI_HELPER(UP, LO) \
5213 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5214 #define LMI_HELPER_1(UP, LO) \
5215 case OPC_##UP: gen_helper_##LO(t0, t0); break
5216 #define LMI_DIRECT(UP, LO, OP) \
5217 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5220 LMI_HELPER(PADDSH
, paddsh
);
5221 LMI_HELPER(PADDUSH
, paddush
);
5222 LMI_HELPER(PADDH
, paddh
);
5223 LMI_HELPER(PADDW
, paddw
);
5224 LMI_HELPER(PADDSB
, paddsb
);
5225 LMI_HELPER(PADDUSB
, paddusb
);
5226 LMI_HELPER(PADDB
, paddb
);
5228 LMI_HELPER(PSUBSH
, psubsh
);
5229 LMI_HELPER(PSUBUSH
, psubush
);
5230 LMI_HELPER(PSUBH
, psubh
);
5231 LMI_HELPER(PSUBW
, psubw
);
5232 LMI_HELPER(PSUBSB
, psubsb
);
5233 LMI_HELPER(PSUBUSB
, psubusb
);
5234 LMI_HELPER(PSUBB
, psubb
);
5236 LMI_HELPER(PSHUFH
, pshufh
);
5237 LMI_HELPER(PACKSSWH
, packsswh
);
5238 LMI_HELPER(PACKSSHB
, packsshb
);
5239 LMI_HELPER(PACKUSHB
, packushb
);
5241 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5242 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5243 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5244 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5245 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5246 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5248 LMI_HELPER(PAVGH
, pavgh
);
5249 LMI_HELPER(PAVGB
, pavgb
);
5250 LMI_HELPER(PMAXSH
, pmaxsh
);
5251 LMI_HELPER(PMINSH
, pminsh
);
5252 LMI_HELPER(PMAXUB
, pmaxub
);
5253 LMI_HELPER(PMINUB
, pminub
);
5255 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5256 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5257 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5258 LMI_HELPER(PCMPGTH
, pcmpgth
);
5259 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5260 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5262 LMI_HELPER(PSLLW
, psllw
);
5263 LMI_HELPER(PSLLH
, psllh
);
5264 LMI_HELPER(PSRLW
, psrlw
);
5265 LMI_HELPER(PSRLH
, psrlh
);
5266 LMI_HELPER(PSRAW
, psraw
);
5267 LMI_HELPER(PSRAH
, psrah
);
5269 LMI_HELPER(PMULLH
, pmullh
);
5270 LMI_HELPER(PMULHH
, pmulhh
);
5271 LMI_HELPER(PMULHUH
, pmulhuh
);
5272 LMI_HELPER(PMADDHW
, pmaddhw
);
5274 LMI_HELPER(PASUBUB
, pasubub
);
5275 LMI_HELPER_1(BIADD
, biadd
);
5276 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5278 LMI_DIRECT(PADDD
, paddd
, add
);
5279 LMI_DIRECT(PSUBD
, psubd
, sub
);
5280 LMI_DIRECT(XOR_CP2
, xor, xor);
5281 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5282 LMI_DIRECT(AND_CP2
, and, and);
5283 LMI_DIRECT(OR_CP2
, or, or);
5286 tcg_gen_andc_i64(t0
, t1
, t0
);
5290 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5293 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5296 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5299 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5303 tcg_gen_andi_i64(t1
, t1
, 3);
5304 tcg_gen_shli_i64(t1
, t1
, 4);
5305 tcg_gen_shr_i64(t0
, t0
, t1
);
5306 tcg_gen_ext16u_i64(t0
, t0
);
5310 tcg_gen_add_i64(t0
, t0
, t1
);
5311 tcg_gen_ext32s_i64(t0
, t0
);
5314 tcg_gen_sub_i64(t0
, t0
, t1
);
5315 tcg_gen_ext32s_i64(t0
, t0
);
5337 /* Make sure shift count isn't TCG undefined behaviour. */
5338 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5343 tcg_gen_shl_i64(t0
, t0
, t1
);
5347 /* Since SRA is UndefinedResult without sign-extended inputs,
5348 we can treat SRA and DSRA the same. */
5349 tcg_gen_sar_i64(t0
, t0
, t1
);
5352 /* We want to shift in zeros for SRL; zero-extend first. */
5353 tcg_gen_ext32u_i64(t0
, t0
);
5356 tcg_gen_shr_i64(t0
, t0
, t1
);
5360 if (shift_max
== 32) {
5361 tcg_gen_ext32s_i64(t0
, t0
);
5364 /* Shifts larger than MAX produce zero. */
5365 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5366 tcg_gen_neg_i64(t1
, t1
);
5367 tcg_gen_and_i64(t0
, t0
, t1
);
5373 TCGv_i64 t2
= tcg_temp_new_i64();
5374 TCGLabel
*lab
= gen_new_label();
5376 tcg_gen_mov_i64(t2
, t0
);
5377 tcg_gen_add_i64(t0
, t1
, t2
);
5378 if (opc
== OPC_ADD_CP2
) {
5379 tcg_gen_ext32s_i64(t0
, t0
);
5381 tcg_gen_xor_i64(t1
, t1
, t2
);
5382 tcg_gen_xor_i64(t2
, t2
, t0
);
5383 tcg_gen_andc_i64(t1
, t2
, t1
);
5384 tcg_temp_free_i64(t2
);
5385 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5386 generate_exception(ctx
, EXCP_OVERFLOW
);
5394 TCGv_i64 t2
= tcg_temp_new_i64();
5395 TCGLabel
*lab
= gen_new_label();
5397 tcg_gen_mov_i64(t2
, t0
);
5398 tcg_gen_sub_i64(t0
, t1
, t2
);
5399 if (opc
== OPC_SUB_CP2
) {
5400 tcg_gen_ext32s_i64(t0
, t0
);
5402 tcg_gen_xor_i64(t1
, t1
, t2
);
5403 tcg_gen_xor_i64(t2
, t2
, t0
);
5404 tcg_gen_and_i64(t1
, t1
, t2
);
5405 tcg_temp_free_i64(t2
);
5406 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5407 generate_exception(ctx
, EXCP_OVERFLOW
);
5413 tcg_gen_ext32u_i64(t0
, t0
);
5414 tcg_gen_ext32u_i64(t1
, t1
);
5415 tcg_gen_mul_i64(t0
, t0
, t1
);
5424 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
5425 FD field is the CC field? */
5427 MIPS_INVAL("loongson_cp2");
5428 generate_exception_end(ctx
, EXCP_RI
);
5435 gen_store_fpr64(ctx
, t0
, rd
);
5437 tcg_temp_free_i64(t0
);
5438 tcg_temp_free_i64(t1
);
5442 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5443 int rs
, int rt
, int16_t imm
)
5446 TCGv t0
= tcg_temp_new();
5447 TCGv t1
= tcg_temp_new();
5450 /* Load needed operands */
5458 /* Compare two registers */
5460 gen_load_gpr(t0
, rs
);
5461 gen_load_gpr(t1
, rt
);
5471 /* Compare register to immediate */
5472 if (rs
!= 0 || imm
!= 0) {
5473 gen_load_gpr(t0
, rs
);
5474 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5481 case OPC_TEQ
: /* rs == rs */
5482 case OPC_TEQI
: /* r0 == 0 */
5483 case OPC_TGE
: /* rs >= rs */
5484 case OPC_TGEI
: /* r0 >= 0 */
5485 case OPC_TGEU
: /* rs >= rs unsigned */
5486 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5488 generate_exception_end(ctx
, EXCP_TRAP
);
5490 case OPC_TLT
: /* rs < rs */
5491 case OPC_TLTI
: /* r0 < 0 */
5492 case OPC_TLTU
: /* rs < rs unsigned */
5493 case OPC_TLTIU
: /* r0 < 0 unsigned */
5494 case OPC_TNE
: /* rs != rs */
5495 case OPC_TNEI
: /* r0 != 0 */
5496 /* Never trap: treat as NOP. */
5500 TCGLabel
*l1
= gen_new_label();
5505 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5509 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5513 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5517 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5521 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5525 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5528 generate_exception(ctx
, EXCP_TRAP
);
5535 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5537 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5541 #ifndef CONFIG_USER_ONLY
5542 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5548 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5550 if (use_goto_tb(ctx
, dest
)) {
5553 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5556 if (ctx
->base
.singlestep_enabled
) {
5557 save_cpu_state(ctx
, 0);
5558 gen_helper_raise_exception_debug(cpu_env
);
5560 tcg_gen_lookup_and_goto_ptr();
5564 /* Branches (before delay slot) */
5565 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5567 int rs
, int rt
, int32_t offset
,
5570 target_ulong btgt
= -1;
5572 int bcond_compute
= 0;
5573 TCGv t0
= tcg_temp_new();
5574 TCGv t1
= tcg_temp_new();
5576 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5577 #ifdef MIPS_DEBUG_DISAS
5578 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5579 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5581 generate_exception_end(ctx
, EXCP_RI
);
5585 /* Load needed operands */
5591 /* Compare two registers */
5593 gen_load_gpr(t0
, rs
);
5594 gen_load_gpr(t1
, rt
);
5597 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5611 /* Compare to zero */
5613 gen_load_gpr(t0
, rs
);
5616 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5619 #if defined(TARGET_MIPS64)
5621 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5623 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5626 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5631 /* Jump to immediate */
5632 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5637 /* Jump to register */
5638 if (offset
!= 0 && offset
!= 16) {
5639 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5640 others are reserved. */
5641 MIPS_INVAL("jump hint");
5642 generate_exception_end(ctx
, EXCP_RI
);
5645 gen_load_gpr(btarget
, rs
);
5648 MIPS_INVAL("branch/jump");
5649 generate_exception_end(ctx
, EXCP_RI
);
5652 if (bcond_compute
== 0) {
5653 /* No condition to be computed */
5655 case OPC_BEQ
: /* rx == rx */
5656 case OPC_BEQL
: /* rx == rx likely */
5657 case OPC_BGEZ
: /* 0 >= 0 */
5658 case OPC_BGEZL
: /* 0 >= 0 likely */
5659 case OPC_BLEZ
: /* 0 <= 0 */
5660 case OPC_BLEZL
: /* 0 <= 0 likely */
5662 ctx
->hflags
|= MIPS_HFLAG_B
;
5664 case OPC_BGEZAL
: /* 0 >= 0 */
5665 case OPC_BGEZALL
: /* 0 >= 0 likely */
5666 /* Always take and link */
5668 ctx
->hflags
|= MIPS_HFLAG_B
;
5670 case OPC_BNE
: /* rx != rx */
5671 case OPC_BGTZ
: /* 0 > 0 */
5672 case OPC_BLTZ
: /* 0 < 0 */
5675 case OPC_BLTZAL
: /* 0 < 0 */
5676 /* Handle as an unconditional branch to get correct delay
5679 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5680 ctx
->hflags
|= MIPS_HFLAG_B
;
5682 case OPC_BLTZALL
: /* 0 < 0 likely */
5683 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5684 /* Skip the instruction in the delay slot */
5685 ctx
->base
.pc_next
+= 4;
5687 case OPC_BNEL
: /* rx != rx likely */
5688 case OPC_BGTZL
: /* 0 > 0 likely */
5689 case OPC_BLTZL
: /* 0 < 0 likely */
5690 /* Skip the instruction in the delay slot */
5691 ctx
->base
.pc_next
+= 4;
5694 ctx
->hflags
|= MIPS_HFLAG_B
;
5697 ctx
->hflags
|= MIPS_HFLAG_BX
;
5701 ctx
->hflags
|= MIPS_HFLAG_B
;
5704 ctx
->hflags
|= MIPS_HFLAG_BR
;
5708 ctx
->hflags
|= MIPS_HFLAG_BR
;
5711 MIPS_INVAL("branch/jump");
5712 generate_exception_end(ctx
, EXCP_RI
);
5718 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5721 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5724 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5727 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5730 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5733 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5736 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5740 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5744 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5747 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5750 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5753 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5756 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5759 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5762 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5764 #if defined(TARGET_MIPS64)
5766 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
5770 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5773 ctx
->hflags
|= MIPS_HFLAG_BC
;
5776 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5779 ctx
->hflags
|= MIPS_HFLAG_BL
;
5782 MIPS_INVAL("conditional branch/jump");
5783 generate_exception_end(ctx
, EXCP_RI
);
5788 ctx
->btarget
= btgt
;
5790 switch (delayslot_size
) {
5792 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
5795 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
5800 int post_delay
= insn_bytes
+ delayslot_size
;
5801 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
5803 tcg_gen_movi_tl(cpu_gpr
[blink
],
5804 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
5808 if (insn_bytes
== 2)
5809 ctx
->hflags
|= MIPS_HFLAG_B16
;
5815 /* nanoMIPS Branches */
5816 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
5818 int rs
, int rt
, int32_t offset
)
5820 target_ulong btgt
= -1;
5821 int bcond_compute
= 0;
5822 TCGv t0
= tcg_temp_new();
5823 TCGv t1
= tcg_temp_new();
5825 /* Load needed operands */
5829 /* Compare two registers */
5831 gen_load_gpr(t0
, rs
);
5832 gen_load_gpr(t1
, rt
);
5835 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5838 /* Compare to zero */
5840 gen_load_gpr(t0
, rs
);
5843 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5846 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5848 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5852 /* Jump to register */
5853 if (offset
!= 0 && offset
!= 16) {
5854 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5855 others are reserved. */
5856 MIPS_INVAL("jump hint");
5857 generate_exception_end(ctx
, EXCP_RI
);
5860 gen_load_gpr(btarget
, rs
);
5863 MIPS_INVAL("branch/jump");
5864 generate_exception_end(ctx
, EXCP_RI
);
5867 if (bcond_compute
== 0) {
5868 /* No condition to be computed */
5870 case OPC_BEQ
: /* rx == rx */
5872 ctx
->hflags
|= MIPS_HFLAG_B
;
5874 case OPC_BGEZAL
: /* 0 >= 0 */
5875 /* Always take and link */
5876 tcg_gen_movi_tl(cpu_gpr
[31],
5877 ctx
->base
.pc_next
+ insn_bytes
);
5878 ctx
->hflags
|= MIPS_HFLAG_B
;
5880 case OPC_BNE
: /* rx != rx */
5881 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5882 /* Skip the instruction in the delay slot */
5883 ctx
->base
.pc_next
+= 4;
5886 ctx
->hflags
|= MIPS_HFLAG_BR
;
5890 tcg_gen_movi_tl(cpu_gpr
[rt
],
5891 ctx
->base
.pc_next
+ insn_bytes
);
5893 ctx
->hflags
|= MIPS_HFLAG_BR
;
5896 MIPS_INVAL("branch/jump");
5897 generate_exception_end(ctx
, EXCP_RI
);
5903 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5906 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5909 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5910 tcg_gen_movi_tl(cpu_gpr
[31],
5911 ctx
->base
.pc_next
+ insn_bytes
);
5914 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5916 ctx
->hflags
|= MIPS_HFLAG_BC
;
5919 MIPS_INVAL("conditional branch/jump");
5920 generate_exception_end(ctx
, EXCP_RI
);
5925 ctx
->btarget
= btgt
;
5928 if (insn_bytes
== 2) {
5929 ctx
->hflags
|= MIPS_HFLAG_B16
;
5936 /* special3 bitfield operations */
5937 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
5938 int rs
, int lsb
, int msb
)
5940 TCGv t0
= tcg_temp_new();
5941 TCGv t1
= tcg_temp_new();
5943 gen_load_gpr(t1
, rs
);
5946 if (lsb
+ msb
> 31) {
5950 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5952 /* The two checks together imply that lsb == 0,
5953 so this is a simple sign-extension. */
5954 tcg_gen_ext32s_tl(t0
, t1
);
5957 #if defined(TARGET_MIPS64)
5966 if (lsb
+ msb
> 63) {
5969 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5976 gen_load_gpr(t0
, rt
);
5977 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5978 tcg_gen_ext32s_tl(t0
, t0
);
5980 #if defined(TARGET_MIPS64)
5991 gen_load_gpr(t0
, rt
);
5992 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5997 MIPS_INVAL("bitops");
5998 generate_exception_end(ctx
, EXCP_RI
);
6003 gen_store_gpr(t0
, rt
);
6008 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6013 /* If no destination, treat it as a NOP. */
6017 t0
= tcg_temp_new();
6018 gen_load_gpr(t0
, rt
);
6022 TCGv t1
= tcg_temp_new();
6023 TCGv t2
= tcg_const_tl(0x00FF00FF);
6025 tcg_gen_shri_tl(t1
, t0
, 8);
6026 tcg_gen_and_tl(t1
, t1
, t2
);
6027 tcg_gen_and_tl(t0
, t0
, t2
);
6028 tcg_gen_shli_tl(t0
, t0
, 8);
6029 tcg_gen_or_tl(t0
, t0
, t1
);
6032 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6036 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6039 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6041 #if defined(TARGET_MIPS64)
6044 TCGv t1
= tcg_temp_new();
6045 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6047 tcg_gen_shri_tl(t1
, t0
, 8);
6048 tcg_gen_and_tl(t1
, t1
, t2
);
6049 tcg_gen_and_tl(t0
, t0
, t2
);
6050 tcg_gen_shli_tl(t0
, t0
, 8);
6051 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6058 TCGv t1
= tcg_temp_new();
6059 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6061 tcg_gen_shri_tl(t1
, t0
, 16);
6062 tcg_gen_and_tl(t1
, t1
, t2
);
6063 tcg_gen_and_tl(t0
, t0
, t2
);
6064 tcg_gen_shli_tl(t0
, t0
, 16);
6065 tcg_gen_or_tl(t0
, t0
, t1
);
6066 tcg_gen_shri_tl(t1
, t0
, 32);
6067 tcg_gen_shli_tl(t0
, t0
, 32);
6068 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6075 MIPS_INVAL("bsfhl");
6076 generate_exception_end(ctx
, EXCP_RI
);
6083 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6092 t0
= tcg_temp_new();
6093 t1
= tcg_temp_new();
6094 gen_load_gpr(t0
, rs
);
6095 gen_load_gpr(t1
, rt
);
6096 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6097 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6098 if (opc
== OPC_LSA
) {
6099 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6108 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6116 t0
= tcg_temp_new();
6117 if (bits
== 0 || bits
== wordsz
) {
6119 gen_load_gpr(t0
, rt
);
6121 gen_load_gpr(t0
, rs
);
6125 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6127 #if defined(TARGET_MIPS64)
6129 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6134 TCGv t1
= tcg_temp_new();
6135 gen_load_gpr(t0
, rt
);
6136 gen_load_gpr(t1
, rs
);
6140 TCGv_i64 t2
= tcg_temp_new_i64();
6141 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6142 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6143 gen_move_low32(cpu_gpr
[rd
], t2
);
6144 tcg_temp_free_i64(t2
);
6147 #if defined(TARGET_MIPS64)
6149 tcg_gen_shli_tl(t0
, t0
, bits
);
6150 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6151 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6161 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6164 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6167 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6170 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6173 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6180 t0
= tcg_temp_new();
6181 gen_load_gpr(t0
, rt
);
6184 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6186 #if defined(TARGET_MIPS64)
6188 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6195 #ifndef CONFIG_USER_ONLY
6196 /* CP0 (MMU and control) */
6197 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6199 TCGv_i64 t0
= tcg_temp_new_i64();
6200 TCGv_i64 t1
= tcg_temp_new_i64();
6202 tcg_gen_ext_tl_i64(t0
, arg
);
6203 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6204 #if defined(TARGET_MIPS64)
6205 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6207 tcg_gen_concat32_i64(t1
, t1
, t0
);
6209 tcg_gen_st_i64(t1
, cpu_env
, off
);
6210 tcg_temp_free_i64(t1
);
6211 tcg_temp_free_i64(t0
);
6214 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6216 TCGv_i64 t0
= tcg_temp_new_i64();
6217 TCGv_i64 t1
= tcg_temp_new_i64();
6219 tcg_gen_ext_tl_i64(t0
, arg
);
6220 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6221 tcg_gen_concat32_i64(t1
, t1
, t0
);
6222 tcg_gen_st_i64(t1
, cpu_env
, off
);
6223 tcg_temp_free_i64(t1
);
6224 tcg_temp_free_i64(t0
);
6227 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6229 TCGv_i64 t0
= tcg_temp_new_i64();
6231 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6232 #if defined(TARGET_MIPS64)
6233 tcg_gen_shri_i64(t0
, t0
, 30);
6235 tcg_gen_shri_i64(t0
, t0
, 32);
6237 gen_move_low32(arg
, t0
);
6238 tcg_temp_free_i64(t0
);
6241 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6243 TCGv_i64 t0
= tcg_temp_new_i64();
6245 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6246 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6247 gen_move_low32(arg
, t0
);
6248 tcg_temp_free_i64(t0
);
6251 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
6253 TCGv_i32 t0
= tcg_temp_new_i32();
6255 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6256 tcg_gen_ext_i32_tl(arg
, t0
);
6257 tcg_temp_free_i32(t0
);
6260 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
6262 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6263 tcg_gen_ext32s_tl(arg
, arg
);
6266 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
6268 TCGv_i32 t0
= tcg_temp_new_i32();
6270 tcg_gen_trunc_tl_i32(t0
, arg
);
6271 tcg_gen_st_i32(t0
, cpu_env
, off
);
6272 tcg_temp_free_i32(t0
);
6275 #define CP0_CHECK(c) \
6278 goto cp0_unimplemented; \
6282 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6284 const char *rn
= "invalid";
6290 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6291 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6295 goto cp0_unimplemented
;
6301 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6302 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6306 goto cp0_unimplemented
;
6312 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
6313 ctx
->CP0_LLAddr_shift
);
6317 CP0_CHECK(ctx
->mrp
);
6318 gen_helper_mfhc0_maar(arg
, cpu_env
);
6322 goto cp0_unimplemented
;
6331 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6335 goto cp0_unimplemented
;
6339 goto cp0_unimplemented
;
6341 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
6345 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6346 tcg_gen_movi_tl(arg
, 0);
6349 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6351 const char *rn
= "invalid";
6352 uint64_t mask
= ctx
->PAMask
>> 36;
6358 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6359 tcg_gen_andi_tl(arg
, arg
, mask
);
6360 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6364 goto cp0_unimplemented
;
6370 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6371 tcg_gen_andi_tl(arg
, arg
, mask
);
6372 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6376 goto cp0_unimplemented
;
6382 /* LLAddr is read-only (the only exception is bit 0 if LLB is
6383 supported); the CP0_LLAddr_rw_bitmask does not seem to be
6384 relevant for modern MIPS cores supporting MTHC0, therefore
6385 treating MTHC0 to LLAddr as NOP. */
6389 CP0_CHECK(ctx
->mrp
);
6390 gen_helper_mthc0_maar(cpu_env
, arg
);
6394 goto cp0_unimplemented
;
6403 tcg_gen_andi_tl(arg
, arg
, mask
);
6404 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6408 goto cp0_unimplemented
;
6412 goto cp0_unimplemented
;
6414 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
6417 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6420 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6422 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6423 tcg_gen_movi_tl(arg
, 0);
6425 tcg_gen_movi_tl(arg
, ~0);
6429 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6431 const char *rn
= "invalid";
6434 check_insn(ctx
, ISA_MIPS32
);
6440 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6444 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6445 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6449 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6450 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6454 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6455 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6460 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6464 goto cp0_unimplemented
;
6470 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6471 gen_helper_mfc0_random(arg
, cpu_env
);
6475 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6476 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6480 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6481 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6485 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6486 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6490 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6491 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6495 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6496 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6500 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6501 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6502 rn
= "VPEScheFBack";
6505 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6506 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6510 goto cp0_unimplemented
;
6517 TCGv_i64 tmp
= tcg_temp_new_i64();
6518 tcg_gen_ld_i64(tmp
, cpu_env
,
6519 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6520 #if defined(TARGET_MIPS64)
6522 /* Move RI/XI fields to bits 31:30 */
6523 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6524 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6527 gen_move_low32(arg
, tmp
);
6528 tcg_temp_free_i64(tmp
);
6533 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6534 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6538 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6539 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6543 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6544 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6548 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6549 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6553 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6554 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6558 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6559 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6563 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6564 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6568 goto cp0_unimplemented
;
6575 TCGv_i64 tmp
= tcg_temp_new_i64();
6576 tcg_gen_ld_i64(tmp
, cpu_env
,
6577 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6578 #if defined(TARGET_MIPS64)
6580 /* Move RI/XI fields to bits 31:30 */
6581 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6582 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6585 gen_move_low32(arg
, tmp
);
6586 tcg_temp_free_i64(tmp
);
6592 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6593 rn
= "GlobalNumber";
6596 goto cp0_unimplemented
;
6602 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6603 tcg_gen_ext32s_tl(arg
, arg
);
6607 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
6608 rn
= "ContextConfig";
6609 goto cp0_unimplemented
;
6611 CP0_CHECK(ctx
->ulri
);
6612 tcg_gen_ld_tl(arg
, cpu_env
,
6613 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6614 tcg_gen_ext32s_tl(arg
, arg
);
6618 goto cp0_unimplemented
;
6624 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6628 check_insn(ctx
, ISA_MIPS32R2
);
6629 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6634 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6635 tcg_gen_ext32s_tl(arg
, arg
);
6640 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6641 tcg_gen_ext32s_tl(arg
, arg
);
6646 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6647 tcg_gen_ext32s_tl(arg
, arg
);
6652 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6657 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
6662 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
6666 goto cp0_unimplemented
;
6672 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6676 check_insn(ctx
, ISA_MIPS32R2
);
6677 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6681 check_insn(ctx
, ISA_MIPS32R2
);
6682 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6686 check_insn(ctx
, ISA_MIPS32R2
);
6687 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6691 check_insn(ctx
, ISA_MIPS32R2
);
6692 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6696 check_insn(ctx
, ISA_MIPS32R2
);
6697 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6702 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
6706 goto cp0_unimplemented
;
6712 check_insn(ctx
, ISA_MIPS32R2
);
6713 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6717 goto cp0_unimplemented
;
6723 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6724 tcg_gen_ext32s_tl(arg
, arg
);
6729 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6734 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6739 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
6740 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
6744 goto cp0_unimplemented
;
6750 /* Mark as an IO operation because we read the time. */
6751 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6754 gen_helper_mfc0_count(arg
, cpu_env
);
6755 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6758 /* Break the TB to be able to take timer interrupts immediately
6759 after reading count. DISAS_STOP isn't sufficient, we need to
6760 ensure we break completely out of translated code. */
6761 gen_save_pc(ctx
->base
.pc_next
+ 4);
6762 ctx
->base
.is_jmp
= DISAS_EXIT
;
6765 /* 6,7 are implementation dependent */
6767 goto cp0_unimplemented
;
6773 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6774 tcg_gen_ext32s_tl(arg
, arg
);
6778 goto cp0_unimplemented
;
6784 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6787 /* 6,7 are implementation dependent */
6789 goto cp0_unimplemented
;
6795 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6799 check_insn(ctx
, ISA_MIPS32R2
);
6800 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6804 check_insn(ctx
, ISA_MIPS32R2
);
6805 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6809 check_insn(ctx
, ISA_MIPS32R2
);
6810 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6814 goto cp0_unimplemented
;
6820 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6824 goto cp0_unimplemented
;
6830 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6831 tcg_gen_ext32s_tl(arg
, arg
);
6835 goto cp0_unimplemented
;
6841 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6845 check_insn(ctx
, ISA_MIPS32R2
);
6846 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6847 tcg_gen_ext32s_tl(arg
, arg
);
6851 check_insn(ctx
, ISA_MIPS32R2
);
6852 CP0_CHECK(ctx
->cmgcr
);
6853 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6854 tcg_gen_ext32s_tl(arg
, arg
);
6858 goto cp0_unimplemented
;
6864 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6868 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6872 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6876 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6880 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6884 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6887 /* 6,7 are implementation dependent */
6889 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6893 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6897 goto cp0_unimplemented
;
6903 gen_helper_mfc0_lladdr(arg
, cpu_env
);
6907 CP0_CHECK(ctx
->mrp
);
6908 gen_helper_mfc0_maar(arg
, cpu_env
);
6912 CP0_CHECK(ctx
->mrp
);
6913 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6917 goto cp0_unimplemented
;
6930 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6931 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
6935 goto cp0_unimplemented
;
6948 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6949 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6953 goto cp0_unimplemented
;
6959 #if defined(TARGET_MIPS64)
6960 check_insn(ctx
, ISA_MIPS3
);
6961 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6962 tcg_gen_ext32s_tl(arg
, arg
);
6967 goto cp0_unimplemented
;
6971 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6972 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6975 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6979 goto cp0_unimplemented
;
6983 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6984 rn
= "'Diagnostic"; /* implementation dependent */
6989 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6993 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
6994 rn
= "TraceControl";
6995 goto cp0_unimplemented
;
6997 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
6998 rn
= "TraceControl2";
6999 goto cp0_unimplemented
;
7001 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
7002 rn
= "UserTraceData";
7003 goto cp0_unimplemented
;
7005 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
7007 goto cp0_unimplemented
;
7009 goto cp0_unimplemented
;
7016 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7017 tcg_gen_ext32s_tl(arg
, arg
);
7021 goto cp0_unimplemented
;
7027 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7028 rn
= "Performance0";
7031 // gen_helper_mfc0_performance1(arg);
7032 rn
= "Performance1";
7033 goto cp0_unimplemented
;
7035 // gen_helper_mfc0_performance2(arg);
7036 rn
= "Performance2";
7037 goto cp0_unimplemented
;
7039 // gen_helper_mfc0_performance3(arg);
7040 rn
= "Performance3";
7041 goto cp0_unimplemented
;
7043 // gen_helper_mfc0_performance4(arg);
7044 rn
= "Performance4";
7045 goto cp0_unimplemented
;
7047 // gen_helper_mfc0_performance5(arg);
7048 rn
= "Performance5";
7049 goto cp0_unimplemented
;
7051 // gen_helper_mfc0_performance6(arg);
7052 rn
= "Performance6";
7053 goto cp0_unimplemented
;
7055 // gen_helper_mfc0_performance7(arg);
7056 rn
= "Performance7";
7057 goto cp0_unimplemented
;
7059 goto cp0_unimplemented
;
7065 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7069 goto cp0_unimplemented
;
7078 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7082 goto cp0_unimplemented
;
7092 TCGv_i64 tmp
= tcg_temp_new_i64();
7093 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7094 gen_move_low32(arg
, tmp
);
7095 tcg_temp_free_i64(tmp
);
7103 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7107 goto cp0_unimplemented
;
7116 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7123 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7127 goto cp0_unimplemented
;
7133 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7134 tcg_gen_ext32s_tl(arg
, arg
);
7138 goto cp0_unimplemented
;
7145 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7154 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7155 tcg_gen_ld_tl(arg
, cpu_env
,
7156 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7157 tcg_gen_ext32s_tl(arg
, arg
);
7161 goto cp0_unimplemented
;
7165 goto cp0_unimplemented
;
7167 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
7171 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7172 gen_mfc0_unimplemented(ctx
, arg
);
7175 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7177 const char *rn
= "invalid";
7180 check_insn(ctx
, ISA_MIPS32
);
7182 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7190 gen_helper_mtc0_index(cpu_env
, arg
);
7194 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7195 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7199 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7204 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7214 goto cp0_unimplemented
;
7224 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7225 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7229 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7230 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7234 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7235 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7239 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7240 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7244 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7245 tcg_gen_st_tl(arg
, cpu_env
,
7246 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7250 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7251 tcg_gen_st_tl(arg
, cpu_env
,
7252 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7253 rn
= "VPEScheFBack";
7256 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7257 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7261 goto cp0_unimplemented
;
7267 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7271 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7272 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7276 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7277 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7281 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7282 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7286 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7287 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7291 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7292 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7296 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7297 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7301 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7302 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7306 goto cp0_unimplemented
;
7312 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7318 rn
= "GlobalNumber";
7321 goto cp0_unimplemented
;
7327 gen_helper_mtc0_context(cpu_env
, arg
);
7331 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7332 rn
= "ContextConfig";
7333 goto cp0_unimplemented
;
7335 CP0_CHECK(ctx
->ulri
);
7336 tcg_gen_st_tl(arg
, cpu_env
,
7337 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7341 goto cp0_unimplemented
;
7347 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7351 check_insn(ctx
, ISA_MIPS32R2
);
7352 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7354 ctx
->base
.is_jmp
= DISAS_STOP
;
7358 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7363 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7368 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7373 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7378 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7383 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7387 goto cp0_unimplemented
;
7393 gen_helper_mtc0_wired(cpu_env
, arg
);
7397 check_insn(ctx
, ISA_MIPS32R2
);
7398 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7402 check_insn(ctx
, ISA_MIPS32R2
);
7403 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7407 check_insn(ctx
, ISA_MIPS32R2
);
7408 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7412 check_insn(ctx
, ISA_MIPS32R2
);
7413 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7417 check_insn(ctx
, ISA_MIPS32R2
);
7418 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7423 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7427 goto cp0_unimplemented
;
7433 check_insn(ctx
, ISA_MIPS32R2
);
7434 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7435 ctx
->base
.is_jmp
= DISAS_STOP
;
7439 goto cp0_unimplemented
;
7461 goto cp0_unimplemented
;
7467 gen_helper_mtc0_count(cpu_env
, arg
);
7470 /* 6,7 are implementation dependent */
7472 goto cp0_unimplemented
;
7478 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7482 goto cp0_unimplemented
;
7488 gen_helper_mtc0_compare(cpu_env
, arg
);
7491 /* 6,7 are implementation dependent */
7493 goto cp0_unimplemented
;
7499 save_cpu_state(ctx
, 1);
7500 gen_helper_mtc0_status(cpu_env
, arg
);
7501 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7502 gen_save_pc(ctx
->base
.pc_next
+ 4);
7503 ctx
->base
.is_jmp
= DISAS_EXIT
;
7507 check_insn(ctx
, ISA_MIPS32R2
);
7508 gen_helper_mtc0_intctl(cpu_env
, arg
);
7509 /* Stop translation as we may have switched the execution mode */
7510 ctx
->base
.is_jmp
= DISAS_STOP
;
7514 check_insn(ctx
, ISA_MIPS32R2
);
7515 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7516 /* Stop translation as we may have switched the execution mode */
7517 ctx
->base
.is_jmp
= DISAS_STOP
;
7521 check_insn(ctx
, ISA_MIPS32R2
);
7522 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7523 /* Stop translation as we may have switched the execution mode */
7524 ctx
->base
.is_jmp
= DISAS_STOP
;
7528 goto cp0_unimplemented
;
7534 save_cpu_state(ctx
, 1);
7535 gen_helper_mtc0_cause(cpu_env
, arg
);
7536 /* Stop translation as we may have triggered an interrupt.
7537 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7538 * translated code to check for pending interrupts. */
7539 gen_save_pc(ctx
->base
.pc_next
+ 4);
7540 ctx
->base
.is_jmp
= DISAS_EXIT
;
7544 goto cp0_unimplemented
;
7550 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7554 goto cp0_unimplemented
;
7564 check_insn(ctx
, ISA_MIPS32R2
);
7565 gen_helper_mtc0_ebase(cpu_env
, arg
);
7569 goto cp0_unimplemented
;
7575 gen_helper_mtc0_config0(cpu_env
, arg
);
7577 /* Stop translation as we may have switched the execution mode */
7578 ctx
->base
.is_jmp
= DISAS_STOP
;
7581 /* ignored, read only */
7585 gen_helper_mtc0_config2(cpu_env
, arg
);
7587 /* Stop translation as we may have switched the execution mode */
7588 ctx
->base
.is_jmp
= DISAS_STOP
;
7591 gen_helper_mtc0_config3(cpu_env
, arg
);
7593 /* Stop translation as we may have switched the execution mode */
7594 ctx
->base
.is_jmp
= DISAS_STOP
;
7597 gen_helper_mtc0_config4(cpu_env
, arg
);
7599 ctx
->base
.is_jmp
= DISAS_STOP
;
7602 gen_helper_mtc0_config5(cpu_env
, arg
);
7604 /* Stop translation as we may have switched the execution mode */
7605 ctx
->base
.is_jmp
= DISAS_STOP
;
7607 /* 6,7 are implementation dependent */
7617 rn
= "Invalid config selector";
7618 goto cp0_unimplemented
;
7624 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7628 CP0_CHECK(ctx
->mrp
);
7629 gen_helper_mtc0_maar(cpu_env
, arg
);
7633 CP0_CHECK(ctx
->mrp
);
7634 gen_helper_mtc0_maari(cpu_env
, arg
);
7638 goto cp0_unimplemented
;
7651 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7652 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7656 goto cp0_unimplemented
;
7669 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7670 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7674 goto cp0_unimplemented
;
7680 #if defined(TARGET_MIPS64)
7681 check_insn(ctx
, ISA_MIPS3
);
7682 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7687 goto cp0_unimplemented
;
7691 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7692 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7695 gen_helper_mtc0_framemask(cpu_env
, arg
);
7699 goto cp0_unimplemented
;
7704 rn
= "Diagnostic"; /* implementation dependent */
7709 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7710 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7711 gen_save_pc(ctx
->base
.pc_next
+ 4);
7712 ctx
->base
.is_jmp
= DISAS_EXIT
;
7716 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7717 rn
= "TraceControl";
7718 /* Stop translation as we may have switched the execution mode */
7719 ctx
->base
.is_jmp
= DISAS_STOP
;
7720 goto cp0_unimplemented
;
7722 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7723 rn
= "TraceControl2";
7724 /* Stop translation as we may have switched the execution mode */
7725 ctx
->base
.is_jmp
= DISAS_STOP
;
7726 goto cp0_unimplemented
;
7728 /* Stop translation as we may have switched the execution mode */
7729 ctx
->base
.is_jmp
= DISAS_STOP
;
7730 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7731 rn
= "UserTraceData";
7732 /* Stop translation as we may have switched the execution mode */
7733 ctx
->base
.is_jmp
= DISAS_STOP
;
7734 goto cp0_unimplemented
;
7736 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7737 /* Stop translation as we may have switched the execution mode */
7738 ctx
->base
.is_jmp
= DISAS_STOP
;
7740 goto cp0_unimplemented
;
7742 goto cp0_unimplemented
;
7749 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7753 goto cp0_unimplemented
;
7759 gen_helper_mtc0_performance0(cpu_env
, arg
);
7760 rn
= "Performance0";
7763 // gen_helper_mtc0_performance1(arg);
7764 rn
= "Performance1";
7765 goto cp0_unimplemented
;
7767 // gen_helper_mtc0_performance2(arg);
7768 rn
= "Performance2";
7769 goto cp0_unimplemented
;
7771 // gen_helper_mtc0_performance3(arg);
7772 rn
= "Performance3";
7773 goto cp0_unimplemented
;
7775 // gen_helper_mtc0_performance4(arg);
7776 rn
= "Performance4";
7777 goto cp0_unimplemented
;
7779 // gen_helper_mtc0_performance5(arg);
7780 rn
= "Performance5";
7781 goto cp0_unimplemented
;
7783 // gen_helper_mtc0_performance6(arg);
7784 rn
= "Performance6";
7785 goto cp0_unimplemented
;
7787 // gen_helper_mtc0_performance7(arg);
7788 rn
= "Performance7";
7789 goto cp0_unimplemented
;
7791 goto cp0_unimplemented
;
7797 gen_helper_mtc0_errctl(cpu_env
, arg
);
7798 ctx
->base
.is_jmp
= DISAS_STOP
;
7802 goto cp0_unimplemented
;
7815 goto cp0_unimplemented
;
7824 gen_helper_mtc0_taglo(cpu_env
, arg
);
7831 gen_helper_mtc0_datalo(cpu_env
, arg
);
7835 goto cp0_unimplemented
;
7844 gen_helper_mtc0_taghi(cpu_env
, arg
);
7851 gen_helper_mtc0_datahi(cpu_env
, arg
);
7856 goto cp0_unimplemented
;
7862 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7866 goto cp0_unimplemented
;
7873 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7882 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7883 tcg_gen_st_tl(arg
, cpu_env
,
7884 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7888 goto cp0_unimplemented
;
7892 goto cp0_unimplemented
;
7894 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
7896 /* For simplicity assume that all writes can cause interrupts. */
7897 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7899 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
7900 * translated code to check for pending interrupts. */
7901 gen_save_pc(ctx
->base
.pc_next
+ 4);
7902 ctx
->base
.is_jmp
= DISAS_EXIT
;
7907 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7910 #if defined(TARGET_MIPS64)
7911 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7913 const char *rn
= "invalid";
7916 check_insn(ctx
, ISA_MIPS64
);
7922 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
7926 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7927 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
7931 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7932 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
7936 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7937 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
7942 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
7946 goto cp0_unimplemented
;
7952 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7953 gen_helper_mfc0_random(arg
, cpu_env
);
7957 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7958 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
7962 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7963 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
7967 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7968 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
7972 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7973 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
7977 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7978 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7982 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7983 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7984 rn
= "VPEScheFBack";
7987 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7988 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
7992 goto cp0_unimplemented
;
7998 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
8002 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8003 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8007 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8008 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8012 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8013 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8017 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8018 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8022 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8023 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8027 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8028 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8032 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8033 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8037 goto cp0_unimplemented
;
8043 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8048 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8049 rn
= "GlobalNumber";
8052 goto cp0_unimplemented
;
8058 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8062 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8063 rn
= "ContextConfig";
8064 goto cp0_unimplemented
;
8066 CP0_CHECK(ctx
->ulri
);
8067 tcg_gen_ld_tl(arg
, cpu_env
,
8068 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8072 goto cp0_unimplemented
;
8078 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8082 check_insn(ctx
, ISA_MIPS32R2
);
8083 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8088 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8093 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8098 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8103 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8108 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8113 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8117 goto cp0_unimplemented
;
8123 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8127 check_insn(ctx
, ISA_MIPS32R2
);
8128 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8132 check_insn(ctx
, ISA_MIPS32R2
);
8133 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8137 check_insn(ctx
, ISA_MIPS32R2
);
8138 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8142 check_insn(ctx
, ISA_MIPS32R2
);
8143 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8147 check_insn(ctx
, ISA_MIPS32R2
);
8148 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8153 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8157 goto cp0_unimplemented
;
8163 check_insn(ctx
, ISA_MIPS32R2
);
8164 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8168 goto cp0_unimplemented
;
8174 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8179 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8184 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8189 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8190 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8194 goto cp0_unimplemented
;
8200 /* Mark as an IO operation because we read the time. */
8201 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8204 gen_helper_mfc0_count(arg
, cpu_env
);
8205 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8208 /* Break the TB to be able to take timer interrupts immediately
8209 after reading count. DISAS_STOP isn't sufficient, we need to
8210 ensure we break completely out of translated code. */
8211 gen_save_pc(ctx
->base
.pc_next
+ 4);
8212 ctx
->base
.is_jmp
= DISAS_EXIT
;
8215 /* 6,7 are implementation dependent */
8217 goto cp0_unimplemented
;
8223 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8227 goto cp0_unimplemented
;
8233 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8236 /* 6,7 are implementation dependent */
8238 goto cp0_unimplemented
;
8244 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8248 check_insn(ctx
, ISA_MIPS32R2
);
8249 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8253 check_insn(ctx
, ISA_MIPS32R2
);
8254 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8258 check_insn(ctx
, ISA_MIPS32R2
);
8259 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8263 goto cp0_unimplemented
;
8269 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8273 goto cp0_unimplemented
;
8279 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8283 goto cp0_unimplemented
;
8289 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8293 check_insn(ctx
, ISA_MIPS32R2
);
8294 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8298 check_insn(ctx
, ISA_MIPS32R2
);
8299 CP0_CHECK(ctx
->cmgcr
);
8300 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8304 goto cp0_unimplemented
;
8310 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8314 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8318 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8322 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8326 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8330 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8333 /* 6,7 are implementation dependent */
8335 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8339 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8343 goto cp0_unimplemented
;
8349 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8353 CP0_CHECK(ctx
->mrp
);
8354 gen_helper_dmfc0_maar(arg
, cpu_env
);
8358 CP0_CHECK(ctx
->mrp
);
8359 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8363 goto cp0_unimplemented
;
8376 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8377 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8381 goto cp0_unimplemented
;
8394 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8395 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8399 goto cp0_unimplemented
;
8405 check_insn(ctx
, ISA_MIPS3
);
8406 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8410 goto cp0_unimplemented
;
8414 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8415 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8418 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8422 goto cp0_unimplemented
;
8426 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8427 rn
= "'Diagnostic"; /* implementation dependent */
8432 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8436 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8437 rn
= "TraceControl";
8438 goto cp0_unimplemented
;
8440 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8441 rn
= "TraceControl2";
8442 goto cp0_unimplemented
;
8444 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8445 rn
= "UserTraceData";
8446 goto cp0_unimplemented
;
8448 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8450 goto cp0_unimplemented
;
8452 goto cp0_unimplemented
;
8459 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8463 goto cp0_unimplemented
;
8469 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8470 rn
= "Performance0";
8473 // gen_helper_dmfc0_performance1(arg);
8474 rn
= "Performance1";
8475 goto cp0_unimplemented
;
8477 // gen_helper_dmfc0_performance2(arg);
8478 rn
= "Performance2";
8479 goto cp0_unimplemented
;
8481 // gen_helper_dmfc0_performance3(arg);
8482 rn
= "Performance3";
8483 goto cp0_unimplemented
;
8485 // gen_helper_dmfc0_performance4(arg);
8486 rn
= "Performance4";
8487 goto cp0_unimplemented
;
8489 // gen_helper_dmfc0_performance5(arg);
8490 rn
= "Performance5";
8491 goto cp0_unimplemented
;
8493 // gen_helper_dmfc0_performance6(arg);
8494 rn
= "Performance6";
8495 goto cp0_unimplemented
;
8497 // gen_helper_dmfc0_performance7(arg);
8498 rn
= "Performance7";
8499 goto cp0_unimplemented
;
8501 goto cp0_unimplemented
;
8507 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8511 goto cp0_unimplemented
;
8521 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8525 goto cp0_unimplemented
;
8534 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8541 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8545 goto cp0_unimplemented
;
8554 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8561 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8565 goto cp0_unimplemented
;
8571 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8575 goto cp0_unimplemented
;
8582 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8591 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8592 tcg_gen_ld_tl(arg
, cpu_env
,
8593 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8597 goto cp0_unimplemented
;
8601 goto cp0_unimplemented
;
8603 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
8607 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8608 gen_mfc0_unimplemented(ctx
, arg
);
8611 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8613 const char *rn
= "invalid";
8616 check_insn(ctx
, ISA_MIPS64
);
8618 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8626 gen_helper_mtc0_index(cpu_env
, arg
);
8630 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8631 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
8635 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8640 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8650 goto cp0_unimplemented
;
8660 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8661 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
8665 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8666 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
8670 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8671 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
8675 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8676 gen_helper_mtc0_yqmask(cpu_env
, arg
);
8680 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8681 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8685 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8686 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8687 rn
= "VPEScheFBack";
8690 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8691 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
8695 goto cp0_unimplemented
;
8701 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
8705 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8706 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
8710 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8711 gen_helper_mtc0_tcbind(cpu_env
, arg
);
8715 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8716 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
8720 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8721 gen_helper_mtc0_tchalt(cpu_env
, arg
);
8725 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8726 gen_helper_mtc0_tccontext(cpu_env
, arg
);
8730 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8731 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
8735 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8736 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
8740 goto cp0_unimplemented
;
8746 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
8752 rn
= "GlobalNumber";
8755 goto cp0_unimplemented
;
8761 gen_helper_mtc0_context(cpu_env
, arg
);
8765 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
8766 rn
= "ContextConfig";
8767 goto cp0_unimplemented
;
8769 CP0_CHECK(ctx
->ulri
);
8770 tcg_gen_st_tl(arg
, cpu_env
,
8771 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8775 goto cp0_unimplemented
;
8781 gen_helper_mtc0_pagemask(cpu_env
, arg
);
8785 check_insn(ctx
, ISA_MIPS32R2
);
8786 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
8791 gen_helper_mtc0_segctl0(cpu_env
, arg
);
8796 gen_helper_mtc0_segctl1(cpu_env
, arg
);
8801 gen_helper_mtc0_segctl2(cpu_env
, arg
);
8806 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8811 gen_helper_mtc0_pwfield(cpu_env
, arg
);
8816 gen_helper_mtc0_pwsize(cpu_env
, arg
);
8820 goto cp0_unimplemented
;
8826 gen_helper_mtc0_wired(cpu_env
, arg
);
8830 check_insn(ctx
, ISA_MIPS32R2
);
8831 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
8835 check_insn(ctx
, ISA_MIPS32R2
);
8836 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
8840 check_insn(ctx
, ISA_MIPS32R2
);
8841 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
8845 check_insn(ctx
, ISA_MIPS32R2
);
8846 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
8850 check_insn(ctx
, ISA_MIPS32R2
);
8851 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
8856 gen_helper_mtc0_pwctl(cpu_env
, arg
);
8860 goto cp0_unimplemented
;
8866 check_insn(ctx
, ISA_MIPS32R2
);
8867 gen_helper_mtc0_hwrena(cpu_env
, arg
);
8868 ctx
->base
.is_jmp
= DISAS_STOP
;
8872 goto cp0_unimplemented
;
8894 goto cp0_unimplemented
;
8900 gen_helper_mtc0_count(cpu_env
, arg
);
8903 /* 6,7 are implementation dependent */
8905 goto cp0_unimplemented
;
8907 /* Stop translation as we may have switched the execution mode */
8908 ctx
->base
.is_jmp
= DISAS_STOP
;
8913 gen_helper_mtc0_entryhi(cpu_env
, arg
);
8917 goto cp0_unimplemented
;
8923 gen_helper_mtc0_compare(cpu_env
, arg
);
8926 /* 6,7 are implementation dependent */
8928 goto cp0_unimplemented
;
8930 /* Stop translation as we may have switched the execution mode */
8931 ctx
->base
.is_jmp
= DISAS_STOP
;
8936 save_cpu_state(ctx
, 1);
8937 gen_helper_mtc0_status(cpu_env
, arg
);
8938 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8939 gen_save_pc(ctx
->base
.pc_next
+ 4);
8940 ctx
->base
.is_jmp
= DISAS_EXIT
;
8944 check_insn(ctx
, ISA_MIPS32R2
);
8945 gen_helper_mtc0_intctl(cpu_env
, arg
);
8946 /* Stop translation as we may have switched the execution mode */
8947 ctx
->base
.is_jmp
= DISAS_STOP
;
8951 check_insn(ctx
, ISA_MIPS32R2
);
8952 gen_helper_mtc0_srsctl(cpu_env
, arg
);
8953 /* Stop translation as we may have switched the execution mode */
8954 ctx
->base
.is_jmp
= DISAS_STOP
;
8958 check_insn(ctx
, ISA_MIPS32R2
);
8959 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8960 /* Stop translation as we may have switched the execution mode */
8961 ctx
->base
.is_jmp
= DISAS_STOP
;
8965 goto cp0_unimplemented
;
8971 save_cpu_state(ctx
, 1);
8972 gen_helper_mtc0_cause(cpu_env
, arg
);
8973 /* Stop translation as we may have triggered an interrupt.
8974 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8975 * translated code to check for pending interrupts. */
8976 gen_save_pc(ctx
->base
.pc_next
+ 4);
8977 ctx
->base
.is_jmp
= DISAS_EXIT
;
8981 goto cp0_unimplemented
;
8987 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8991 goto cp0_unimplemented
;
9001 check_insn(ctx
, ISA_MIPS32R2
);
9002 gen_helper_mtc0_ebase(cpu_env
, arg
);
9006 goto cp0_unimplemented
;
9012 gen_helper_mtc0_config0(cpu_env
, arg
);
9014 /* Stop translation as we may have switched the execution mode */
9015 ctx
->base
.is_jmp
= DISAS_STOP
;
9018 /* ignored, read only */
9022 gen_helper_mtc0_config2(cpu_env
, arg
);
9024 /* Stop translation as we may have switched the execution mode */
9025 ctx
->base
.is_jmp
= DISAS_STOP
;
9028 gen_helper_mtc0_config3(cpu_env
, arg
);
9030 /* Stop translation as we may have switched the execution mode */
9031 ctx
->base
.is_jmp
= DISAS_STOP
;
9034 /* currently ignored */
9038 gen_helper_mtc0_config5(cpu_env
, arg
);
9040 /* Stop translation as we may have switched the execution mode */
9041 ctx
->base
.is_jmp
= DISAS_STOP
;
9043 /* 6,7 are implementation dependent */
9045 rn
= "Invalid config selector";
9046 goto cp0_unimplemented
;
9052 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9056 CP0_CHECK(ctx
->mrp
);
9057 gen_helper_mtc0_maar(cpu_env
, arg
);
9061 CP0_CHECK(ctx
->mrp
);
9062 gen_helper_mtc0_maari(cpu_env
, arg
);
9066 goto cp0_unimplemented
;
9079 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9080 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9084 goto cp0_unimplemented
;
9097 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9098 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9102 goto cp0_unimplemented
;
9108 check_insn(ctx
, ISA_MIPS3
);
9109 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9113 goto cp0_unimplemented
;
9117 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9118 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9121 gen_helper_mtc0_framemask(cpu_env
, arg
);
9125 goto cp0_unimplemented
;
9130 rn
= "Diagnostic"; /* implementation dependent */
9135 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9136 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9137 gen_save_pc(ctx
->base
.pc_next
+ 4);
9138 ctx
->base
.is_jmp
= DISAS_EXIT
;
9142 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9143 /* Stop translation as we may have switched the execution mode */
9144 ctx
->base
.is_jmp
= DISAS_STOP
;
9145 rn
= "TraceControl";
9146 goto cp0_unimplemented
;
9148 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9149 /* Stop translation as we may have switched the execution mode */
9150 ctx
->base
.is_jmp
= DISAS_STOP
;
9151 rn
= "TraceControl2";
9152 goto cp0_unimplemented
;
9154 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9155 /* Stop translation as we may have switched the execution mode */
9156 ctx
->base
.is_jmp
= DISAS_STOP
;
9157 rn
= "UserTraceData";
9158 goto cp0_unimplemented
;
9160 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9161 /* Stop translation as we may have switched the execution mode */
9162 ctx
->base
.is_jmp
= DISAS_STOP
;
9164 goto cp0_unimplemented
;
9166 goto cp0_unimplemented
;
9173 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9177 goto cp0_unimplemented
;
9183 gen_helper_mtc0_performance0(cpu_env
, arg
);
9184 rn
= "Performance0";
9187 // gen_helper_mtc0_performance1(cpu_env, arg);
9188 rn
= "Performance1";
9189 goto cp0_unimplemented
;
9191 // gen_helper_mtc0_performance2(cpu_env, arg);
9192 rn
= "Performance2";
9193 goto cp0_unimplemented
;
9195 // gen_helper_mtc0_performance3(cpu_env, arg);
9196 rn
= "Performance3";
9197 goto cp0_unimplemented
;
9199 // gen_helper_mtc0_performance4(cpu_env, arg);
9200 rn
= "Performance4";
9201 goto cp0_unimplemented
;
9203 // gen_helper_mtc0_performance5(cpu_env, arg);
9204 rn
= "Performance5";
9205 goto cp0_unimplemented
;
9207 // gen_helper_mtc0_performance6(cpu_env, arg);
9208 rn
= "Performance6";
9209 goto cp0_unimplemented
;
9211 // gen_helper_mtc0_performance7(cpu_env, arg);
9212 rn
= "Performance7";
9213 goto cp0_unimplemented
;
9215 goto cp0_unimplemented
;
9221 gen_helper_mtc0_errctl(cpu_env
, arg
);
9222 ctx
->base
.is_jmp
= DISAS_STOP
;
9226 goto cp0_unimplemented
;
9239 goto cp0_unimplemented
;
9248 gen_helper_mtc0_taglo(cpu_env
, arg
);
9255 gen_helper_mtc0_datalo(cpu_env
, arg
);
9259 goto cp0_unimplemented
;
9268 gen_helper_mtc0_taghi(cpu_env
, arg
);
9275 gen_helper_mtc0_datahi(cpu_env
, arg
);
9280 goto cp0_unimplemented
;
9286 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9290 goto cp0_unimplemented
;
9297 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9306 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9307 tcg_gen_st_tl(arg
, cpu_env
,
9308 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9312 goto cp0_unimplemented
;
9316 goto cp0_unimplemented
;
9318 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
9320 /* For simplicity assume that all writes can cause interrupts. */
9321 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9323 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
9324 * translated code to check for pending interrupts. */
9325 gen_save_pc(ctx
->base
.pc_next
+ 4);
9326 ctx
->base
.is_jmp
= DISAS_EXIT
;
9331 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
9333 #endif /* TARGET_MIPS64 */
9335 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9336 int u
, int sel
, int h
)
9338 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9339 TCGv t0
= tcg_temp_local_new();
9341 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9342 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9343 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9344 tcg_gen_movi_tl(t0
, -1);
9345 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9346 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9347 tcg_gen_movi_tl(t0
, -1);
9353 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9356 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9366 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9369 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9372 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9375 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9378 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9381 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9384 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9387 gen_mfc0(ctx
, t0
, rt
, sel
);
9394 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9397 gen_mfc0(ctx
, t0
, rt
, sel
);
9403 gen_helper_mftc0_status(t0
, cpu_env
);
9406 gen_mfc0(ctx
, t0
, rt
, sel
);
9412 gen_helper_mftc0_cause(t0
, cpu_env
);
9422 gen_helper_mftc0_epc(t0
, cpu_env
);
9432 gen_helper_mftc0_ebase(t0
, cpu_env
);
9449 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9459 gen_helper_mftc0_debug(t0
, cpu_env
);
9462 gen_mfc0(ctx
, t0
, rt
, sel
);
9467 gen_mfc0(ctx
, t0
, rt
, sel
);
9469 } else switch (sel
) {
9470 /* GPR registers. */
9472 gen_helper_1e0i(mftgpr
, t0
, rt
);
9474 /* Auxiliary CPU registers */
9478 gen_helper_1e0i(mftlo
, t0
, 0);
9481 gen_helper_1e0i(mfthi
, t0
, 0);
9484 gen_helper_1e0i(mftacx
, t0
, 0);
9487 gen_helper_1e0i(mftlo
, t0
, 1);
9490 gen_helper_1e0i(mfthi
, t0
, 1);
9493 gen_helper_1e0i(mftacx
, t0
, 1);
9496 gen_helper_1e0i(mftlo
, t0
, 2);
9499 gen_helper_1e0i(mfthi
, t0
, 2);
9502 gen_helper_1e0i(mftacx
, t0
, 2);
9505 gen_helper_1e0i(mftlo
, t0
, 3);
9508 gen_helper_1e0i(mfthi
, t0
, 3);
9511 gen_helper_1e0i(mftacx
, t0
, 3);
9514 gen_helper_mftdsp(t0
, cpu_env
);
9520 /* Floating point (COP1). */
9522 /* XXX: For now we support only a single FPU context. */
9524 TCGv_i32 fp0
= tcg_temp_new_i32();
9526 gen_load_fpr32(ctx
, fp0
, rt
);
9527 tcg_gen_ext_i32_tl(t0
, fp0
);
9528 tcg_temp_free_i32(fp0
);
9530 TCGv_i32 fp0
= tcg_temp_new_i32();
9532 gen_load_fpr32h(ctx
, fp0
, rt
);
9533 tcg_gen_ext_i32_tl(t0
, fp0
);
9534 tcg_temp_free_i32(fp0
);
9538 /* XXX: For now we support only a single FPU context. */
9539 gen_helper_1e0i(cfc1
, t0
, rt
);
9541 /* COP2: Not implemented. */
9548 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9549 gen_store_gpr(t0
, rd
);
9555 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9556 generate_exception_end(ctx
, EXCP_RI
);
9559 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9560 int u
, int sel
, int h
)
9562 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9563 TCGv t0
= tcg_temp_local_new();
9565 gen_load_gpr(t0
, rt
);
9566 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9567 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9568 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9570 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9571 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9578 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
9581 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
9591 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
9594 gen_helper_mttc0_tcbind(cpu_env
, t0
);
9597 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
9600 gen_helper_mttc0_tchalt(cpu_env
, t0
);
9603 gen_helper_mttc0_tccontext(cpu_env
, t0
);
9606 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
9609 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
9612 gen_mtc0(ctx
, t0
, rd
, sel
);
9619 gen_helper_mttc0_entryhi(cpu_env
, t0
);
9622 gen_mtc0(ctx
, t0
, rd
, sel
);
9628 gen_helper_mttc0_status(cpu_env
, t0
);
9631 gen_mtc0(ctx
, t0
, rd
, sel
);
9637 gen_helper_mttc0_cause(cpu_env
, t0
);
9647 gen_helper_mttc0_ebase(cpu_env
, t0
);
9657 gen_helper_mttc0_debug(cpu_env
, t0
);
9660 gen_mtc0(ctx
, t0
, rd
, sel
);
9665 gen_mtc0(ctx
, t0
, rd
, sel
);
9667 } else switch (sel
) {
9668 /* GPR registers. */
9670 gen_helper_0e1i(mttgpr
, t0
, rd
);
9672 /* Auxiliary CPU registers */
9676 gen_helper_0e1i(mttlo
, t0
, 0);
9679 gen_helper_0e1i(mtthi
, t0
, 0);
9682 gen_helper_0e1i(mttacx
, t0
, 0);
9685 gen_helper_0e1i(mttlo
, t0
, 1);
9688 gen_helper_0e1i(mtthi
, t0
, 1);
9691 gen_helper_0e1i(mttacx
, t0
, 1);
9694 gen_helper_0e1i(mttlo
, t0
, 2);
9697 gen_helper_0e1i(mtthi
, t0
, 2);
9700 gen_helper_0e1i(mttacx
, t0
, 2);
9703 gen_helper_0e1i(mttlo
, t0
, 3);
9706 gen_helper_0e1i(mtthi
, t0
, 3);
9709 gen_helper_0e1i(mttacx
, t0
, 3);
9712 gen_helper_mttdsp(cpu_env
, t0
);
9718 /* Floating point (COP1). */
9720 /* XXX: For now we support only a single FPU context. */
9722 TCGv_i32 fp0
= tcg_temp_new_i32();
9724 tcg_gen_trunc_tl_i32(fp0
, t0
);
9725 gen_store_fpr32(ctx
, fp0
, rd
);
9726 tcg_temp_free_i32(fp0
);
9728 TCGv_i32 fp0
= tcg_temp_new_i32();
9730 tcg_gen_trunc_tl_i32(fp0
, t0
);
9731 gen_store_fpr32h(ctx
, fp0
, rd
);
9732 tcg_temp_free_i32(fp0
);
9736 /* XXX: For now we support only a single FPU context. */
9738 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
9740 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
9741 tcg_temp_free_i32(fs_tmp
);
9743 /* Stop translation as we may have changed hflags */
9744 ctx
->base
.is_jmp
= DISAS_STOP
;
9746 /* COP2: Not implemented. */
9753 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
9759 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
9760 generate_exception_end(ctx
, EXCP_RI
);
9763 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
9765 const char *opn
= "ldst";
9767 check_cp0_enabled(ctx
);
9774 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9779 TCGv t0
= tcg_temp_new();
9781 gen_load_gpr(t0
, rt
);
9782 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9787 #if defined(TARGET_MIPS64)
9789 check_insn(ctx
, ISA_MIPS3
);
9794 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9798 check_insn(ctx
, ISA_MIPS3
);
9800 TCGv t0
= tcg_temp_new();
9802 gen_load_gpr(t0
, rt
);
9803 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9815 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9821 TCGv t0
= tcg_temp_new();
9822 gen_load_gpr(t0
, rt
);
9823 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9829 check_cp0_enabled(ctx
);
9834 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
9835 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9839 check_cp0_enabled(ctx
);
9840 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
9841 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9846 if (!env
->tlb
->helper_tlbwi
)
9848 gen_helper_tlbwi(cpu_env
);
9853 if (!env
->tlb
->helper_tlbinv
) {
9856 gen_helper_tlbinv(cpu_env
);
9857 } /* treat as nop if TLBINV not supported */
9862 if (!env
->tlb
->helper_tlbinvf
) {
9865 gen_helper_tlbinvf(cpu_env
);
9866 } /* treat as nop if TLBINV not supported */
9870 if (!env
->tlb
->helper_tlbwr
)
9872 gen_helper_tlbwr(cpu_env
);
9876 if (!env
->tlb
->helper_tlbp
)
9878 gen_helper_tlbp(cpu_env
);
9882 if (!env
->tlb
->helper_tlbr
)
9884 gen_helper_tlbr(cpu_env
);
9886 case OPC_ERET
: /* OPC_ERETNC */
9887 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9888 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9891 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
9892 if (ctx
->opcode
& (1 << bit_shift
)) {
9895 check_insn(ctx
, ISA_MIPS32R5
);
9896 gen_helper_eretnc(cpu_env
);
9900 check_insn(ctx
, ISA_MIPS2
);
9901 gen_helper_eret(cpu_env
);
9903 ctx
->base
.is_jmp
= DISAS_EXIT
;
9908 check_insn(ctx
, ISA_MIPS32
);
9909 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9910 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9913 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9915 generate_exception_end(ctx
, EXCP_RI
);
9917 gen_helper_deret(cpu_env
);
9918 ctx
->base
.is_jmp
= DISAS_EXIT
;
9923 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
9924 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9925 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9928 /* If we get an exception, we want to restart at next instruction */
9929 ctx
->base
.pc_next
+= 4;
9930 save_cpu_state(ctx
, 1);
9931 ctx
->base
.pc_next
-= 4;
9932 gen_helper_wait(cpu_env
);
9933 ctx
->base
.is_jmp
= DISAS_NORETURN
;
9938 generate_exception_end(ctx
, EXCP_RI
);
9941 (void)opn
; /* avoid a compiler warning */
9943 #endif /* !CONFIG_USER_ONLY */
9945 /* CP1 Branches (before delay slot) */
9946 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
9947 int32_t cc
, int32_t offset
)
9949 target_ulong btarget
;
9950 TCGv_i32 t0
= tcg_temp_new_i32();
9952 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9953 generate_exception_end(ctx
, EXCP_RI
);
9958 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
9960 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
9964 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9965 tcg_gen_not_i32(t0
, t0
);
9966 tcg_gen_andi_i32(t0
, t0
, 1);
9967 tcg_gen_extu_i32_tl(bcond
, t0
);
9970 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9971 tcg_gen_not_i32(t0
, t0
);
9972 tcg_gen_andi_i32(t0
, t0
, 1);
9973 tcg_gen_extu_i32_tl(bcond
, t0
);
9976 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9977 tcg_gen_andi_i32(t0
, t0
, 1);
9978 tcg_gen_extu_i32_tl(bcond
, t0
);
9981 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9982 tcg_gen_andi_i32(t0
, t0
, 1);
9983 tcg_gen_extu_i32_tl(bcond
, t0
);
9985 ctx
->hflags
|= MIPS_HFLAG_BL
;
9989 TCGv_i32 t1
= tcg_temp_new_i32();
9990 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9991 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
9992 tcg_gen_nand_i32(t0
, t0
, t1
);
9993 tcg_temp_free_i32(t1
);
9994 tcg_gen_andi_i32(t0
, t0
, 1);
9995 tcg_gen_extu_i32_tl(bcond
, t0
);
10000 TCGv_i32 t1
= tcg_temp_new_i32();
10001 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10002 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10003 tcg_gen_or_i32(t0
, t0
, t1
);
10004 tcg_temp_free_i32(t1
);
10005 tcg_gen_andi_i32(t0
, t0
, 1);
10006 tcg_gen_extu_i32_tl(bcond
, t0
);
10011 TCGv_i32 t1
= tcg_temp_new_i32();
10012 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10013 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10014 tcg_gen_and_i32(t0
, t0
, t1
);
10015 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10016 tcg_gen_and_i32(t0
, t0
, t1
);
10017 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10018 tcg_gen_nand_i32(t0
, t0
, t1
);
10019 tcg_temp_free_i32(t1
);
10020 tcg_gen_andi_i32(t0
, t0
, 1);
10021 tcg_gen_extu_i32_tl(bcond
, t0
);
10026 TCGv_i32 t1
= tcg_temp_new_i32();
10027 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10028 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10029 tcg_gen_or_i32(t0
, t0
, t1
);
10030 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10031 tcg_gen_or_i32(t0
, t0
, t1
);
10032 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10033 tcg_gen_or_i32(t0
, t0
, t1
);
10034 tcg_temp_free_i32(t1
);
10035 tcg_gen_andi_i32(t0
, t0
, 1);
10036 tcg_gen_extu_i32_tl(bcond
, t0
);
10039 ctx
->hflags
|= MIPS_HFLAG_BC
;
10042 MIPS_INVAL("cp1 cond branch");
10043 generate_exception_end(ctx
, EXCP_RI
);
10046 ctx
->btarget
= btarget
;
10047 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10049 tcg_temp_free_i32(t0
);
10052 /* R6 CP1 Branches */
10053 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10054 int32_t ft
, int32_t offset
,
10055 int delayslot_size
)
10057 target_ulong btarget
;
10058 TCGv_i64 t0
= tcg_temp_new_i64();
10060 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10061 #ifdef MIPS_DEBUG_DISAS
10062 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10063 "\n", ctx
->base
.pc_next
);
10065 generate_exception_end(ctx
, EXCP_RI
);
10069 gen_load_fpr64(ctx
, t0
, ft
);
10070 tcg_gen_andi_i64(t0
, t0
, 1);
10072 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10076 tcg_gen_xori_i64(t0
, t0
, 1);
10077 ctx
->hflags
|= MIPS_HFLAG_BC
;
10080 /* t0 already set */
10081 ctx
->hflags
|= MIPS_HFLAG_BC
;
10084 MIPS_INVAL("cp1 cond branch");
10085 generate_exception_end(ctx
, EXCP_RI
);
10089 tcg_gen_trunc_i64_tl(bcond
, t0
);
10091 ctx
->btarget
= btarget
;
10093 switch (delayslot_size
) {
10095 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10098 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10103 tcg_temp_free_i64(t0
);
10106 /* Coprocessor 1 (FPU) */
10108 #define FOP(func, fmt) (((fmt) << 21) | (func))
10111 OPC_ADD_S
= FOP(0, FMT_S
),
10112 OPC_SUB_S
= FOP(1, FMT_S
),
10113 OPC_MUL_S
= FOP(2, FMT_S
),
10114 OPC_DIV_S
= FOP(3, FMT_S
),
10115 OPC_SQRT_S
= FOP(4, FMT_S
),
10116 OPC_ABS_S
= FOP(5, FMT_S
),
10117 OPC_MOV_S
= FOP(6, FMT_S
),
10118 OPC_NEG_S
= FOP(7, FMT_S
),
10119 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10120 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10121 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10122 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10123 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10124 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10125 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10126 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10127 OPC_SEL_S
= FOP(16, FMT_S
),
10128 OPC_MOVCF_S
= FOP(17, FMT_S
),
10129 OPC_MOVZ_S
= FOP(18, FMT_S
),
10130 OPC_MOVN_S
= FOP(19, FMT_S
),
10131 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10132 OPC_RECIP_S
= FOP(21, FMT_S
),
10133 OPC_RSQRT_S
= FOP(22, FMT_S
),
10134 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10135 OPC_MADDF_S
= FOP(24, FMT_S
),
10136 OPC_MSUBF_S
= FOP(25, FMT_S
),
10137 OPC_RINT_S
= FOP(26, FMT_S
),
10138 OPC_CLASS_S
= FOP(27, FMT_S
),
10139 OPC_MIN_S
= FOP(28, FMT_S
),
10140 OPC_RECIP2_S
= FOP(28, FMT_S
),
10141 OPC_MINA_S
= FOP(29, FMT_S
),
10142 OPC_RECIP1_S
= FOP(29, FMT_S
),
10143 OPC_MAX_S
= FOP(30, FMT_S
),
10144 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10145 OPC_MAXA_S
= FOP(31, FMT_S
),
10146 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10147 OPC_CVT_D_S
= FOP(33, FMT_S
),
10148 OPC_CVT_W_S
= FOP(36, FMT_S
),
10149 OPC_CVT_L_S
= FOP(37, FMT_S
),
10150 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10151 OPC_CMP_F_S
= FOP (48, FMT_S
),
10152 OPC_CMP_UN_S
= FOP (49, FMT_S
),
10153 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
10154 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
10155 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
10156 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
10157 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
10158 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
10159 OPC_CMP_SF_S
= FOP (56, FMT_S
),
10160 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
10161 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
10162 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
10163 OPC_CMP_LT_S
= FOP (60, FMT_S
),
10164 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
10165 OPC_CMP_LE_S
= FOP (62, FMT_S
),
10166 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
10168 OPC_ADD_D
= FOP(0, FMT_D
),
10169 OPC_SUB_D
= FOP(1, FMT_D
),
10170 OPC_MUL_D
= FOP(2, FMT_D
),
10171 OPC_DIV_D
= FOP(3, FMT_D
),
10172 OPC_SQRT_D
= FOP(4, FMT_D
),
10173 OPC_ABS_D
= FOP(5, FMT_D
),
10174 OPC_MOV_D
= FOP(6, FMT_D
),
10175 OPC_NEG_D
= FOP(7, FMT_D
),
10176 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10177 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10178 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10179 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10180 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10181 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10182 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10183 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10184 OPC_SEL_D
= FOP(16, FMT_D
),
10185 OPC_MOVCF_D
= FOP(17, FMT_D
),
10186 OPC_MOVZ_D
= FOP(18, FMT_D
),
10187 OPC_MOVN_D
= FOP(19, FMT_D
),
10188 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10189 OPC_RECIP_D
= FOP(21, FMT_D
),
10190 OPC_RSQRT_D
= FOP(22, FMT_D
),
10191 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10192 OPC_MADDF_D
= FOP(24, FMT_D
),
10193 OPC_MSUBF_D
= FOP(25, FMT_D
),
10194 OPC_RINT_D
= FOP(26, FMT_D
),
10195 OPC_CLASS_D
= FOP(27, FMT_D
),
10196 OPC_MIN_D
= FOP(28, FMT_D
),
10197 OPC_RECIP2_D
= FOP(28, FMT_D
),
10198 OPC_MINA_D
= FOP(29, FMT_D
),
10199 OPC_RECIP1_D
= FOP(29, FMT_D
),
10200 OPC_MAX_D
= FOP(30, FMT_D
),
10201 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10202 OPC_MAXA_D
= FOP(31, FMT_D
),
10203 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10204 OPC_CVT_S_D
= FOP(32, FMT_D
),
10205 OPC_CVT_W_D
= FOP(36, FMT_D
),
10206 OPC_CVT_L_D
= FOP(37, FMT_D
),
10207 OPC_CMP_F_D
= FOP (48, FMT_D
),
10208 OPC_CMP_UN_D
= FOP (49, FMT_D
),
10209 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
10210 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
10211 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
10212 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
10213 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
10214 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
10215 OPC_CMP_SF_D
= FOP (56, FMT_D
),
10216 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
10217 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
10218 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
10219 OPC_CMP_LT_D
= FOP (60, FMT_D
),
10220 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
10221 OPC_CMP_LE_D
= FOP (62, FMT_D
),
10222 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
10224 OPC_CVT_S_W
= FOP(32, FMT_W
),
10225 OPC_CVT_D_W
= FOP(33, FMT_W
),
10226 OPC_CVT_S_L
= FOP(32, FMT_L
),
10227 OPC_CVT_D_L
= FOP(33, FMT_L
),
10228 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10230 OPC_ADD_PS
= FOP(0, FMT_PS
),
10231 OPC_SUB_PS
= FOP(1, FMT_PS
),
10232 OPC_MUL_PS
= FOP(2, FMT_PS
),
10233 OPC_DIV_PS
= FOP(3, FMT_PS
),
10234 OPC_ABS_PS
= FOP(5, FMT_PS
),
10235 OPC_MOV_PS
= FOP(6, FMT_PS
),
10236 OPC_NEG_PS
= FOP(7, FMT_PS
),
10237 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10238 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10239 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10240 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10241 OPC_MULR_PS
= FOP(26, FMT_PS
),
10242 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10243 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10244 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10245 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10247 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10248 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10249 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10250 OPC_PLL_PS
= FOP(44, FMT_PS
),
10251 OPC_PLU_PS
= FOP(45, FMT_PS
),
10252 OPC_PUL_PS
= FOP(46, FMT_PS
),
10253 OPC_PUU_PS
= FOP(47, FMT_PS
),
10254 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
10255 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
10256 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
10257 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
10258 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
10259 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
10260 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
10261 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
10262 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
10263 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
10264 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
10265 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
10266 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
10267 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
10268 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
10269 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
10273 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10274 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10275 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10276 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10277 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10278 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10279 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10280 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10281 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10282 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10283 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10284 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10285 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10286 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10287 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10288 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10289 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10290 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10291 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10292 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10293 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10294 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10296 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10297 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10298 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10299 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10300 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10301 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10302 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10303 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10304 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10305 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10306 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10307 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10308 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10309 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10310 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10311 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10312 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10313 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10314 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10315 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10316 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10317 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10319 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10321 TCGv t0
= tcg_temp_new();
10326 TCGv_i32 fp0
= tcg_temp_new_i32();
10328 gen_load_fpr32(ctx
, fp0
, fs
);
10329 tcg_gen_ext_i32_tl(t0
, fp0
);
10330 tcg_temp_free_i32(fp0
);
10332 gen_store_gpr(t0
, rt
);
10335 gen_load_gpr(t0
, rt
);
10337 TCGv_i32 fp0
= tcg_temp_new_i32();
10339 tcg_gen_trunc_tl_i32(fp0
, t0
);
10340 gen_store_fpr32(ctx
, fp0
, fs
);
10341 tcg_temp_free_i32(fp0
);
10345 gen_helper_1e0i(cfc1
, t0
, fs
);
10346 gen_store_gpr(t0
, rt
);
10349 gen_load_gpr(t0
, rt
);
10350 save_cpu_state(ctx
, 0);
10352 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10354 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10355 tcg_temp_free_i32(fs_tmp
);
10357 /* Stop translation as we may have changed hflags */
10358 ctx
->base
.is_jmp
= DISAS_STOP
;
10360 #if defined(TARGET_MIPS64)
10362 gen_load_fpr64(ctx
, t0
, fs
);
10363 gen_store_gpr(t0
, rt
);
10366 gen_load_gpr(t0
, rt
);
10367 gen_store_fpr64(ctx
, t0
, fs
);
10372 TCGv_i32 fp0
= tcg_temp_new_i32();
10374 gen_load_fpr32h(ctx
, fp0
, fs
);
10375 tcg_gen_ext_i32_tl(t0
, fp0
);
10376 tcg_temp_free_i32(fp0
);
10378 gen_store_gpr(t0
, rt
);
10381 gen_load_gpr(t0
, rt
);
10383 TCGv_i32 fp0
= tcg_temp_new_i32();
10385 tcg_gen_trunc_tl_i32(fp0
, t0
);
10386 gen_store_fpr32h(ctx
, fp0
, fs
);
10387 tcg_temp_free_i32(fp0
);
10391 MIPS_INVAL("cp1 move");
10392 generate_exception_end(ctx
, EXCP_RI
);
10400 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10407 /* Treat as NOP. */
10412 cond
= TCG_COND_EQ
;
10414 cond
= TCG_COND_NE
;
10416 l1
= gen_new_label();
10417 t0
= tcg_temp_new_i32();
10418 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10419 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10420 tcg_temp_free_i32(t0
);
10422 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10424 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10429 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10433 TCGv_i32 t0
= tcg_temp_new_i32();
10434 TCGLabel
*l1
= gen_new_label();
10437 cond
= TCG_COND_EQ
;
10439 cond
= TCG_COND_NE
;
10441 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10442 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10443 gen_load_fpr32(ctx
, t0
, fs
);
10444 gen_store_fpr32(ctx
, t0
, fd
);
10446 tcg_temp_free_i32(t0
);
10449 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
10452 TCGv_i32 t0
= tcg_temp_new_i32();
10454 TCGLabel
*l1
= gen_new_label();
10457 cond
= TCG_COND_EQ
;
10459 cond
= TCG_COND_NE
;
10461 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10462 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10463 tcg_temp_free_i32(t0
);
10464 fp0
= tcg_temp_new_i64();
10465 gen_load_fpr64(ctx
, fp0
, fs
);
10466 gen_store_fpr64(ctx
, fp0
, fd
);
10467 tcg_temp_free_i64(fp0
);
10471 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10475 TCGv_i32 t0
= tcg_temp_new_i32();
10476 TCGLabel
*l1
= gen_new_label();
10477 TCGLabel
*l2
= gen_new_label();
10480 cond
= TCG_COND_EQ
;
10482 cond
= TCG_COND_NE
;
10484 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10485 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10486 gen_load_fpr32(ctx
, t0
, fs
);
10487 gen_store_fpr32(ctx
, t0
, fd
);
10490 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10491 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10492 gen_load_fpr32h(ctx
, t0
, fs
);
10493 gen_store_fpr32h(ctx
, t0
, fd
);
10494 tcg_temp_free_i32(t0
);
10498 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10501 TCGv_i32 t1
= tcg_const_i32(0);
10502 TCGv_i32 fp0
= tcg_temp_new_i32();
10503 TCGv_i32 fp1
= tcg_temp_new_i32();
10504 TCGv_i32 fp2
= tcg_temp_new_i32();
10505 gen_load_fpr32(ctx
, fp0
, fd
);
10506 gen_load_fpr32(ctx
, fp1
, ft
);
10507 gen_load_fpr32(ctx
, fp2
, fs
);
10511 tcg_gen_andi_i32(fp0
, fp0
, 1);
10512 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10515 tcg_gen_andi_i32(fp1
, fp1
, 1);
10516 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10519 tcg_gen_andi_i32(fp1
, fp1
, 1);
10520 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10523 MIPS_INVAL("gen_sel_s");
10524 generate_exception_end(ctx
, EXCP_RI
);
10528 gen_store_fpr32(ctx
, fp0
, fd
);
10529 tcg_temp_free_i32(fp2
);
10530 tcg_temp_free_i32(fp1
);
10531 tcg_temp_free_i32(fp0
);
10532 tcg_temp_free_i32(t1
);
10535 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10538 TCGv_i64 t1
= tcg_const_i64(0);
10539 TCGv_i64 fp0
= tcg_temp_new_i64();
10540 TCGv_i64 fp1
= tcg_temp_new_i64();
10541 TCGv_i64 fp2
= tcg_temp_new_i64();
10542 gen_load_fpr64(ctx
, fp0
, fd
);
10543 gen_load_fpr64(ctx
, fp1
, ft
);
10544 gen_load_fpr64(ctx
, fp2
, fs
);
10548 tcg_gen_andi_i64(fp0
, fp0
, 1);
10549 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10552 tcg_gen_andi_i64(fp1
, fp1
, 1);
10553 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10556 tcg_gen_andi_i64(fp1
, fp1
, 1);
10557 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10560 MIPS_INVAL("gen_sel_d");
10561 generate_exception_end(ctx
, EXCP_RI
);
10565 gen_store_fpr64(ctx
, fp0
, fd
);
10566 tcg_temp_free_i64(fp2
);
10567 tcg_temp_free_i64(fp1
);
10568 tcg_temp_free_i64(fp0
);
10569 tcg_temp_free_i64(t1
);
10572 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
10573 int ft
, int fs
, int fd
, int cc
)
10575 uint32_t func
= ctx
->opcode
& 0x3f;
10579 TCGv_i32 fp0
= tcg_temp_new_i32();
10580 TCGv_i32 fp1
= tcg_temp_new_i32();
10582 gen_load_fpr32(ctx
, fp0
, fs
);
10583 gen_load_fpr32(ctx
, fp1
, ft
);
10584 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
10585 tcg_temp_free_i32(fp1
);
10586 gen_store_fpr32(ctx
, fp0
, fd
);
10587 tcg_temp_free_i32(fp0
);
10592 TCGv_i32 fp0
= tcg_temp_new_i32();
10593 TCGv_i32 fp1
= tcg_temp_new_i32();
10595 gen_load_fpr32(ctx
, fp0
, fs
);
10596 gen_load_fpr32(ctx
, fp1
, ft
);
10597 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
10598 tcg_temp_free_i32(fp1
);
10599 gen_store_fpr32(ctx
, fp0
, fd
);
10600 tcg_temp_free_i32(fp0
);
10605 TCGv_i32 fp0
= tcg_temp_new_i32();
10606 TCGv_i32 fp1
= tcg_temp_new_i32();
10608 gen_load_fpr32(ctx
, fp0
, fs
);
10609 gen_load_fpr32(ctx
, fp1
, ft
);
10610 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
10611 tcg_temp_free_i32(fp1
);
10612 gen_store_fpr32(ctx
, fp0
, fd
);
10613 tcg_temp_free_i32(fp0
);
10618 TCGv_i32 fp0
= tcg_temp_new_i32();
10619 TCGv_i32 fp1
= tcg_temp_new_i32();
10621 gen_load_fpr32(ctx
, fp0
, fs
);
10622 gen_load_fpr32(ctx
, fp1
, ft
);
10623 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
10624 tcg_temp_free_i32(fp1
);
10625 gen_store_fpr32(ctx
, fp0
, fd
);
10626 tcg_temp_free_i32(fp0
);
10631 TCGv_i32 fp0
= tcg_temp_new_i32();
10633 gen_load_fpr32(ctx
, fp0
, fs
);
10634 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
10635 gen_store_fpr32(ctx
, fp0
, fd
);
10636 tcg_temp_free_i32(fp0
);
10641 TCGv_i32 fp0
= tcg_temp_new_i32();
10643 gen_load_fpr32(ctx
, fp0
, fs
);
10644 if (ctx
->abs2008
) {
10645 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
10647 gen_helper_float_abs_s(fp0
, fp0
);
10649 gen_store_fpr32(ctx
, fp0
, fd
);
10650 tcg_temp_free_i32(fp0
);
10655 TCGv_i32 fp0
= tcg_temp_new_i32();
10657 gen_load_fpr32(ctx
, fp0
, fs
);
10658 gen_store_fpr32(ctx
, fp0
, fd
);
10659 tcg_temp_free_i32(fp0
);
10664 TCGv_i32 fp0
= tcg_temp_new_i32();
10666 gen_load_fpr32(ctx
, fp0
, fs
);
10667 if (ctx
->abs2008
) {
10668 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
10670 gen_helper_float_chs_s(fp0
, fp0
);
10672 gen_store_fpr32(ctx
, fp0
, fd
);
10673 tcg_temp_free_i32(fp0
);
10676 case OPC_ROUND_L_S
:
10677 check_cp1_64bitmode(ctx
);
10679 TCGv_i32 fp32
= tcg_temp_new_i32();
10680 TCGv_i64 fp64
= tcg_temp_new_i64();
10682 gen_load_fpr32(ctx
, fp32
, fs
);
10683 if (ctx
->nan2008
) {
10684 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
10686 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
10688 tcg_temp_free_i32(fp32
);
10689 gen_store_fpr64(ctx
, fp64
, fd
);
10690 tcg_temp_free_i64(fp64
);
10693 case OPC_TRUNC_L_S
:
10694 check_cp1_64bitmode(ctx
);
10696 TCGv_i32 fp32
= tcg_temp_new_i32();
10697 TCGv_i64 fp64
= tcg_temp_new_i64();
10699 gen_load_fpr32(ctx
, fp32
, fs
);
10700 if (ctx
->nan2008
) {
10701 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
10703 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
10705 tcg_temp_free_i32(fp32
);
10706 gen_store_fpr64(ctx
, fp64
, fd
);
10707 tcg_temp_free_i64(fp64
);
10711 check_cp1_64bitmode(ctx
);
10713 TCGv_i32 fp32
= tcg_temp_new_i32();
10714 TCGv_i64 fp64
= tcg_temp_new_i64();
10716 gen_load_fpr32(ctx
, fp32
, fs
);
10717 if (ctx
->nan2008
) {
10718 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
10720 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
10722 tcg_temp_free_i32(fp32
);
10723 gen_store_fpr64(ctx
, fp64
, fd
);
10724 tcg_temp_free_i64(fp64
);
10727 case OPC_FLOOR_L_S
:
10728 check_cp1_64bitmode(ctx
);
10730 TCGv_i32 fp32
= tcg_temp_new_i32();
10731 TCGv_i64 fp64
= tcg_temp_new_i64();
10733 gen_load_fpr32(ctx
, fp32
, fs
);
10734 if (ctx
->nan2008
) {
10735 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
10737 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
10739 tcg_temp_free_i32(fp32
);
10740 gen_store_fpr64(ctx
, fp64
, fd
);
10741 tcg_temp_free_i64(fp64
);
10744 case OPC_ROUND_W_S
:
10746 TCGv_i32 fp0
= tcg_temp_new_i32();
10748 gen_load_fpr32(ctx
, fp0
, fs
);
10749 if (ctx
->nan2008
) {
10750 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
10752 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
10754 gen_store_fpr32(ctx
, fp0
, fd
);
10755 tcg_temp_free_i32(fp0
);
10758 case OPC_TRUNC_W_S
:
10760 TCGv_i32 fp0
= tcg_temp_new_i32();
10762 gen_load_fpr32(ctx
, fp0
, fs
);
10763 if (ctx
->nan2008
) {
10764 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
10766 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
10768 gen_store_fpr32(ctx
, fp0
, fd
);
10769 tcg_temp_free_i32(fp0
);
10774 TCGv_i32 fp0
= tcg_temp_new_i32();
10776 gen_load_fpr32(ctx
, fp0
, fs
);
10777 if (ctx
->nan2008
) {
10778 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
10780 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
10782 gen_store_fpr32(ctx
, fp0
, fd
);
10783 tcg_temp_free_i32(fp0
);
10786 case OPC_FLOOR_W_S
:
10788 TCGv_i32 fp0
= tcg_temp_new_i32();
10790 gen_load_fpr32(ctx
, fp0
, fs
);
10791 if (ctx
->nan2008
) {
10792 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
10794 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
10796 gen_store_fpr32(ctx
, fp0
, fd
);
10797 tcg_temp_free_i32(fp0
);
10801 check_insn(ctx
, ISA_MIPS32R6
);
10802 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10805 check_insn(ctx
, ISA_MIPS32R6
);
10806 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10809 check_insn(ctx
, ISA_MIPS32R6
);
10810 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10813 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10814 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10817 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10819 TCGLabel
*l1
= gen_new_label();
10823 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10825 fp0
= tcg_temp_new_i32();
10826 gen_load_fpr32(ctx
, fp0
, fs
);
10827 gen_store_fpr32(ctx
, fp0
, fd
);
10828 tcg_temp_free_i32(fp0
);
10833 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10835 TCGLabel
*l1
= gen_new_label();
10839 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10840 fp0
= tcg_temp_new_i32();
10841 gen_load_fpr32(ctx
, fp0
, fs
);
10842 gen_store_fpr32(ctx
, fp0
, fd
);
10843 tcg_temp_free_i32(fp0
);
10850 TCGv_i32 fp0
= tcg_temp_new_i32();
10852 gen_load_fpr32(ctx
, fp0
, fs
);
10853 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
10854 gen_store_fpr32(ctx
, fp0
, fd
);
10855 tcg_temp_free_i32(fp0
);
10860 TCGv_i32 fp0
= tcg_temp_new_i32();
10862 gen_load_fpr32(ctx
, fp0
, fs
);
10863 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
10864 gen_store_fpr32(ctx
, fp0
, fd
);
10865 tcg_temp_free_i32(fp0
);
10869 check_insn(ctx
, ISA_MIPS32R6
);
10871 TCGv_i32 fp0
= tcg_temp_new_i32();
10872 TCGv_i32 fp1
= tcg_temp_new_i32();
10873 TCGv_i32 fp2
= tcg_temp_new_i32();
10874 gen_load_fpr32(ctx
, fp0
, fs
);
10875 gen_load_fpr32(ctx
, fp1
, ft
);
10876 gen_load_fpr32(ctx
, fp2
, fd
);
10877 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10878 gen_store_fpr32(ctx
, fp2
, fd
);
10879 tcg_temp_free_i32(fp2
);
10880 tcg_temp_free_i32(fp1
);
10881 tcg_temp_free_i32(fp0
);
10885 check_insn(ctx
, ISA_MIPS32R6
);
10887 TCGv_i32 fp0
= tcg_temp_new_i32();
10888 TCGv_i32 fp1
= tcg_temp_new_i32();
10889 TCGv_i32 fp2
= tcg_temp_new_i32();
10890 gen_load_fpr32(ctx
, fp0
, fs
);
10891 gen_load_fpr32(ctx
, fp1
, ft
);
10892 gen_load_fpr32(ctx
, fp2
, fd
);
10893 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10894 gen_store_fpr32(ctx
, fp2
, fd
);
10895 tcg_temp_free_i32(fp2
);
10896 tcg_temp_free_i32(fp1
);
10897 tcg_temp_free_i32(fp0
);
10901 check_insn(ctx
, ISA_MIPS32R6
);
10903 TCGv_i32 fp0
= tcg_temp_new_i32();
10904 gen_load_fpr32(ctx
, fp0
, fs
);
10905 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
10906 gen_store_fpr32(ctx
, fp0
, fd
);
10907 tcg_temp_free_i32(fp0
);
10911 check_insn(ctx
, ISA_MIPS32R6
);
10913 TCGv_i32 fp0
= tcg_temp_new_i32();
10914 gen_load_fpr32(ctx
, fp0
, fs
);
10915 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
10916 gen_store_fpr32(ctx
, fp0
, fd
);
10917 tcg_temp_free_i32(fp0
);
10920 case OPC_MIN_S
: /* OPC_RECIP2_S */
10921 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10923 TCGv_i32 fp0
= tcg_temp_new_i32();
10924 TCGv_i32 fp1
= tcg_temp_new_i32();
10925 TCGv_i32 fp2
= tcg_temp_new_i32();
10926 gen_load_fpr32(ctx
, fp0
, fs
);
10927 gen_load_fpr32(ctx
, fp1
, ft
);
10928 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
10929 gen_store_fpr32(ctx
, fp2
, fd
);
10930 tcg_temp_free_i32(fp2
);
10931 tcg_temp_free_i32(fp1
);
10932 tcg_temp_free_i32(fp0
);
10935 check_cp1_64bitmode(ctx
);
10937 TCGv_i32 fp0
= tcg_temp_new_i32();
10938 TCGv_i32 fp1
= tcg_temp_new_i32();
10940 gen_load_fpr32(ctx
, fp0
, fs
);
10941 gen_load_fpr32(ctx
, fp1
, ft
);
10942 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
10943 tcg_temp_free_i32(fp1
);
10944 gen_store_fpr32(ctx
, fp0
, fd
);
10945 tcg_temp_free_i32(fp0
);
10949 case OPC_MINA_S
: /* OPC_RECIP1_S */
10950 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10952 TCGv_i32 fp0
= tcg_temp_new_i32();
10953 TCGv_i32 fp1
= tcg_temp_new_i32();
10954 TCGv_i32 fp2
= tcg_temp_new_i32();
10955 gen_load_fpr32(ctx
, fp0
, fs
);
10956 gen_load_fpr32(ctx
, fp1
, ft
);
10957 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
10958 gen_store_fpr32(ctx
, fp2
, fd
);
10959 tcg_temp_free_i32(fp2
);
10960 tcg_temp_free_i32(fp1
);
10961 tcg_temp_free_i32(fp0
);
10964 check_cp1_64bitmode(ctx
);
10966 TCGv_i32 fp0
= tcg_temp_new_i32();
10968 gen_load_fpr32(ctx
, fp0
, fs
);
10969 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
10970 gen_store_fpr32(ctx
, fp0
, fd
);
10971 tcg_temp_free_i32(fp0
);
10975 case OPC_MAX_S
: /* OPC_RSQRT1_S */
10976 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10978 TCGv_i32 fp0
= tcg_temp_new_i32();
10979 TCGv_i32 fp1
= tcg_temp_new_i32();
10980 gen_load_fpr32(ctx
, fp0
, fs
);
10981 gen_load_fpr32(ctx
, fp1
, ft
);
10982 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
10983 gen_store_fpr32(ctx
, fp1
, fd
);
10984 tcg_temp_free_i32(fp1
);
10985 tcg_temp_free_i32(fp0
);
10988 check_cp1_64bitmode(ctx
);
10990 TCGv_i32 fp0
= tcg_temp_new_i32();
10992 gen_load_fpr32(ctx
, fp0
, fs
);
10993 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
10994 gen_store_fpr32(ctx
, fp0
, fd
);
10995 tcg_temp_free_i32(fp0
);
10999 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11000 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11002 TCGv_i32 fp0
= tcg_temp_new_i32();
11003 TCGv_i32 fp1
= tcg_temp_new_i32();
11004 gen_load_fpr32(ctx
, fp0
, fs
);
11005 gen_load_fpr32(ctx
, fp1
, ft
);
11006 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11007 gen_store_fpr32(ctx
, fp1
, fd
);
11008 tcg_temp_free_i32(fp1
);
11009 tcg_temp_free_i32(fp0
);
11012 check_cp1_64bitmode(ctx
);
11014 TCGv_i32 fp0
= tcg_temp_new_i32();
11015 TCGv_i32 fp1
= tcg_temp_new_i32();
11017 gen_load_fpr32(ctx
, fp0
, fs
);
11018 gen_load_fpr32(ctx
, fp1
, ft
);
11019 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11020 tcg_temp_free_i32(fp1
);
11021 gen_store_fpr32(ctx
, fp0
, fd
);
11022 tcg_temp_free_i32(fp0
);
11027 check_cp1_registers(ctx
, fd
);
11029 TCGv_i32 fp32
= tcg_temp_new_i32();
11030 TCGv_i64 fp64
= tcg_temp_new_i64();
11032 gen_load_fpr32(ctx
, fp32
, fs
);
11033 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11034 tcg_temp_free_i32(fp32
);
11035 gen_store_fpr64(ctx
, fp64
, fd
);
11036 tcg_temp_free_i64(fp64
);
11041 TCGv_i32 fp0
= tcg_temp_new_i32();
11043 gen_load_fpr32(ctx
, fp0
, fs
);
11044 if (ctx
->nan2008
) {
11045 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11047 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11049 gen_store_fpr32(ctx
, fp0
, fd
);
11050 tcg_temp_free_i32(fp0
);
11054 check_cp1_64bitmode(ctx
);
11056 TCGv_i32 fp32
= tcg_temp_new_i32();
11057 TCGv_i64 fp64
= tcg_temp_new_i64();
11059 gen_load_fpr32(ctx
, fp32
, fs
);
11060 if (ctx
->nan2008
) {
11061 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11063 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11065 tcg_temp_free_i32(fp32
);
11066 gen_store_fpr64(ctx
, fp64
, fd
);
11067 tcg_temp_free_i64(fp64
);
11073 TCGv_i64 fp64
= tcg_temp_new_i64();
11074 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11075 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11077 gen_load_fpr32(ctx
, fp32_0
, fs
);
11078 gen_load_fpr32(ctx
, fp32_1
, ft
);
11079 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11080 tcg_temp_free_i32(fp32_1
);
11081 tcg_temp_free_i32(fp32_0
);
11082 gen_store_fpr64(ctx
, fp64
, fd
);
11083 tcg_temp_free_i64(fp64
);
11089 case OPC_CMP_UEQ_S
:
11090 case OPC_CMP_OLT_S
:
11091 case OPC_CMP_ULT_S
:
11092 case OPC_CMP_OLE_S
:
11093 case OPC_CMP_ULE_S
:
11095 case OPC_CMP_NGLE_S
:
11096 case OPC_CMP_SEQ_S
:
11097 case OPC_CMP_NGL_S
:
11099 case OPC_CMP_NGE_S
:
11101 case OPC_CMP_NGT_S
:
11102 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11103 if (ctx
->opcode
& (1 << 6)) {
11104 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
11106 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
11110 check_cp1_registers(ctx
, fs
| ft
| fd
);
11112 TCGv_i64 fp0
= tcg_temp_new_i64();
11113 TCGv_i64 fp1
= tcg_temp_new_i64();
11115 gen_load_fpr64(ctx
, fp0
, fs
);
11116 gen_load_fpr64(ctx
, fp1
, ft
);
11117 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11118 tcg_temp_free_i64(fp1
);
11119 gen_store_fpr64(ctx
, fp0
, fd
);
11120 tcg_temp_free_i64(fp0
);
11124 check_cp1_registers(ctx
, fs
| ft
| fd
);
11126 TCGv_i64 fp0
= tcg_temp_new_i64();
11127 TCGv_i64 fp1
= tcg_temp_new_i64();
11129 gen_load_fpr64(ctx
, fp0
, fs
);
11130 gen_load_fpr64(ctx
, fp1
, ft
);
11131 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11132 tcg_temp_free_i64(fp1
);
11133 gen_store_fpr64(ctx
, fp0
, fd
);
11134 tcg_temp_free_i64(fp0
);
11138 check_cp1_registers(ctx
, fs
| ft
| fd
);
11140 TCGv_i64 fp0
= tcg_temp_new_i64();
11141 TCGv_i64 fp1
= tcg_temp_new_i64();
11143 gen_load_fpr64(ctx
, fp0
, fs
);
11144 gen_load_fpr64(ctx
, fp1
, ft
);
11145 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11146 tcg_temp_free_i64(fp1
);
11147 gen_store_fpr64(ctx
, fp0
, fd
);
11148 tcg_temp_free_i64(fp0
);
11152 check_cp1_registers(ctx
, fs
| ft
| fd
);
11154 TCGv_i64 fp0
= tcg_temp_new_i64();
11155 TCGv_i64 fp1
= tcg_temp_new_i64();
11157 gen_load_fpr64(ctx
, fp0
, fs
);
11158 gen_load_fpr64(ctx
, fp1
, ft
);
11159 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11160 tcg_temp_free_i64(fp1
);
11161 gen_store_fpr64(ctx
, fp0
, fd
);
11162 tcg_temp_free_i64(fp0
);
11166 check_cp1_registers(ctx
, fs
| fd
);
11168 TCGv_i64 fp0
= tcg_temp_new_i64();
11170 gen_load_fpr64(ctx
, fp0
, fs
);
11171 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11172 gen_store_fpr64(ctx
, fp0
, fd
);
11173 tcg_temp_free_i64(fp0
);
11177 check_cp1_registers(ctx
, fs
| fd
);
11179 TCGv_i64 fp0
= tcg_temp_new_i64();
11181 gen_load_fpr64(ctx
, fp0
, fs
);
11182 if (ctx
->abs2008
) {
11183 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11185 gen_helper_float_abs_d(fp0
, fp0
);
11187 gen_store_fpr64(ctx
, fp0
, fd
);
11188 tcg_temp_free_i64(fp0
);
11192 check_cp1_registers(ctx
, fs
| fd
);
11194 TCGv_i64 fp0
= tcg_temp_new_i64();
11196 gen_load_fpr64(ctx
, fp0
, fs
);
11197 gen_store_fpr64(ctx
, fp0
, fd
);
11198 tcg_temp_free_i64(fp0
);
11202 check_cp1_registers(ctx
, fs
| fd
);
11204 TCGv_i64 fp0
= tcg_temp_new_i64();
11206 gen_load_fpr64(ctx
, fp0
, fs
);
11207 if (ctx
->abs2008
) {
11208 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11210 gen_helper_float_chs_d(fp0
, fp0
);
11212 gen_store_fpr64(ctx
, fp0
, fd
);
11213 tcg_temp_free_i64(fp0
);
11216 case OPC_ROUND_L_D
:
11217 check_cp1_64bitmode(ctx
);
11219 TCGv_i64 fp0
= tcg_temp_new_i64();
11221 gen_load_fpr64(ctx
, fp0
, fs
);
11222 if (ctx
->nan2008
) {
11223 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11225 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11227 gen_store_fpr64(ctx
, fp0
, fd
);
11228 tcg_temp_free_i64(fp0
);
11231 case OPC_TRUNC_L_D
:
11232 check_cp1_64bitmode(ctx
);
11234 TCGv_i64 fp0
= tcg_temp_new_i64();
11236 gen_load_fpr64(ctx
, fp0
, fs
);
11237 if (ctx
->nan2008
) {
11238 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11240 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11242 gen_store_fpr64(ctx
, fp0
, fd
);
11243 tcg_temp_free_i64(fp0
);
11247 check_cp1_64bitmode(ctx
);
11249 TCGv_i64 fp0
= tcg_temp_new_i64();
11251 gen_load_fpr64(ctx
, fp0
, fs
);
11252 if (ctx
->nan2008
) {
11253 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11255 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11257 gen_store_fpr64(ctx
, fp0
, fd
);
11258 tcg_temp_free_i64(fp0
);
11261 case OPC_FLOOR_L_D
:
11262 check_cp1_64bitmode(ctx
);
11264 TCGv_i64 fp0
= tcg_temp_new_i64();
11266 gen_load_fpr64(ctx
, fp0
, fs
);
11267 if (ctx
->nan2008
) {
11268 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11270 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11272 gen_store_fpr64(ctx
, fp0
, fd
);
11273 tcg_temp_free_i64(fp0
);
11276 case OPC_ROUND_W_D
:
11277 check_cp1_registers(ctx
, fs
);
11279 TCGv_i32 fp32
= tcg_temp_new_i32();
11280 TCGv_i64 fp64
= tcg_temp_new_i64();
11282 gen_load_fpr64(ctx
, fp64
, fs
);
11283 if (ctx
->nan2008
) {
11284 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11286 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11288 tcg_temp_free_i64(fp64
);
11289 gen_store_fpr32(ctx
, fp32
, fd
);
11290 tcg_temp_free_i32(fp32
);
11293 case OPC_TRUNC_W_D
:
11294 check_cp1_registers(ctx
, fs
);
11296 TCGv_i32 fp32
= tcg_temp_new_i32();
11297 TCGv_i64 fp64
= tcg_temp_new_i64();
11299 gen_load_fpr64(ctx
, fp64
, fs
);
11300 if (ctx
->nan2008
) {
11301 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11303 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11305 tcg_temp_free_i64(fp64
);
11306 gen_store_fpr32(ctx
, fp32
, fd
);
11307 tcg_temp_free_i32(fp32
);
11311 check_cp1_registers(ctx
, fs
);
11313 TCGv_i32 fp32
= tcg_temp_new_i32();
11314 TCGv_i64 fp64
= tcg_temp_new_i64();
11316 gen_load_fpr64(ctx
, fp64
, fs
);
11317 if (ctx
->nan2008
) {
11318 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11320 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11322 tcg_temp_free_i64(fp64
);
11323 gen_store_fpr32(ctx
, fp32
, fd
);
11324 tcg_temp_free_i32(fp32
);
11327 case OPC_FLOOR_W_D
:
11328 check_cp1_registers(ctx
, fs
);
11330 TCGv_i32 fp32
= tcg_temp_new_i32();
11331 TCGv_i64 fp64
= tcg_temp_new_i64();
11333 gen_load_fpr64(ctx
, fp64
, fs
);
11334 if (ctx
->nan2008
) {
11335 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11337 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11339 tcg_temp_free_i64(fp64
);
11340 gen_store_fpr32(ctx
, fp32
, fd
);
11341 tcg_temp_free_i32(fp32
);
11345 check_insn(ctx
, ISA_MIPS32R6
);
11346 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11349 check_insn(ctx
, ISA_MIPS32R6
);
11350 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11353 check_insn(ctx
, ISA_MIPS32R6
);
11354 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11357 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11358 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11361 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11363 TCGLabel
*l1
= gen_new_label();
11367 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11369 fp0
= tcg_temp_new_i64();
11370 gen_load_fpr64(ctx
, fp0
, fs
);
11371 gen_store_fpr64(ctx
, fp0
, fd
);
11372 tcg_temp_free_i64(fp0
);
11377 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11379 TCGLabel
*l1
= gen_new_label();
11383 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11384 fp0
= tcg_temp_new_i64();
11385 gen_load_fpr64(ctx
, fp0
, fs
);
11386 gen_store_fpr64(ctx
, fp0
, fd
);
11387 tcg_temp_free_i64(fp0
);
11393 check_cp1_registers(ctx
, fs
| fd
);
11395 TCGv_i64 fp0
= tcg_temp_new_i64();
11397 gen_load_fpr64(ctx
, fp0
, fs
);
11398 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11399 gen_store_fpr64(ctx
, fp0
, fd
);
11400 tcg_temp_free_i64(fp0
);
11404 check_cp1_registers(ctx
, fs
| fd
);
11406 TCGv_i64 fp0
= tcg_temp_new_i64();
11408 gen_load_fpr64(ctx
, fp0
, fs
);
11409 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11410 gen_store_fpr64(ctx
, fp0
, fd
);
11411 tcg_temp_free_i64(fp0
);
11415 check_insn(ctx
, ISA_MIPS32R6
);
11417 TCGv_i64 fp0
= tcg_temp_new_i64();
11418 TCGv_i64 fp1
= tcg_temp_new_i64();
11419 TCGv_i64 fp2
= tcg_temp_new_i64();
11420 gen_load_fpr64(ctx
, fp0
, fs
);
11421 gen_load_fpr64(ctx
, fp1
, ft
);
11422 gen_load_fpr64(ctx
, fp2
, fd
);
11423 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11424 gen_store_fpr64(ctx
, fp2
, fd
);
11425 tcg_temp_free_i64(fp2
);
11426 tcg_temp_free_i64(fp1
);
11427 tcg_temp_free_i64(fp0
);
11431 check_insn(ctx
, ISA_MIPS32R6
);
11433 TCGv_i64 fp0
= tcg_temp_new_i64();
11434 TCGv_i64 fp1
= tcg_temp_new_i64();
11435 TCGv_i64 fp2
= tcg_temp_new_i64();
11436 gen_load_fpr64(ctx
, fp0
, fs
);
11437 gen_load_fpr64(ctx
, fp1
, ft
);
11438 gen_load_fpr64(ctx
, fp2
, fd
);
11439 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11440 gen_store_fpr64(ctx
, fp2
, fd
);
11441 tcg_temp_free_i64(fp2
);
11442 tcg_temp_free_i64(fp1
);
11443 tcg_temp_free_i64(fp0
);
11447 check_insn(ctx
, ISA_MIPS32R6
);
11449 TCGv_i64 fp0
= tcg_temp_new_i64();
11450 gen_load_fpr64(ctx
, fp0
, fs
);
11451 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11452 gen_store_fpr64(ctx
, fp0
, fd
);
11453 tcg_temp_free_i64(fp0
);
11457 check_insn(ctx
, ISA_MIPS32R6
);
11459 TCGv_i64 fp0
= tcg_temp_new_i64();
11460 gen_load_fpr64(ctx
, fp0
, fs
);
11461 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11462 gen_store_fpr64(ctx
, fp0
, fd
);
11463 tcg_temp_free_i64(fp0
);
11466 case OPC_MIN_D
: /* OPC_RECIP2_D */
11467 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11469 TCGv_i64 fp0
= tcg_temp_new_i64();
11470 TCGv_i64 fp1
= tcg_temp_new_i64();
11471 gen_load_fpr64(ctx
, fp0
, fs
);
11472 gen_load_fpr64(ctx
, fp1
, ft
);
11473 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11474 gen_store_fpr64(ctx
, fp1
, fd
);
11475 tcg_temp_free_i64(fp1
);
11476 tcg_temp_free_i64(fp0
);
11479 check_cp1_64bitmode(ctx
);
11481 TCGv_i64 fp0
= tcg_temp_new_i64();
11482 TCGv_i64 fp1
= tcg_temp_new_i64();
11484 gen_load_fpr64(ctx
, fp0
, fs
);
11485 gen_load_fpr64(ctx
, fp1
, ft
);
11486 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11487 tcg_temp_free_i64(fp1
);
11488 gen_store_fpr64(ctx
, fp0
, fd
);
11489 tcg_temp_free_i64(fp0
);
11493 case OPC_MINA_D
: /* OPC_RECIP1_D */
11494 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11496 TCGv_i64 fp0
= tcg_temp_new_i64();
11497 TCGv_i64 fp1
= tcg_temp_new_i64();
11498 gen_load_fpr64(ctx
, fp0
, fs
);
11499 gen_load_fpr64(ctx
, fp1
, ft
);
11500 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11501 gen_store_fpr64(ctx
, fp1
, fd
);
11502 tcg_temp_free_i64(fp1
);
11503 tcg_temp_free_i64(fp0
);
11506 check_cp1_64bitmode(ctx
);
11508 TCGv_i64 fp0
= tcg_temp_new_i64();
11510 gen_load_fpr64(ctx
, fp0
, fs
);
11511 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11512 gen_store_fpr64(ctx
, fp0
, fd
);
11513 tcg_temp_free_i64(fp0
);
11517 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11518 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11520 TCGv_i64 fp0
= tcg_temp_new_i64();
11521 TCGv_i64 fp1
= tcg_temp_new_i64();
11522 gen_load_fpr64(ctx
, fp0
, fs
);
11523 gen_load_fpr64(ctx
, fp1
, ft
);
11524 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11525 gen_store_fpr64(ctx
, fp1
, fd
);
11526 tcg_temp_free_i64(fp1
);
11527 tcg_temp_free_i64(fp0
);
11530 check_cp1_64bitmode(ctx
);
11532 TCGv_i64 fp0
= tcg_temp_new_i64();
11534 gen_load_fpr64(ctx
, fp0
, fs
);
11535 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11536 gen_store_fpr64(ctx
, fp0
, fd
);
11537 tcg_temp_free_i64(fp0
);
11541 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11542 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11544 TCGv_i64 fp0
= tcg_temp_new_i64();
11545 TCGv_i64 fp1
= tcg_temp_new_i64();
11546 gen_load_fpr64(ctx
, fp0
, fs
);
11547 gen_load_fpr64(ctx
, fp1
, ft
);
11548 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11549 gen_store_fpr64(ctx
, fp1
, fd
);
11550 tcg_temp_free_i64(fp1
);
11551 tcg_temp_free_i64(fp0
);
11554 check_cp1_64bitmode(ctx
);
11556 TCGv_i64 fp0
= tcg_temp_new_i64();
11557 TCGv_i64 fp1
= tcg_temp_new_i64();
11559 gen_load_fpr64(ctx
, fp0
, fs
);
11560 gen_load_fpr64(ctx
, fp1
, ft
);
11561 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
11562 tcg_temp_free_i64(fp1
);
11563 gen_store_fpr64(ctx
, fp0
, fd
);
11564 tcg_temp_free_i64(fp0
);
11571 case OPC_CMP_UEQ_D
:
11572 case OPC_CMP_OLT_D
:
11573 case OPC_CMP_ULT_D
:
11574 case OPC_CMP_OLE_D
:
11575 case OPC_CMP_ULE_D
:
11577 case OPC_CMP_NGLE_D
:
11578 case OPC_CMP_SEQ_D
:
11579 case OPC_CMP_NGL_D
:
11581 case OPC_CMP_NGE_D
:
11583 case OPC_CMP_NGT_D
:
11584 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11585 if (ctx
->opcode
& (1 << 6)) {
11586 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
11588 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
11592 check_cp1_registers(ctx
, fs
);
11594 TCGv_i32 fp32
= tcg_temp_new_i32();
11595 TCGv_i64 fp64
= tcg_temp_new_i64();
11597 gen_load_fpr64(ctx
, fp64
, fs
);
11598 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
11599 tcg_temp_free_i64(fp64
);
11600 gen_store_fpr32(ctx
, fp32
, fd
);
11601 tcg_temp_free_i32(fp32
);
11605 check_cp1_registers(ctx
, fs
);
11607 TCGv_i32 fp32
= tcg_temp_new_i32();
11608 TCGv_i64 fp64
= tcg_temp_new_i64();
11610 gen_load_fpr64(ctx
, fp64
, fs
);
11611 if (ctx
->nan2008
) {
11612 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
11614 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
11616 tcg_temp_free_i64(fp64
);
11617 gen_store_fpr32(ctx
, fp32
, fd
);
11618 tcg_temp_free_i32(fp32
);
11622 check_cp1_64bitmode(ctx
);
11624 TCGv_i64 fp0
= tcg_temp_new_i64();
11626 gen_load_fpr64(ctx
, fp0
, fs
);
11627 if (ctx
->nan2008
) {
11628 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
11630 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
11632 gen_store_fpr64(ctx
, fp0
, fd
);
11633 tcg_temp_free_i64(fp0
);
11638 TCGv_i32 fp0
= tcg_temp_new_i32();
11640 gen_load_fpr32(ctx
, fp0
, fs
);
11641 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
11642 gen_store_fpr32(ctx
, fp0
, fd
);
11643 tcg_temp_free_i32(fp0
);
11647 check_cp1_registers(ctx
, fd
);
11649 TCGv_i32 fp32
= tcg_temp_new_i32();
11650 TCGv_i64 fp64
= tcg_temp_new_i64();
11652 gen_load_fpr32(ctx
, fp32
, fs
);
11653 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
11654 tcg_temp_free_i32(fp32
);
11655 gen_store_fpr64(ctx
, fp64
, fd
);
11656 tcg_temp_free_i64(fp64
);
11660 check_cp1_64bitmode(ctx
);
11662 TCGv_i32 fp32
= tcg_temp_new_i32();
11663 TCGv_i64 fp64
= tcg_temp_new_i64();
11665 gen_load_fpr64(ctx
, fp64
, fs
);
11666 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
11667 tcg_temp_free_i64(fp64
);
11668 gen_store_fpr32(ctx
, fp32
, fd
);
11669 tcg_temp_free_i32(fp32
);
11673 check_cp1_64bitmode(ctx
);
11675 TCGv_i64 fp0
= tcg_temp_new_i64();
11677 gen_load_fpr64(ctx
, fp0
, fs
);
11678 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
11679 gen_store_fpr64(ctx
, fp0
, fd
);
11680 tcg_temp_free_i64(fp0
);
11683 case OPC_CVT_PS_PW
:
11686 TCGv_i64 fp0
= tcg_temp_new_i64();
11688 gen_load_fpr64(ctx
, fp0
, fs
);
11689 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
11690 gen_store_fpr64(ctx
, fp0
, fd
);
11691 tcg_temp_free_i64(fp0
);
11697 TCGv_i64 fp0
= tcg_temp_new_i64();
11698 TCGv_i64 fp1
= tcg_temp_new_i64();
11700 gen_load_fpr64(ctx
, fp0
, fs
);
11701 gen_load_fpr64(ctx
, fp1
, ft
);
11702 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
11703 tcg_temp_free_i64(fp1
);
11704 gen_store_fpr64(ctx
, fp0
, fd
);
11705 tcg_temp_free_i64(fp0
);
11711 TCGv_i64 fp0
= tcg_temp_new_i64();
11712 TCGv_i64 fp1
= tcg_temp_new_i64();
11714 gen_load_fpr64(ctx
, fp0
, fs
);
11715 gen_load_fpr64(ctx
, fp1
, ft
);
11716 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
11717 tcg_temp_free_i64(fp1
);
11718 gen_store_fpr64(ctx
, fp0
, fd
);
11719 tcg_temp_free_i64(fp0
);
11725 TCGv_i64 fp0
= tcg_temp_new_i64();
11726 TCGv_i64 fp1
= tcg_temp_new_i64();
11728 gen_load_fpr64(ctx
, fp0
, fs
);
11729 gen_load_fpr64(ctx
, fp1
, ft
);
11730 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
11731 tcg_temp_free_i64(fp1
);
11732 gen_store_fpr64(ctx
, fp0
, fd
);
11733 tcg_temp_free_i64(fp0
);
11739 TCGv_i64 fp0
= tcg_temp_new_i64();
11741 gen_load_fpr64(ctx
, fp0
, fs
);
11742 gen_helper_float_abs_ps(fp0
, fp0
);
11743 gen_store_fpr64(ctx
, fp0
, fd
);
11744 tcg_temp_free_i64(fp0
);
11750 TCGv_i64 fp0
= tcg_temp_new_i64();
11752 gen_load_fpr64(ctx
, fp0
, fs
);
11753 gen_store_fpr64(ctx
, fp0
, fd
);
11754 tcg_temp_free_i64(fp0
);
11760 TCGv_i64 fp0
= tcg_temp_new_i64();
11762 gen_load_fpr64(ctx
, fp0
, fs
);
11763 gen_helper_float_chs_ps(fp0
, fp0
);
11764 gen_store_fpr64(ctx
, fp0
, fd
);
11765 tcg_temp_free_i64(fp0
);
11770 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11775 TCGLabel
*l1
= gen_new_label();
11779 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11780 fp0
= tcg_temp_new_i64();
11781 gen_load_fpr64(ctx
, fp0
, fs
);
11782 gen_store_fpr64(ctx
, fp0
, fd
);
11783 tcg_temp_free_i64(fp0
);
11790 TCGLabel
*l1
= gen_new_label();
11794 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11795 fp0
= tcg_temp_new_i64();
11796 gen_load_fpr64(ctx
, fp0
, fs
);
11797 gen_store_fpr64(ctx
, fp0
, fd
);
11798 tcg_temp_free_i64(fp0
);
11806 TCGv_i64 fp0
= tcg_temp_new_i64();
11807 TCGv_i64 fp1
= tcg_temp_new_i64();
11809 gen_load_fpr64(ctx
, fp0
, ft
);
11810 gen_load_fpr64(ctx
, fp1
, fs
);
11811 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
11812 tcg_temp_free_i64(fp1
);
11813 gen_store_fpr64(ctx
, fp0
, fd
);
11814 tcg_temp_free_i64(fp0
);
11820 TCGv_i64 fp0
= tcg_temp_new_i64();
11821 TCGv_i64 fp1
= tcg_temp_new_i64();
11823 gen_load_fpr64(ctx
, fp0
, ft
);
11824 gen_load_fpr64(ctx
, fp1
, fs
);
11825 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
11826 tcg_temp_free_i64(fp1
);
11827 gen_store_fpr64(ctx
, fp0
, fd
);
11828 tcg_temp_free_i64(fp0
);
11831 case OPC_RECIP2_PS
:
11834 TCGv_i64 fp0
= tcg_temp_new_i64();
11835 TCGv_i64 fp1
= tcg_temp_new_i64();
11837 gen_load_fpr64(ctx
, fp0
, fs
);
11838 gen_load_fpr64(ctx
, fp1
, ft
);
11839 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
11840 tcg_temp_free_i64(fp1
);
11841 gen_store_fpr64(ctx
, fp0
, fd
);
11842 tcg_temp_free_i64(fp0
);
11845 case OPC_RECIP1_PS
:
11848 TCGv_i64 fp0
= tcg_temp_new_i64();
11850 gen_load_fpr64(ctx
, fp0
, fs
);
11851 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
11852 gen_store_fpr64(ctx
, fp0
, fd
);
11853 tcg_temp_free_i64(fp0
);
11856 case OPC_RSQRT1_PS
:
11859 TCGv_i64 fp0
= tcg_temp_new_i64();
11861 gen_load_fpr64(ctx
, fp0
, fs
);
11862 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
11863 gen_store_fpr64(ctx
, fp0
, fd
);
11864 tcg_temp_free_i64(fp0
);
11867 case OPC_RSQRT2_PS
:
11870 TCGv_i64 fp0
= tcg_temp_new_i64();
11871 TCGv_i64 fp1
= tcg_temp_new_i64();
11873 gen_load_fpr64(ctx
, fp0
, fs
);
11874 gen_load_fpr64(ctx
, fp1
, ft
);
11875 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
11876 tcg_temp_free_i64(fp1
);
11877 gen_store_fpr64(ctx
, fp0
, fd
);
11878 tcg_temp_free_i64(fp0
);
11882 check_cp1_64bitmode(ctx
);
11884 TCGv_i32 fp0
= tcg_temp_new_i32();
11886 gen_load_fpr32h(ctx
, fp0
, fs
);
11887 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
11888 gen_store_fpr32(ctx
, fp0
, fd
);
11889 tcg_temp_free_i32(fp0
);
11892 case OPC_CVT_PW_PS
:
11895 TCGv_i64 fp0
= tcg_temp_new_i64();
11897 gen_load_fpr64(ctx
, fp0
, fs
);
11898 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
11899 gen_store_fpr64(ctx
, fp0
, fd
);
11900 tcg_temp_free_i64(fp0
);
11904 check_cp1_64bitmode(ctx
);
11906 TCGv_i32 fp0
= tcg_temp_new_i32();
11908 gen_load_fpr32(ctx
, fp0
, fs
);
11909 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
11910 gen_store_fpr32(ctx
, fp0
, fd
);
11911 tcg_temp_free_i32(fp0
);
11917 TCGv_i32 fp0
= tcg_temp_new_i32();
11918 TCGv_i32 fp1
= tcg_temp_new_i32();
11920 gen_load_fpr32(ctx
, fp0
, fs
);
11921 gen_load_fpr32(ctx
, fp1
, ft
);
11922 gen_store_fpr32h(ctx
, fp0
, fd
);
11923 gen_store_fpr32(ctx
, fp1
, fd
);
11924 tcg_temp_free_i32(fp0
);
11925 tcg_temp_free_i32(fp1
);
11931 TCGv_i32 fp0
= tcg_temp_new_i32();
11932 TCGv_i32 fp1
= tcg_temp_new_i32();
11934 gen_load_fpr32(ctx
, fp0
, fs
);
11935 gen_load_fpr32h(ctx
, fp1
, ft
);
11936 gen_store_fpr32(ctx
, fp1
, fd
);
11937 gen_store_fpr32h(ctx
, fp0
, fd
);
11938 tcg_temp_free_i32(fp0
);
11939 tcg_temp_free_i32(fp1
);
11945 TCGv_i32 fp0
= tcg_temp_new_i32();
11946 TCGv_i32 fp1
= tcg_temp_new_i32();
11948 gen_load_fpr32h(ctx
, fp0
, fs
);
11949 gen_load_fpr32(ctx
, fp1
, ft
);
11950 gen_store_fpr32(ctx
, fp1
, fd
);
11951 gen_store_fpr32h(ctx
, fp0
, fd
);
11952 tcg_temp_free_i32(fp0
);
11953 tcg_temp_free_i32(fp1
);
11959 TCGv_i32 fp0
= tcg_temp_new_i32();
11960 TCGv_i32 fp1
= tcg_temp_new_i32();
11962 gen_load_fpr32h(ctx
, fp0
, fs
);
11963 gen_load_fpr32h(ctx
, fp1
, ft
);
11964 gen_store_fpr32(ctx
, fp1
, fd
);
11965 gen_store_fpr32h(ctx
, fp0
, fd
);
11966 tcg_temp_free_i32(fp0
);
11967 tcg_temp_free_i32(fp1
);
11971 case OPC_CMP_UN_PS
:
11972 case OPC_CMP_EQ_PS
:
11973 case OPC_CMP_UEQ_PS
:
11974 case OPC_CMP_OLT_PS
:
11975 case OPC_CMP_ULT_PS
:
11976 case OPC_CMP_OLE_PS
:
11977 case OPC_CMP_ULE_PS
:
11978 case OPC_CMP_SF_PS
:
11979 case OPC_CMP_NGLE_PS
:
11980 case OPC_CMP_SEQ_PS
:
11981 case OPC_CMP_NGL_PS
:
11982 case OPC_CMP_LT_PS
:
11983 case OPC_CMP_NGE_PS
:
11984 case OPC_CMP_LE_PS
:
11985 case OPC_CMP_NGT_PS
:
11986 if (ctx
->opcode
& (1 << 6)) {
11987 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
11989 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
11993 MIPS_INVAL("farith");
11994 generate_exception_end(ctx
, EXCP_RI
);
11999 /* Coprocessor 3 (FPU) */
12000 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
12001 int fd
, int fs
, int base
, int index
)
12003 TCGv t0
= tcg_temp_new();
12006 gen_load_gpr(t0
, index
);
12007 } else if (index
== 0) {
12008 gen_load_gpr(t0
, base
);
12010 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12012 /* Don't do NOP if destination is zero: we must perform the actual
12018 TCGv_i32 fp0
= tcg_temp_new_i32();
12020 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12021 tcg_gen_trunc_tl_i32(fp0
, t0
);
12022 gen_store_fpr32(ctx
, fp0
, fd
);
12023 tcg_temp_free_i32(fp0
);
12028 check_cp1_registers(ctx
, fd
);
12030 TCGv_i64 fp0
= tcg_temp_new_i64();
12031 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12032 gen_store_fpr64(ctx
, fp0
, fd
);
12033 tcg_temp_free_i64(fp0
);
12037 check_cp1_64bitmode(ctx
);
12038 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12040 TCGv_i64 fp0
= tcg_temp_new_i64();
12042 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12043 gen_store_fpr64(ctx
, fp0
, fd
);
12044 tcg_temp_free_i64(fp0
);
12050 TCGv_i32 fp0
= tcg_temp_new_i32();
12051 gen_load_fpr32(ctx
, fp0
, fs
);
12052 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12053 tcg_temp_free_i32(fp0
);
12058 check_cp1_registers(ctx
, fs
);
12060 TCGv_i64 fp0
= tcg_temp_new_i64();
12061 gen_load_fpr64(ctx
, fp0
, fs
);
12062 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12063 tcg_temp_free_i64(fp0
);
12067 check_cp1_64bitmode(ctx
);
12068 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12070 TCGv_i64 fp0
= tcg_temp_new_i64();
12071 gen_load_fpr64(ctx
, fp0
, fs
);
12072 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12073 tcg_temp_free_i64(fp0
);
12080 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
12081 int fd
, int fr
, int fs
, int ft
)
12087 TCGv t0
= tcg_temp_local_new();
12088 TCGv_i32 fp
= tcg_temp_new_i32();
12089 TCGv_i32 fph
= tcg_temp_new_i32();
12090 TCGLabel
*l1
= gen_new_label();
12091 TCGLabel
*l2
= gen_new_label();
12093 gen_load_gpr(t0
, fr
);
12094 tcg_gen_andi_tl(t0
, t0
, 0x7);
12096 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12097 gen_load_fpr32(ctx
, fp
, fs
);
12098 gen_load_fpr32h(ctx
, fph
, fs
);
12099 gen_store_fpr32(ctx
, fp
, fd
);
12100 gen_store_fpr32h(ctx
, fph
, fd
);
12103 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12105 #ifdef TARGET_WORDS_BIGENDIAN
12106 gen_load_fpr32(ctx
, fp
, fs
);
12107 gen_load_fpr32h(ctx
, fph
, ft
);
12108 gen_store_fpr32h(ctx
, fp
, fd
);
12109 gen_store_fpr32(ctx
, fph
, fd
);
12111 gen_load_fpr32h(ctx
, fph
, fs
);
12112 gen_load_fpr32(ctx
, fp
, ft
);
12113 gen_store_fpr32(ctx
, fph
, fd
);
12114 gen_store_fpr32h(ctx
, fp
, fd
);
12117 tcg_temp_free_i32(fp
);
12118 tcg_temp_free_i32(fph
);
12124 TCGv_i32 fp0
= tcg_temp_new_i32();
12125 TCGv_i32 fp1
= tcg_temp_new_i32();
12126 TCGv_i32 fp2
= tcg_temp_new_i32();
12128 gen_load_fpr32(ctx
, fp0
, fs
);
12129 gen_load_fpr32(ctx
, fp1
, ft
);
12130 gen_load_fpr32(ctx
, fp2
, fr
);
12131 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12132 tcg_temp_free_i32(fp0
);
12133 tcg_temp_free_i32(fp1
);
12134 gen_store_fpr32(ctx
, fp2
, fd
);
12135 tcg_temp_free_i32(fp2
);
12140 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12142 TCGv_i64 fp0
= tcg_temp_new_i64();
12143 TCGv_i64 fp1
= tcg_temp_new_i64();
12144 TCGv_i64 fp2
= tcg_temp_new_i64();
12146 gen_load_fpr64(ctx
, fp0
, fs
);
12147 gen_load_fpr64(ctx
, fp1
, ft
);
12148 gen_load_fpr64(ctx
, fp2
, fr
);
12149 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12150 tcg_temp_free_i64(fp0
);
12151 tcg_temp_free_i64(fp1
);
12152 gen_store_fpr64(ctx
, fp2
, fd
);
12153 tcg_temp_free_i64(fp2
);
12159 TCGv_i64 fp0
= tcg_temp_new_i64();
12160 TCGv_i64 fp1
= tcg_temp_new_i64();
12161 TCGv_i64 fp2
= tcg_temp_new_i64();
12163 gen_load_fpr64(ctx
, fp0
, fs
);
12164 gen_load_fpr64(ctx
, fp1
, ft
);
12165 gen_load_fpr64(ctx
, fp2
, fr
);
12166 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12167 tcg_temp_free_i64(fp0
);
12168 tcg_temp_free_i64(fp1
);
12169 gen_store_fpr64(ctx
, fp2
, fd
);
12170 tcg_temp_free_i64(fp2
);
12176 TCGv_i32 fp0
= tcg_temp_new_i32();
12177 TCGv_i32 fp1
= tcg_temp_new_i32();
12178 TCGv_i32 fp2
= tcg_temp_new_i32();
12180 gen_load_fpr32(ctx
, fp0
, fs
);
12181 gen_load_fpr32(ctx
, fp1
, ft
);
12182 gen_load_fpr32(ctx
, fp2
, fr
);
12183 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12184 tcg_temp_free_i32(fp0
);
12185 tcg_temp_free_i32(fp1
);
12186 gen_store_fpr32(ctx
, fp2
, fd
);
12187 tcg_temp_free_i32(fp2
);
12192 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12194 TCGv_i64 fp0
= tcg_temp_new_i64();
12195 TCGv_i64 fp1
= tcg_temp_new_i64();
12196 TCGv_i64 fp2
= tcg_temp_new_i64();
12198 gen_load_fpr64(ctx
, fp0
, fs
);
12199 gen_load_fpr64(ctx
, fp1
, ft
);
12200 gen_load_fpr64(ctx
, fp2
, fr
);
12201 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12202 tcg_temp_free_i64(fp0
);
12203 tcg_temp_free_i64(fp1
);
12204 gen_store_fpr64(ctx
, fp2
, fd
);
12205 tcg_temp_free_i64(fp2
);
12211 TCGv_i64 fp0
= tcg_temp_new_i64();
12212 TCGv_i64 fp1
= tcg_temp_new_i64();
12213 TCGv_i64 fp2
= tcg_temp_new_i64();
12215 gen_load_fpr64(ctx
, fp0
, fs
);
12216 gen_load_fpr64(ctx
, fp1
, ft
);
12217 gen_load_fpr64(ctx
, fp2
, fr
);
12218 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12219 tcg_temp_free_i64(fp0
);
12220 tcg_temp_free_i64(fp1
);
12221 gen_store_fpr64(ctx
, fp2
, fd
);
12222 tcg_temp_free_i64(fp2
);
12228 TCGv_i32 fp0
= tcg_temp_new_i32();
12229 TCGv_i32 fp1
= tcg_temp_new_i32();
12230 TCGv_i32 fp2
= tcg_temp_new_i32();
12232 gen_load_fpr32(ctx
, fp0
, fs
);
12233 gen_load_fpr32(ctx
, fp1
, ft
);
12234 gen_load_fpr32(ctx
, fp2
, fr
);
12235 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12236 tcg_temp_free_i32(fp0
);
12237 tcg_temp_free_i32(fp1
);
12238 gen_store_fpr32(ctx
, fp2
, fd
);
12239 tcg_temp_free_i32(fp2
);
12244 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12246 TCGv_i64 fp0
= tcg_temp_new_i64();
12247 TCGv_i64 fp1
= tcg_temp_new_i64();
12248 TCGv_i64 fp2
= tcg_temp_new_i64();
12250 gen_load_fpr64(ctx
, fp0
, fs
);
12251 gen_load_fpr64(ctx
, fp1
, ft
);
12252 gen_load_fpr64(ctx
, fp2
, fr
);
12253 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12254 tcg_temp_free_i64(fp0
);
12255 tcg_temp_free_i64(fp1
);
12256 gen_store_fpr64(ctx
, fp2
, fd
);
12257 tcg_temp_free_i64(fp2
);
12263 TCGv_i64 fp0
= tcg_temp_new_i64();
12264 TCGv_i64 fp1
= tcg_temp_new_i64();
12265 TCGv_i64 fp2
= tcg_temp_new_i64();
12267 gen_load_fpr64(ctx
, fp0
, fs
);
12268 gen_load_fpr64(ctx
, fp1
, ft
);
12269 gen_load_fpr64(ctx
, fp2
, fr
);
12270 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12271 tcg_temp_free_i64(fp0
);
12272 tcg_temp_free_i64(fp1
);
12273 gen_store_fpr64(ctx
, fp2
, fd
);
12274 tcg_temp_free_i64(fp2
);
12280 TCGv_i32 fp0
= tcg_temp_new_i32();
12281 TCGv_i32 fp1
= tcg_temp_new_i32();
12282 TCGv_i32 fp2
= tcg_temp_new_i32();
12284 gen_load_fpr32(ctx
, fp0
, fs
);
12285 gen_load_fpr32(ctx
, fp1
, ft
);
12286 gen_load_fpr32(ctx
, fp2
, fr
);
12287 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12288 tcg_temp_free_i32(fp0
);
12289 tcg_temp_free_i32(fp1
);
12290 gen_store_fpr32(ctx
, fp2
, fd
);
12291 tcg_temp_free_i32(fp2
);
12296 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12298 TCGv_i64 fp0
= tcg_temp_new_i64();
12299 TCGv_i64 fp1
= tcg_temp_new_i64();
12300 TCGv_i64 fp2
= tcg_temp_new_i64();
12302 gen_load_fpr64(ctx
, fp0
, fs
);
12303 gen_load_fpr64(ctx
, fp1
, ft
);
12304 gen_load_fpr64(ctx
, fp2
, fr
);
12305 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12306 tcg_temp_free_i64(fp0
);
12307 tcg_temp_free_i64(fp1
);
12308 gen_store_fpr64(ctx
, fp2
, fd
);
12309 tcg_temp_free_i64(fp2
);
12315 TCGv_i64 fp0
= tcg_temp_new_i64();
12316 TCGv_i64 fp1
= tcg_temp_new_i64();
12317 TCGv_i64 fp2
= tcg_temp_new_i64();
12319 gen_load_fpr64(ctx
, fp0
, fs
);
12320 gen_load_fpr64(ctx
, fp1
, ft
);
12321 gen_load_fpr64(ctx
, fp2
, fr
);
12322 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12323 tcg_temp_free_i64(fp0
);
12324 tcg_temp_free_i64(fp1
);
12325 gen_store_fpr64(ctx
, fp2
, fd
);
12326 tcg_temp_free_i64(fp2
);
12330 MIPS_INVAL("flt3_arith");
12331 generate_exception_end(ctx
, EXCP_RI
);
12336 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12340 #if !defined(CONFIG_USER_ONLY)
12341 /* The Linux kernel will emulate rdhwr if it's not supported natively.
12342 Therefore only check the ISA in system mode. */
12343 check_insn(ctx
, ISA_MIPS32R2
);
12345 t0
= tcg_temp_new();
12349 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12350 gen_store_gpr(t0
, rt
);
12353 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12354 gen_store_gpr(t0
, rt
);
12357 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12360 gen_helper_rdhwr_cc(t0
, cpu_env
);
12361 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12364 gen_store_gpr(t0
, rt
);
12365 /* Break the TB to be able to take timer interrupts immediately
12366 after reading count. DISAS_STOP isn't sufficient, we need to ensure
12367 we break completely out of translated code. */
12368 gen_save_pc(ctx
->base
.pc_next
+ 4);
12369 ctx
->base
.is_jmp
= DISAS_EXIT
;
12372 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12373 gen_store_gpr(t0
, rt
);
12376 check_insn(ctx
, ISA_MIPS32R6
);
12378 /* Performance counter registers are not implemented other than
12379 * control register 0.
12381 generate_exception(ctx
, EXCP_RI
);
12383 gen_helper_rdhwr_performance(t0
, cpu_env
);
12384 gen_store_gpr(t0
, rt
);
12387 check_insn(ctx
, ISA_MIPS32R6
);
12388 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12389 gen_store_gpr(t0
, rt
);
12392 #if defined(CONFIG_USER_ONLY)
12393 tcg_gen_ld_tl(t0
, cpu_env
,
12394 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12395 gen_store_gpr(t0
, rt
);
12398 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12399 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12400 tcg_gen_ld_tl(t0
, cpu_env
,
12401 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12402 gen_store_gpr(t0
, rt
);
12404 generate_exception_end(ctx
, EXCP_RI
);
12408 default: /* Invalid */
12409 MIPS_INVAL("rdhwr");
12410 generate_exception_end(ctx
, EXCP_RI
);
12416 static inline void clear_branch_hflags(DisasContext
*ctx
)
12418 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12419 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12420 save_cpu_state(ctx
, 0);
12422 /* it is not safe to save ctx->hflags as hflags may be changed
12423 in execution time by the instruction in delay / forbidden slot. */
12424 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12428 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12430 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12431 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12432 /* Branches completion */
12433 clear_branch_hflags(ctx
);
12434 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12435 /* FIXME: Need to clear can_do_io. */
12436 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12437 case MIPS_HFLAG_FBNSLOT
:
12438 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12441 /* unconditional branch */
12442 if (proc_hflags
& MIPS_HFLAG_BX
) {
12443 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12445 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12447 case MIPS_HFLAG_BL
:
12448 /* blikely taken case */
12449 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12451 case MIPS_HFLAG_BC
:
12452 /* Conditional branch */
12454 TCGLabel
*l1
= gen_new_label();
12456 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12457 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12459 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12462 case MIPS_HFLAG_BR
:
12463 /* unconditional branch to register */
12464 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12465 TCGv t0
= tcg_temp_new();
12466 TCGv_i32 t1
= tcg_temp_new_i32();
12468 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12469 tcg_gen_trunc_tl_i32(t1
, t0
);
12471 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12472 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12473 tcg_gen_or_i32(hflags
, hflags
, t1
);
12474 tcg_temp_free_i32(t1
);
12476 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12478 tcg_gen_mov_tl(cpu_PC
, btarget
);
12480 if (ctx
->base
.singlestep_enabled
) {
12481 save_cpu_state(ctx
, 0);
12482 gen_helper_raise_exception_debug(cpu_env
);
12484 tcg_gen_lookup_and_goto_ptr();
12487 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12493 /* Compact Branches */
12494 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12495 int rs
, int rt
, int32_t offset
)
12497 int bcond_compute
= 0;
12498 TCGv t0
= tcg_temp_new();
12499 TCGv t1
= tcg_temp_new();
12500 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12502 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12503 #ifdef MIPS_DEBUG_DISAS
12504 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12505 "\n", ctx
->base
.pc_next
);
12507 generate_exception_end(ctx
, EXCP_RI
);
12511 /* Load needed operands and calculate btarget */
12513 /* compact branch */
12514 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12515 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12516 gen_load_gpr(t0
, rs
);
12517 gen_load_gpr(t1
, rt
);
12519 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12520 if (rs
<= rt
&& rs
== 0) {
12521 /* OPC_BEQZALC, OPC_BNEZALC */
12522 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12525 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12526 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12527 gen_load_gpr(t0
, rs
);
12528 gen_load_gpr(t1
, rt
);
12530 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12532 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12533 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12534 if (rs
== 0 || rs
== rt
) {
12535 /* OPC_BLEZALC, OPC_BGEZALC */
12536 /* OPC_BGTZALC, OPC_BLTZALC */
12537 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12539 gen_load_gpr(t0
, rs
);
12540 gen_load_gpr(t1
, rt
);
12542 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12546 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12551 /* OPC_BEQZC, OPC_BNEZC */
12552 gen_load_gpr(t0
, rs
);
12554 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12556 /* OPC_JIC, OPC_JIALC */
12557 TCGv tbase
= tcg_temp_new();
12558 TCGv toffset
= tcg_temp_new();
12560 gen_load_gpr(tbase
, rt
);
12561 tcg_gen_movi_tl(toffset
, offset
);
12562 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
12563 tcg_temp_free(tbase
);
12564 tcg_temp_free(toffset
);
12568 MIPS_INVAL("Compact branch/jump");
12569 generate_exception_end(ctx
, EXCP_RI
);
12573 if (bcond_compute
== 0) {
12574 /* Uncoditional compact branch */
12577 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12580 ctx
->hflags
|= MIPS_HFLAG_BR
;
12583 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12586 ctx
->hflags
|= MIPS_HFLAG_B
;
12589 MIPS_INVAL("Compact branch/jump");
12590 generate_exception_end(ctx
, EXCP_RI
);
12594 /* Generating branch here as compact branches don't have delay slot */
12595 gen_branch(ctx
, 4);
12597 /* Conditional compact branch */
12598 TCGLabel
*fs
= gen_new_label();
12599 save_cpu_state(ctx
, 0);
12602 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12603 if (rs
== 0 && rt
!= 0) {
12605 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12606 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12608 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12611 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
12614 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12615 if (rs
== 0 && rt
!= 0) {
12617 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12618 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12620 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12623 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
12626 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12627 if (rs
== 0 && rt
!= 0) {
12629 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12630 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12632 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12635 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
12638 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12639 if (rs
== 0 && rt
!= 0) {
12641 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12642 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12644 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12647 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
12650 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12651 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12653 /* OPC_BOVC, OPC_BNVC */
12654 TCGv t2
= tcg_temp_new();
12655 TCGv t3
= tcg_temp_new();
12656 TCGv t4
= tcg_temp_new();
12657 TCGv input_overflow
= tcg_temp_new();
12659 gen_load_gpr(t0
, rs
);
12660 gen_load_gpr(t1
, rt
);
12661 tcg_gen_ext32s_tl(t2
, t0
);
12662 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
12663 tcg_gen_ext32s_tl(t3
, t1
);
12664 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
12665 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
12667 tcg_gen_add_tl(t4
, t2
, t3
);
12668 tcg_gen_ext32s_tl(t4
, t4
);
12669 tcg_gen_xor_tl(t2
, t2
, t3
);
12670 tcg_gen_xor_tl(t3
, t4
, t3
);
12671 tcg_gen_andc_tl(t2
, t3
, t2
);
12672 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
12673 tcg_gen_or_tl(t4
, t4
, input_overflow
);
12674 if (opc
== OPC_BOVC
) {
12676 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
12679 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
12681 tcg_temp_free(input_overflow
);
12685 } else if (rs
< rt
&& rs
== 0) {
12686 /* OPC_BEQZALC, OPC_BNEZALC */
12687 if (opc
== OPC_BEQZALC
) {
12689 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
12692 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
12695 /* OPC_BEQC, OPC_BNEC */
12696 if (opc
== OPC_BEQC
) {
12698 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
12701 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
12706 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
12709 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
12712 MIPS_INVAL("Compact conditional branch/jump");
12713 generate_exception_end(ctx
, EXCP_RI
);
12717 /* Generating branch here as compact branches don't have delay slot */
12718 gen_goto_tb(ctx
, 1, ctx
->btarget
);
12721 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
12729 /* ISA extensions (ASEs) */
12730 /* MIPS16 extension to MIPS32 */
12732 /* MIPS16 major opcodes */
12734 M16_OPC_ADDIUSP
= 0x00,
12735 M16_OPC_ADDIUPC
= 0x01,
12737 M16_OPC_JAL
= 0x03,
12738 M16_OPC_BEQZ
= 0x04,
12739 M16_OPC_BNEQZ
= 0x05,
12740 M16_OPC_SHIFT
= 0x06,
12742 M16_OPC_RRIA
= 0x08,
12743 M16_OPC_ADDIU8
= 0x09,
12744 M16_OPC_SLTI
= 0x0a,
12745 M16_OPC_SLTIU
= 0x0b,
12748 M16_OPC_CMPI
= 0x0e,
12752 M16_OPC_LWSP
= 0x12,
12754 M16_OPC_LBU
= 0x14,
12755 M16_OPC_LHU
= 0x15,
12756 M16_OPC_LWPC
= 0x16,
12757 M16_OPC_LWU
= 0x17,
12760 M16_OPC_SWSP
= 0x1a,
12762 M16_OPC_RRR
= 0x1c,
12764 M16_OPC_EXTEND
= 0x1e,
12768 /* I8 funct field */
12787 /* RR funct field */
12821 /* I64 funct field */
12829 I64_DADDIUPC
= 0x6,
12833 /* RR ry field for CNVT */
12835 RR_RY_CNVT_ZEB
= 0x0,
12836 RR_RY_CNVT_ZEH
= 0x1,
12837 RR_RY_CNVT_ZEW
= 0x2,
12838 RR_RY_CNVT_SEB
= 0x4,
12839 RR_RY_CNVT_SEH
= 0x5,
12840 RR_RY_CNVT_SEW
= 0x6,
12843 static int xlat (int r
)
12845 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12850 static void gen_mips16_save (DisasContext
*ctx
,
12851 int xsregs
, int aregs
,
12852 int do_ra
, int do_s0
, int do_s1
,
12855 TCGv t0
= tcg_temp_new();
12856 TCGv t1
= tcg_temp_new();
12857 TCGv t2
= tcg_temp_new();
12887 generate_exception_end(ctx
, EXCP_RI
);
12893 gen_base_offset_addr(ctx
, t0
, 29, 12);
12894 gen_load_gpr(t1
, 7);
12895 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12898 gen_base_offset_addr(ctx
, t0
, 29, 8);
12899 gen_load_gpr(t1
, 6);
12900 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12903 gen_base_offset_addr(ctx
, t0
, 29, 4);
12904 gen_load_gpr(t1
, 5);
12905 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12908 gen_base_offset_addr(ctx
, t0
, 29, 0);
12909 gen_load_gpr(t1
, 4);
12910 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12913 gen_load_gpr(t0
, 29);
12915 #define DECR_AND_STORE(reg) do { \
12916 tcg_gen_movi_tl(t2, -4); \
12917 gen_op_addr_add(ctx, t0, t0, t2); \
12918 gen_load_gpr(t1, reg); \
12919 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
12923 DECR_AND_STORE(31);
12928 DECR_AND_STORE(30);
12931 DECR_AND_STORE(23);
12934 DECR_AND_STORE(22);
12937 DECR_AND_STORE(21);
12940 DECR_AND_STORE(20);
12943 DECR_AND_STORE(19);
12946 DECR_AND_STORE(18);
12950 DECR_AND_STORE(17);
12953 DECR_AND_STORE(16);
12983 generate_exception_end(ctx
, EXCP_RI
);
12999 #undef DECR_AND_STORE
13001 tcg_gen_movi_tl(t2
, -framesize
);
13002 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13008 static void gen_mips16_restore (DisasContext
*ctx
,
13009 int xsregs
, int aregs
,
13010 int do_ra
, int do_s0
, int do_s1
,
13014 TCGv t0
= tcg_temp_new();
13015 TCGv t1
= tcg_temp_new();
13016 TCGv t2
= tcg_temp_new();
13018 tcg_gen_movi_tl(t2
, framesize
);
13019 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13021 #define DECR_AND_LOAD(reg) do { \
13022 tcg_gen_movi_tl(t2, -4); \
13023 gen_op_addr_add(ctx, t0, t0, t2); \
13024 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13025 gen_store_gpr(t1, reg); \
13089 generate_exception_end(ctx
, EXCP_RI
);
13105 #undef DECR_AND_LOAD
13107 tcg_gen_movi_tl(t2
, framesize
);
13108 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13114 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
13115 int is_64_bit
, int extended
)
13119 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13120 generate_exception_end(ctx
, EXCP_RI
);
13124 t0
= tcg_temp_new();
13126 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13127 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13129 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13135 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13138 TCGv_i32 t0
= tcg_const_i32(op
);
13139 TCGv t1
= tcg_temp_new();
13140 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13141 gen_helper_cache(cpu_env
, t1
, t0
);
13144 #if defined(TARGET_MIPS64)
13145 static void decode_i64_mips16 (DisasContext
*ctx
,
13146 int ry
, int funct
, int16_t offset
,
13151 check_insn(ctx
, ISA_MIPS3
);
13152 check_mips_64(ctx
);
13153 offset
= extended
? offset
: offset
<< 3;
13154 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13157 check_insn(ctx
, ISA_MIPS3
);
13158 check_mips_64(ctx
);
13159 offset
= extended
? offset
: offset
<< 3;
13160 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13163 check_insn(ctx
, ISA_MIPS3
);
13164 check_mips_64(ctx
);
13165 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13166 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13169 check_insn(ctx
, ISA_MIPS3
);
13170 check_mips_64(ctx
);
13171 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13172 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13175 check_insn(ctx
, ISA_MIPS3
);
13176 check_mips_64(ctx
);
13177 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13178 generate_exception_end(ctx
, EXCP_RI
);
13180 offset
= extended
? offset
: offset
<< 3;
13181 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13185 check_insn(ctx
, ISA_MIPS3
);
13186 check_mips_64(ctx
);
13187 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13188 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13191 check_insn(ctx
, ISA_MIPS3
);
13192 check_mips_64(ctx
);
13193 offset
= extended
? offset
: offset
<< 2;
13194 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13197 check_insn(ctx
, ISA_MIPS3
);
13198 check_mips_64(ctx
);
13199 offset
= extended
? offset
: offset
<< 2;
13200 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13206 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13208 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13209 int op
, rx
, ry
, funct
, sa
;
13210 int16_t imm
, offset
;
13212 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13213 op
= (ctx
->opcode
>> 11) & 0x1f;
13214 sa
= (ctx
->opcode
>> 22) & 0x1f;
13215 funct
= (ctx
->opcode
>> 8) & 0x7;
13216 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13217 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13218 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13219 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13220 | (ctx
->opcode
& 0x1f));
13222 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
13225 case M16_OPC_ADDIUSP
:
13226 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13228 case M16_OPC_ADDIUPC
:
13229 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13232 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13233 /* No delay slot, so just process as a normal instruction */
13236 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13237 /* No delay slot, so just process as a normal instruction */
13239 case M16_OPC_BNEQZ
:
13240 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13241 /* No delay slot, so just process as a normal instruction */
13243 case M16_OPC_SHIFT
:
13244 switch (ctx
->opcode
& 0x3) {
13246 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13249 #if defined(TARGET_MIPS64)
13250 check_mips_64(ctx
);
13251 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13253 generate_exception_end(ctx
, EXCP_RI
);
13257 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13260 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13264 #if defined(TARGET_MIPS64)
13266 check_insn(ctx
, ISA_MIPS3
);
13267 check_mips_64(ctx
);
13268 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13272 imm
= ctx
->opcode
& 0xf;
13273 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13274 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13275 imm
= (int16_t) (imm
<< 1) >> 1;
13276 if ((ctx
->opcode
>> 4) & 0x1) {
13277 #if defined(TARGET_MIPS64)
13278 check_mips_64(ctx
);
13279 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13281 generate_exception_end(ctx
, EXCP_RI
);
13284 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13287 case M16_OPC_ADDIU8
:
13288 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13291 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13293 case M16_OPC_SLTIU
:
13294 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13299 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13302 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13305 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13308 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13311 check_insn(ctx
, ISA_MIPS32
);
13313 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13314 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13315 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13316 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13317 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13318 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13319 | (ctx
->opcode
& 0xf)) << 3;
13321 if (ctx
->opcode
& (1 << 7)) {
13322 gen_mips16_save(ctx
, xsregs
, aregs
,
13323 do_ra
, do_s0
, do_s1
,
13326 gen_mips16_restore(ctx
, xsregs
, aregs
,
13327 do_ra
, do_s0
, do_s1
,
13333 generate_exception_end(ctx
, EXCP_RI
);
13338 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13341 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13343 #if defined(TARGET_MIPS64)
13345 check_insn(ctx
, ISA_MIPS3
);
13346 check_mips_64(ctx
);
13347 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13351 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13354 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13357 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13360 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13363 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13366 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13369 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13371 #if defined(TARGET_MIPS64)
13373 check_insn(ctx
, ISA_MIPS3
);
13374 check_mips_64(ctx
);
13375 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13379 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13382 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13385 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13388 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13390 #if defined(TARGET_MIPS64)
13392 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13396 generate_exception_end(ctx
, EXCP_RI
);
13403 static inline bool is_uhi(int sdbbp_code
)
13405 #ifdef CONFIG_USER_ONLY
13408 return semihosting_enabled() && sdbbp_code
== 1;
13412 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13416 int op
, cnvt_op
, op1
, offset
;
13420 op
= (ctx
->opcode
>> 11) & 0x1f;
13421 sa
= (ctx
->opcode
>> 2) & 0x7;
13422 sa
= sa
== 0 ? 8 : sa
;
13423 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13424 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13425 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13426 op1
= offset
= ctx
->opcode
& 0x1f;
13431 case M16_OPC_ADDIUSP
:
13433 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13435 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13438 case M16_OPC_ADDIUPC
:
13439 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13442 offset
= (ctx
->opcode
& 0x7ff) << 1;
13443 offset
= (int16_t)(offset
<< 4) >> 4;
13444 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13445 /* No delay slot, so just process as a normal instruction */
13448 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13449 offset
= (((ctx
->opcode
& 0x1f) << 21)
13450 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13452 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13453 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13457 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13458 ((int8_t)ctx
->opcode
) << 1, 0);
13459 /* No delay slot, so just process as a normal instruction */
13461 case M16_OPC_BNEQZ
:
13462 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13463 ((int8_t)ctx
->opcode
) << 1, 0);
13464 /* No delay slot, so just process as a normal instruction */
13466 case M16_OPC_SHIFT
:
13467 switch (ctx
->opcode
& 0x3) {
13469 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13472 #if defined(TARGET_MIPS64)
13473 check_insn(ctx
, ISA_MIPS3
);
13474 check_mips_64(ctx
);
13475 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13477 generate_exception_end(ctx
, EXCP_RI
);
13481 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13484 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13488 #if defined(TARGET_MIPS64)
13490 check_insn(ctx
, ISA_MIPS3
);
13491 check_mips_64(ctx
);
13492 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13497 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13499 if ((ctx
->opcode
>> 4) & 1) {
13500 #if defined(TARGET_MIPS64)
13501 check_insn(ctx
, ISA_MIPS3
);
13502 check_mips_64(ctx
);
13503 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13505 generate_exception_end(ctx
, EXCP_RI
);
13508 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13512 case M16_OPC_ADDIU8
:
13514 int16_t imm
= (int8_t) ctx
->opcode
;
13516 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13521 int16_t imm
= (uint8_t) ctx
->opcode
;
13522 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13525 case M16_OPC_SLTIU
:
13527 int16_t imm
= (uint8_t) ctx
->opcode
;
13528 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13535 funct
= (ctx
->opcode
>> 8) & 0x7;
13538 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
13539 ((int8_t)ctx
->opcode
) << 1, 0);
13542 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
13543 ((int8_t)ctx
->opcode
) << 1, 0);
13546 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
13549 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
13550 ((int8_t)ctx
->opcode
) << 3);
13553 check_insn(ctx
, ISA_MIPS32
);
13555 int do_ra
= ctx
->opcode
& (1 << 6);
13556 int do_s0
= ctx
->opcode
& (1 << 5);
13557 int do_s1
= ctx
->opcode
& (1 << 4);
13558 int framesize
= ctx
->opcode
& 0xf;
13560 if (framesize
== 0) {
13563 framesize
= framesize
<< 3;
13566 if (ctx
->opcode
& (1 << 7)) {
13567 gen_mips16_save(ctx
, 0, 0,
13568 do_ra
, do_s0
, do_s1
, framesize
);
13570 gen_mips16_restore(ctx
, 0, 0,
13571 do_ra
, do_s0
, do_s1
, framesize
);
13577 int rz
= xlat(ctx
->opcode
& 0x7);
13579 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
13580 ((ctx
->opcode
>> 5) & 0x7);
13581 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
13585 reg32
= ctx
->opcode
& 0x1f;
13586 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
13589 generate_exception_end(ctx
, EXCP_RI
);
13596 int16_t imm
= (uint8_t) ctx
->opcode
;
13598 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
13603 int16_t imm
= (uint8_t) ctx
->opcode
;
13604 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
13607 #if defined(TARGET_MIPS64)
13609 check_insn(ctx
, ISA_MIPS3
);
13610 check_mips_64(ctx
);
13611 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
13615 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13618 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
13621 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13624 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
13627 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13630 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
13633 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
13635 #if defined (TARGET_MIPS64)
13637 check_insn(ctx
, ISA_MIPS3
);
13638 check_mips_64(ctx
);
13639 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
13643 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13646 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
13649 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13652 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
13656 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
13659 switch (ctx
->opcode
& 0x3) {
13661 mips32_op
= OPC_ADDU
;
13664 mips32_op
= OPC_SUBU
;
13666 #if defined(TARGET_MIPS64)
13668 mips32_op
= OPC_DADDU
;
13669 check_insn(ctx
, ISA_MIPS3
);
13670 check_mips_64(ctx
);
13673 mips32_op
= OPC_DSUBU
;
13674 check_insn(ctx
, ISA_MIPS3
);
13675 check_mips_64(ctx
);
13679 generate_exception_end(ctx
, EXCP_RI
);
13683 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
13692 int nd
= (ctx
->opcode
>> 7) & 0x1;
13693 int link
= (ctx
->opcode
>> 6) & 0x1;
13694 int ra
= (ctx
->opcode
>> 5) & 0x1;
13697 check_insn(ctx
, ISA_MIPS32
);
13706 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
13711 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
13712 gen_helper_do_semihosting(cpu_env
);
13714 /* XXX: not clear which exception should be raised
13715 * when in debug mode...
13717 check_insn(ctx
, ISA_MIPS32
);
13718 generate_exception_end(ctx
, EXCP_DBp
);
13722 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
13725 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
13728 generate_exception_end(ctx
, EXCP_BREAK
);
13731 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
13734 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
13737 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
13739 #if defined (TARGET_MIPS64)
13741 check_insn(ctx
, ISA_MIPS3
);
13742 check_mips_64(ctx
);
13743 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
13747 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
13750 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
13753 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
13756 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
13759 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
13762 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
13765 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
13768 check_insn(ctx
, ISA_MIPS32
);
13770 case RR_RY_CNVT_ZEB
:
13771 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13773 case RR_RY_CNVT_ZEH
:
13774 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13776 case RR_RY_CNVT_SEB
:
13777 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13779 case RR_RY_CNVT_SEH
:
13780 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13782 #if defined (TARGET_MIPS64)
13783 case RR_RY_CNVT_ZEW
:
13784 check_insn(ctx
, ISA_MIPS64
);
13785 check_mips_64(ctx
);
13786 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13788 case RR_RY_CNVT_SEW
:
13789 check_insn(ctx
, ISA_MIPS64
);
13790 check_mips_64(ctx
);
13791 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13795 generate_exception_end(ctx
, EXCP_RI
);
13800 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
13802 #if defined (TARGET_MIPS64)
13804 check_insn(ctx
, ISA_MIPS3
);
13805 check_mips_64(ctx
);
13806 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
13809 check_insn(ctx
, ISA_MIPS3
);
13810 check_mips_64(ctx
);
13811 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
13814 check_insn(ctx
, ISA_MIPS3
);
13815 check_mips_64(ctx
);
13816 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
13819 check_insn(ctx
, ISA_MIPS3
);
13820 check_mips_64(ctx
);
13821 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
13825 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
13828 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
13831 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
13834 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
13836 #if defined (TARGET_MIPS64)
13838 check_insn(ctx
, ISA_MIPS3
);
13839 check_mips_64(ctx
);
13840 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
13843 check_insn(ctx
, ISA_MIPS3
);
13844 check_mips_64(ctx
);
13845 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
13848 check_insn(ctx
, ISA_MIPS3
);
13849 check_mips_64(ctx
);
13850 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
13853 check_insn(ctx
, ISA_MIPS3
);
13854 check_mips_64(ctx
);
13855 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
13859 generate_exception_end(ctx
, EXCP_RI
);
13863 case M16_OPC_EXTEND
:
13864 decode_extended_mips16_opc(env
, ctx
);
13867 #if defined(TARGET_MIPS64)
13869 funct
= (ctx
->opcode
>> 8) & 0x7;
13870 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
13874 generate_exception_end(ctx
, EXCP_RI
);
13881 /* microMIPS extension to MIPS32/MIPS64 */
13884 * microMIPS32/microMIPS64 major opcodes
13886 * 1. MIPS Architecture for Programmers Volume II-B:
13887 * The microMIPS32 Instruction Set (Revision 3.05)
13889 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
13891 * 2. MIPS Architecture For Programmers Volume II-A:
13892 * The MIPS64 Instruction Set (Revision 3.51)
13922 POOL32S
= 0x16, /* MIPS64 */
13923 DADDIU32
= 0x17, /* MIPS64 */
13952 /* 0x29 is reserved */
13965 /* 0x31 is reserved */
13978 SD32
= 0x36, /* MIPS64 */
13979 LD32
= 0x37, /* MIPS64 */
13981 /* 0x39 is reserved */
13997 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14019 /* POOL32A encoding of minor opcode field */
14022 /* These opcodes are distinguished only by bits 9..6; those bits are
14023 * what are recorded below. */
14060 /* The following can be distinguished by their lower 6 bits. */
14070 /* POOL32AXF encoding of minor opcode field extension */
14073 * 1. MIPS Architecture for Programmers Volume II-B:
14074 * The microMIPS32 Instruction Set (Revision 3.05)
14076 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14078 * 2. MIPS Architecture for Programmers VolumeIV-e:
14079 * The MIPS DSP Application-Specific Extension
14080 * to the microMIPS32 Architecture (Revision 2.34)
14082 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14097 /* begin of microMIPS32 DSP */
14099 /* bits 13..12 for 0x01 */
14105 /* bits 13..12 for 0x2a */
14111 /* bits 13..12 for 0x32 */
14115 /* end of microMIPS32 DSP */
14117 /* bits 15..12 for 0x2c */
14134 /* bits 15..12 for 0x34 */
14142 /* bits 15..12 for 0x3c */
14144 JR
= 0x0, /* alias */
14152 /* bits 15..12 for 0x05 */
14156 /* bits 15..12 for 0x0d */
14168 /* bits 15..12 for 0x15 */
14174 /* bits 15..12 for 0x1d */
14178 /* bits 15..12 for 0x2d */
14183 /* bits 15..12 for 0x35 */
14190 /* POOL32B encoding of minor opcode field (bits 15..12) */
14206 /* POOL32C encoding of minor opcode field (bits 15..12) */
14227 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14240 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14253 /* POOL32F encoding of minor opcode field (bits 5..0) */
14256 /* These are the bit 7..6 values */
14265 /* These are the bit 8..6 values */
14290 MOVZ_FMT_05
= 0x05,
14324 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14331 /* POOL32Fxf encoding of minor opcode extension field */
14369 /* POOL32I encoding of minor opcode field (bits 25..21) */
14399 /* These overlap and are distinguished by bit16 of the instruction */
14408 /* POOL16A encoding of minor opcode field */
14415 /* POOL16B encoding of minor opcode field */
14422 /* POOL16C encoding of minor opcode field */
14442 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14466 /* POOL16D encoding of minor opcode field */
14473 /* POOL16E encoding of minor opcode field */
14480 static int mmreg (int r
)
14482 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14487 /* Used for 16-bit store instructions. */
14488 static int mmreg2 (int r
)
14490 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14495 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14496 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14497 #define uMIPS_RS2(op) uMIPS_RS(op)
14498 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14499 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14500 #define uMIPS_RS5(op) (op & 0x1f)
14502 /* Signed immediate */
14503 #define SIMM(op, start, width) \
14504 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14507 /* Zero-extended immediate */
14508 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14510 static void gen_addiur1sp(DisasContext
*ctx
)
14512 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14514 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14517 static void gen_addiur2(DisasContext
*ctx
)
14519 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14520 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14521 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14523 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14526 static void gen_addiusp(DisasContext
*ctx
)
14528 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14531 if (encoded
<= 1) {
14532 decoded
= 256 + encoded
;
14533 } else if (encoded
<= 255) {
14535 } else if (encoded
<= 509) {
14536 decoded
= encoded
- 512;
14538 decoded
= encoded
- 768;
14541 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
14544 static void gen_addius5(DisasContext
*ctx
)
14546 int imm
= SIMM(ctx
->opcode
, 1, 4);
14547 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14549 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
14552 static void gen_andi16(DisasContext
*ctx
)
14554 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
14555 31, 32, 63, 64, 255, 32768, 65535 };
14556 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14557 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14558 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
14560 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
14563 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
14564 int base
, int16_t offset
)
14569 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
14570 generate_exception_end(ctx
, EXCP_RI
);
14574 t0
= tcg_temp_new();
14576 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14578 t1
= tcg_const_tl(reglist
);
14579 t2
= tcg_const_i32(ctx
->mem_idx
);
14581 save_cpu_state(ctx
, 1);
14584 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
14587 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
14589 #ifdef TARGET_MIPS64
14591 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
14594 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
14600 tcg_temp_free_i32(t2
);
14604 static void gen_pool16c_insn(DisasContext
*ctx
)
14606 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
14607 int rs
= mmreg(ctx
->opcode
& 0x7);
14609 switch (((ctx
->opcode
) >> 4) & 0x3f) {
14614 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
14620 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
14626 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
14632 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
14639 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14640 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14642 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
14651 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14652 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14654 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
14661 int reg
= ctx
->opcode
& 0x1f;
14663 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
14669 int reg
= ctx
->opcode
& 0x1f;
14670 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
14671 /* Let normal delay slot handling in our caller take us
14672 to the branch target. */
14677 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
14678 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14682 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
14683 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14687 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
14691 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
14694 generate_exception_end(ctx
, EXCP_BREAK
);
14697 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
14698 gen_helper_do_semihosting(cpu_env
);
14700 /* XXX: not clear which exception should be raised
14701 * when in debug mode...
14703 check_insn(ctx
, ISA_MIPS32
);
14704 generate_exception_end(ctx
, EXCP_DBp
);
14707 case JRADDIUSP
+ 0:
14708 case JRADDIUSP
+ 1:
14710 int imm
= ZIMM(ctx
->opcode
, 0, 5);
14711 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14712 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14713 /* Let normal delay slot handling in our caller take us
14714 to the branch target. */
14718 generate_exception_end(ctx
, EXCP_RI
);
14723 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
14726 int rd
, rs
, re
, rt
;
14727 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
14728 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
14729 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
14730 rd
= rd_enc
[enc_dest
];
14731 re
= re_enc
[enc_dest
];
14732 rs
= rs_rt_enc
[enc_rs
];
14733 rt
= rs_rt_enc
[enc_rt
];
14735 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
14737 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
14740 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
14742 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
14746 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
14748 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
14749 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
14751 switch (ctx
->opcode
& 0xf) {
14753 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
14756 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
14760 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14761 int offset
= extract32(ctx
->opcode
, 4, 4);
14762 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
14765 case R6_JRC16
: /* JRCADDIUSP */
14766 if ((ctx
->opcode
>> 4) & 1) {
14768 int imm
= extract32(ctx
->opcode
, 5, 5);
14769 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14770 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14773 rs
= extract32(ctx
->opcode
, 5, 5);
14774 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
14786 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14787 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14788 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
14789 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14793 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
14796 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
14800 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14801 int offset
= extract32(ctx
->opcode
, 4, 4);
14802 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
14805 case JALRC16
: /* BREAK16, SDBBP16 */
14806 switch (ctx
->opcode
& 0x3f) {
14808 case JALRC16
+ 0x20:
14810 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
14815 generate_exception(ctx
, EXCP_BREAK
);
14819 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
14820 gen_helper_do_semihosting(cpu_env
);
14822 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
14823 generate_exception(ctx
, EXCP_RI
);
14825 generate_exception(ctx
, EXCP_DBp
);
14832 generate_exception(ctx
, EXCP_RI
);
14837 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
14839 TCGv t0
= tcg_temp_new();
14840 TCGv t1
= tcg_temp_new();
14842 gen_load_gpr(t0
, base
);
14845 gen_load_gpr(t1
, index
);
14846 tcg_gen_shli_tl(t1
, t1
, 2);
14847 gen_op_addr_add(ctx
, t0
, t1
, t0
);
14850 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14851 gen_store_gpr(t1
, rd
);
14857 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
14858 int base
, int16_t offset
)
14862 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
14863 generate_exception_end(ctx
, EXCP_RI
);
14867 t0
= tcg_temp_new();
14868 t1
= tcg_temp_new();
14870 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14875 generate_exception_end(ctx
, EXCP_RI
);
14878 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14879 gen_store_gpr(t1
, rd
);
14880 tcg_gen_movi_tl(t1
, 4);
14881 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14882 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14883 gen_store_gpr(t1
, rd
+1);
14886 gen_load_gpr(t1
, rd
);
14887 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
14888 tcg_gen_movi_tl(t1
, 4);
14889 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14890 gen_load_gpr(t1
, rd
+1);
14891 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
14893 #ifdef TARGET_MIPS64
14896 generate_exception_end(ctx
, EXCP_RI
);
14899 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14900 gen_store_gpr(t1
, rd
);
14901 tcg_gen_movi_tl(t1
, 8);
14902 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14903 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14904 gen_store_gpr(t1
, rd
+1);
14907 gen_load_gpr(t1
, rd
);
14908 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14909 tcg_gen_movi_tl(t1
, 8);
14910 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14911 gen_load_gpr(t1
, rd
+1);
14912 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14920 static void gen_sync(int stype
)
14922 TCGBar tcg_mo
= TCG_BAR_SC
;
14925 case 0x4: /* SYNC_WMB */
14926 tcg_mo
|= TCG_MO_ST_ST
;
14928 case 0x10: /* SYNC_MB */
14929 tcg_mo
|= TCG_MO_ALL
;
14931 case 0x11: /* SYNC_ACQUIRE */
14932 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
14934 case 0x12: /* SYNC_RELEASE */
14935 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
14937 case 0x13: /* SYNC_RMB */
14938 tcg_mo
|= TCG_MO_LD_LD
;
14941 tcg_mo
|= TCG_MO_ALL
;
14945 tcg_gen_mb(tcg_mo
);
14948 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
14950 int extension
= (ctx
->opcode
>> 6) & 0x3f;
14951 int minor
= (ctx
->opcode
>> 12) & 0xf;
14952 uint32_t mips32_op
;
14954 switch (extension
) {
14956 mips32_op
= OPC_TEQ
;
14959 mips32_op
= OPC_TGE
;
14962 mips32_op
= OPC_TGEU
;
14965 mips32_op
= OPC_TLT
;
14968 mips32_op
= OPC_TLTU
;
14971 mips32_op
= OPC_TNE
;
14973 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
14975 #ifndef CONFIG_USER_ONLY
14978 check_cp0_enabled(ctx
);
14980 /* Treat as NOP. */
14983 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
14987 check_cp0_enabled(ctx
);
14989 TCGv t0
= tcg_temp_new();
14991 gen_load_gpr(t0
, rt
);
14992 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
14998 switch (minor
& 3) {
15000 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15003 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15006 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15009 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15012 goto pool32axf_invalid
;
15016 switch (minor
& 3) {
15018 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15021 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15024 goto pool32axf_invalid
;
15030 check_insn(ctx
, ISA_MIPS32R6
);
15031 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15034 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15037 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15040 mips32_op
= OPC_CLO
;
15043 mips32_op
= OPC_CLZ
;
15045 check_insn(ctx
, ISA_MIPS32
);
15046 gen_cl(ctx
, mips32_op
, rt
, rs
);
15049 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15050 gen_rdhwr(ctx
, rt
, rs
, 0);
15053 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15056 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15057 mips32_op
= OPC_MULT
;
15060 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15061 mips32_op
= OPC_MULTU
;
15064 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15065 mips32_op
= OPC_DIV
;
15068 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15069 mips32_op
= OPC_DIVU
;
15072 check_insn(ctx
, ISA_MIPS32
);
15073 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15076 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15077 mips32_op
= OPC_MADD
;
15080 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15081 mips32_op
= OPC_MADDU
;
15084 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15085 mips32_op
= OPC_MSUB
;
15088 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15089 mips32_op
= OPC_MSUBU
;
15091 check_insn(ctx
, ISA_MIPS32
);
15092 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15095 goto pool32axf_invalid
;
15106 generate_exception_err(ctx
, EXCP_CpU
, 2);
15109 goto pool32axf_invalid
;
15114 case JALR
: /* JALRC */
15115 case JALR_HB
: /* JALRC_HB */
15116 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15117 /* JALRC, JALRC_HB */
15118 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15120 /* JALR, JALR_HB */
15121 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15122 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15127 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15128 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15129 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15132 goto pool32axf_invalid
;
15138 check_cp0_enabled(ctx
);
15139 check_insn(ctx
, ISA_MIPS32R2
);
15140 gen_load_srsgpr(rs
, rt
);
15143 check_cp0_enabled(ctx
);
15144 check_insn(ctx
, ISA_MIPS32R2
);
15145 gen_store_srsgpr(rs
, rt
);
15148 goto pool32axf_invalid
;
15151 #ifndef CONFIG_USER_ONLY
15155 mips32_op
= OPC_TLBP
;
15158 mips32_op
= OPC_TLBR
;
15161 mips32_op
= OPC_TLBWI
;
15164 mips32_op
= OPC_TLBWR
;
15167 mips32_op
= OPC_TLBINV
;
15170 mips32_op
= OPC_TLBINVF
;
15173 mips32_op
= OPC_WAIT
;
15176 mips32_op
= OPC_DERET
;
15179 mips32_op
= OPC_ERET
;
15181 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15184 goto pool32axf_invalid
;
15190 check_cp0_enabled(ctx
);
15192 TCGv t0
= tcg_temp_new();
15194 save_cpu_state(ctx
, 1);
15195 gen_helper_di(t0
, cpu_env
);
15196 gen_store_gpr(t0
, rs
);
15197 /* Stop translation as we may have switched the execution mode */
15198 ctx
->base
.is_jmp
= DISAS_STOP
;
15203 check_cp0_enabled(ctx
);
15205 TCGv t0
= tcg_temp_new();
15207 save_cpu_state(ctx
, 1);
15208 gen_helper_ei(t0
, cpu_env
);
15209 gen_store_gpr(t0
, rs
);
15210 /* DISAS_STOP isn't sufficient, we need to ensure we break out
15211 of translated code to check for pending interrupts. */
15212 gen_save_pc(ctx
->base
.pc_next
+ 4);
15213 ctx
->base
.is_jmp
= DISAS_EXIT
;
15218 goto pool32axf_invalid
;
15225 gen_sync(extract32(ctx
->opcode
, 16, 5));
15228 generate_exception_end(ctx
, EXCP_SYSCALL
);
15231 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15232 gen_helper_do_semihosting(cpu_env
);
15234 check_insn(ctx
, ISA_MIPS32
);
15235 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15236 generate_exception_end(ctx
, EXCP_RI
);
15238 generate_exception_end(ctx
, EXCP_DBp
);
15243 goto pool32axf_invalid
;
15247 switch (minor
& 3) {
15249 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15252 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15255 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15258 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15261 goto pool32axf_invalid
;
15265 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15268 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15271 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15274 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15277 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15280 goto pool32axf_invalid
;
15285 MIPS_INVAL("pool32axf");
15286 generate_exception_end(ctx
, EXCP_RI
);
15291 /* Values for microMIPS fmt field. Variable-width, depending on which
15292 formats the instruction supports. */
15311 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15313 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15314 uint32_t mips32_op
;
15316 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
15317 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
15318 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
15320 switch (extension
) {
15321 case FLOAT_1BIT_FMT(CFC1
, 0):
15322 mips32_op
= OPC_CFC1
;
15324 case FLOAT_1BIT_FMT(CTC1
, 0):
15325 mips32_op
= OPC_CTC1
;
15327 case FLOAT_1BIT_FMT(MFC1
, 0):
15328 mips32_op
= OPC_MFC1
;
15330 case FLOAT_1BIT_FMT(MTC1
, 0):
15331 mips32_op
= OPC_MTC1
;
15333 case FLOAT_1BIT_FMT(MFHC1
, 0):
15334 mips32_op
= OPC_MFHC1
;
15336 case FLOAT_1BIT_FMT(MTHC1
, 0):
15337 mips32_op
= OPC_MTHC1
;
15339 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15342 /* Reciprocal square root */
15343 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15344 mips32_op
= OPC_RSQRT_S
;
15346 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15347 mips32_op
= OPC_RSQRT_D
;
15351 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15352 mips32_op
= OPC_SQRT_S
;
15354 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15355 mips32_op
= OPC_SQRT_D
;
15359 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15360 mips32_op
= OPC_RECIP_S
;
15362 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15363 mips32_op
= OPC_RECIP_D
;
15367 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15368 mips32_op
= OPC_FLOOR_L_S
;
15370 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15371 mips32_op
= OPC_FLOOR_L_D
;
15373 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15374 mips32_op
= OPC_FLOOR_W_S
;
15376 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15377 mips32_op
= OPC_FLOOR_W_D
;
15381 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15382 mips32_op
= OPC_CEIL_L_S
;
15384 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15385 mips32_op
= OPC_CEIL_L_D
;
15387 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15388 mips32_op
= OPC_CEIL_W_S
;
15390 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15391 mips32_op
= OPC_CEIL_W_D
;
15395 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15396 mips32_op
= OPC_TRUNC_L_S
;
15398 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15399 mips32_op
= OPC_TRUNC_L_D
;
15401 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15402 mips32_op
= OPC_TRUNC_W_S
;
15404 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15405 mips32_op
= OPC_TRUNC_W_D
;
15409 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15410 mips32_op
= OPC_ROUND_L_S
;
15412 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15413 mips32_op
= OPC_ROUND_L_D
;
15415 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15416 mips32_op
= OPC_ROUND_W_S
;
15418 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15419 mips32_op
= OPC_ROUND_W_D
;
15422 /* Integer to floating-point conversion */
15423 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15424 mips32_op
= OPC_CVT_L_S
;
15426 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15427 mips32_op
= OPC_CVT_L_D
;
15429 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15430 mips32_op
= OPC_CVT_W_S
;
15432 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15433 mips32_op
= OPC_CVT_W_D
;
15436 /* Paired-foo conversions */
15437 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15438 mips32_op
= OPC_CVT_S_PL
;
15440 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15441 mips32_op
= OPC_CVT_S_PU
;
15443 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15444 mips32_op
= OPC_CVT_PW_PS
;
15446 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15447 mips32_op
= OPC_CVT_PS_PW
;
15450 /* Floating-point moves */
15451 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15452 mips32_op
= OPC_MOV_S
;
15454 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15455 mips32_op
= OPC_MOV_D
;
15457 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15458 mips32_op
= OPC_MOV_PS
;
15461 /* Absolute value */
15462 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15463 mips32_op
= OPC_ABS_S
;
15465 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15466 mips32_op
= OPC_ABS_D
;
15468 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15469 mips32_op
= OPC_ABS_PS
;
15473 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15474 mips32_op
= OPC_NEG_S
;
15476 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15477 mips32_op
= OPC_NEG_D
;
15479 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15480 mips32_op
= OPC_NEG_PS
;
15483 /* Reciprocal square root step */
15484 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15485 mips32_op
= OPC_RSQRT1_S
;
15487 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15488 mips32_op
= OPC_RSQRT1_D
;
15490 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15491 mips32_op
= OPC_RSQRT1_PS
;
15494 /* Reciprocal step */
15495 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15496 mips32_op
= OPC_RECIP1_S
;
15498 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15499 mips32_op
= OPC_RECIP1_S
;
15501 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15502 mips32_op
= OPC_RECIP1_PS
;
15505 /* Conversions from double */
15506 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15507 mips32_op
= OPC_CVT_D_S
;
15509 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15510 mips32_op
= OPC_CVT_D_W
;
15512 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15513 mips32_op
= OPC_CVT_D_L
;
15516 /* Conversions from single */
15517 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15518 mips32_op
= OPC_CVT_S_D
;
15520 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15521 mips32_op
= OPC_CVT_S_W
;
15523 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15524 mips32_op
= OPC_CVT_S_L
;
15526 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
15529 /* Conditional moves on floating-point codes */
15530 case COND_FLOAT_MOV(MOVT
, 0):
15531 case COND_FLOAT_MOV(MOVT
, 1):
15532 case COND_FLOAT_MOV(MOVT
, 2):
15533 case COND_FLOAT_MOV(MOVT
, 3):
15534 case COND_FLOAT_MOV(MOVT
, 4):
15535 case COND_FLOAT_MOV(MOVT
, 5):
15536 case COND_FLOAT_MOV(MOVT
, 6):
15537 case COND_FLOAT_MOV(MOVT
, 7):
15538 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15539 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
15541 case COND_FLOAT_MOV(MOVF
, 0):
15542 case COND_FLOAT_MOV(MOVF
, 1):
15543 case COND_FLOAT_MOV(MOVF
, 2):
15544 case COND_FLOAT_MOV(MOVF
, 3):
15545 case COND_FLOAT_MOV(MOVF
, 4):
15546 case COND_FLOAT_MOV(MOVF
, 5):
15547 case COND_FLOAT_MOV(MOVF
, 6):
15548 case COND_FLOAT_MOV(MOVF
, 7):
15549 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15550 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
15553 MIPS_INVAL("pool32fxf");
15554 generate_exception_end(ctx
, EXCP_RI
);
15559 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15563 int rt
, rs
, rd
, rr
;
15565 uint32_t op
, minor
, minor2
, mips32_op
;
15566 uint32_t cond
, fmt
, cc
;
15568 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
15569 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
15571 rt
= (ctx
->opcode
>> 21) & 0x1f;
15572 rs
= (ctx
->opcode
>> 16) & 0x1f;
15573 rd
= (ctx
->opcode
>> 11) & 0x1f;
15574 rr
= (ctx
->opcode
>> 6) & 0x1f;
15575 imm
= (int16_t) ctx
->opcode
;
15577 op
= (ctx
->opcode
>> 26) & 0x3f;
15580 minor
= ctx
->opcode
& 0x3f;
15583 minor
= (ctx
->opcode
>> 6) & 0xf;
15586 mips32_op
= OPC_SLL
;
15589 mips32_op
= OPC_SRA
;
15592 mips32_op
= OPC_SRL
;
15595 mips32_op
= OPC_ROTR
;
15597 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
15600 check_insn(ctx
, ISA_MIPS32R6
);
15601 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
15604 check_insn(ctx
, ISA_MIPS32R6
);
15605 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
15608 check_insn(ctx
, ISA_MIPS32R6
);
15609 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
15612 goto pool32a_invalid
;
15616 minor
= (ctx
->opcode
>> 6) & 0xf;
15620 mips32_op
= OPC_ADD
;
15623 mips32_op
= OPC_ADDU
;
15626 mips32_op
= OPC_SUB
;
15629 mips32_op
= OPC_SUBU
;
15632 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15633 mips32_op
= OPC_MUL
;
15635 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
15639 mips32_op
= OPC_SLLV
;
15642 mips32_op
= OPC_SRLV
;
15645 mips32_op
= OPC_SRAV
;
15648 mips32_op
= OPC_ROTRV
;
15650 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
15652 /* Logical operations */
15654 mips32_op
= OPC_AND
;
15657 mips32_op
= OPC_OR
;
15660 mips32_op
= OPC_NOR
;
15663 mips32_op
= OPC_XOR
;
15665 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
15667 /* Set less than */
15669 mips32_op
= OPC_SLT
;
15672 mips32_op
= OPC_SLTU
;
15674 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
15677 goto pool32a_invalid
;
15681 minor
= (ctx
->opcode
>> 6) & 0xf;
15683 /* Conditional moves */
15684 case MOVN
: /* MUL */
15685 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15687 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
15690 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
15693 case MOVZ
: /* MUH */
15694 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15696 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
15699 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
15703 check_insn(ctx
, ISA_MIPS32R6
);
15704 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
15707 check_insn(ctx
, ISA_MIPS32R6
);
15708 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
15710 case LWXS
: /* DIV */
15711 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15713 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
15716 gen_ldxs(ctx
, rs
, rt
, rd
);
15720 check_insn(ctx
, ISA_MIPS32R6
);
15721 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
15724 check_insn(ctx
, ISA_MIPS32R6
);
15725 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
15728 check_insn(ctx
, ISA_MIPS32R6
);
15729 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
15732 goto pool32a_invalid
;
15736 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
15739 check_insn(ctx
, ISA_MIPS32R6
);
15740 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
15741 extract32(ctx
->opcode
, 9, 2));
15744 check_insn(ctx
, ISA_MIPS32R6
);
15745 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
15748 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
15751 gen_pool32axf(env
, ctx
, rt
, rs
);
15754 generate_exception_end(ctx
, EXCP_BREAK
);
15757 check_insn(ctx
, ISA_MIPS32R6
);
15758 generate_exception_end(ctx
, EXCP_RI
);
15762 MIPS_INVAL("pool32a");
15763 generate_exception_end(ctx
, EXCP_RI
);
15768 minor
= (ctx
->opcode
>> 12) & 0xf;
15771 check_cp0_enabled(ctx
);
15772 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15773 gen_cache_operation(ctx
, rt
, rs
, imm
);
15778 /* COP2: Not implemented. */
15779 generate_exception_err(ctx
, EXCP_CpU
, 2);
15781 #ifdef TARGET_MIPS64
15784 check_insn(ctx
, ISA_MIPS3
);
15785 check_mips_64(ctx
);
15790 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
15792 #ifdef TARGET_MIPS64
15795 check_insn(ctx
, ISA_MIPS3
);
15796 check_mips_64(ctx
);
15801 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
15804 MIPS_INVAL("pool32b");
15805 generate_exception_end(ctx
, EXCP_RI
);
15810 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
15811 minor
= ctx
->opcode
& 0x3f;
15812 check_cp1_enabled(ctx
);
15815 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15816 mips32_op
= OPC_ALNV_PS
;
15819 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15820 mips32_op
= OPC_MADD_S
;
15823 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15824 mips32_op
= OPC_MADD_D
;
15827 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15828 mips32_op
= OPC_MADD_PS
;
15831 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15832 mips32_op
= OPC_MSUB_S
;
15835 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15836 mips32_op
= OPC_MSUB_D
;
15839 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15840 mips32_op
= OPC_MSUB_PS
;
15843 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15844 mips32_op
= OPC_NMADD_S
;
15847 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15848 mips32_op
= OPC_NMADD_D
;
15851 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15852 mips32_op
= OPC_NMADD_PS
;
15855 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15856 mips32_op
= OPC_NMSUB_S
;
15859 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15860 mips32_op
= OPC_NMSUB_D
;
15863 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15864 mips32_op
= OPC_NMSUB_PS
;
15866 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
15868 case CABS_COND_FMT
:
15869 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15870 cond
= (ctx
->opcode
>> 6) & 0xf;
15871 cc
= (ctx
->opcode
>> 13) & 0x7;
15872 fmt
= (ctx
->opcode
>> 10) & 0x3;
15875 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
15878 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
15881 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
15884 goto pool32f_invalid
;
15888 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15889 cond
= (ctx
->opcode
>> 6) & 0xf;
15890 cc
= (ctx
->opcode
>> 13) & 0x7;
15891 fmt
= (ctx
->opcode
>> 10) & 0x3;
15894 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
15897 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
15900 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
15903 goto pool32f_invalid
;
15907 check_insn(ctx
, ISA_MIPS32R6
);
15908 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
15911 check_insn(ctx
, ISA_MIPS32R6
);
15912 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
15915 gen_pool32fxf(ctx
, rt
, rs
);
15919 switch ((ctx
->opcode
>> 6) & 0x7) {
15921 mips32_op
= OPC_PLL_PS
;
15924 mips32_op
= OPC_PLU_PS
;
15927 mips32_op
= OPC_PUL_PS
;
15930 mips32_op
= OPC_PUU_PS
;
15933 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15934 mips32_op
= OPC_CVT_PS_S
;
15936 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
15939 goto pool32f_invalid
;
15943 check_insn(ctx
, ISA_MIPS32R6
);
15944 switch ((ctx
->opcode
>> 9) & 0x3) {
15946 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
15949 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
15952 goto pool32f_invalid
;
15957 switch ((ctx
->opcode
>> 6) & 0x7) {
15959 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15960 mips32_op
= OPC_LWXC1
;
15963 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15964 mips32_op
= OPC_SWXC1
;
15967 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15968 mips32_op
= OPC_LDXC1
;
15971 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15972 mips32_op
= OPC_SDXC1
;
15975 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15976 mips32_op
= OPC_LUXC1
;
15979 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15980 mips32_op
= OPC_SUXC1
;
15982 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
15985 goto pool32f_invalid
;
15989 check_insn(ctx
, ISA_MIPS32R6
);
15990 switch ((ctx
->opcode
>> 9) & 0x3) {
15992 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
15995 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
15998 goto pool32f_invalid
;
16003 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16004 fmt
= (ctx
->opcode
>> 9) & 0x3;
16005 switch ((ctx
->opcode
>> 6) & 0x7) {
16009 mips32_op
= OPC_RSQRT2_S
;
16012 mips32_op
= OPC_RSQRT2_D
;
16015 mips32_op
= OPC_RSQRT2_PS
;
16018 goto pool32f_invalid
;
16024 mips32_op
= OPC_RECIP2_S
;
16027 mips32_op
= OPC_RECIP2_D
;
16030 mips32_op
= OPC_RECIP2_PS
;
16033 goto pool32f_invalid
;
16037 mips32_op
= OPC_ADDR_PS
;
16040 mips32_op
= OPC_MULR_PS
;
16042 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16045 goto pool32f_invalid
;
16049 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16050 cc
= (ctx
->opcode
>> 13) & 0x7;
16051 fmt
= (ctx
->opcode
>> 9) & 0x3;
16052 switch ((ctx
->opcode
>> 6) & 0x7) {
16053 case MOVF_FMT
: /* RINT_FMT */
16054 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16058 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16061 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16064 goto pool32f_invalid
;
16070 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16073 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16077 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16080 goto pool32f_invalid
;
16084 case MOVT_FMT
: /* CLASS_FMT */
16085 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16089 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16092 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16095 goto pool32f_invalid
;
16101 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16104 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16108 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16111 goto pool32f_invalid
;
16116 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16119 goto pool32f_invalid
;
16122 #define FINSN_3ARG_SDPS(prfx) \
16123 switch ((ctx->opcode >> 8) & 0x3) { \
16125 mips32_op = OPC_##prfx##_S; \
16128 mips32_op = OPC_##prfx##_D; \
16130 case FMT_SDPS_PS: \
16132 mips32_op = OPC_##prfx##_PS; \
16135 goto pool32f_invalid; \
16138 check_insn(ctx
, ISA_MIPS32R6
);
16139 switch ((ctx
->opcode
>> 9) & 0x3) {
16141 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16144 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16147 goto pool32f_invalid
;
16151 check_insn(ctx
, ISA_MIPS32R6
);
16152 switch ((ctx
->opcode
>> 9) & 0x3) {
16154 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16157 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16160 goto pool32f_invalid
;
16164 /* regular FP ops */
16165 switch ((ctx
->opcode
>> 6) & 0x3) {
16167 FINSN_3ARG_SDPS(ADD
);
16170 FINSN_3ARG_SDPS(SUB
);
16173 FINSN_3ARG_SDPS(MUL
);
16176 fmt
= (ctx
->opcode
>> 8) & 0x3;
16178 mips32_op
= OPC_DIV_D
;
16179 } else if (fmt
== 0) {
16180 mips32_op
= OPC_DIV_S
;
16182 goto pool32f_invalid
;
16186 goto pool32f_invalid
;
16191 switch ((ctx
->opcode
>> 6) & 0x7) {
16192 case MOVN_FMT
: /* SELEQZ_FMT */
16193 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16195 switch ((ctx
->opcode
>> 9) & 0x3) {
16197 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16200 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16203 goto pool32f_invalid
;
16207 FINSN_3ARG_SDPS(MOVN
);
16211 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16212 FINSN_3ARG_SDPS(MOVN
);
16214 case MOVZ_FMT
: /* SELNEZ_FMT */
16215 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16217 switch ((ctx
->opcode
>> 9) & 0x3) {
16219 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16222 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16225 goto pool32f_invalid
;
16229 FINSN_3ARG_SDPS(MOVZ
);
16233 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16234 FINSN_3ARG_SDPS(MOVZ
);
16237 check_insn(ctx
, ISA_MIPS32R6
);
16238 switch ((ctx
->opcode
>> 9) & 0x3) {
16240 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16243 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16246 goto pool32f_invalid
;
16250 check_insn(ctx
, ISA_MIPS32R6
);
16251 switch ((ctx
->opcode
>> 9) & 0x3) {
16253 mips32_op
= OPC_MADDF_S
;
16256 mips32_op
= OPC_MADDF_D
;
16259 goto pool32f_invalid
;
16263 check_insn(ctx
, ISA_MIPS32R6
);
16264 switch ((ctx
->opcode
>> 9) & 0x3) {
16266 mips32_op
= OPC_MSUBF_S
;
16269 mips32_op
= OPC_MSUBF_D
;
16272 goto pool32f_invalid
;
16276 goto pool32f_invalid
;
16280 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16284 MIPS_INVAL("pool32f");
16285 generate_exception_end(ctx
, EXCP_RI
);
16289 generate_exception_err(ctx
, EXCP_CpU
, 1);
16293 minor
= (ctx
->opcode
>> 21) & 0x1f;
16296 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16297 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16300 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16301 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16302 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16305 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16306 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16307 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16310 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16311 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16314 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16315 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16316 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16319 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16320 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16321 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16324 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16325 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16328 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16329 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16333 case TLTI
: /* BC1EQZC */
16334 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16336 check_cp1_enabled(ctx
);
16337 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16340 mips32_op
= OPC_TLTI
;
16344 case TGEI
: /* BC1NEZC */
16345 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16347 check_cp1_enabled(ctx
);
16348 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16351 mips32_op
= OPC_TGEI
;
16356 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16357 mips32_op
= OPC_TLTIU
;
16360 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16361 mips32_op
= OPC_TGEIU
;
16363 case TNEI
: /* SYNCI */
16364 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16366 /* Break the TB to be able to sync copied instructions
16368 ctx
->base
.is_jmp
= DISAS_STOP
;
16371 mips32_op
= OPC_TNEI
;
16376 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16377 mips32_op
= OPC_TEQI
;
16379 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16384 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16385 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16386 4, rs
, 0, imm
<< 1, 0);
16387 /* Compact branches don't have a delay slot, so just let
16388 the normal delay slot handling take us to the branch
16392 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16393 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16396 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16397 /* Break the TB to be able to sync copied instructions
16399 ctx
->base
.is_jmp
= DISAS_STOP
;
16403 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16404 /* COP2: Not implemented. */
16405 generate_exception_err(ctx
, EXCP_CpU
, 2);
16408 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16409 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16412 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16413 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16416 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16417 mips32_op
= OPC_BC1FANY4
;
16420 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16421 mips32_op
= OPC_BC1TANY4
;
16424 check_insn(ctx
, ASE_MIPS3D
);
16427 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16428 check_cp1_enabled(ctx
);
16429 gen_compute_branch1(ctx
, mips32_op
,
16430 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16432 generate_exception_err(ctx
, EXCP_CpU
, 1);
16437 /* MIPS DSP: not implemented */
16440 MIPS_INVAL("pool32i");
16441 generate_exception_end(ctx
, EXCP_RI
);
16446 minor
= (ctx
->opcode
>> 12) & 0xf;
16447 offset
= sextract32(ctx
->opcode
, 0,
16448 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16451 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16452 mips32_op
= OPC_LWL
;
16455 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16456 mips32_op
= OPC_SWL
;
16459 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16460 mips32_op
= OPC_LWR
;
16463 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16464 mips32_op
= OPC_SWR
;
16466 #if defined(TARGET_MIPS64)
16468 check_insn(ctx
, ISA_MIPS3
);
16469 check_mips_64(ctx
);
16470 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16471 mips32_op
= OPC_LDL
;
16474 check_insn(ctx
, ISA_MIPS3
);
16475 check_mips_64(ctx
);
16476 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16477 mips32_op
= OPC_SDL
;
16480 check_insn(ctx
, ISA_MIPS3
);
16481 check_mips_64(ctx
);
16482 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16483 mips32_op
= OPC_LDR
;
16486 check_insn(ctx
, ISA_MIPS3
);
16487 check_mips_64(ctx
);
16488 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16489 mips32_op
= OPC_SDR
;
16492 check_insn(ctx
, ISA_MIPS3
);
16493 check_mips_64(ctx
);
16494 mips32_op
= OPC_LWU
;
16497 check_insn(ctx
, ISA_MIPS3
);
16498 check_mips_64(ctx
);
16499 mips32_op
= OPC_LLD
;
16503 mips32_op
= OPC_LL
;
16506 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16509 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16512 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
16514 #if defined(TARGET_MIPS64)
16516 check_insn(ctx
, ISA_MIPS3
);
16517 check_mips_64(ctx
);
16518 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
16523 MIPS_INVAL("pool32c ld-eva");
16524 generate_exception_end(ctx
, EXCP_RI
);
16527 check_cp0_enabled(ctx
);
16529 minor2
= (ctx
->opcode
>> 9) & 0x7;
16530 offset
= sextract32(ctx
->opcode
, 0, 9);
16533 mips32_op
= OPC_LBUE
;
16536 mips32_op
= OPC_LHUE
;
16539 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16540 mips32_op
= OPC_LWLE
;
16543 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16544 mips32_op
= OPC_LWRE
;
16547 mips32_op
= OPC_LBE
;
16550 mips32_op
= OPC_LHE
;
16553 mips32_op
= OPC_LLE
;
16556 mips32_op
= OPC_LWE
;
16562 MIPS_INVAL("pool32c st-eva");
16563 generate_exception_end(ctx
, EXCP_RI
);
16566 check_cp0_enabled(ctx
);
16568 minor2
= (ctx
->opcode
>> 9) & 0x7;
16569 offset
= sextract32(ctx
->opcode
, 0, 9);
16572 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16573 mips32_op
= OPC_SWLE
;
16576 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16577 mips32_op
= OPC_SWRE
;
16580 /* Treat as no-op */
16581 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16582 /* hint codes 24-31 are reserved and signal RI */
16583 generate_exception(ctx
, EXCP_RI
);
16587 /* Treat as no-op */
16588 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16589 gen_cache_operation(ctx
, rt
, rs
, offset
);
16593 mips32_op
= OPC_SBE
;
16596 mips32_op
= OPC_SHE
;
16599 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
16602 mips32_op
= OPC_SWE
;
16607 /* Treat as no-op */
16608 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16609 /* hint codes 24-31 are reserved and signal RI */
16610 generate_exception(ctx
, EXCP_RI
);
16614 MIPS_INVAL("pool32c");
16615 generate_exception_end(ctx
, EXCP_RI
);
16619 case ADDI32
: /* AUI, LUI */
16620 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16622 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
16625 mips32_op
= OPC_ADDI
;
16630 mips32_op
= OPC_ADDIU
;
16632 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16635 /* Logical operations */
16637 mips32_op
= OPC_ORI
;
16640 mips32_op
= OPC_XORI
;
16643 mips32_op
= OPC_ANDI
;
16645 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16648 /* Set less than immediate */
16650 mips32_op
= OPC_SLTI
;
16653 mips32_op
= OPC_SLTIU
;
16655 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16658 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16659 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
16660 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
16661 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16663 case JALS32
: /* BOVC, BEQC, BEQZALC */
16664 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16667 mips32_op
= OPC_BOVC
;
16668 } else if (rs
< rt
&& rs
== 0) {
16670 mips32_op
= OPC_BEQZALC
;
16673 mips32_op
= OPC_BEQC
;
16675 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16678 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
16679 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
16680 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16683 case BEQ32
: /* BC */
16684 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16686 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
16687 sextract32(ctx
->opcode
<< 1, 0, 27));
16690 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
16693 case BNE32
: /* BALC */
16694 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16696 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
16697 sextract32(ctx
->opcode
<< 1, 0, 27));
16700 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
16703 case J32
: /* BGTZC, BLTZC, BLTC */
16704 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16705 if (rs
== 0 && rt
!= 0) {
16707 mips32_op
= OPC_BGTZC
;
16708 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16710 mips32_op
= OPC_BLTZC
;
16713 mips32_op
= OPC_BLTC
;
16715 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16718 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
16719 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16722 case JAL32
: /* BLEZC, BGEZC, BGEC */
16723 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16724 if (rs
== 0 && rt
!= 0) {
16726 mips32_op
= OPC_BLEZC
;
16727 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16729 mips32_op
= OPC_BGEZC
;
16732 mips32_op
= OPC_BGEC
;
16734 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16737 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
16738 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16739 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16742 /* Floating point (COP1) */
16744 mips32_op
= OPC_LWC1
;
16747 mips32_op
= OPC_LDC1
;
16750 mips32_op
= OPC_SWC1
;
16753 mips32_op
= OPC_SDC1
;
16755 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
16757 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16758 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16759 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16760 switch ((ctx
->opcode
>> 16) & 0x1f) {
16769 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16772 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
16775 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
16785 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16788 generate_exception(ctx
, EXCP_RI
);
16793 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
16794 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
16796 gen_addiupc(ctx
, reg
, offset
, 0, 0);
16799 case BNVC
: /* BNEC, BNEZALC */
16800 check_insn(ctx
, ISA_MIPS32R6
);
16803 mips32_op
= OPC_BNVC
;
16804 } else if (rs
< rt
&& rs
== 0) {
16806 mips32_op
= OPC_BNEZALC
;
16809 mips32_op
= OPC_BNEC
;
16811 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16813 case R6_BNEZC
: /* JIALC */
16814 check_insn(ctx
, ISA_MIPS32R6
);
16817 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
16818 sextract32(ctx
->opcode
<< 1, 0, 22));
16821 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
16824 case R6_BEQZC
: /* JIC */
16825 check_insn(ctx
, ISA_MIPS32R6
);
16828 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
16829 sextract32(ctx
->opcode
<< 1, 0, 22));
16832 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
16835 case BLEZALC
: /* BGEZALC, BGEUC */
16836 check_insn(ctx
, ISA_MIPS32R6
);
16837 if (rs
== 0 && rt
!= 0) {
16839 mips32_op
= OPC_BLEZALC
;
16840 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16842 mips32_op
= OPC_BGEZALC
;
16845 mips32_op
= OPC_BGEUC
;
16847 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16849 case BGTZALC
: /* BLTZALC, BLTUC */
16850 check_insn(ctx
, ISA_MIPS32R6
);
16851 if (rs
== 0 && rt
!= 0) {
16853 mips32_op
= OPC_BGTZALC
;
16854 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16856 mips32_op
= OPC_BLTZALC
;
16859 mips32_op
= OPC_BLTUC
;
16861 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16863 /* Loads and stores */
16865 mips32_op
= OPC_LB
;
16868 mips32_op
= OPC_LBU
;
16871 mips32_op
= OPC_LH
;
16874 mips32_op
= OPC_LHU
;
16877 mips32_op
= OPC_LW
;
16879 #ifdef TARGET_MIPS64
16881 check_insn(ctx
, ISA_MIPS3
);
16882 check_mips_64(ctx
);
16883 mips32_op
= OPC_LD
;
16886 check_insn(ctx
, ISA_MIPS3
);
16887 check_mips_64(ctx
);
16888 mips32_op
= OPC_SD
;
16892 mips32_op
= OPC_SB
;
16895 mips32_op
= OPC_SH
;
16898 mips32_op
= OPC_SW
;
16901 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
16904 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
16907 generate_exception_end(ctx
, EXCP_RI
);
16912 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
16916 /* make sure instructions are on a halfword boundary */
16917 if (ctx
->base
.pc_next
& 0x1) {
16918 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
16919 generate_exception_end(ctx
, EXCP_AdEL
);
16923 op
= (ctx
->opcode
>> 10) & 0x3f;
16924 /* Enforce properly-sized instructions in a delay slot */
16925 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
16926 switch (op
& 0x7) { /* MSB-3..MSB-5 */
16928 /* POOL32A, POOL32B, POOL32I, POOL32C */
16930 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
16932 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
16934 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
16936 /* LB32, LH32, LWC132, LDC132, LW32 */
16937 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
16938 generate_exception_end(ctx
, EXCP_RI
);
16943 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
16945 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
16947 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
16948 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
16949 generate_exception_end(ctx
, EXCP_RI
);
16959 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16960 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
16961 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
16964 switch (ctx
->opcode
& 0x1) {
16972 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16973 /* In the Release 6 the register number location in
16974 * the instruction encoding has changed.
16976 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
16978 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
16984 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16985 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
16986 int amount
= (ctx
->opcode
>> 1) & 0x7;
16988 amount
= amount
== 0 ? 8 : amount
;
16990 switch (ctx
->opcode
& 0x1) {
16999 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17003 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17004 gen_pool16c_r6_insn(ctx
);
17006 gen_pool16c_insn(ctx
);
17011 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17012 int rb
= 28; /* GP */
17013 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17015 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17019 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17020 if (ctx
->opcode
& 1) {
17021 generate_exception_end(ctx
, EXCP_RI
);
17024 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17025 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17026 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17027 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17032 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17033 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17034 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17035 offset
= (offset
== 0xf ? -1 : offset
);
17037 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17042 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17043 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17044 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17046 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17051 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17052 int rb
= 29; /* SP */
17053 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17055 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17060 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17061 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17062 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17064 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17069 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17070 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17071 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17073 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17078 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17079 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17080 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17082 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17087 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17088 int rb
= 29; /* SP */
17089 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17091 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17096 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17097 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17098 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17100 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17105 int rd
= uMIPS_RD5(ctx
->opcode
);
17106 int rs
= uMIPS_RS5(ctx
->opcode
);
17108 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17115 switch (ctx
->opcode
& 0x1) {
17125 switch (ctx
->opcode
& 0x1) {
17130 gen_addiur1sp(ctx
);
17134 case B16
: /* BC16 */
17135 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17136 sextract32(ctx
->opcode
, 0, 10) << 1,
17137 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17139 case BNEZ16
: /* BNEZC16 */
17140 case BEQZ16
: /* BEQZC16 */
17141 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17142 mmreg(uMIPS_RD(ctx
->opcode
)),
17143 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17144 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17149 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17150 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17152 imm
= (imm
== 0x7f ? -1 : imm
);
17153 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17159 generate_exception_end(ctx
, EXCP_RI
);
17162 decode_micromips32_opc(env
, ctx
);
17175 /* MAJOR, P16, and P32 pools opcodes */
17179 NM_MOVE_BALC
= 0x02,
17187 NM_P16_SHIFT
= 0x0c,
17205 NM_P_LS_U12
= 0x21,
17215 NM_P16_ADDU
= 0x2c,
17229 NM_MOVEPREV
= 0x3f,
17232 /* POOL32A instruction pool */
17234 NM_POOL32A0
= 0x00,
17235 NM_SPECIAL2
= 0x01,
17238 NM_POOL32A5
= 0x05,
17239 NM_POOL32A7
= 0x07,
17242 /* P.GP.W instruction pool */
17244 NM_ADDIUGP_W
= 0x00,
17249 /* P48I instruction pool */
17253 NM_ADDIUGP48
= 0x02,
17254 NM_ADDIUPC48
= 0x03,
17259 /* P.U12 instruction pool */
17268 NM_ADDIUNEG
= 0x08,
17275 /* POOL32F instruction pool */
17277 NM_POOL32F_0
= 0x00,
17278 NM_POOL32F_3
= 0x03,
17279 NM_POOL32F_5
= 0x05,
17282 /* POOL32S instruction pool */
17284 NM_POOL32S_0
= 0x00,
17285 NM_POOL32S_4
= 0x04,
17288 /* P.LUI instruction pool */
17294 /* P.GP.BH instruction pool */
17299 NM_ADDIUGP_B
= 0x03,
17302 NM_P_GP_CP1
= 0x06,
17305 /* P.LS.U12 instruction pool */
17310 NM_P_PREFU12
= 0x03,
17323 /* P.LS.S9 instruction pool */
17329 NM_P_LS_UAWM
= 0x05,
17332 /* P.BAL instruction pool */
17338 /* P.J instruction pool */
17341 NM_JALRC_HB
= 0x01,
17342 NM_P_BALRSC
= 0x08,
17345 /* P.BR1 instruction pool */
17353 /* P.BR2 instruction pool */
17360 /* P.BRI instruction pool */
17372 /* P16.SHIFT instruction pool */
17378 /* POOL16C instruction pool */
17380 NM_POOL16C_0
= 0x00,
17384 /* P16.A1 instruction pool */
17386 NM_ADDIUR1SP
= 0x01,
17389 /* P16.A2 instruction pool */
17392 NM_P_ADDIURS5
= 0x01,
17395 /* P16.ADDU instruction pool */
17401 /* P16.SR instruction pool */
17404 NM_RESTORE_JRC16
= 0x01,
17407 /* P16.4X4 instruction pool */
17413 /* P16.LB instruction pool */
17420 /* P16.LH instruction pool */
17427 /* P.RI instruction pool */
17430 NM_P_SYSCALL
= 0x01,
17435 /* POOL32A0 instruction pool */
17470 NM_D_E_MT_VPE
= 0x56,
17478 /* POOL32A5 instruction pool */
17480 NM_CMP_EQ_PH
= 0x00,
17481 NM_CMP_LT_PH
= 0x08,
17482 NM_CMP_LE_PH
= 0x10,
17483 NM_CMPGU_EQ_QB
= 0x18,
17484 NM_CMPGU_LT_QB
= 0x20,
17485 NM_CMPGU_LE_QB
= 0x28,
17486 NM_CMPGDU_EQ_QB
= 0x30,
17487 NM_CMPGDU_LT_QB
= 0x38,
17488 NM_CMPGDU_LE_QB
= 0x40,
17489 NM_CMPU_EQ_QB
= 0x48,
17490 NM_CMPU_LT_QB
= 0x50,
17491 NM_CMPU_LE_QB
= 0x58,
17492 NM_ADDQ_S_W
= 0x60,
17493 NM_SUBQ_S_W
= 0x68,
17497 NM_ADDQ_S_PH
= 0x01,
17498 NM_ADDQH_R_PH
= 0x09,
17499 NM_ADDQH_R_W
= 0x11,
17500 NM_ADDU_S_QB
= 0x19,
17501 NM_ADDU_S_PH
= 0x21,
17502 NM_ADDUH_R_QB
= 0x29,
17503 NM_SHRAV_R_PH
= 0x31,
17504 NM_SHRAV_R_QB
= 0x39,
17505 NM_SUBQ_S_PH
= 0x41,
17506 NM_SUBQH_R_PH
= 0x49,
17507 NM_SUBQH_R_W
= 0x51,
17508 NM_SUBU_S_QB
= 0x59,
17509 NM_SUBU_S_PH
= 0x61,
17510 NM_SUBUH_R_QB
= 0x69,
17511 NM_SHLLV_S_PH
= 0x71,
17512 NM_PRECR_SRA_R_PH_W
= 0x79,
17514 NM_MULEU_S_PH_QBL
= 0x12,
17515 NM_MULEU_S_PH_QBR
= 0x1a,
17516 NM_MULQ_RS_PH
= 0x22,
17517 NM_MULQ_S_PH
= 0x2a,
17518 NM_MULQ_RS_W
= 0x32,
17519 NM_MULQ_S_W
= 0x3a,
17522 NM_SHRAV_R_W
= 0x5a,
17523 NM_SHRLV_PH
= 0x62,
17524 NM_SHRLV_QB
= 0x6a,
17525 NM_SHLLV_QB
= 0x72,
17526 NM_SHLLV_S_W
= 0x7a,
17530 NM_MULEQ_S_W_PHL
= 0x04,
17531 NM_MULEQ_S_W_PHR
= 0x0c,
17533 NM_MUL_S_PH
= 0x05,
17534 NM_PRECR_QB_PH
= 0x0d,
17535 NM_PRECRQ_QB_PH
= 0x15,
17536 NM_PRECRQ_PH_W
= 0x1d,
17537 NM_PRECRQ_RS_PH_W
= 0x25,
17538 NM_PRECRQU_S_QB_PH
= 0x2d,
17539 NM_PACKRL_PH
= 0x35,
17543 NM_SHRA_R_W
= 0x5e,
17544 NM_SHRA_R_PH
= 0x66,
17545 NM_SHLL_S_PH
= 0x76,
17546 NM_SHLL_S_W
= 0x7e,
17551 /* POOL32A7 instruction pool */
17556 NM_POOL32AXF
= 0x07,
17559 /* P.SR instruction pool */
17565 /* P.SHIFT instruction pool */
17573 /* P.ROTX instruction pool */
17578 /* P.INS instruction pool */
17583 /* P.EXT instruction pool */
17588 /* POOL32F_0 (fmt) instruction pool */
17593 NM_SELEQZ_S
= 0x07,
17594 NM_SELEQZ_D
= 0x47,
17598 NM_SELNEZ_S
= 0x0f,
17599 NM_SELNEZ_D
= 0x4f,
17614 /* POOL32F_3 instruction pool */
17618 NM_MINA_FMT
= 0x04,
17619 NM_MAXA_FMT
= 0x05,
17620 NM_POOL32FXF
= 0x07,
17623 /* POOL32F_5 instruction pool */
17625 NM_CMP_CONDN_S
= 0x00,
17626 NM_CMP_CONDN_D
= 0x02,
17629 /* P.GP.LH instruction pool */
17635 /* P.GP.SH instruction pool */
17640 /* P.GP.CP1 instruction pool */
17648 /* P.LS.S0 instruction pool */
17665 NM_P_PREFS9
= 0x03,
17671 /* P.LS.S1 instruction pool */
17673 NM_ASET_ACLR
= 0x02,
17681 /* P.LS.E0 instruction pool */
17697 /* P.PREFE instruction pool */
17703 /* P.LLE instruction pool */
17709 /* P.SCE instruction pool */
17715 /* P.LS.WM instruction pool */
17721 /* P.LS.UAWM instruction pool */
17727 /* P.BR3A instruction pool */
17733 NM_BPOSGE32C
= 0x04,
17736 /* P16.RI instruction pool */
17738 NM_P16_SYSCALL
= 0x01,
17743 /* POOL16C_0 instruction pool */
17745 NM_POOL16C_00
= 0x00,
17748 /* P16.JRC instruction pool */
17754 /* P.SYSCALL instruction pool */
17760 /* P.TRAP instruction pool */
17766 /* P.CMOVE instruction pool */
17772 /* POOL32Axf instruction pool */
17774 NM_POOL32AXF_1
= 0x01,
17775 NM_POOL32AXF_2
= 0x02,
17776 NM_POOL32AXF_4
= 0x04,
17777 NM_POOL32AXF_5
= 0x05,
17778 NM_POOL32AXF_7
= 0x07,
17781 /* POOL32Axf_1 instruction pool */
17783 NM_POOL32AXF_1_0
= 0x00,
17784 NM_POOL32AXF_1_1
= 0x01,
17785 NM_POOL32AXF_1_3
= 0x03,
17786 NM_POOL32AXF_1_4
= 0x04,
17787 NM_POOL32AXF_1_5
= 0x05,
17788 NM_POOL32AXF_1_7
= 0x07,
17791 /* POOL32Axf_2 instruction pool */
17793 NM_POOL32AXF_2_0_7
= 0x00,
17794 NM_POOL32AXF_2_8_15
= 0x01,
17795 NM_POOL32AXF_2_16_23
= 0x02,
17796 NM_POOL32AXF_2_24_31
= 0x03,
17799 /* POOL32Axf_7 instruction pool */
17801 NM_SHRA_R_QB
= 0x0,
17806 /* POOL32Axf_1_0 instruction pool */
17814 /* POOL32Axf_1_1 instruction pool */
17820 /* POOL32Axf_1_3 instruction pool */
17828 /* POOL32Axf_1_4 instruction pool */
17834 /* POOL32Axf_1_5 instruction pool */
17836 NM_MAQ_S_W_PHR
= 0x0,
17837 NM_MAQ_S_W_PHL
= 0x1,
17838 NM_MAQ_SA_W_PHR
= 0x2,
17839 NM_MAQ_SA_W_PHL
= 0x3,
17842 /* POOL32Axf_1_7 instruction pool */
17846 NM_EXTR_RS_W
= 0x2,
17850 /* POOL32Axf_2_0_7 instruction pool */
17853 NM_DPAQ_S_W_PH
= 0x1,
17855 NM_DPSQ_S_W_PH
= 0x3,
17862 /* POOL32Axf_2_8_15 instruction pool */
17864 NM_DPAX_W_PH
= 0x0,
17865 NM_DPAQ_SA_L_W
= 0x1,
17866 NM_DPSX_W_PH
= 0x2,
17867 NM_DPSQ_SA_L_W
= 0x3,
17870 NM_EXTRV_R_W
= 0x7,
17873 /* POOL32Axf_2_16_23 instruction pool */
17875 NM_DPAU_H_QBL
= 0x0,
17876 NM_DPAQX_S_W_PH
= 0x1,
17877 NM_DPSU_H_QBL
= 0x2,
17878 NM_DPSQX_S_W_PH
= 0x3,
17881 NM_MULSA_W_PH
= 0x6,
17882 NM_EXTRV_RS_W
= 0x7,
17885 /* POOL32Axf_2_24_31 instruction pool */
17887 NM_DPAU_H_QBR
= 0x0,
17888 NM_DPAQX_SA_W_PH
= 0x1,
17889 NM_DPSU_H_QBR
= 0x2,
17890 NM_DPSQX_SA_W_PH
= 0x3,
17893 NM_MULSAQ_S_W_PH
= 0x6,
17894 NM_EXTRV_S_H
= 0x7,
17897 /* POOL32Axf_{4, 5} instruction pool */
17916 /* nanoMIPS DSP instructions */
17917 NM_ABSQ_S_QB
= 0x00,
17918 NM_ABSQ_S_PH
= 0x08,
17919 NM_ABSQ_S_W
= 0x10,
17920 NM_PRECEQ_W_PHL
= 0x28,
17921 NM_PRECEQ_W_PHR
= 0x30,
17922 NM_PRECEQU_PH_QBL
= 0x38,
17923 NM_PRECEQU_PH_QBR
= 0x48,
17924 NM_PRECEU_PH_QBL
= 0x58,
17925 NM_PRECEU_PH_QBR
= 0x68,
17926 NM_PRECEQU_PH_QBLA
= 0x39,
17927 NM_PRECEQU_PH_QBRA
= 0x49,
17928 NM_PRECEU_PH_QBLA
= 0x59,
17929 NM_PRECEU_PH_QBRA
= 0x69,
17930 NM_REPLV_PH
= 0x01,
17931 NM_REPLV_QB
= 0x09,
17934 NM_RADDU_W_QB
= 0x78,
17940 /* PP.SR instruction pool */
17944 NM_RESTORE_JRC
= 0x03,
17947 /* P.SR.F instruction pool */
17950 NM_RESTOREF
= 0x01,
17953 /* P16.SYSCALL instruction pool */
17955 NM_SYSCALL16
= 0x00,
17956 NM_HYPCALL16
= 0x01,
17959 /* POOL16C_00 instruction pool */
17967 /* PP.LSX and PP.LSXS instruction pool */
18005 /* ERETx instruction pool */
18011 /* POOL32FxF_{0, 1} insturction pool */
18020 NM_CVT_S_PL
= 0x84,
18021 NM_CVT_S_PU
= 0xa4,
18023 NM_CVT_L_S
= 0x004,
18024 NM_CVT_L_D
= 0x104,
18025 NM_CVT_W_S
= 0x024,
18026 NM_CVT_W_D
= 0x124,
18028 NM_RSQRT_S
= 0x008,
18029 NM_RSQRT_D
= 0x108,
18034 NM_RECIP_S
= 0x048,
18035 NM_RECIP_D
= 0x148,
18037 NM_FLOOR_L_S
= 0x00c,
18038 NM_FLOOR_L_D
= 0x10c,
18040 NM_FLOOR_W_S
= 0x02c,
18041 NM_FLOOR_W_D
= 0x12c,
18043 NM_CEIL_L_S
= 0x04c,
18044 NM_CEIL_L_D
= 0x14c,
18045 NM_CEIL_W_S
= 0x06c,
18046 NM_CEIL_W_D
= 0x16c,
18047 NM_TRUNC_L_S
= 0x08c,
18048 NM_TRUNC_L_D
= 0x18c,
18049 NM_TRUNC_W_S
= 0x0ac,
18050 NM_TRUNC_W_D
= 0x1ac,
18051 NM_ROUND_L_S
= 0x0cc,
18052 NM_ROUND_L_D
= 0x1cc,
18053 NM_ROUND_W_S
= 0x0ec,
18054 NM_ROUND_W_D
= 0x1ec,
18062 NM_CVT_D_S
= 0x04d,
18063 NM_CVT_D_W
= 0x0cd,
18064 NM_CVT_D_L
= 0x14d,
18065 NM_CVT_S_D
= 0x06d,
18066 NM_CVT_S_W
= 0x0ed,
18067 NM_CVT_S_L
= 0x16d,
18070 /* P.LL instruction pool */
18076 /* P.SC instruction pool */
18082 /* P.DVP instruction pool */
18091 * nanoMIPS decoding engine
18096 /* extraction utilities */
18098 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18099 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18100 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18101 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18102 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18103 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18105 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18106 static inline int decode_gpr_gpr3(int r
)
18108 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18110 return map
[r
& 0x7];
18113 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18114 static inline int decode_gpr_gpr3_src_store(int r
)
18116 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18118 return map
[r
& 0x7];
18121 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18122 static inline int decode_gpr_gpr4(int r
)
18124 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18125 16, 17, 18, 19, 20, 21, 22, 23 };
18127 return map
[r
& 0xf];
18130 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18131 static inline int decode_gpr_gpr4_zero(int r
)
18133 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18134 16, 17, 18, 19, 20, 21, 22, 23 };
18136 return map
[r
& 0xf];
18140 /* extraction utilities */
18142 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18143 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18144 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18145 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18146 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18147 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18150 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18152 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18155 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18156 uint8_t gp
, uint16_t u
)
18159 TCGv va
= tcg_temp_new();
18160 TCGv t0
= tcg_temp_new();
18162 while (counter
!= count
) {
18163 bool use_gp
= gp
&& (counter
== count
- 1);
18164 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18165 int this_offset
= -((counter
+ 1) << 2);
18166 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18167 gen_load_gpr(t0
, this_rt
);
18168 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18169 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18173 /* adjust stack pointer */
18174 gen_adjust_sp(ctx
, -u
);
18180 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18181 uint8_t gp
, uint16_t u
)
18184 TCGv va
= tcg_temp_new();
18185 TCGv t0
= tcg_temp_new();
18187 while (counter
!= count
) {
18188 bool use_gp
= gp
&& (counter
== count
- 1);
18189 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18190 int this_offset
= u
- ((counter
+ 1) << 2);
18191 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18192 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18193 ctx
->default_tcg_memop_mask
);
18194 tcg_gen_ext32s_tl(t0
, t0
);
18195 gen_store_gpr(t0
, this_rt
);
18199 /* adjust stack pointer */
18200 gen_adjust_sp(ctx
, u
);
18206 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18208 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
18209 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
18211 switch (extract32(ctx
->opcode
, 2, 2)) {
18213 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18216 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18219 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18222 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18227 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18229 int rt
= extract32(ctx
->opcode
, 21, 5);
18230 int rs
= extract32(ctx
->opcode
, 16, 5);
18231 int rd
= extract32(ctx
->opcode
, 11, 5);
18233 switch (extract32(ctx
->opcode
, 3, 7)) {
18235 switch (extract32(ctx
->opcode
, 10, 1)) {
18238 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18242 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18248 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18252 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18255 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18258 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18261 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18264 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18267 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18270 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18273 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18277 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18280 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18283 switch (extract32(ctx
->opcode
, 10, 1)) {
18285 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18288 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18293 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18296 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18299 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18302 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18305 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18310 #ifndef CONFIG_USER_ONLY
18311 TCGv t0
= tcg_temp_new();
18312 switch (extract32(ctx
->opcode
, 10, 1)) {
18315 check_cp0_enabled(ctx
);
18316 gen_helper_dvp(t0
, cpu_env
);
18317 gen_store_gpr(t0
, rt
);
18322 check_cp0_enabled(ctx
);
18323 gen_helper_evp(t0
, cpu_env
);
18324 gen_store_gpr(t0
, rt
);
18331 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18336 TCGv t0
= tcg_temp_new();
18337 TCGv t1
= tcg_temp_new();
18338 TCGv t2
= tcg_temp_new();
18340 gen_load_gpr(t1
, rs
);
18341 gen_load_gpr(t2
, rt
);
18342 tcg_gen_add_tl(t0
, t1
, t2
);
18343 tcg_gen_ext32s_tl(t0
, t0
);
18344 tcg_gen_xor_tl(t1
, t1
, t2
);
18345 tcg_gen_xor_tl(t2
, t0
, t2
);
18346 tcg_gen_andc_tl(t1
, t2
, t1
);
18348 /* operands of same sign, result different sign */
18349 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18350 gen_store_gpr(t0
, rd
);
18358 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18361 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18364 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18367 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18370 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18373 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18376 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18379 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18381 #ifndef CONFIG_USER_ONLY
18383 check_cp0_enabled(ctx
);
18385 /* Treat as NOP. */
18388 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18391 check_cp0_enabled(ctx
);
18393 TCGv t0
= tcg_temp_new();
18395 gen_load_gpr(t0
, rt
);
18396 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18400 case NM_D_E_MT_VPE
:
18402 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18403 TCGv t0
= tcg_temp_new();
18410 gen_helper_dmt(t0
);
18411 gen_store_gpr(t0
, rt
);
18412 } else if (rs
== 0) {
18415 gen_helper_dvpe(t0
, cpu_env
);
18416 gen_store_gpr(t0
, rt
);
18418 generate_exception_end(ctx
, EXCP_RI
);
18425 gen_helper_emt(t0
);
18426 gen_store_gpr(t0
, rt
);
18427 } else if (rs
== 0) {
18430 gen_helper_evpe(t0
, cpu_env
);
18431 gen_store_gpr(t0
, rt
);
18433 generate_exception_end(ctx
, EXCP_RI
);
18444 TCGv t0
= tcg_temp_new();
18445 TCGv t1
= tcg_temp_new();
18447 gen_load_gpr(t0
, rt
);
18448 gen_load_gpr(t1
, rs
);
18449 gen_helper_fork(t0
, t1
);
18456 check_cp0_enabled(ctx
);
18458 /* Treat as NOP. */
18461 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18462 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18466 check_cp0_enabled(ctx
);
18467 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18468 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18473 TCGv t0
= tcg_temp_new();
18475 gen_load_gpr(t0
, rs
);
18476 gen_helper_yield(t0
, cpu_env
, t0
);
18477 gen_store_gpr(t0
, rt
);
18483 generate_exception_end(ctx
, EXCP_RI
);
18489 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18490 int ret
, int v1
, int v2
)
18496 t0
= tcg_temp_new_i32();
18498 v0_t
= tcg_temp_new();
18499 v1_t
= tcg_temp_new();
18501 tcg_gen_movi_i32(t0
, v2
>> 3);
18503 gen_load_gpr(v0_t
, ret
);
18504 gen_load_gpr(v1_t
, v1
);
18507 case NM_MAQ_S_W_PHR
:
18509 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18511 case NM_MAQ_S_W_PHL
:
18513 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18515 case NM_MAQ_SA_W_PHR
:
18517 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18519 case NM_MAQ_SA_W_PHL
:
18521 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18524 generate_exception_end(ctx
, EXCP_RI
);
18528 tcg_temp_free_i32(t0
);
18530 tcg_temp_free(v0_t
);
18531 tcg_temp_free(v1_t
);
18535 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18536 int ret
, int v1
, int v2
)
18539 TCGv t0
= tcg_temp_new();
18540 TCGv t1
= tcg_temp_new();
18541 TCGv v0_t
= tcg_temp_new();
18543 gen_load_gpr(v0_t
, v1
);
18546 case NM_POOL32AXF_1_0
:
18548 switch (extract32(ctx
->opcode
, 12, 2)) {
18550 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
18553 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
18556 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
18559 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
18563 case NM_POOL32AXF_1_1
:
18565 switch (extract32(ctx
->opcode
, 12, 2)) {
18567 tcg_gen_movi_tl(t0
, v2
);
18568 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
18571 tcg_gen_movi_tl(t0
, v2
>> 3);
18572 gen_helper_shilo(t0
, v0_t
, cpu_env
);
18575 generate_exception_end(ctx
, EXCP_RI
);
18579 case NM_POOL32AXF_1_3
:
18581 imm
= extract32(ctx
->opcode
, 14, 7);
18582 switch (extract32(ctx
->opcode
, 12, 2)) {
18584 tcg_gen_movi_tl(t0
, imm
);
18585 gen_helper_rddsp(t0
, t0
, cpu_env
);
18586 gen_store_gpr(t0
, ret
);
18589 gen_load_gpr(t0
, ret
);
18590 tcg_gen_movi_tl(t1
, imm
);
18591 gen_helper_wrdsp(t0
, t1
, cpu_env
);
18594 tcg_gen_movi_tl(t0
, v2
>> 3);
18595 tcg_gen_movi_tl(t1
, v1
);
18596 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
18597 gen_store_gpr(t0
, ret
);
18600 tcg_gen_movi_tl(t0
, v2
>> 3);
18601 tcg_gen_movi_tl(t1
, v1
);
18602 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
18603 gen_store_gpr(t0
, ret
);
18607 case NM_POOL32AXF_1_4
:
18609 tcg_gen_movi_tl(t0
, v2
>> 2);
18610 switch (extract32(ctx
->opcode
, 12, 1)) {
18612 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
18613 gen_store_gpr(t0
, ret
);
18616 gen_helper_shrl_qb(t0
, t0
, v0_t
);
18617 gen_store_gpr(t0
, ret
);
18621 case NM_POOL32AXF_1_5
:
18622 opc
= extract32(ctx
->opcode
, 12, 2);
18623 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
18625 case NM_POOL32AXF_1_7
:
18627 tcg_gen_movi_tl(t0
, v2
>> 3);
18628 tcg_gen_movi_tl(t1
, v1
);
18629 switch (extract32(ctx
->opcode
, 12, 2)) {
18631 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
18632 gen_store_gpr(t0
, ret
);
18635 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
18636 gen_store_gpr(t0
, ret
);
18639 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
18640 gen_store_gpr(t0
, ret
);
18643 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
18644 gen_store_gpr(t0
, ret
);
18649 generate_exception_end(ctx
, EXCP_RI
);
18655 tcg_temp_free(v0_t
);
18658 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
18659 TCGv v0
, TCGv v1
, int rd
)
18663 t0
= tcg_temp_new_i32();
18665 tcg_gen_movi_i32(t0
, rd
>> 3);
18668 case NM_POOL32AXF_2_0_7
:
18669 switch (extract32(ctx
->opcode
, 9, 3)) {
18672 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
18674 case NM_DPAQ_S_W_PH
:
18676 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18680 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
18682 case NM_DPSQ_S_W_PH
:
18684 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18687 generate_exception_end(ctx
, EXCP_RI
);
18691 case NM_POOL32AXF_2_8_15
:
18692 switch (extract32(ctx
->opcode
, 9, 3)) {
18695 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
18697 case NM_DPAQ_SA_L_W
:
18699 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18703 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
18705 case NM_DPSQ_SA_L_W
:
18707 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18710 generate_exception_end(ctx
, EXCP_RI
);
18714 case NM_POOL32AXF_2_16_23
:
18715 switch (extract32(ctx
->opcode
, 9, 3)) {
18716 case NM_DPAU_H_QBL
:
18718 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
18720 case NM_DPAQX_S_W_PH
:
18722 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18724 case NM_DPSU_H_QBL
:
18726 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
18728 case NM_DPSQX_S_W_PH
:
18730 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18732 case NM_MULSA_W_PH
:
18734 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
18737 generate_exception_end(ctx
, EXCP_RI
);
18741 case NM_POOL32AXF_2_24_31
:
18742 switch (extract32(ctx
->opcode
, 9, 3)) {
18743 case NM_DPAU_H_QBR
:
18745 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
18747 case NM_DPAQX_SA_W_PH
:
18749 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18751 case NM_DPSU_H_QBR
:
18753 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
18755 case NM_DPSQX_SA_W_PH
:
18757 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18759 case NM_MULSAQ_S_W_PH
:
18761 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18764 generate_exception_end(ctx
, EXCP_RI
);
18769 generate_exception_end(ctx
, EXCP_RI
);
18773 tcg_temp_free_i32(t0
);
18776 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18777 int rt
, int rs
, int rd
)
18780 TCGv t0
= tcg_temp_new();
18781 TCGv t1
= tcg_temp_new();
18782 TCGv v0_t
= tcg_temp_new();
18783 TCGv v1_t
= tcg_temp_new();
18785 gen_load_gpr(v0_t
, rt
);
18786 gen_load_gpr(v1_t
, rs
);
18789 case NM_POOL32AXF_2_0_7
:
18790 switch (extract32(ctx
->opcode
, 9, 3)) {
18792 case NM_DPAQ_S_W_PH
:
18794 case NM_DPSQ_S_W_PH
:
18795 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18800 gen_load_gpr(t0
, rs
);
18802 if (rd
!= 0 && rd
!= 2) {
18803 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
18804 tcg_gen_ext32u_tl(t0
, t0
);
18805 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
18806 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
18808 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
18814 int acc
= extract32(ctx
->opcode
, 14, 2);
18815 TCGv_i64 t2
= tcg_temp_new_i64();
18816 TCGv_i64 t3
= tcg_temp_new_i64();
18818 gen_load_gpr(t0
, rt
);
18819 gen_load_gpr(t1
, rs
);
18820 tcg_gen_ext_tl_i64(t2
, t0
);
18821 tcg_gen_ext_tl_i64(t3
, t1
);
18822 tcg_gen_mul_i64(t2
, t2
, t3
);
18823 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18824 tcg_gen_add_i64(t2
, t2
, t3
);
18825 tcg_temp_free_i64(t3
);
18826 gen_move_low32(cpu_LO
[acc
], t2
);
18827 gen_move_high32(cpu_HI
[acc
], t2
);
18828 tcg_temp_free_i64(t2
);
18834 int acc
= extract32(ctx
->opcode
, 14, 2);
18835 TCGv_i32 t2
= tcg_temp_new_i32();
18836 TCGv_i32 t3
= tcg_temp_new_i32();
18838 gen_load_gpr(t0
, rs
);
18839 gen_load_gpr(t1
, rt
);
18840 tcg_gen_trunc_tl_i32(t2
, t0
);
18841 tcg_gen_trunc_tl_i32(t3
, t1
);
18842 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
18843 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
18844 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
18845 tcg_temp_free_i32(t2
);
18846 tcg_temp_free_i32(t3
);
18851 gen_load_gpr(v1_t
, rs
);
18852 tcg_gen_movi_tl(t0
, rd
>> 3);
18853 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
18854 gen_store_gpr(t0
, ret
);
18858 case NM_POOL32AXF_2_8_15
:
18859 switch (extract32(ctx
->opcode
, 9, 3)) {
18861 case NM_DPAQ_SA_L_W
:
18863 case NM_DPSQ_SA_L_W
:
18864 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18869 int acc
= extract32(ctx
->opcode
, 14, 2);
18870 TCGv_i64 t2
= tcg_temp_new_i64();
18871 TCGv_i64 t3
= tcg_temp_new_i64();
18873 gen_load_gpr(t0
, rs
);
18874 gen_load_gpr(t1
, rt
);
18875 tcg_gen_ext32u_tl(t0
, t0
);
18876 tcg_gen_ext32u_tl(t1
, t1
);
18877 tcg_gen_extu_tl_i64(t2
, t0
);
18878 tcg_gen_extu_tl_i64(t3
, t1
);
18879 tcg_gen_mul_i64(t2
, t2
, t3
);
18880 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18881 tcg_gen_add_i64(t2
, t2
, t3
);
18882 tcg_temp_free_i64(t3
);
18883 gen_move_low32(cpu_LO
[acc
], t2
);
18884 gen_move_high32(cpu_HI
[acc
], t2
);
18885 tcg_temp_free_i64(t2
);
18891 int acc
= extract32(ctx
->opcode
, 14, 2);
18892 TCGv_i32 t2
= tcg_temp_new_i32();
18893 TCGv_i32 t3
= tcg_temp_new_i32();
18895 gen_load_gpr(t0
, rs
);
18896 gen_load_gpr(t1
, rt
);
18897 tcg_gen_trunc_tl_i32(t2
, t0
);
18898 tcg_gen_trunc_tl_i32(t3
, t1
);
18899 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
18900 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
18901 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
18902 tcg_temp_free_i32(t2
);
18903 tcg_temp_free_i32(t3
);
18908 tcg_gen_movi_tl(t0
, rd
>> 3);
18909 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
18910 gen_store_gpr(t0
, ret
);
18913 generate_exception_end(ctx
, EXCP_RI
);
18917 case NM_POOL32AXF_2_16_23
:
18918 switch (extract32(ctx
->opcode
, 9, 3)) {
18919 case NM_DPAU_H_QBL
:
18920 case NM_DPAQX_S_W_PH
:
18921 case NM_DPSU_H_QBL
:
18922 case NM_DPSQX_S_W_PH
:
18923 case NM_MULSA_W_PH
:
18924 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18928 tcg_gen_movi_tl(t0
, rd
>> 3);
18929 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
18930 gen_store_gpr(t0
, ret
);
18935 int acc
= extract32(ctx
->opcode
, 14, 2);
18936 TCGv_i64 t2
= tcg_temp_new_i64();
18937 TCGv_i64 t3
= tcg_temp_new_i64();
18939 gen_load_gpr(t0
, rs
);
18940 gen_load_gpr(t1
, rt
);
18941 tcg_gen_ext_tl_i64(t2
, t0
);
18942 tcg_gen_ext_tl_i64(t3
, t1
);
18943 tcg_gen_mul_i64(t2
, t2
, t3
);
18944 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18945 tcg_gen_sub_i64(t2
, t3
, t2
);
18946 tcg_temp_free_i64(t3
);
18947 gen_move_low32(cpu_LO
[acc
], t2
);
18948 gen_move_high32(cpu_HI
[acc
], t2
);
18949 tcg_temp_free_i64(t2
);
18952 case NM_EXTRV_RS_W
:
18954 tcg_gen_movi_tl(t0
, rd
>> 3);
18955 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
18956 gen_store_gpr(t0
, ret
);
18960 case NM_POOL32AXF_2_24_31
:
18961 switch (extract32(ctx
->opcode
, 9, 3)) {
18962 case NM_DPAU_H_QBR
:
18963 case NM_DPAQX_SA_W_PH
:
18964 case NM_DPSU_H_QBR
:
18965 case NM_DPSQX_SA_W_PH
:
18966 case NM_MULSAQ_S_W_PH
:
18967 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18971 tcg_gen_movi_tl(t0
, rd
>> 3);
18972 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
18973 gen_store_gpr(t0
, ret
);
18978 int acc
= extract32(ctx
->opcode
, 14, 2);
18979 TCGv_i64 t2
= tcg_temp_new_i64();
18980 TCGv_i64 t3
= tcg_temp_new_i64();
18982 gen_load_gpr(t0
, rs
);
18983 gen_load_gpr(t1
, rt
);
18984 tcg_gen_ext32u_tl(t0
, t0
);
18985 tcg_gen_ext32u_tl(t1
, t1
);
18986 tcg_gen_extu_tl_i64(t2
, t0
);
18987 tcg_gen_extu_tl_i64(t3
, t1
);
18988 tcg_gen_mul_i64(t2
, t2
, t3
);
18989 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18990 tcg_gen_sub_i64(t2
, t3
, t2
);
18991 tcg_temp_free_i64(t3
);
18992 gen_move_low32(cpu_LO
[acc
], t2
);
18993 gen_move_high32(cpu_HI
[acc
], t2
);
18994 tcg_temp_free_i64(t2
);
18999 tcg_gen_movi_tl(t0
, rd
>> 3);
19000 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19001 gen_store_gpr(t0
, ret
);
19006 generate_exception_end(ctx
, EXCP_RI
);
19013 tcg_temp_free(v0_t
);
19014 tcg_temp_free(v1_t
);
19017 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19021 TCGv t0
= tcg_temp_new();
19022 TCGv v0_t
= tcg_temp_new();
19024 gen_load_gpr(v0_t
, rs
);
19029 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19030 gen_store_gpr(v0_t
, ret
);
19034 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19035 gen_store_gpr(v0_t
, ret
);
19039 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19040 gen_store_gpr(v0_t
, ret
);
19042 case NM_PRECEQ_W_PHL
:
19044 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19045 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19046 gen_store_gpr(v0_t
, ret
);
19048 case NM_PRECEQ_W_PHR
:
19050 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19051 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19052 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19053 gen_store_gpr(v0_t
, ret
);
19055 case NM_PRECEQU_PH_QBL
:
19057 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19058 gen_store_gpr(v0_t
, ret
);
19060 case NM_PRECEQU_PH_QBR
:
19062 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19063 gen_store_gpr(v0_t
, ret
);
19065 case NM_PRECEQU_PH_QBLA
:
19067 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19068 gen_store_gpr(v0_t
, ret
);
19070 case NM_PRECEQU_PH_QBRA
:
19072 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19073 gen_store_gpr(v0_t
, ret
);
19075 case NM_PRECEU_PH_QBL
:
19077 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19078 gen_store_gpr(v0_t
, ret
);
19080 case NM_PRECEU_PH_QBR
:
19082 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19083 gen_store_gpr(v0_t
, ret
);
19085 case NM_PRECEU_PH_QBLA
:
19087 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19088 gen_store_gpr(v0_t
, ret
);
19090 case NM_PRECEU_PH_QBRA
:
19092 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19093 gen_store_gpr(v0_t
, ret
);
19097 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19098 tcg_gen_shli_tl(t0
, v0_t
, 16);
19099 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19100 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19101 gen_store_gpr(v0_t
, ret
);
19105 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19106 tcg_gen_shli_tl(t0
, v0_t
, 8);
19107 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19108 tcg_gen_shli_tl(t0
, v0_t
, 16);
19109 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19110 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19111 gen_store_gpr(v0_t
, ret
);
19115 gen_helper_bitrev(v0_t
, v0_t
);
19116 gen_store_gpr(v0_t
, ret
);
19121 TCGv tv0
= tcg_temp_new();
19123 gen_load_gpr(tv0
, rt
);
19124 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19125 gen_store_gpr(v0_t
, ret
);
19126 tcg_temp_free(tv0
);
19129 case NM_RADDU_W_QB
:
19131 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19132 gen_store_gpr(v0_t
, ret
);
19135 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19139 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19143 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19146 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19149 generate_exception_end(ctx
, EXCP_RI
);
19153 tcg_temp_free(v0_t
);
19157 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19158 int rt
, int rs
, int rd
)
19160 TCGv t0
= tcg_temp_new();
19161 TCGv rs_t
= tcg_temp_new();
19163 gen_load_gpr(rs_t
, rs
);
19168 tcg_gen_movi_tl(t0
, rd
>> 2);
19169 switch (extract32(ctx
->opcode
, 12, 1)) {
19172 gen_helper_shra_qb(t0
, t0
, rs_t
);
19173 gen_store_gpr(t0
, rt
);
19177 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19178 gen_store_gpr(t0
, rt
);
19184 tcg_gen_movi_tl(t0
, rd
>> 1);
19185 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19186 gen_store_gpr(t0
, rt
);
19192 target_long result
;
19193 imm
= extract32(ctx
->opcode
, 13, 8);
19194 result
= (uint32_t)imm
<< 24 |
19195 (uint32_t)imm
<< 16 |
19196 (uint32_t)imm
<< 8 |
19198 result
= (int32_t)result
;
19199 tcg_gen_movi_tl(t0
, result
);
19200 gen_store_gpr(t0
, rt
);
19204 generate_exception_end(ctx
, EXCP_RI
);
19208 tcg_temp_free(rs_t
);
19212 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19214 int rt
= extract32(ctx
->opcode
, 21, 5);
19215 int rs
= extract32(ctx
->opcode
, 16, 5);
19216 int rd
= extract32(ctx
->opcode
, 11, 5);
19218 switch (extract32(ctx
->opcode
, 6, 3)) {
19219 case NM_POOL32AXF_1
:
19221 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19222 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19225 case NM_POOL32AXF_2
:
19227 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19228 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19231 case NM_POOL32AXF_4
:
19233 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19234 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19237 case NM_POOL32AXF_5
:
19238 switch (extract32(ctx
->opcode
, 9, 7)) {
19239 #ifndef CONFIG_USER_ONLY
19241 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19244 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19247 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19250 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19253 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19256 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19259 check_cp0_enabled(ctx
);
19261 TCGv t0
= tcg_temp_new();
19263 save_cpu_state(ctx
, 1);
19264 gen_helper_di(t0
, cpu_env
);
19265 gen_store_gpr(t0
, rt
);
19266 /* Stop translation as we may have switched the execution mode */
19267 ctx
->base
.is_jmp
= DISAS_STOP
;
19272 check_cp0_enabled(ctx
);
19274 TCGv t0
= tcg_temp_new();
19276 save_cpu_state(ctx
, 1);
19277 gen_helper_ei(t0
, cpu_env
);
19278 gen_store_gpr(t0
, rt
);
19279 /* Stop translation as we may have switched the execution mode */
19280 ctx
->base
.is_jmp
= DISAS_STOP
;
19285 gen_load_srsgpr(rs
, rt
);
19288 gen_store_srsgpr(rs
, rt
);
19291 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19294 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19297 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19301 generate_exception_end(ctx
, EXCP_RI
);
19305 case NM_POOL32AXF_7
:
19307 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19308 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19312 generate_exception_end(ctx
, EXCP_RI
);
19317 /* Immediate Value Compact Branches */
19318 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19319 int rt
, int32_t imm
, int32_t offset
)
19322 int bcond_compute
= 0;
19323 TCGv t0
= tcg_temp_new();
19324 TCGv t1
= tcg_temp_new();
19326 gen_load_gpr(t0
, rt
);
19327 tcg_gen_movi_tl(t1
, imm
);
19328 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19330 /* Load needed operands and calculate btarget */
19333 if (rt
== 0 && imm
== 0) {
19334 /* Unconditional branch */
19335 } else if (rt
== 0 && imm
!= 0) {
19340 cond
= TCG_COND_EQ
;
19346 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19347 generate_exception_end(ctx
, EXCP_RI
);
19349 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19350 /* Unconditional branch */
19351 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19355 tcg_gen_shri_tl(t0
, t0
, imm
);
19356 tcg_gen_andi_tl(t0
, t0
, 1);
19357 tcg_gen_movi_tl(t1
, 0);
19359 if (opc
== NM_BBEQZC
) {
19360 cond
= TCG_COND_EQ
;
19362 cond
= TCG_COND_NE
;
19367 if (rt
== 0 && imm
== 0) {
19370 } else if (rt
== 0 && imm
!= 0) {
19371 /* Unconditional branch */
19374 cond
= TCG_COND_NE
;
19378 if (rt
== 0 && imm
== 0) {
19379 /* Unconditional branch */
19382 cond
= TCG_COND_GE
;
19387 cond
= TCG_COND_LT
;
19390 if (rt
== 0 && imm
== 0) {
19391 /* Unconditional branch */
19394 cond
= TCG_COND_GEU
;
19399 cond
= TCG_COND_LTU
;
19402 MIPS_INVAL("Immediate Value Compact branch");
19403 generate_exception_end(ctx
, EXCP_RI
);
19407 if (bcond_compute
== 0) {
19408 /* Uncoditional compact branch */
19409 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19411 /* Conditional compact branch */
19412 TCGLabel
*fs
= gen_new_label();
19414 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19416 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19419 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19427 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19428 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19431 TCGv t0
= tcg_temp_new();
19432 TCGv t1
= tcg_temp_new();
19435 gen_load_gpr(t0
, rs
);
19439 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19442 /* calculate btarget */
19443 tcg_gen_shli_tl(t0
, t0
, 1);
19444 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19445 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19447 /* unconditional branch to register */
19448 tcg_gen_mov_tl(cpu_PC
, btarget
);
19449 tcg_gen_lookup_and_goto_ptr();
19455 /* nanoMIPS Branches */
19456 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19457 int rs
, int rt
, int32_t offset
)
19459 int bcond_compute
= 0;
19460 TCGv t0
= tcg_temp_new();
19461 TCGv t1
= tcg_temp_new();
19463 /* Load needed operands and calculate btarget */
19465 /* compact branch */
19468 gen_load_gpr(t0
, rs
);
19469 gen_load_gpr(t1
, rt
);
19471 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19475 if (rs
== 0 || rs
== rt
) {
19476 /* OPC_BLEZALC, OPC_BGEZALC */
19477 /* OPC_BGTZALC, OPC_BLTZALC */
19478 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19480 gen_load_gpr(t0
, rs
);
19481 gen_load_gpr(t1
, rt
);
19483 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19486 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19490 /* OPC_BEQZC, OPC_BNEZC */
19491 gen_load_gpr(t0
, rs
);
19493 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19495 /* OPC_JIC, OPC_JIALC */
19496 TCGv tbase
= tcg_temp_new();
19497 TCGv toffset
= tcg_temp_new();
19499 gen_load_gpr(tbase
, rt
);
19500 tcg_gen_movi_tl(toffset
, offset
);
19501 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19502 tcg_temp_free(tbase
);
19503 tcg_temp_free(toffset
);
19507 MIPS_INVAL("Compact branch/jump");
19508 generate_exception_end(ctx
, EXCP_RI
);
19512 if (bcond_compute
== 0) {
19513 /* Uncoditional compact branch */
19516 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19519 MIPS_INVAL("Compact branch/jump");
19520 generate_exception_end(ctx
, EXCP_RI
);
19524 /* Conditional compact branch */
19525 TCGLabel
*fs
= gen_new_label();
19529 if (rs
== 0 && rt
!= 0) {
19531 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19532 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19534 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19537 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
19541 if (rs
== 0 && rt
!= 0) {
19543 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19544 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19546 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19549 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
19553 if (rs
== 0 && rt
!= 0) {
19555 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19556 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19558 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19561 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
19565 if (rs
== 0 && rt
!= 0) {
19567 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19568 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19570 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19573 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
19577 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
19580 MIPS_INVAL("Compact conditional branch/jump");
19581 generate_exception_end(ctx
, EXCP_RI
);
19585 /* Generating branch here as compact branches don't have delay slot */
19586 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19589 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19598 /* nanoMIPS CP1 Branches */
19599 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
19600 int32_t ft
, int32_t offset
)
19602 target_ulong btarget
;
19603 TCGv_i64 t0
= tcg_temp_new_i64();
19605 gen_load_fpr64(ctx
, t0
, ft
);
19606 tcg_gen_andi_i64(t0
, t0
, 1);
19608 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19612 tcg_gen_xori_i64(t0
, t0
, 1);
19613 ctx
->hflags
|= MIPS_HFLAG_BC
;
19616 /* t0 already set */
19617 ctx
->hflags
|= MIPS_HFLAG_BC
;
19620 MIPS_INVAL("cp1 cond branch");
19621 generate_exception_end(ctx
, EXCP_RI
);
19625 tcg_gen_trunc_i64_tl(bcond
, t0
);
19627 ctx
->btarget
= btarget
;
19630 tcg_temp_free_i64(t0
);
19634 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
19637 t0
= tcg_temp_new();
19638 t1
= tcg_temp_new();
19640 gen_load_gpr(t0
, rs
);
19641 gen_load_gpr(t1
, rt
);
19643 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
19644 /* PP.LSXS instructions require shifting */
19645 switch (extract32(ctx
->opcode
, 7, 4)) {
19650 tcg_gen_shli_tl(t0
, t0
, 1);
19657 tcg_gen_shli_tl(t0
, t0
, 2);
19661 tcg_gen_shli_tl(t0
, t0
, 3);
19665 gen_op_addr_add(ctx
, t0
, t0
, t1
);
19667 switch (extract32(ctx
->opcode
, 7, 4)) {
19669 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19671 gen_store_gpr(t0
, rd
);
19675 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19677 gen_store_gpr(t0
, rd
);
19681 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19683 gen_store_gpr(t0
, rd
);
19686 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19688 gen_store_gpr(t0
, rd
);
19692 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19694 gen_store_gpr(t0
, rd
);
19698 gen_load_gpr(t1
, rd
);
19699 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19705 gen_load_gpr(t1
, rd
);
19706 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19712 gen_load_gpr(t1
, rd
);
19713 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19717 /*case NM_LWC1XS:*/
19719 /*case NM_LDC1XS:*/
19721 /*case NM_SWC1XS:*/
19723 /*case NM_SDC1XS:*/
19724 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19725 check_cp1_enabled(ctx
);
19726 switch (extract32(ctx
->opcode
, 7, 4)) {
19728 /*case NM_LWC1XS:*/
19729 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
19732 /*case NM_LDC1XS:*/
19733 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
19736 /*case NM_SWC1XS:*/
19737 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
19740 /*case NM_SDC1XS:*/
19741 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
19745 generate_exception_err(ctx
, EXCP_CpU
, 1);
19749 generate_exception_end(ctx
, EXCP_RI
);
19757 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
19761 rt
= extract32(ctx
->opcode
, 21, 5);
19762 rs
= extract32(ctx
->opcode
, 16, 5);
19763 rd
= extract32(ctx
->opcode
, 11, 5);
19765 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
19766 generate_exception_end(ctx
, EXCP_RI
);
19769 check_cp1_enabled(ctx
);
19770 switch (extract32(ctx
->opcode
, 0, 3)) {
19772 switch (extract32(ctx
->opcode
, 3, 7)) {
19774 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
19777 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
19780 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
19783 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
19786 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
19789 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
19792 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
19795 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
19798 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
19801 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
19804 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
19807 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
19810 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
19813 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
19816 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
19819 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
19822 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
19825 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
19828 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
19831 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
19834 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
19837 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
19840 generate_exception_end(ctx
, EXCP_RI
);
19845 switch (extract32(ctx
->opcode
, 3, 3)) {
19847 switch (extract32(ctx
->opcode
, 9, 1)) {
19849 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
19852 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
19857 switch (extract32(ctx
->opcode
, 9, 1)) {
19859 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
19862 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
19867 switch (extract32(ctx
->opcode
, 9, 1)) {
19869 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
19872 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
19877 switch (extract32(ctx
->opcode
, 9, 1)) {
19879 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
19882 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
19887 switch (extract32(ctx
->opcode
, 6, 8)) {
19889 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
19892 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
19895 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
19898 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
19901 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
19904 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
19907 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
19910 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
19913 switch (extract32(ctx
->opcode
, 6, 9)) {
19915 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
19918 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
19921 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
19924 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
19927 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
19930 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
19933 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
19936 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
19939 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
19942 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
19945 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
19948 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
19951 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
19954 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
19957 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
19960 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
19963 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
19966 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
19969 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
19972 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
19975 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
19978 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
19981 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
19984 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
19987 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
19990 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
19993 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
19996 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
19999 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20002 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20005 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20008 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20011 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20014 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20017 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20020 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20023 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20026 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20029 generate_exception_end(ctx
, EXCP_RI
);
20038 switch (extract32(ctx
->opcode
, 3, 3)) {
20039 case NM_CMP_CONDN_S
:
20040 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20042 case NM_CMP_CONDN_D
:
20043 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20046 generate_exception_end(ctx
, EXCP_RI
);
20051 generate_exception_end(ctx
, EXCP_RI
);
20056 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20057 int rd
, int rs
, int rt
)
20060 TCGv t0
= tcg_temp_new();
20061 TCGv v1_t
= tcg_temp_new();
20062 TCGv v2_t
= tcg_temp_new();
20064 gen_load_gpr(v1_t
, rs
);
20065 gen_load_gpr(v2_t
, rt
);
20070 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20074 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20078 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20080 case NM_CMPU_EQ_QB
:
20082 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20084 case NM_CMPU_LT_QB
:
20086 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20088 case NM_CMPU_LE_QB
:
20090 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20092 case NM_CMPGU_EQ_QB
:
20094 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20095 gen_store_gpr(v1_t
, ret
);
20097 case NM_CMPGU_LT_QB
:
20099 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20100 gen_store_gpr(v1_t
, ret
);
20102 case NM_CMPGU_LE_QB
:
20104 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20105 gen_store_gpr(v1_t
, ret
);
20107 case NM_CMPGDU_EQ_QB
:
20109 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20110 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20111 gen_store_gpr(v1_t
, ret
);
20113 case NM_CMPGDU_LT_QB
:
20115 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20116 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20117 gen_store_gpr(v1_t
, ret
);
20119 case NM_CMPGDU_LE_QB
:
20121 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20122 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20123 gen_store_gpr(v1_t
, ret
);
20127 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20128 gen_store_gpr(v1_t
, ret
);
20132 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20133 gen_store_gpr(v1_t
, ret
);
20137 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20138 gen_store_gpr(v1_t
, ret
);
20142 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20143 gen_store_gpr(v1_t
, ret
);
20147 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20148 gen_store_gpr(v1_t
, ret
);
20152 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20153 gen_store_gpr(v1_t
, ret
);
20157 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20158 gen_store_gpr(v1_t
, ret
);
20162 switch (extract32(ctx
->opcode
, 10, 1)) {
20165 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20166 gen_store_gpr(v1_t
, ret
);
20170 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20171 gen_store_gpr(v1_t
, ret
);
20175 case NM_ADDQH_R_PH
:
20177 switch (extract32(ctx
->opcode
, 10, 1)) {
20180 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20181 gen_store_gpr(v1_t
, ret
);
20185 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20186 gen_store_gpr(v1_t
, ret
);
20192 switch (extract32(ctx
->opcode
, 10, 1)) {
20195 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20196 gen_store_gpr(v1_t
, ret
);
20200 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20201 gen_store_gpr(v1_t
, ret
);
20207 switch (extract32(ctx
->opcode
, 10, 1)) {
20210 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20211 gen_store_gpr(v1_t
, ret
);
20215 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20216 gen_store_gpr(v1_t
, ret
);
20222 switch (extract32(ctx
->opcode
, 10, 1)) {
20225 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20226 gen_store_gpr(v1_t
, ret
);
20230 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20231 gen_store_gpr(v1_t
, ret
);
20235 case NM_ADDUH_R_QB
:
20237 switch (extract32(ctx
->opcode
, 10, 1)) {
20240 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20241 gen_store_gpr(v1_t
, ret
);
20245 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20246 gen_store_gpr(v1_t
, ret
);
20250 case NM_SHRAV_R_PH
:
20252 switch (extract32(ctx
->opcode
, 10, 1)) {
20255 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20256 gen_store_gpr(v1_t
, ret
);
20260 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20261 gen_store_gpr(v1_t
, ret
);
20265 case NM_SHRAV_R_QB
:
20267 switch (extract32(ctx
->opcode
, 10, 1)) {
20270 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20271 gen_store_gpr(v1_t
, ret
);
20275 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20276 gen_store_gpr(v1_t
, ret
);
20282 switch (extract32(ctx
->opcode
, 10, 1)) {
20285 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20286 gen_store_gpr(v1_t
, ret
);
20290 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20291 gen_store_gpr(v1_t
, ret
);
20295 case NM_SUBQH_R_PH
:
20297 switch (extract32(ctx
->opcode
, 10, 1)) {
20300 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20301 gen_store_gpr(v1_t
, ret
);
20305 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20306 gen_store_gpr(v1_t
, ret
);
20312 switch (extract32(ctx
->opcode
, 10, 1)) {
20315 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20316 gen_store_gpr(v1_t
, ret
);
20320 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20321 gen_store_gpr(v1_t
, ret
);
20327 switch (extract32(ctx
->opcode
, 10, 1)) {
20330 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20331 gen_store_gpr(v1_t
, ret
);
20335 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20336 gen_store_gpr(v1_t
, ret
);
20342 switch (extract32(ctx
->opcode
, 10, 1)) {
20345 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20346 gen_store_gpr(v1_t
, ret
);
20350 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20351 gen_store_gpr(v1_t
, ret
);
20355 case NM_SUBUH_R_QB
:
20357 switch (extract32(ctx
->opcode
, 10, 1)) {
20360 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20361 gen_store_gpr(v1_t
, ret
);
20365 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20366 gen_store_gpr(v1_t
, ret
);
20370 case NM_SHLLV_S_PH
:
20372 switch (extract32(ctx
->opcode
, 10, 1)) {
20375 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20376 gen_store_gpr(v1_t
, ret
);
20380 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20381 gen_store_gpr(v1_t
, ret
);
20385 case NM_PRECR_SRA_R_PH_W
:
20387 switch (extract32(ctx
->opcode
, 10, 1)) {
20389 /* PRECR_SRA_PH_W */
20391 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20392 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20394 gen_store_gpr(v1_t
, rt
);
20395 tcg_temp_free_i32(sa_t
);
20399 /* PRECR_SRA_R_PH_W */
20401 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20402 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20404 gen_store_gpr(v1_t
, rt
);
20405 tcg_temp_free_i32(sa_t
);
20410 case NM_MULEU_S_PH_QBL
:
20412 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20413 gen_store_gpr(v1_t
, ret
);
20415 case NM_MULEU_S_PH_QBR
:
20417 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20418 gen_store_gpr(v1_t
, ret
);
20420 case NM_MULQ_RS_PH
:
20422 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20423 gen_store_gpr(v1_t
, ret
);
20427 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20428 gen_store_gpr(v1_t
, ret
);
20432 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20433 gen_store_gpr(v1_t
, ret
);
20437 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20438 gen_store_gpr(v1_t
, ret
);
20442 gen_load_gpr(t0
, rs
);
20444 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20446 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20450 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20451 gen_store_gpr(v1_t
, ret
);
20455 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20456 gen_store_gpr(v1_t
, ret
);
20460 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20461 gen_store_gpr(v1_t
, ret
);
20465 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20466 gen_store_gpr(v1_t
, ret
);
20470 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20471 gen_store_gpr(v1_t
, ret
);
20475 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20476 gen_store_gpr(v1_t
, ret
);
20481 TCGv tv0
= tcg_temp_new();
20482 TCGv tv1
= tcg_temp_new();
20483 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20485 tcg_gen_movi_tl(tv0
, rd
>> 3);
20486 tcg_gen_movi_tl(tv1
, imm
);
20487 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20490 case NM_MULEQ_S_W_PHL
:
20492 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20493 gen_store_gpr(v1_t
, ret
);
20495 case NM_MULEQ_S_W_PHR
:
20497 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20498 gen_store_gpr(v1_t
, ret
);
20502 switch (extract32(ctx
->opcode
, 10, 1)) {
20505 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20506 gen_store_gpr(v1_t
, ret
);
20510 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20511 gen_store_gpr(v1_t
, ret
);
20515 case NM_PRECR_QB_PH
:
20517 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
20518 gen_store_gpr(v1_t
, ret
);
20520 case NM_PRECRQ_QB_PH
:
20522 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
20523 gen_store_gpr(v1_t
, ret
);
20525 case NM_PRECRQ_PH_W
:
20527 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
20528 gen_store_gpr(v1_t
, ret
);
20530 case NM_PRECRQ_RS_PH_W
:
20532 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20533 gen_store_gpr(v1_t
, ret
);
20535 case NM_PRECRQU_S_QB_PH
:
20537 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20538 gen_store_gpr(v1_t
, ret
);
20542 tcg_gen_movi_tl(t0
, rd
);
20543 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
20544 gen_store_gpr(v1_t
, rt
);
20548 tcg_gen_movi_tl(t0
, rd
>> 1);
20549 switch (extract32(ctx
->opcode
, 10, 1)) {
20552 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
20553 gen_store_gpr(v1_t
, rt
);
20557 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
20558 gen_store_gpr(v1_t
, rt
);
20564 tcg_gen_movi_tl(t0
, rd
>> 1);
20565 switch (extract32(ctx
->opcode
, 10, 2)) {
20568 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
20569 gen_store_gpr(v1_t
, rt
);
20573 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
20574 gen_store_gpr(v1_t
, rt
);
20577 generate_exception_end(ctx
, EXCP_RI
);
20583 tcg_gen_movi_tl(t0
, rd
);
20584 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
20585 gen_store_gpr(v1_t
, rt
);
20591 imm
= sextract32(ctx
->opcode
, 11, 11);
20592 imm
= (int16_t)(imm
<< 6) >> 6;
20594 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
20599 generate_exception_end(ctx
, EXCP_RI
);
20604 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
20612 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
20613 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
20615 rt
= extract32(ctx
->opcode
, 21, 5);
20616 rs
= extract32(ctx
->opcode
, 16, 5);
20617 rd
= extract32(ctx
->opcode
, 11, 5);
20619 op
= extract32(ctx
->opcode
, 26, 6);
20624 switch (extract32(ctx
->opcode
, 19, 2)) {
20627 generate_exception_end(ctx
, EXCP_RI
);
20630 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
20631 generate_exception_end(ctx
, EXCP_SYSCALL
);
20633 generate_exception_end(ctx
, EXCP_RI
);
20637 generate_exception_end(ctx
, EXCP_BREAK
);
20640 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
20641 gen_helper_do_semihosting(cpu_env
);
20643 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
20644 generate_exception_end(ctx
, EXCP_RI
);
20646 generate_exception_end(ctx
, EXCP_DBp
);
20653 imm
= extract32(ctx
->opcode
, 0, 16);
20655 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
20657 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
20659 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20664 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
20665 extract32(ctx
->opcode
, 1, 20) << 1;
20666 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20667 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20671 switch (ctx
->opcode
& 0x07) {
20673 gen_pool32a0_nanomips_insn(env
, ctx
);
20677 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
20678 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
20682 switch (extract32(ctx
->opcode
, 3, 3)) {
20684 gen_p_lsx(ctx
, rd
, rs
, rt
);
20687 /* In nanoMIPS, the shift field directly encodes the shift
20688 * amount, meaning that the supported shift values are in
20689 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
20690 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
20691 extract32(ctx
->opcode
, 9, 2) - 1);
20694 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
20697 gen_pool32axf_nanomips_insn(env
, ctx
);
20700 generate_exception_end(ctx
, EXCP_RI
);
20705 generate_exception_end(ctx
, EXCP_RI
);
20710 switch (ctx
->opcode
& 0x03) {
20713 offset
= extract32(ctx
->opcode
, 0, 21);
20714 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
20718 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20721 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20724 generate_exception_end(ctx
, EXCP_RI
);
20730 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
20731 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
20732 switch (extract32(ctx
->opcode
, 16, 5)) {
20736 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
20742 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
20743 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20749 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
20755 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20758 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20765 t0
= tcg_temp_new();
20767 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20770 tcg_gen_movi_tl(t0
, addr
);
20771 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
20779 t0
= tcg_temp_new();
20780 t1
= tcg_temp_new();
20782 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20785 tcg_gen_movi_tl(t0
, addr
);
20786 gen_load_gpr(t1
, rt
);
20788 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
20795 generate_exception_end(ctx
, EXCP_RI
);
20801 switch (extract32(ctx
->opcode
, 12, 4)) {
20803 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20806 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20809 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20812 switch (extract32(ctx
->opcode
, 20, 1)) {
20814 switch (ctx
->opcode
& 3) {
20816 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
20817 extract32(ctx
->opcode
, 2, 1),
20818 extract32(ctx
->opcode
, 3, 9) << 3);
20821 case NM_RESTORE_JRC
:
20822 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
20823 extract32(ctx
->opcode
, 2, 1),
20824 extract32(ctx
->opcode
, 3, 9) << 3);
20825 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
20826 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
20830 generate_exception_end(ctx
, EXCP_RI
);
20835 generate_exception_end(ctx
, EXCP_RI
);
20840 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20843 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20847 TCGv t0
= tcg_temp_new();
20849 imm
= extract32(ctx
->opcode
, 0, 12);
20850 gen_load_gpr(t0
, rs
);
20851 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
20852 gen_store_gpr(t0
, rt
);
20858 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
20859 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
20863 int shift
= extract32(ctx
->opcode
, 0, 5);
20864 switch (extract32(ctx
->opcode
, 5, 4)) {
20866 if (rt
== 0 && shift
== 0) {
20868 } else if (rt
== 0 && shift
== 3) {
20869 /* EHB - treat as NOP */
20870 } else if (rt
== 0 && shift
== 5) {
20871 /* PAUSE - treat as NOP */
20872 } else if (rt
== 0 && shift
== 6) {
20874 gen_sync(extract32(ctx
->opcode
, 16, 5));
20877 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
20878 extract32(ctx
->opcode
, 0, 5));
20882 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
20883 extract32(ctx
->opcode
, 0, 5));
20886 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
20887 extract32(ctx
->opcode
, 0, 5));
20890 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
20891 extract32(ctx
->opcode
, 0, 5));
20899 TCGv t0
= tcg_temp_new();
20900 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
20901 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
20903 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
20905 gen_load_gpr(t0
, rs
);
20906 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
20909 tcg_temp_free_i32(shift
);
20910 tcg_temp_free_i32(shiftx
);
20911 tcg_temp_free_i32(stripe
);
20915 switch (((ctx
->opcode
>> 10) & 2) |
20916 (extract32(ctx
->opcode
, 5, 1))) {
20919 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
20920 extract32(ctx
->opcode
, 6, 5));
20923 generate_exception_end(ctx
, EXCP_RI
);
20928 switch (((ctx
->opcode
>> 10) & 2) |
20929 (extract32(ctx
->opcode
, 5, 1))) {
20932 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
20933 extract32(ctx
->opcode
, 6, 5));
20936 generate_exception_end(ctx
, EXCP_RI
);
20941 generate_exception_end(ctx
, EXCP_RI
);
20946 gen_pool32f_nanomips_insn(ctx
);
20951 switch (extract32(ctx
->opcode
, 1, 1)) {
20954 tcg_gen_movi_tl(cpu_gpr
[rt
],
20955 sextract32(ctx
->opcode
, 0, 1) << 31 |
20956 extract32(ctx
->opcode
, 2, 10) << 21 |
20957 extract32(ctx
->opcode
, 12, 9) << 12);
20962 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
20963 extract32(ctx
->opcode
, 2, 10) << 21 |
20964 extract32(ctx
->opcode
, 12, 9) << 12;
20966 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20967 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20974 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
20976 switch (extract32(ctx
->opcode
, 18, 3)) {
20978 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
20981 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
20984 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
20988 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
20993 switch (ctx
->opcode
& 1) {
20995 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
20998 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21004 switch (ctx
->opcode
& 1) {
21006 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21009 generate_exception_end(ctx
, EXCP_RI
);
21015 switch (ctx
->opcode
& 0x3) {
21017 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21020 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21023 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21026 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21031 generate_exception_end(ctx
, EXCP_RI
);
21038 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21040 switch (extract32(ctx
->opcode
, 12, 4)) {
21044 /* Break the TB to be able to sync copied instructions
21046 ctx
->base
.is_jmp
= DISAS_STOP
;
21049 /* Treat as NOP. */
21053 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21056 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21059 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21062 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21065 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21068 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21071 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21074 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21077 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21080 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21083 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21086 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21089 generate_exception_end(ctx
, EXCP_RI
);
21096 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21097 extract32(ctx
->opcode
, 0, 8);
21099 switch (extract32(ctx
->opcode
, 8, 3)) {
21101 switch (extract32(ctx
->opcode
, 11, 4)) {
21103 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21106 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21109 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21112 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21115 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21118 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21121 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21124 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21127 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21130 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21133 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21136 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21141 /* Break the TB to be able to sync copied instructions
21143 ctx
->base
.is_jmp
= DISAS_STOP
;
21146 /* Treat as NOP. */
21150 generate_exception_end(ctx
, EXCP_RI
);
21155 switch (extract32(ctx
->opcode
, 11, 4)) {
21160 TCGv t0
= tcg_temp_new();
21161 TCGv t1
= tcg_temp_new();
21163 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21165 switch (extract32(ctx
->opcode
, 11, 4)) {
21167 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21169 gen_store_gpr(t0
, rt
);
21172 gen_load_gpr(t1
, rt
);
21173 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21182 switch (ctx
->opcode
& 0x03) {
21184 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21188 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21193 switch (ctx
->opcode
& 0x03) {
21195 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, s
);
21199 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21204 check_cp0_enabled(ctx
);
21205 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21206 gen_cache_operation(ctx
, rt
, rs
, s
);
21215 int count
= extract32(ctx
->opcode
, 12, 3);
21218 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21219 extract32(ctx
->opcode
, 0, 8);
21220 TCGv va
= tcg_temp_new();
21221 TCGv t1
= tcg_temp_new();
21222 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21223 NM_P_LS_UAWM
? MO_UNALN
: 0;
21225 count
= (count
== 0) ? 8 : count
;
21226 while (counter
!= count
) {
21227 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21228 int this_offset
= offset
+ (counter
<< 2);
21230 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21232 switch (extract32(ctx
->opcode
, 11, 1)) {
21234 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21236 gen_store_gpr(t1
, this_rt
);
21237 if ((this_rt
== rs
) &&
21238 (counter
!= (count
- 1))) {
21239 /* UNPREDICTABLE */
21243 this_rt
= (rt
== 0) ? 0 : this_rt
;
21244 gen_load_gpr(t1
, this_rt
);
21245 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21256 generate_exception_end(ctx
, EXCP_RI
);
21264 TCGv t0
= tcg_temp_new();
21265 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21266 extract32(ctx
->opcode
, 1, 20) << 1;
21267 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21268 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21269 extract32(ctx
->opcode
, 21, 3));
21270 gen_load_gpr(t0
, rt
);
21271 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21272 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21278 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21279 extract32(ctx
->opcode
, 1, 24) << 1;
21281 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21283 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21286 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21291 switch (extract32(ctx
->opcode
, 12, 4)) {
21294 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21297 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21300 generate_exception_end(ctx
, EXCP_RI
);
21306 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21307 extract32(ctx
->opcode
, 1, 13) << 1;
21308 switch (extract32(ctx
->opcode
, 14, 2)) {
21311 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21314 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21315 extract32(ctx
->opcode
, 1, 13) << 1;
21316 check_cp1_enabled(ctx
);
21317 switch (extract32(ctx
->opcode
, 16, 5)) {
21319 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21322 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21327 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21328 extract32(ctx
->opcode
, 0, 1) << 13;
21330 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21335 generate_exception_end(ctx
, EXCP_RI
);
21341 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21343 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21347 if (rs
== rt
|| rt
== 0) {
21348 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21349 } else if (rs
== 0) {
21350 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21352 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21360 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21361 extract32(ctx
->opcode
, 1, 13) << 1;
21362 switch (extract32(ctx
->opcode
, 14, 2)) {
21365 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21368 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21370 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21372 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21376 if (rs
== 0 || rs
== rt
) {
21378 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21380 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21384 generate_exception_end(ctx
, EXCP_RI
);
21391 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21392 extract32(ctx
->opcode
, 1, 10) << 1;
21393 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21395 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
21400 generate_exception_end(ctx
, EXCP_RI
);
21406 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21409 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21410 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21411 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS1(ctx
->opcode
));
21415 /* make sure instructions are on a halfword boundary */
21416 if (ctx
->base
.pc_next
& 0x1) {
21417 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
21418 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
21419 tcg_temp_free(tmp
);
21420 generate_exception_end(ctx
, EXCP_AdEL
);
21424 op
= extract32(ctx
->opcode
, 10, 6);
21427 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21430 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
21431 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
21434 switch (extract32(ctx
->opcode
, 3, 2)) {
21435 case NM_P16_SYSCALL
:
21436 if (extract32(ctx
->opcode
, 2, 1) == 0) {
21437 generate_exception_end(ctx
, EXCP_SYSCALL
);
21439 generate_exception_end(ctx
, EXCP_RI
);
21443 generate_exception_end(ctx
, EXCP_BREAK
);
21446 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
21447 gen_helper_do_semihosting(cpu_env
);
21449 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21450 generate_exception_end(ctx
, EXCP_RI
);
21452 generate_exception_end(ctx
, EXCP_DBp
);
21457 generate_exception_end(ctx
, EXCP_RI
);
21464 int shift
= extract32(ctx
->opcode
, 0, 3);
21466 shift
= (shift
== 0) ? 8 : shift
;
21468 switch (extract32(ctx
->opcode
, 3, 1)) {
21476 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
21480 switch (ctx
->opcode
& 1) {
21482 gen_pool16c_nanomips_insn(ctx
);
21485 gen_ldxs(ctx
, rt
, rs
, rd
);
21490 switch (extract32(ctx
->opcode
, 6, 1)) {
21492 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
21493 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
21496 generate_exception_end(ctx
, EXCP_RI
);
21501 switch (extract32(ctx
->opcode
, 3, 1)) {
21503 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
21504 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
21506 case NM_P_ADDIURS5
:
21507 rt
= extract32(ctx
->opcode
, 5, 5);
21509 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
21510 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
21511 (extract32(ctx
->opcode
, 0, 3));
21512 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
21518 switch (ctx
->opcode
& 0x1) {
21520 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
21523 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
21528 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21529 extract32(ctx
->opcode
, 5, 3);
21530 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21531 extract32(ctx
->opcode
, 0, 3);
21532 rt
= decode_gpr_gpr4(rt
);
21533 rs
= decode_gpr_gpr4(rs
);
21534 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
21535 (extract32(ctx
->opcode
, 3, 1))) {
21538 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
21542 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
21545 generate_exception_end(ctx
, EXCP_RI
);
21551 int imm
= extract32(ctx
->opcode
, 0, 7);
21552 imm
= (imm
== 0x7f ? -1 : imm
);
21554 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21560 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
21561 u
= (u
== 12) ? 0xff :
21562 (u
== 13) ? 0xffff : u
;
21563 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
21567 offset
= extract32(ctx
->opcode
, 0, 2);
21568 switch (extract32(ctx
->opcode
, 2, 2)) {
21570 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
21573 rt
= decode_gpr_gpr3_src_store(
21574 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21575 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
21578 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
21581 generate_exception_end(ctx
, EXCP_RI
);
21586 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
21587 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
21589 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
21592 rt
= decode_gpr_gpr3_src_store(
21593 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21594 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
21597 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
21600 generate_exception_end(ctx
, EXCP_RI
);
21605 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21606 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21609 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21610 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21611 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
21615 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21616 extract32(ctx
->opcode
, 5, 3);
21617 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21618 extract32(ctx
->opcode
, 0, 3);
21619 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21620 (extract32(ctx
->opcode
, 8, 1) << 2);
21621 rt
= decode_gpr_gpr4(rt
);
21622 rs
= decode_gpr_gpr4(rs
);
21623 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21627 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21628 extract32(ctx
->opcode
, 5, 3);
21629 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21630 extract32(ctx
->opcode
, 0, 3);
21631 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21632 (extract32(ctx
->opcode
, 8, 1) << 2);
21633 rt
= decode_gpr_gpr4_zero(rt
);
21634 rs
= decode_gpr_gpr4(rs
);
21635 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21638 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21639 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
21642 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21643 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21644 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
21647 rt
= decode_gpr_gpr3_src_store(
21648 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21649 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21650 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21651 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21654 rt
= decode_gpr_gpr3_src_store(
21655 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21656 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21657 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
21660 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
21661 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21662 (extract32(ctx
->opcode
, 1, 9) << 1));
21665 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
21666 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21667 (extract32(ctx
->opcode
, 1, 9) << 1));
21670 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
21671 (sextract32(ctx
->opcode
, 0, 1) << 7) |
21672 (extract32(ctx
->opcode
, 1, 6) << 1));
21675 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
21676 (sextract32(ctx
->opcode
, 0, 1) << 7) |
21677 (extract32(ctx
->opcode
, 1, 6) << 1));
21680 switch (ctx
->opcode
& 0xf) {
21683 switch (extract32(ctx
->opcode
, 4, 1)) {
21685 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
21686 extract32(ctx
->opcode
, 5, 5), 0, 0);
21689 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
21690 extract32(ctx
->opcode
, 5, 5), 31, 0);
21697 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
21698 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
21699 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
21700 extract32(ctx
->opcode
, 0, 4) << 1);
21707 int count
= extract32(ctx
->opcode
, 0, 4);
21708 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
21710 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
21711 switch (extract32(ctx
->opcode
, 8, 1)) {
21713 gen_save(ctx
, rt
, count
, 0, u
);
21715 case NM_RESTORE_JRC16
:
21716 gen_restore(ctx
, rt
, count
, 0, u
);
21717 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21726 static const int gpr2reg1
[] = {4, 5, 6, 7};
21727 static const int gpr2reg2
[] = {5, 6, 7, 8};
21729 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
21730 extract32(ctx
->opcode
, 8, 1);
21731 int r1
= gpr2reg1
[rd2
];
21732 int r2
= gpr2reg2
[rd2
];
21733 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
21734 extract32(ctx
->opcode
, 0, 3);
21735 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
21736 extract32(ctx
->opcode
, 5, 3);
21737 TCGv t0
= tcg_temp_new();
21738 TCGv t1
= tcg_temp_new();
21739 if (op
== NM_MOVEP
) {
21742 rs
= decode_gpr_gpr4_zero(r3
);
21743 rt
= decode_gpr_gpr4_zero(r4
);
21745 rd
= decode_gpr_gpr4(r3
);
21746 re
= decode_gpr_gpr4(r4
);
21750 gen_load_gpr(t0
, rs
);
21751 gen_load_gpr(t1
, rt
);
21752 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21753 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
21759 return decode_nanomips_32_48_opc(env
, ctx
);
21766 /* SmartMIPS extension to MIPS32 */
21768 #if defined(TARGET_MIPS64)
21770 /* MDMX extension to MIPS64 */
21774 /* MIPSDSP functions. */
21775 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
21776 int rd
, int base
, int offset
)
21781 t0
= tcg_temp_new();
21784 gen_load_gpr(t0
, offset
);
21785 } else if (offset
== 0) {
21786 gen_load_gpr(t0
, base
);
21788 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
21793 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
21794 gen_store_gpr(t0
, rd
);
21797 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
21798 gen_store_gpr(t0
, rd
);
21801 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
21802 gen_store_gpr(t0
, rd
);
21804 #if defined(TARGET_MIPS64)
21806 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
21807 gen_store_gpr(t0
, rd
);
21814 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
21815 int ret
, int v1
, int v2
)
21821 /* Treat as NOP. */
21825 v1_t
= tcg_temp_new();
21826 v2_t
= tcg_temp_new();
21828 gen_load_gpr(v1_t
, v1
);
21829 gen_load_gpr(v2_t
, v2
);
21832 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
21833 case OPC_MULT_G_2E
:
21837 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21839 case OPC_ADDUH_R_QB
:
21840 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21843 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21845 case OPC_ADDQH_R_PH
:
21846 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21849 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21851 case OPC_ADDQH_R_W
:
21852 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21855 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21857 case OPC_SUBUH_R_QB
:
21858 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21861 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21863 case OPC_SUBQH_R_PH
:
21864 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21867 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21869 case OPC_SUBQH_R_W
:
21870 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21874 case OPC_ABSQ_S_PH_DSP
:
21876 case OPC_ABSQ_S_QB
:
21878 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
21880 case OPC_ABSQ_S_PH
:
21882 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
21886 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
21888 case OPC_PRECEQ_W_PHL
:
21890 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
21891 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
21893 case OPC_PRECEQ_W_PHR
:
21895 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
21896 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
21897 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
21899 case OPC_PRECEQU_PH_QBL
:
21901 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
21903 case OPC_PRECEQU_PH_QBR
:
21905 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
21907 case OPC_PRECEQU_PH_QBLA
:
21909 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
21911 case OPC_PRECEQU_PH_QBRA
:
21913 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
21915 case OPC_PRECEU_PH_QBL
:
21917 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
21919 case OPC_PRECEU_PH_QBR
:
21921 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
21923 case OPC_PRECEU_PH_QBLA
:
21925 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
21927 case OPC_PRECEU_PH_QBRA
:
21929 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
21933 case OPC_ADDU_QB_DSP
:
21937 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21939 case OPC_ADDQ_S_PH
:
21941 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21945 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21949 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21951 case OPC_ADDU_S_QB
:
21953 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21957 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21959 case OPC_ADDU_S_PH
:
21961 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21965 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21967 case OPC_SUBQ_S_PH
:
21969 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21973 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21977 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21979 case OPC_SUBU_S_QB
:
21981 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21985 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21987 case OPC_SUBU_S_PH
:
21989 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21993 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21997 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22001 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22003 case OPC_RADDU_W_QB
:
22005 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22009 case OPC_CMPU_EQ_QB_DSP
:
22011 case OPC_PRECR_QB_PH
:
22013 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22015 case OPC_PRECRQ_QB_PH
:
22017 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22019 case OPC_PRECR_SRA_PH_W
:
22022 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22023 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22025 tcg_temp_free_i32(sa_t
);
22028 case OPC_PRECR_SRA_R_PH_W
:
22031 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22032 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22034 tcg_temp_free_i32(sa_t
);
22037 case OPC_PRECRQ_PH_W
:
22039 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22041 case OPC_PRECRQ_RS_PH_W
:
22043 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22045 case OPC_PRECRQU_S_QB_PH
:
22047 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22051 #ifdef TARGET_MIPS64
22052 case OPC_ABSQ_S_QH_DSP
:
22054 case OPC_PRECEQ_L_PWL
:
22056 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22058 case OPC_PRECEQ_L_PWR
:
22060 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22062 case OPC_PRECEQ_PW_QHL
:
22064 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22066 case OPC_PRECEQ_PW_QHR
:
22068 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22070 case OPC_PRECEQ_PW_QHLA
:
22072 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22074 case OPC_PRECEQ_PW_QHRA
:
22076 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22078 case OPC_PRECEQU_QH_OBL
:
22080 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22082 case OPC_PRECEQU_QH_OBR
:
22084 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22086 case OPC_PRECEQU_QH_OBLA
:
22088 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22090 case OPC_PRECEQU_QH_OBRA
:
22092 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22094 case OPC_PRECEU_QH_OBL
:
22096 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22098 case OPC_PRECEU_QH_OBR
:
22100 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22102 case OPC_PRECEU_QH_OBLA
:
22104 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22106 case OPC_PRECEU_QH_OBRA
:
22108 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22110 case OPC_ABSQ_S_OB
:
22112 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22114 case OPC_ABSQ_S_PW
:
22116 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22118 case OPC_ABSQ_S_QH
:
22120 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22124 case OPC_ADDU_OB_DSP
:
22126 case OPC_RADDU_L_OB
:
22128 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22132 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22134 case OPC_SUBQ_S_PW
:
22136 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22140 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22142 case OPC_SUBQ_S_QH
:
22144 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22148 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22150 case OPC_SUBU_S_OB
:
22152 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22156 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22158 case OPC_SUBU_S_QH
:
22160 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22164 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22166 case OPC_SUBUH_R_OB
:
22168 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22172 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22174 case OPC_ADDQ_S_PW
:
22176 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22180 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22182 case OPC_ADDQ_S_QH
:
22184 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22188 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22190 case OPC_ADDU_S_OB
:
22192 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22196 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22198 case OPC_ADDU_S_QH
:
22200 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22204 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22206 case OPC_ADDUH_R_OB
:
22208 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22212 case OPC_CMPU_EQ_OB_DSP
:
22214 case OPC_PRECR_OB_QH
:
22216 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22218 case OPC_PRECR_SRA_QH_PW
:
22221 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22222 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22223 tcg_temp_free_i32(ret_t
);
22226 case OPC_PRECR_SRA_R_QH_PW
:
22229 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22230 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22231 tcg_temp_free_i32(sa_v
);
22234 case OPC_PRECRQ_OB_QH
:
22236 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22238 case OPC_PRECRQ_PW_L
:
22240 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22242 case OPC_PRECRQ_QH_PW
:
22244 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22246 case OPC_PRECRQ_RS_QH_PW
:
22248 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22250 case OPC_PRECRQU_S_OB_QH
:
22252 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22259 tcg_temp_free(v1_t
);
22260 tcg_temp_free(v2_t
);
22263 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22264 int ret
, int v1
, int v2
)
22272 /* Treat as NOP. */
22276 t0
= tcg_temp_new();
22277 v1_t
= tcg_temp_new();
22278 v2_t
= tcg_temp_new();
22280 tcg_gen_movi_tl(t0
, v1
);
22281 gen_load_gpr(v1_t
, v1
);
22282 gen_load_gpr(v2_t
, v2
);
22285 case OPC_SHLL_QB_DSP
:
22287 op2
= MASK_SHLL_QB(ctx
->opcode
);
22291 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22295 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22299 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22303 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22305 case OPC_SHLL_S_PH
:
22307 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22309 case OPC_SHLLV_S_PH
:
22311 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22315 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22317 case OPC_SHLLV_S_W
:
22319 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22323 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22327 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22331 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22335 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22339 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22341 case OPC_SHRA_R_QB
:
22343 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22347 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22349 case OPC_SHRAV_R_QB
:
22351 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22355 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22357 case OPC_SHRA_R_PH
:
22359 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22363 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22365 case OPC_SHRAV_R_PH
:
22367 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22371 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22373 case OPC_SHRAV_R_W
:
22375 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22377 default: /* Invalid */
22378 MIPS_INVAL("MASK SHLL.QB");
22379 generate_exception_end(ctx
, EXCP_RI
);
22384 #ifdef TARGET_MIPS64
22385 case OPC_SHLL_OB_DSP
:
22386 op2
= MASK_SHLL_OB(ctx
->opcode
);
22390 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22394 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22396 case OPC_SHLL_S_PW
:
22398 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22400 case OPC_SHLLV_S_PW
:
22402 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22406 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22410 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22414 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22418 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22420 case OPC_SHLL_S_QH
:
22422 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22424 case OPC_SHLLV_S_QH
:
22426 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22430 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
22434 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22436 case OPC_SHRA_R_OB
:
22438 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
22440 case OPC_SHRAV_R_OB
:
22442 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22446 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
22450 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22452 case OPC_SHRA_R_PW
:
22454 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
22456 case OPC_SHRAV_R_PW
:
22458 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22462 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
22466 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22468 case OPC_SHRA_R_QH
:
22470 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
22472 case OPC_SHRAV_R_QH
:
22474 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22478 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
22482 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22486 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
22490 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22492 default: /* Invalid */
22493 MIPS_INVAL("MASK SHLL.OB");
22494 generate_exception_end(ctx
, EXCP_RI
);
22502 tcg_temp_free(v1_t
);
22503 tcg_temp_free(v2_t
);
22506 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22507 int ret
, int v1
, int v2
, int check_ret
)
22513 if ((ret
== 0) && (check_ret
== 1)) {
22514 /* Treat as NOP. */
22518 t0
= tcg_temp_new_i32();
22519 v1_t
= tcg_temp_new();
22520 v2_t
= tcg_temp_new();
22522 tcg_gen_movi_i32(t0
, ret
);
22523 gen_load_gpr(v1_t
, v1
);
22524 gen_load_gpr(v2_t
, v2
);
22527 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22528 * the same mask and op1. */
22529 case OPC_MULT_G_2E
:
22533 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22536 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22539 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22541 case OPC_MULQ_RS_W
:
22542 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22546 case OPC_DPA_W_PH_DSP
:
22548 case OPC_DPAU_H_QBL
:
22550 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22552 case OPC_DPAU_H_QBR
:
22554 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22556 case OPC_DPSU_H_QBL
:
22558 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22560 case OPC_DPSU_H_QBR
:
22562 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22566 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22568 case OPC_DPAX_W_PH
:
22570 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22572 case OPC_DPAQ_S_W_PH
:
22574 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22576 case OPC_DPAQX_S_W_PH
:
22578 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22580 case OPC_DPAQX_SA_W_PH
:
22582 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22586 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22588 case OPC_DPSX_W_PH
:
22590 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22592 case OPC_DPSQ_S_W_PH
:
22594 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22596 case OPC_DPSQX_S_W_PH
:
22598 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22600 case OPC_DPSQX_SA_W_PH
:
22602 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22604 case OPC_MULSAQ_S_W_PH
:
22606 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22608 case OPC_DPAQ_SA_L_W
:
22610 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22612 case OPC_DPSQ_SA_L_W
:
22614 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22616 case OPC_MAQ_S_W_PHL
:
22618 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22620 case OPC_MAQ_S_W_PHR
:
22622 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22624 case OPC_MAQ_SA_W_PHL
:
22626 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22628 case OPC_MAQ_SA_W_PHR
:
22630 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22632 case OPC_MULSA_W_PH
:
22634 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22638 #ifdef TARGET_MIPS64
22639 case OPC_DPAQ_W_QH_DSP
:
22641 int ac
= ret
& 0x03;
22642 tcg_gen_movi_i32(t0
, ac
);
22647 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
22651 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
22655 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
22659 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
22663 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22665 case OPC_DPAQ_S_W_QH
:
22667 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22669 case OPC_DPAQ_SA_L_PW
:
22671 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22673 case OPC_DPAU_H_OBL
:
22675 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
22677 case OPC_DPAU_H_OBR
:
22679 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
22683 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22685 case OPC_DPSQ_S_W_QH
:
22687 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22689 case OPC_DPSQ_SA_L_PW
:
22691 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22693 case OPC_DPSU_H_OBL
:
22695 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
22697 case OPC_DPSU_H_OBR
:
22699 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
22701 case OPC_MAQ_S_L_PWL
:
22703 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
22705 case OPC_MAQ_S_L_PWR
:
22707 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
22709 case OPC_MAQ_S_W_QHLL
:
22711 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
22713 case OPC_MAQ_SA_W_QHLL
:
22715 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
22717 case OPC_MAQ_S_W_QHLR
:
22719 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
22721 case OPC_MAQ_SA_W_QHLR
:
22723 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
22725 case OPC_MAQ_S_W_QHRL
:
22727 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
22729 case OPC_MAQ_SA_W_QHRL
:
22731 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
22733 case OPC_MAQ_S_W_QHRR
:
22735 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
22737 case OPC_MAQ_SA_W_QHRR
:
22739 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
22741 case OPC_MULSAQ_S_L_PW
:
22743 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22745 case OPC_MULSAQ_S_W_QH
:
22747 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22753 case OPC_ADDU_QB_DSP
:
22755 case OPC_MULEU_S_PH_QBL
:
22757 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22759 case OPC_MULEU_S_PH_QBR
:
22761 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22763 case OPC_MULQ_RS_PH
:
22765 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22767 case OPC_MULEQ_S_W_PHL
:
22769 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22771 case OPC_MULEQ_S_W_PHR
:
22773 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22775 case OPC_MULQ_S_PH
:
22777 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22781 #ifdef TARGET_MIPS64
22782 case OPC_ADDU_OB_DSP
:
22784 case OPC_MULEQ_S_PW_QHL
:
22786 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22788 case OPC_MULEQ_S_PW_QHR
:
22790 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22792 case OPC_MULEU_S_QH_OBL
:
22794 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22796 case OPC_MULEU_S_QH_OBR
:
22798 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22800 case OPC_MULQ_RS_QH
:
22802 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22809 tcg_temp_free_i32(t0
);
22810 tcg_temp_free(v1_t
);
22811 tcg_temp_free(v2_t
);
22814 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22822 /* Treat as NOP. */
22826 t0
= tcg_temp_new();
22827 val_t
= tcg_temp_new();
22828 gen_load_gpr(val_t
, val
);
22831 case OPC_ABSQ_S_PH_DSP
:
22835 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
22840 target_long result
;
22841 imm
= (ctx
->opcode
>> 16) & 0xFF;
22842 result
= (uint32_t)imm
<< 24 |
22843 (uint32_t)imm
<< 16 |
22844 (uint32_t)imm
<< 8 |
22846 result
= (int32_t)result
;
22847 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
22852 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
22853 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
22854 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22855 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22856 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22857 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22862 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22863 imm
= (int16_t)(imm
<< 6) >> 6;
22864 tcg_gen_movi_tl(cpu_gpr
[ret
], \
22865 (target_long
)((int32_t)imm
<< 16 | \
22871 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
22872 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22873 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22874 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22878 #ifdef TARGET_MIPS64
22879 case OPC_ABSQ_S_QH_DSP
:
22886 imm
= (ctx
->opcode
>> 16) & 0xFF;
22887 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
22888 temp
= (temp
<< 16) | temp
;
22889 temp
= (temp
<< 32) | temp
;
22890 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
22898 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22899 imm
= (int16_t)(imm
<< 6) >> 6;
22900 temp
= ((target_long
)imm
<< 32) \
22901 | ((target_long
)imm
& 0xFFFFFFFF);
22902 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
22910 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22911 imm
= (int16_t)(imm
<< 6) >> 6;
22913 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
22914 ((uint64_t)(uint16_t)imm
<< 32) |
22915 ((uint64_t)(uint16_t)imm
<< 16) |
22916 (uint64_t)(uint16_t)imm
;
22917 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
22922 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
22923 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
22924 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22925 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22926 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22927 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
22928 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22932 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
22933 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
22934 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22938 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
22939 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22940 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22941 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
22942 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22949 tcg_temp_free(val_t
);
22952 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
22953 uint32_t op1
, uint32_t op2
,
22954 int ret
, int v1
, int v2
, int check_ret
)
22960 if ((ret
== 0) && (check_ret
== 1)) {
22961 /* Treat as NOP. */
22965 t1
= tcg_temp_new();
22966 v1_t
= tcg_temp_new();
22967 v2_t
= tcg_temp_new();
22969 gen_load_gpr(v1_t
, v1
);
22970 gen_load_gpr(v2_t
, v2
);
22973 case OPC_CMPU_EQ_QB_DSP
:
22975 case OPC_CMPU_EQ_QB
:
22977 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
22979 case OPC_CMPU_LT_QB
:
22981 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
22983 case OPC_CMPU_LE_QB
:
22985 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
22987 case OPC_CMPGU_EQ_QB
:
22989 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22991 case OPC_CMPGU_LT_QB
:
22993 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22995 case OPC_CMPGU_LE_QB
:
22997 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22999 case OPC_CMPGDU_EQ_QB
:
23001 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23002 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23003 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23004 tcg_gen_shli_tl(t1
, t1
, 24);
23005 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23007 case OPC_CMPGDU_LT_QB
:
23009 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23010 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23011 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23012 tcg_gen_shli_tl(t1
, t1
, 24);
23013 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23015 case OPC_CMPGDU_LE_QB
:
23017 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23018 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23019 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23020 tcg_gen_shli_tl(t1
, t1
, 24);
23021 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23023 case OPC_CMP_EQ_PH
:
23025 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23027 case OPC_CMP_LT_PH
:
23029 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23031 case OPC_CMP_LE_PH
:
23033 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23037 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23041 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23043 case OPC_PACKRL_PH
:
23045 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23049 #ifdef TARGET_MIPS64
23050 case OPC_CMPU_EQ_OB_DSP
:
23052 case OPC_CMP_EQ_PW
:
23054 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23056 case OPC_CMP_LT_PW
:
23058 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23060 case OPC_CMP_LE_PW
:
23062 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23064 case OPC_CMP_EQ_QH
:
23066 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23068 case OPC_CMP_LT_QH
:
23070 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23072 case OPC_CMP_LE_QH
:
23074 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23076 case OPC_CMPGDU_EQ_OB
:
23078 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23080 case OPC_CMPGDU_LT_OB
:
23082 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23084 case OPC_CMPGDU_LE_OB
:
23086 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23088 case OPC_CMPGU_EQ_OB
:
23090 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23092 case OPC_CMPGU_LT_OB
:
23094 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23096 case OPC_CMPGU_LE_OB
:
23098 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23100 case OPC_CMPU_EQ_OB
:
23102 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23104 case OPC_CMPU_LT_OB
:
23106 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23108 case OPC_CMPU_LE_OB
:
23110 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23112 case OPC_PACKRL_PW
:
23114 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23118 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23122 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23126 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23134 tcg_temp_free(v1_t
);
23135 tcg_temp_free(v2_t
);
23138 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23139 uint32_t op1
, int rt
, int rs
, int sa
)
23146 /* Treat as NOP. */
23150 t0
= tcg_temp_new();
23151 gen_load_gpr(t0
, rs
);
23154 case OPC_APPEND_DSP
:
23155 switch (MASK_APPEND(ctx
->opcode
)) {
23158 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23160 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23164 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23165 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23166 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23167 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23169 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23173 if (sa
!= 0 && sa
!= 2) {
23174 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23175 tcg_gen_ext32u_tl(t0
, t0
);
23176 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23177 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23179 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23181 default: /* Invalid */
23182 MIPS_INVAL("MASK APPEND");
23183 generate_exception_end(ctx
, EXCP_RI
);
23187 #ifdef TARGET_MIPS64
23188 case OPC_DAPPEND_DSP
:
23189 switch (MASK_DAPPEND(ctx
->opcode
)) {
23192 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23196 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23197 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23198 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23202 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23203 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23204 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23209 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23210 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23211 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23212 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23215 default: /* Invalid */
23216 MIPS_INVAL("MASK DAPPEND");
23217 generate_exception_end(ctx
, EXCP_RI
);
23226 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23227 int ret
, int v1
, int v2
, int check_ret
)
23236 if ((ret
== 0) && (check_ret
== 1)) {
23237 /* Treat as NOP. */
23241 t0
= tcg_temp_new();
23242 t1
= tcg_temp_new();
23243 v1_t
= tcg_temp_new();
23244 v2_t
= tcg_temp_new();
23246 gen_load_gpr(v1_t
, v1
);
23247 gen_load_gpr(v2_t
, v2
);
23250 case OPC_EXTR_W_DSP
:
23254 tcg_gen_movi_tl(t0
, v2
);
23255 tcg_gen_movi_tl(t1
, v1
);
23256 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23259 tcg_gen_movi_tl(t0
, v2
);
23260 tcg_gen_movi_tl(t1
, v1
);
23261 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23263 case OPC_EXTR_RS_W
:
23264 tcg_gen_movi_tl(t0
, v2
);
23265 tcg_gen_movi_tl(t1
, v1
);
23266 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23269 tcg_gen_movi_tl(t0
, v2
);
23270 tcg_gen_movi_tl(t1
, v1
);
23271 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23273 case OPC_EXTRV_S_H
:
23274 tcg_gen_movi_tl(t0
, v2
);
23275 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23278 tcg_gen_movi_tl(t0
, v2
);
23279 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23281 case OPC_EXTRV_R_W
:
23282 tcg_gen_movi_tl(t0
, v2
);
23283 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23285 case OPC_EXTRV_RS_W
:
23286 tcg_gen_movi_tl(t0
, v2
);
23287 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23290 tcg_gen_movi_tl(t0
, v2
);
23291 tcg_gen_movi_tl(t1
, v1
);
23292 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23295 tcg_gen_movi_tl(t0
, v2
);
23296 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23299 tcg_gen_movi_tl(t0
, v2
);
23300 tcg_gen_movi_tl(t1
, v1
);
23301 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23304 tcg_gen_movi_tl(t0
, v2
);
23305 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23308 imm
= (ctx
->opcode
>> 20) & 0x3F;
23309 tcg_gen_movi_tl(t0
, ret
);
23310 tcg_gen_movi_tl(t1
, imm
);
23311 gen_helper_shilo(t0
, t1
, cpu_env
);
23314 tcg_gen_movi_tl(t0
, ret
);
23315 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23318 tcg_gen_movi_tl(t0
, ret
);
23319 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23322 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23323 tcg_gen_movi_tl(t0
, imm
);
23324 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23327 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23328 tcg_gen_movi_tl(t0
, imm
);
23329 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23333 #ifdef TARGET_MIPS64
23334 case OPC_DEXTR_W_DSP
:
23338 tcg_gen_movi_tl(t0
, ret
);
23339 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23343 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23344 int ac
= (ctx
->opcode
>> 11) & 0x03;
23345 tcg_gen_movi_tl(t0
, shift
);
23346 tcg_gen_movi_tl(t1
, ac
);
23347 gen_helper_dshilo(t0
, t1
, cpu_env
);
23352 int ac
= (ctx
->opcode
>> 11) & 0x03;
23353 tcg_gen_movi_tl(t0
, ac
);
23354 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23358 tcg_gen_movi_tl(t0
, v2
);
23359 tcg_gen_movi_tl(t1
, v1
);
23361 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23364 tcg_gen_movi_tl(t0
, v2
);
23365 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23368 tcg_gen_movi_tl(t0
, v2
);
23369 tcg_gen_movi_tl(t1
, v1
);
23370 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23373 tcg_gen_movi_tl(t0
, v2
);
23374 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23377 tcg_gen_movi_tl(t0
, v2
);
23378 tcg_gen_movi_tl(t1
, v1
);
23379 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23381 case OPC_DEXTR_R_L
:
23382 tcg_gen_movi_tl(t0
, v2
);
23383 tcg_gen_movi_tl(t1
, v1
);
23384 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23386 case OPC_DEXTR_RS_L
:
23387 tcg_gen_movi_tl(t0
, v2
);
23388 tcg_gen_movi_tl(t1
, v1
);
23389 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23392 tcg_gen_movi_tl(t0
, v2
);
23393 tcg_gen_movi_tl(t1
, v1
);
23394 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23396 case OPC_DEXTR_R_W
:
23397 tcg_gen_movi_tl(t0
, v2
);
23398 tcg_gen_movi_tl(t1
, v1
);
23399 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23401 case OPC_DEXTR_RS_W
:
23402 tcg_gen_movi_tl(t0
, v2
);
23403 tcg_gen_movi_tl(t1
, v1
);
23404 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23406 case OPC_DEXTR_S_H
:
23407 tcg_gen_movi_tl(t0
, v2
);
23408 tcg_gen_movi_tl(t1
, v1
);
23409 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23411 case OPC_DEXTRV_S_H
:
23412 tcg_gen_movi_tl(t0
, v2
);
23413 tcg_gen_movi_tl(t1
, v1
);
23414 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23417 tcg_gen_movi_tl(t0
, v2
);
23418 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23420 case OPC_DEXTRV_R_L
:
23421 tcg_gen_movi_tl(t0
, v2
);
23422 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23424 case OPC_DEXTRV_RS_L
:
23425 tcg_gen_movi_tl(t0
, v2
);
23426 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23429 tcg_gen_movi_tl(t0
, v2
);
23430 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23432 case OPC_DEXTRV_R_W
:
23433 tcg_gen_movi_tl(t0
, v2
);
23434 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23436 case OPC_DEXTRV_RS_W
:
23437 tcg_gen_movi_tl(t0
, v2
);
23438 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23447 tcg_temp_free(v1_t
);
23448 tcg_temp_free(v2_t
);
23451 /* End MIPSDSP functions. */
23453 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23455 int rs
, rt
, rd
, sa
;
23458 rs
= (ctx
->opcode
>> 21) & 0x1f;
23459 rt
= (ctx
->opcode
>> 16) & 0x1f;
23460 rd
= (ctx
->opcode
>> 11) & 0x1f;
23461 sa
= (ctx
->opcode
>> 6) & 0x1f;
23463 op1
= MASK_SPECIAL(ctx
->opcode
);
23466 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23472 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23482 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23485 MIPS_INVAL("special_r6 muldiv");
23486 generate_exception_end(ctx
, EXCP_RI
);
23492 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23496 if (rt
== 0 && sa
== 1) {
23497 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23498 We need additionally to check other fields */
23499 gen_cl(ctx
, op1
, rd
, rs
);
23501 generate_exception_end(ctx
, EXCP_RI
);
23505 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23506 gen_helper_do_semihosting(cpu_env
);
23508 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
23509 generate_exception_end(ctx
, EXCP_RI
);
23511 generate_exception_end(ctx
, EXCP_DBp
);
23515 #if defined(TARGET_MIPS64)
23517 check_mips_64(ctx
);
23518 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23522 if (rt
== 0 && sa
== 1) {
23523 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23524 We need additionally to check other fields */
23525 check_mips_64(ctx
);
23526 gen_cl(ctx
, op1
, rd
, rs
);
23528 generate_exception_end(ctx
, EXCP_RI
);
23536 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23546 check_mips_64(ctx
);
23547 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23550 MIPS_INVAL("special_r6 muldiv");
23551 generate_exception_end(ctx
, EXCP_RI
);
23556 default: /* Invalid */
23557 MIPS_INVAL("special_r6");
23558 generate_exception_end(ctx
, EXCP_RI
);
23563 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23565 int rs
, rt
, rd
, sa
;
23568 rs
= (ctx
->opcode
>> 21) & 0x1f;
23569 rt
= (ctx
->opcode
>> 16) & 0x1f;
23570 rd
= (ctx
->opcode
>> 11) & 0x1f;
23571 sa
= (ctx
->opcode
>> 6) & 0x1f;
23573 op1
= MASK_SPECIAL(ctx
->opcode
);
23575 case OPC_MOVN
: /* Conditional move */
23577 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
23578 INSN_LOONGSON2E
| INSN_LOONGSON2F
| INSN_R5900
);
23579 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23581 case OPC_MFHI
: /* Move from HI/LO */
23583 gen_HILO(ctx
, op1
, rs
& 3, rd
);
23586 case OPC_MTLO
: /* Move to HI/LO */
23587 gen_HILO(ctx
, op1
, rd
& 3, rs
);
23590 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
23591 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
23592 check_cp1_enabled(ctx
);
23593 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
23594 (ctx
->opcode
>> 16) & 1);
23596 generate_exception_err(ctx
, EXCP_CpU
, 1);
23602 check_insn(ctx
, INSN_VR54XX
);
23603 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
23604 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
23605 } else if (ctx
->insn_flags
& INSN_R5900
) {
23606 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
23608 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
23613 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23615 #if defined(TARGET_MIPS64)
23620 check_insn(ctx
, ISA_MIPS3
);
23621 check_insn_opc_user_only(ctx
, INSN_R5900
);
23622 check_mips_64(ctx
);
23623 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23627 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
23630 #ifdef MIPS_STRICT_STANDARD
23631 MIPS_INVAL("SPIM");
23632 generate_exception_end(ctx
, EXCP_RI
);
23634 /* Implemented as RI exception for now. */
23635 MIPS_INVAL("spim (unofficial)");
23636 generate_exception_end(ctx
, EXCP_RI
);
23639 default: /* Invalid */
23640 MIPS_INVAL("special_legacy");
23641 generate_exception_end(ctx
, EXCP_RI
);
23646 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
23648 int rs
, rt
, rd
, sa
;
23651 rs
= (ctx
->opcode
>> 21) & 0x1f;
23652 rt
= (ctx
->opcode
>> 16) & 0x1f;
23653 rd
= (ctx
->opcode
>> 11) & 0x1f;
23654 sa
= (ctx
->opcode
>> 6) & 0x1f;
23656 op1
= MASK_SPECIAL(ctx
->opcode
);
23658 case OPC_SLL
: /* Shift with immediate */
23659 if (sa
== 5 && rd
== 0 &&
23660 rs
== 0 && rt
== 0) { /* PAUSE */
23661 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
23662 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
23663 generate_exception_end(ctx
, EXCP_RI
);
23669 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23672 switch ((ctx
->opcode
>> 21) & 0x1f) {
23674 /* rotr is decoded as srl on non-R2 CPUs */
23675 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23680 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23683 generate_exception_end(ctx
, EXCP_RI
);
23691 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23693 case OPC_SLLV
: /* Shifts */
23695 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23698 switch ((ctx
->opcode
>> 6) & 0x1f) {
23700 /* rotrv is decoded as srlv on non-R2 CPUs */
23701 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23706 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23709 generate_exception_end(ctx
, EXCP_RI
);
23713 case OPC_SLT
: /* Set on less than */
23715 gen_slt(ctx
, op1
, rd
, rs
, rt
);
23717 case OPC_AND
: /* Logic*/
23721 gen_logic(ctx
, op1
, rd
, rs
, rt
);
23724 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
23726 case OPC_TGE
: /* Traps */
23732 check_insn(ctx
, ISA_MIPS2
);
23733 gen_trap(ctx
, op1
, rs
, rt
, -1);
23735 case OPC_LSA
: /* OPC_PMON */
23736 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
23737 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
23738 decode_opc_special_r6(env
, ctx
);
23740 /* Pmon entry point, also R4010 selsl */
23741 #ifdef MIPS_STRICT_STANDARD
23742 MIPS_INVAL("PMON / selsl");
23743 generate_exception_end(ctx
, EXCP_RI
);
23745 gen_helper_0e0i(pmon
, sa
);
23750 generate_exception_end(ctx
, EXCP_SYSCALL
);
23753 generate_exception_end(ctx
, EXCP_BREAK
);
23756 check_insn(ctx
, ISA_MIPS2
);
23757 gen_sync(extract32(ctx
->opcode
, 6, 5));
23760 #if defined(TARGET_MIPS64)
23761 /* MIPS64 specific opcodes */
23766 check_insn(ctx
, ISA_MIPS3
);
23767 check_mips_64(ctx
);
23768 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23771 switch ((ctx
->opcode
>> 21) & 0x1f) {
23773 /* drotr is decoded as dsrl on non-R2 CPUs */
23774 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23779 check_insn(ctx
, ISA_MIPS3
);
23780 check_mips_64(ctx
);
23781 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23784 generate_exception_end(ctx
, EXCP_RI
);
23789 switch ((ctx
->opcode
>> 21) & 0x1f) {
23791 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
23792 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23797 check_insn(ctx
, ISA_MIPS3
);
23798 check_mips_64(ctx
);
23799 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23802 generate_exception_end(ctx
, EXCP_RI
);
23810 check_insn(ctx
, ISA_MIPS3
);
23811 check_mips_64(ctx
);
23812 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23816 check_insn(ctx
, ISA_MIPS3
);
23817 check_mips_64(ctx
);
23818 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23821 switch ((ctx
->opcode
>> 6) & 0x1f) {
23823 /* drotrv is decoded as dsrlv on non-R2 CPUs */
23824 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23829 check_insn(ctx
, ISA_MIPS3
);
23830 check_mips_64(ctx
);
23831 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23834 generate_exception_end(ctx
, EXCP_RI
);
23839 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
23840 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
23841 decode_opc_special_r6(env
, ctx
);
23846 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
23847 decode_opc_special_r6(env
, ctx
);
23849 decode_opc_special_legacy(env
, ctx
);
23854 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23859 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
23861 rs
= (ctx
->opcode
>> 21) & 0x1f;
23862 rt
= (ctx
->opcode
>> 16) & 0x1f;
23863 rd
= (ctx
->opcode
>> 11) & 0x1f;
23865 op1
= MASK_SPECIAL2(ctx
->opcode
);
23867 case OPC_MADD
: /* Multiply and add/sub */
23871 check_insn(ctx
, ISA_MIPS32
);
23872 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
23875 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23878 case OPC_DIVU_G_2F
:
23879 case OPC_MULT_G_2F
:
23880 case OPC_MULTU_G_2F
:
23882 case OPC_MODU_G_2F
:
23883 check_insn(ctx
, INSN_LOONGSON2F
);
23884 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
23888 check_insn(ctx
, ISA_MIPS32
);
23889 gen_cl(ctx
, op1
, rd
, rs
);
23892 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23893 gen_helper_do_semihosting(cpu_env
);
23895 /* XXX: not clear which exception should be raised
23896 * when in debug mode...
23898 check_insn(ctx
, ISA_MIPS32
);
23899 generate_exception_end(ctx
, EXCP_DBp
);
23902 #if defined(TARGET_MIPS64)
23905 check_insn(ctx
, ISA_MIPS64
);
23906 check_mips_64(ctx
);
23907 gen_cl(ctx
, op1
, rd
, rs
);
23909 case OPC_DMULT_G_2F
:
23910 case OPC_DMULTU_G_2F
:
23911 case OPC_DDIV_G_2F
:
23912 case OPC_DDIVU_G_2F
:
23913 case OPC_DMOD_G_2F
:
23914 case OPC_DMODU_G_2F
:
23915 check_insn(ctx
, INSN_LOONGSON2F
);
23916 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
23919 default: /* Invalid */
23920 MIPS_INVAL("special2_legacy");
23921 generate_exception_end(ctx
, EXCP_RI
);
23926 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23928 int rs
, rt
, rd
, sa
;
23932 rs
= (ctx
->opcode
>> 21) & 0x1f;
23933 rt
= (ctx
->opcode
>> 16) & 0x1f;
23934 rd
= (ctx
->opcode
>> 11) & 0x1f;
23935 sa
= (ctx
->opcode
>> 6) & 0x1f;
23936 imm
= (int16_t)ctx
->opcode
>> 7;
23938 op1
= MASK_SPECIAL3(ctx
->opcode
);
23942 /* hint codes 24-31 are reserved and signal RI */
23943 generate_exception_end(ctx
, EXCP_RI
);
23945 /* Treat as NOP. */
23948 check_cp0_enabled(ctx
);
23949 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
23950 gen_cache_operation(ctx
, rt
, rs
, imm
);
23954 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
23957 gen_ld(ctx
, op1
, rt
, rs
, imm
);
23962 /* Treat as NOP. */
23965 op2
= MASK_BSHFL(ctx
->opcode
);
23971 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
23974 gen_bitswap(ctx
, op2
, rd
, rt
);
23979 #if defined(TARGET_MIPS64)
23981 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
23984 gen_ld(ctx
, op1
, rt
, rs
, imm
);
23987 check_mips_64(ctx
);
23990 /* Treat as NOP. */
23993 op2
= MASK_DBSHFL(ctx
->opcode
);
24003 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
24006 gen_bitswap(ctx
, op2
, rd
, rt
);
24013 default: /* Invalid */
24014 MIPS_INVAL("special3_r6");
24015 generate_exception_end(ctx
, EXCP_RI
);
24020 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
24025 rs
= (ctx
->opcode
>> 21) & 0x1f;
24026 rt
= (ctx
->opcode
>> 16) & 0x1f;
24027 rd
= (ctx
->opcode
>> 11) & 0x1f;
24029 op1
= MASK_SPECIAL3(ctx
->opcode
);
24032 case OPC_DIVU_G_2E
:
24034 case OPC_MODU_G_2E
:
24035 case OPC_MULT_G_2E
:
24036 case OPC_MULTU_G_2E
:
24037 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
24038 * the same mask and op1. */
24039 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
24040 op2
= MASK_ADDUH_QB(ctx
->opcode
);
24043 case OPC_ADDUH_R_QB
:
24045 case OPC_ADDQH_R_PH
:
24047 case OPC_ADDQH_R_W
:
24049 case OPC_SUBUH_R_QB
:
24051 case OPC_SUBQH_R_PH
:
24053 case OPC_SUBQH_R_W
:
24054 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24059 case OPC_MULQ_RS_W
:
24060 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24063 MIPS_INVAL("MASK ADDUH.QB");
24064 generate_exception_end(ctx
, EXCP_RI
);
24067 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
24068 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
24070 generate_exception_end(ctx
, EXCP_RI
);
24074 op2
= MASK_LX(ctx
->opcode
);
24076 #if defined(TARGET_MIPS64)
24082 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
24084 default: /* Invalid */
24085 MIPS_INVAL("MASK LX");
24086 generate_exception_end(ctx
, EXCP_RI
);
24090 case OPC_ABSQ_S_PH_DSP
:
24091 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
24093 case OPC_ABSQ_S_QB
:
24094 case OPC_ABSQ_S_PH
:
24096 case OPC_PRECEQ_W_PHL
:
24097 case OPC_PRECEQ_W_PHR
:
24098 case OPC_PRECEQU_PH_QBL
:
24099 case OPC_PRECEQU_PH_QBR
:
24100 case OPC_PRECEQU_PH_QBLA
:
24101 case OPC_PRECEQU_PH_QBRA
:
24102 case OPC_PRECEU_PH_QBL
:
24103 case OPC_PRECEU_PH_QBR
:
24104 case OPC_PRECEU_PH_QBLA
:
24105 case OPC_PRECEU_PH_QBRA
:
24106 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24113 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
24116 MIPS_INVAL("MASK ABSQ_S.PH");
24117 generate_exception_end(ctx
, EXCP_RI
);
24121 case OPC_ADDU_QB_DSP
:
24122 op2
= MASK_ADDU_QB(ctx
->opcode
);
24125 case OPC_ADDQ_S_PH
:
24128 case OPC_ADDU_S_QB
:
24130 case OPC_ADDU_S_PH
:
24132 case OPC_SUBQ_S_PH
:
24135 case OPC_SUBU_S_QB
:
24137 case OPC_SUBU_S_PH
:
24141 case OPC_RADDU_W_QB
:
24142 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24144 case OPC_MULEU_S_PH_QBL
:
24145 case OPC_MULEU_S_PH_QBR
:
24146 case OPC_MULQ_RS_PH
:
24147 case OPC_MULEQ_S_W_PHL
:
24148 case OPC_MULEQ_S_W_PHR
:
24149 case OPC_MULQ_S_PH
:
24150 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24152 default: /* Invalid */
24153 MIPS_INVAL("MASK ADDU.QB");
24154 generate_exception_end(ctx
, EXCP_RI
);
24159 case OPC_CMPU_EQ_QB_DSP
:
24160 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
24162 case OPC_PRECR_SRA_PH_W
:
24163 case OPC_PRECR_SRA_R_PH_W
:
24164 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
24166 case OPC_PRECR_QB_PH
:
24167 case OPC_PRECRQ_QB_PH
:
24168 case OPC_PRECRQ_PH_W
:
24169 case OPC_PRECRQ_RS_PH_W
:
24170 case OPC_PRECRQU_S_QB_PH
:
24171 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24173 case OPC_CMPU_EQ_QB
:
24174 case OPC_CMPU_LT_QB
:
24175 case OPC_CMPU_LE_QB
:
24176 case OPC_CMP_EQ_PH
:
24177 case OPC_CMP_LT_PH
:
24178 case OPC_CMP_LE_PH
:
24179 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24181 case OPC_CMPGU_EQ_QB
:
24182 case OPC_CMPGU_LT_QB
:
24183 case OPC_CMPGU_LE_QB
:
24184 case OPC_CMPGDU_EQ_QB
:
24185 case OPC_CMPGDU_LT_QB
:
24186 case OPC_CMPGDU_LE_QB
:
24189 case OPC_PACKRL_PH
:
24190 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24192 default: /* Invalid */
24193 MIPS_INVAL("MASK CMPU.EQ.QB");
24194 generate_exception_end(ctx
, EXCP_RI
);
24198 case OPC_SHLL_QB_DSP
:
24199 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
24201 case OPC_DPA_W_PH_DSP
:
24202 op2
= MASK_DPA_W_PH(ctx
->opcode
);
24204 case OPC_DPAU_H_QBL
:
24205 case OPC_DPAU_H_QBR
:
24206 case OPC_DPSU_H_QBL
:
24207 case OPC_DPSU_H_QBR
:
24209 case OPC_DPAX_W_PH
:
24210 case OPC_DPAQ_S_W_PH
:
24211 case OPC_DPAQX_S_W_PH
:
24212 case OPC_DPAQX_SA_W_PH
:
24214 case OPC_DPSX_W_PH
:
24215 case OPC_DPSQ_S_W_PH
:
24216 case OPC_DPSQX_S_W_PH
:
24217 case OPC_DPSQX_SA_W_PH
:
24218 case OPC_MULSAQ_S_W_PH
:
24219 case OPC_DPAQ_SA_L_W
:
24220 case OPC_DPSQ_SA_L_W
:
24221 case OPC_MAQ_S_W_PHL
:
24222 case OPC_MAQ_S_W_PHR
:
24223 case OPC_MAQ_SA_W_PHL
:
24224 case OPC_MAQ_SA_W_PHR
:
24225 case OPC_MULSA_W_PH
:
24226 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24228 default: /* Invalid */
24229 MIPS_INVAL("MASK DPAW.PH");
24230 generate_exception_end(ctx
, EXCP_RI
);
24235 op2
= MASK_INSV(ctx
->opcode
);
24246 t0
= tcg_temp_new();
24247 t1
= tcg_temp_new();
24249 gen_load_gpr(t0
, rt
);
24250 gen_load_gpr(t1
, rs
);
24252 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
24258 default: /* Invalid */
24259 MIPS_INVAL("MASK INSV");
24260 generate_exception_end(ctx
, EXCP_RI
);
24264 case OPC_APPEND_DSP
:
24265 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
24267 case OPC_EXTR_W_DSP
:
24268 op2
= MASK_EXTR_W(ctx
->opcode
);
24272 case OPC_EXTR_RS_W
:
24274 case OPC_EXTRV_S_H
:
24276 case OPC_EXTRV_R_W
:
24277 case OPC_EXTRV_RS_W
:
24282 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
24285 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24291 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24293 default: /* Invalid */
24294 MIPS_INVAL("MASK EXTR.W");
24295 generate_exception_end(ctx
, EXCP_RI
);
24299 #if defined(TARGET_MIPS64)
24300 case OPC_DDIV_G_2E
:
24301 case OPC_DDIVU_G_2E
:
24302 case OPC_DMULT_G_2E
:
24303 case OPC_DMULTU_G_2E
:
24304 case OPC_DMOD_G_2E
:
24305 case OPC_DMODU_G_2E
:
24306 check_insn(ctx
, INSN_LOONGSON2E
);
24307 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
24309 case OPC_ABSQ_S_QH_DSP
:
24310 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
24312 case OPC_PRECEQ_L_PWL
:
24313 case OPC_PRECEQ_L_PWR
:
24314 case OPC_PRECEQ_PW_QHL
:
24315 case OPC_PRECEQ_PW_QHR
:
24316 case OPC_PRECEQ_PW_QHLA
:
24317 case OPC_PRECEQ_PW_QHRA
:
24318 case OPC_PRECEQU_QH_OBL
:
24319 case OPC_PRECEQU_QH_OBR
:
24320 case OPC_PRECEQU_QH_OBLA
:
24321 case OPC_PRECEQU_QH_OBRA
:
24322 case OPC_PRECEU_QH_OBL
:
24323 case OPC_PRECEU_QH_OBR
:
24324 case OPC_PRECEU_QH_OBLA
:
24325 case OPC_PRECEU_QH_OBRA
:
24326 case OPC_ABSQ_S_OB
:
24327 case OPC_ABSQ_S_PW
:
24328 case OPC_ABSQ_S_QH
:
24329 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24337 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
24339 default: /* Invalid */
24340 MIPS_INVAL("MASK ABSQ_S.QH");
24341 generate_exception_end(ctx
, EXCP_RI
);
24345 case OPC_ADDU_OB_DSP
:
24346 op2
= MASK_ADDU_OB(ctx
->opcode
);
24348 case OPC_RADDU_L_OB
:
24350 case OPC_SUBQ_S_PW
:
24352 case OPC_SUBQ_S_QH
:
24354 case OPC_SUBU_S_OB
:
24356 case OPC_SUBU_S_QH
:
24358 case OPC_SUBUH_R_OB
:
24360 case OPC_ADDQ_S_PW
:
24362 case OPC_ADDQ_S_QH
:
24364 case OPC_ADDU_S_OB
:
24366 case OPC_ADDU_S_QH
:
24368 case OPC_ADDUH_R_OB
:
24369 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24371 case OPC_MULEQ_S_PW_QHL
:
24372 case OPC_MULEQ_S_PW_QHR
:
24373 case OPC_MULEU_S_QH_OBL
:
24374 case OPC_MULEU_S_QH_OBR
:
24375 case OPC_MULQ_RS_QH
:
24376 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24378 default: /* Invalid */
24379 MIPS_INVAL("MASK ADDU.OB");
24380 generate_exception_end(ctx
, EXCP_RI
);
24384 case OPC_CMPU_EQ_OB_DSP
:
24385 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
24387 case OPC_PRECR_SRA_QH_PW
:
24388 case OPC_PRECR_SRA_R_QH_PW
:
24389 /* Return value is rt. */
24390 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
24392 case OPC_PRECR_OB_QH
:
24393 case OPC_PRECRQ_OB_QH
:
24394 case OPC_PRECRQ_PW_L
:
24395 case OPC_PRECRQ_QH_PW
:
24396 case OPC_PRECRQ_RS_QH_PW
:
24397 case OPC_PRECRQU_S_OB_QH
:
24398 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24400 case OPC_CMPU_EQ_OB
:
24401 case OPC_CMPU_LT_OB
:
24402 case OPC_CMPU_LE_OB
:
24403 case OPC_CMP_EQ_QH
:
24404 case OPC_CMP_LT_QH
:
24405 case OPC_CMP_LE_QH
:
24406 case OPC_CMP_EQ_PW
:
24407 case OPC_CMP_LT_PW
:
24408 case OPC_CMP_LE_PW
:
24409 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24411 case OPC_CMPGDU_EQ_OB
:
24412 case OPC_CMPGDU_LT_OB
:
24413 case OPC_CMPGDU_LE_OB
:
24414 case OPC_CMPGU_EQ_OB
:
24415 case OPC_CMPGU_LT_OB
:
24416 case OPC_CMPGU_LE_OB
:
24417 case OPC_PACKRL_PW
:
24421 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24423 default: /* Invalid */
24424 MIPS_INVAL("MASK CMPU_EQ.OB");
24425 generate_exception_end(ctx
, EXCP_RI
);
24429 case OPC_DAPPEND_DSP
:
24430 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
24432 case OPC_DEXTR_W_DSP
:
24433 op2
= MASK_DEXTR_W(ctx
->opcode
);
24440 case OPC_DEXTR_R_L
:
24441 case OPC_DEXTR_RS_L
:
24443 case OPC_DEXTR_R_W
:
24444 case OPC_DEXTR_RS_W
:
24445 case OPC_DEXTR_S_H
:
24447 case OPC_DEXTRV_R_L
:
24448 case OPC_DEXTRV_RS_L
:
24449 case OPC_DEXTRV_S_H
:
24451 case OPC_DEXTRV_R_W
:
24452 case OPC_DEXTRV_RS_W
:
24453 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
24458 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24460 default: /* Invalid */
24461 MIPS_INVAL("MASK EXTR.W");
24462 generate_exception_end(ctx
, EXCP_RI
);
24466 case OPC_DPAQ_W_QH_DSP
:
24467 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
24469 case OPC_DPAU_H_OBL
:
24470 case OPC_DPAU_H_OBR
:
24471 case OPC_DPSU_H_OBL
:
24472 case OPC_DPSU_H_OBR
:
24474 case OPC_DPAQ_S_W_QH
:
24476 case OPC_DPSQ_S_W_QH
:
24477 case OPC_MULSAQ_S_W_QH
:
24478 case OPC_DPAQ_SA_L_PW
:
24479 case OPC_DPSQ_SA_L_PW
:
24480 case OPC_MULSAQ_S_L_PW
:
24481 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24483 case OPC_MAQ_S_W_QHLL
:
24484 case OPC_MAQ_S_W_QHLR
:
24485 case OPC_MAQ_S_W_QHRL
:
24486 case OPC_MAQ_S_W_QHRR
:
24487 case OPC_MAQ_SA_W_QHLL
:
24488 case OPC_MAQ_SA_W_QHLR
:
24489 case OPC_MAQ_SA_W_QHRL
:
24490 case OPC_MAQ_SA_W_QHRR
:
24491 case OPC_MAQ_S_L_PWL
:
24492 case OPC_MAQ_S_L_PWR
:
24497 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24499 default: /* Invalid */
24500 MIPS_INVAL("MASK DPAQ.W.QH");
24501 generate_exception_end(ctx
, EXCP_RI
);
24505 case OPC_DINSV_DSP
:
24506 op2
= MASK_INSV(ctx
->opcode
);
24517 t0
= tcg_temp_new();
24518 t1
= tcg_temp_new();
24520 gen_load_gpr(t0
, rt
);
24521 gen_load_gpr(t1
, rs
);
24523 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
24529 default: /* Invalid */
24530 MIPS_INVAL("MASK DINSV");
24531 generate_exception_end(ctx
, EXCP_RI
);
24535 case OPC_SHLL_OB_DSP
:
24536 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
24539 default: /* Invalid */
24540 MIPS_INVAL("special3_legacy");
24541 generate_exception_end(ctx
, EXCP_RI
);
24546 static void decode_tx79_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
24548 uint32_t opc
= MASK_TX79_MMI0(ctx
->opcode
);
24551 case TX79_MMI0_PADDW
: /* TODO: TX79_MMI0_PADDW */
24552 case TX79_MMI0_PSUBW
: /* TODO: TX79_MMI0_PSUBW */
24553 case TX79_MMI0_PCGTW
: /* TODO: TX79_MMI0_PCGTW */
24554 case TX79_MMI0_PMAXW
: /* TODO: TX79_MMI0_PMAXW */
24555 case TX79_MMI0_PADDH
: /* TODO: TX79_MMI0_PADDH */
24556 case TX79_MMI0_PSUBH
: /* TODO: TX79_MMI0_PSUBH */
24557 case TX79_MMI0_PCGTH
: /* TODO: TX79_MMI0_PCGTH */
24558 case TX79_MMI0_PMAXH
: /* TODO: TX79_MMI0_PMAXH */
24559 case TX79_MMI0_PADDB
: /* TODO: TX79_MMI0_PADDB */
24560 case TX79_MMI0_PSUBB
: /* TODO: TX79_MMI0_PSUBB */
24561 case TX79_MMI0_PCGTB
: /* TODO: TX79_MMI0_PCGTB */
24562 case TX79_MMI0_PADDSW
: /* TODO: TX79_MMI0_PADDSW */
24563 case TX79_MMI0_PSUBSW
: /* TODO: TX79_MMI0_PSUBSW */
24564 case TX79_MMI0_PEXTLW
: /* TODO: TX79_MMI0_PEXTLW */
24565 case TX79_MMI0_PPACW
: /* TODO: TX79_MMI0_PPACW */
24566 case TX79_MMI0_PADDSH
: /* TODO: TX79_MMI0_PADDSH */
24567 case TX79_MMI0_PSUBSH
: /* TODO: TX79_MMI0_PSUBSH */
24568 case TX79_MMI0_PEXTLH
: /* TODO: TX79_MMI0_PEXTLH */
24569 case TX79_MMI0_PPACH
: /* TODO: TX79_MMI0_PPACH */
24570 case TX79_MMI0_PADDSB
: /* TODO: TX79_MMI0_PADDSB */
24571 case TX79_MMI0_PSUBSB
: /* TODO: TX79_MMI0_PSUBSB */
24572 case TX79_MMI0_PEXTLB
: /* TODO: TX79_MMI0_PEXTLB */
24573 case TX79_MMI0_PPACB
: /* TODO: TX79_MMI0_PPACB */
24574 case TX79_MMI0_PEXT5
: /* TODO: TX79_MMI0_PEXT5 */
24575 case TX79_MMI0_PPAC5
: /* TODO: TX79_MMI0_PPAC5 */
24576 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI0 */
24579 MIPS_INVAL("TX79 MMI class MMI0");
24580 generate_exception_end(ctx
, EXCP_RI
);
24585 static void decode_tx79_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
24587 uint32_t opc
= MASK_TX79_MMI1(ctx
->opcode
);
24590 case TX79_MMI1_PABSW
: /* TODO: TX79_MMI1_PABSW */
24591 case TX79_MMI1_PCEQW
: /* TODO: TX79_MMI1_PCEQW */
24592 case TX79_MMI1_PMINW
: /* TODO: TX79_MMI1_PMINW */
24593 case TX79_MMI1_PADSBH
: /* TODO: TX79_MMI1_PADSBH */
24594 case TX79_MMI1_PABSH
: /* TODO: TX79_MMI1_PABSH */
24595 case TX79_MMI1_PCEQH
: /* TODO: TX79_MMI1_PCEQH */
24596 case TX79_MMI1_PMINH
: /* TODO: TX79_MMI1_PMINH */
24597 case TX79_MMI1_PCEQB
: /* TODO: TX79_MMI1_PCEQB */
24598 case TX79_MMI1_PADDUW
: /* TODO: TX79_MMI1_PADDUW */
24599 case TX79_MMI1_PSUBUW
: /* TODO: TX79_MMI1_PSUBUW */
24600 case TX79_MMI1_PEXTUW
: /* TODO: TX79_MMI1_PEXTUW */
24601 case TX79_MMI1_PADDUH
: /* TODO: TX79_MMI1_PADDUH */
24602 case TX79_MMI1_PSUBUH
: /* TODO: TX79_MMI1_PSUBUH */
24603 case TX79_MMI1_PEXTUH
: /* TODO: TX79_MMI1_PEXTUH */
24604 case TX79_MMI1_PADDUB
: /* TODO: TX79_MMI1_PADDUB */
24605 case TX79_MMI1_PSUBUB
: /* TODO: TX79_MMI1_PSUBUB */
24606 case TX79_MMI1_PEXTUB
: /* TODO: TX79_MMI1_PEXTUB */
24607 case TX79_MMI1_QFSRV
: /* TODO: TX79_MMI1_QFSRV */
24608 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI1 */
24611 MIPS_INVAL("TX79 MMI class MMI1");
24612 generate_exception_end(ctx
, EXCP_RI
);
24617 static void decode_tx79_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
24619 uint32_t opc
= MASK_TX79_MMI2(ctx
->opcode
);
24622 case TX79_MMI2_PMADDW
: /* TODO: TX79_MMI2_PMADDW */
24623 case TX79_MMI2_PSLLVW
: /* TODO: TX79_MMI2_PSLLVW */
24624 case TX79_MMI2_PSRLVW
: /* TODO: TX79_MMI2_PSRLVW */
24625 case TX79_MMI2_PMSUBW
: /* TODO: TX79_MMI2_PMSUBW */
24626 case TX79_MMI2_PMFHI
: /* TODO: TX79_MMI2_PMFHI */
24627 case TX79_MMI2_PMFLO
: /* TODO: TX79_MMI2_PMFLO */
24628 case TX79_MMI2_PINTH
: /* TODO: TX79_MMI2_PINTH */
24629 case TX79_MMI2_PMULTW
: /* TODO: TX79_MMI2_PMULTW */
24630 case TX79_MMI2_PDIVW
: /* TODO: TX79_MMI2_PDIVW */
24631 case TX79_MMI2_PCPYLD
: /* TODO: TX79_MMI2_PCPYLD */
24632 case TX79_MMI2_PMADDH
: /* TODO: TX79_MMI2_PMADDH */
24633 case TX79_MMI2_PHMADH
: /* TODO: TX79_MMI2_PHMADH */
24634 case TX79_MMI2_PAND
: /* TODO: TX79_MMI2_PAND */
24635 case TX79_MMI2_PXOR
: /* TODO: TX79_MMI2_PXOR */
24636 case TX79_MMI2_PMSUBH
: /* TODO: TX79_MMI2_PMSUBH */
24637 case TX79_MMI2_PHMSBH
: /* TODO: TX79_MMI2_PHMSBH */
24638 case TX79_MMI2_PEXEH
: /* TODO: TX79_MMI2_PEXEH */
24639 case TX79_MMI2_PREVH
: /* TODO: TX79_MMI2_PREVH */
24640 case TX79_MMI2_PMULTH
: /* TODO: TX79_MMI2_PMULTH */
24641 case TX79_MMI2_PDIVBW
: /* TODO: TX79_MMI2_PDIVBW */
24642 case TX79_MMI2_PEXEW
: /* TODO: TX79_MMI2_PEXEW */
24643 case TX79_MMI2_PROT3W
: /* TODO: TX79_MMI2_PROT3W */
24644 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI2 */
24647 MIPS_INVAL("TX79 MMI class MMI2");
24648 generate_exception_end(ctx
, EXCP_RI
);
24653 static void decode_tx79_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
24655 uint32_t opc
= MASK_TX79_MMI3(ctx
->opcode
);
24658 case TX79_MMI3_PMADDUW
: /* TODO: TX79_MMI3_PMADDUW */
24659 case TX79_MMI3_PSRAVW
: /* TODO: TX79_MMI3_PSRAVW */
24660 case TX79_MMI3_PMTHI
: /* TODO: TX79_MMI3_PMTHI */
24661 case TX79_MMI3_PMTLO
: /* TODO: TX79_MMI3_PMTLO */
24662 case TX79_MMI3_PINTEH
: /* TODO: TX79_MMI3_PINTEH */
24663 case TX79_MMI3_PMULTUW
: /* TODO: TX79_MMI3_PMULTUW */
24664 case TX79_MMI3_PDIVUW
: /* TODO: TX79_MMI3_PDIVUW */
24665 case TX79_MMI3_PCPYUD
: /* TODO: TX79_MMI3_PCPYUD */
24666 case TX79_MMI3_POR
: /* TODO: TX79_MMI3_POR */
24667 case TX79_MMI3_PNOR
: /* TODO: TX79_MMI3_PNOR */
24668 case TX79_MMI3_PEXCH
: /* TODO: TX79_MMI3_PEXCH */
24669 case TX79_MMI3_PCPYH
: /* TODO: TX79_MMI3_PCPYH */
24670 case TX79_MMI3_PEXCW
: /* TODO: TX79_MMI3_PEXCW */
24671 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_MMI_CLASS_MMI3 */
24674 MIPS_INVAL("TX79 MMI class MMI3");
24675 generate_exception_end(ctx
, EXCP_RI
);
24680 static void decode_tx79_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
24682 uint32_t opc
= MASK_TX79_MMI(ctx
->opcode
);
24683 int rs
= extract32(ctx
->opcode
, 21, 5);
24684 int rt
= extract32(ctx
->opcode
, 16, 5);
24685 int rd
= extract32(ctx
->opcode
, 11, 5);
24688 case TX79_MMI_CLASS_MMI0
:
24689 decode_tx79_mmi0(env
, ctx
);
24691 case TX79_MMI_CLASS_MMI1
:
24692 decode_tx79_mmi1(env
, ctx
);
24694 case TX79_MMI_CLASS_MMI2
:
24695 decode_tx79_mmi2(env
, ctx
);
24697 case TX79_MMI_CLASS_MMI3
:
24698 decode_tx79_mmi3(env
, ctx
);
24700 case TX79_MMI_MULT1
:
24701 case TX79_MMI_MULTU1
:
24702 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
24704 case TX79_MMI_DIV1
:
24705 case TX79_MMI_DIVU1
:
24706 gen_muldiv(ctx
, opc
, 1, rs
, rt
);
24708 case TX79_MMI_MTLO1
:
24709 case TX79_MMI_MTHI1
:
24710 gen_HILO(ctx
, opc
, 1, rs
);
24712 case TX79_MMI_MFLO1
:
24713 case TX79_MMI_MFHI1
:
24714 gen_HILO(ctx
, opc
, 1, rd
);
24716 case TX79_MMI_MADD
: /* TODO: TX79_MMI_MADD */
24717 case TX79_MMI_MADDU
: /* TODO: TX79_MMI_MADDU */
24718 case TX79_MMI_PLZCW
: /* TODO: TX79_MMI_PLZCW */
24719 case TX79_MMI_MADD1
: /* TODO: TX79_MMI_MADD1 */
24720 case TX79_MMI_MADDU1
: /* TODO: TX79_MMI_MADDU1 */
24721 case TX79_MMI_PMFHL
: /* TODO: TX79_MMI_PMFHL */
24722 case TX79_MMI_PMTHL
: /* TODO: TX79_MMI_PMTHL */
24723 case TX79_MMI_PSLLH
: /* TODO: TX79_MMI_PSLLH */
24724 case TX79_MMI_PSRLH
: /* TODO: TX79_MMI_PSRLH */
24725 case TX79_MMI_PSRAH
: /* TODO: TX79_MMI_PSRAH */
24726 case TX79_MMI_PSLLW
: /* TODO: TX79_MMI_PSLLW */
24727 case TX79_MMI_PSRLW
: /* TODO: TX79_MMI_PSRLW */
24728 case TX79_MMI_PSRAW
: /* TODO: TX79_MMI_PSRAW */
24729 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_CLASS_MMI */
24732 MIPS_INVAL("TX79 MMI class");
24733 generate_exception_end(ctx
, EXCP_RI
);
24738 static void decode_tx79_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
24740 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_LQ */
24743 static void gen_tx79_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
24745 generate_exception_end(ctx
, EXCP_RI
); /* TODO: TX79_SQ */
24749 * The TX79-specific instruction Store Quadword
24751 * +--------+-------+-------+------------------------+
24752 * | 011111 | base | rt | offset | SQ
24753 * +--------+-------+-------+------------------------+
24756 * has the same opcode as the Read Hardware Register instruction
24758 * +--------+-------+-------+-------+-------+--------+
24759 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
24760 * +--------+-------+-------+-------+-------+--------+
24763 * that is required, trapped and emulated by the Linux kernel. However, all
24764 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
24765 * offset is odd. Therefore all valid SQ instructions can execute normally.
24766 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
24767 * between SQ and RDHWR, as the Linux kernel does.
24769 static void decode_tx79_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
24771 int base
= extract32(ctx
->opcode
, 21, 5);
24772 int rt
= extract32(ctx
->opcode
, 16, 5);
24773 int offset
= extract32(ctx
->opcode
, 0, 16);
24775 #ifdef CONFIG_USER_ONLY
24776 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
24777 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
24779 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
24780 int rd
= extract32(ctx
->opcode
, 11, 5);
24782 gen_rdhwr(ctx
, rt
, rd
, 0);
24787 gen_tx79_sq(ctx
, base
, rt
, offset
);
24790 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
24792 int rs
, rt
, rd
, sa
;
24796 rs
= (ctx
->opcode
>> 21) & 0x1f;
24797 rt
= (ctx
->opcode
>> 16) & 0x1f;
24798 rd
= (ctx
->opcode
>> 11) & 0x1f;
24799 sa
= (ctx
->opcode
>> 6) & 0x1f;
24800 imm
= sextract32(ctx
->opcode
, 7, 9);
24802 op1
= MASK_SPECIAL3(ctx
->opcode
);
24805 * EVA loads and stores overlap Loongson 2E instructions decoded by
24806 * decode_opc_special3_legacy(), so be careful to allow their decoding when
24813 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24821 check_cp0_enabled(ctx
);
24822 gen_ld(ctx
, op1
, rt
, rs
, imm
);
24826 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24831 check_cp0_enabled(ctx
);
24832 gen_st(ctx
, op1
, rt
, rs
, imm
);
24835 check_cp0_enabled(ctx
);
24836 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
24839 check_cp0_enabled(ctx
);
24840 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
24841 gen_cache_operation(ctx
, rt
, rs
, imm
);
24843 /* Treat as NOP. */
24846 check_cp0_enabled(ctx
);
24847 /* Treat as NOP. */
24855 check_insn(ctx
, ISA_MIPS32R2
);
24856 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
24859 op2
= MASK_BSHFL(ctx
->opcode
);
24866 check_insn(ctx
, ISA_MIPS32R6
);
24867 decode_opc_special3_r6(env
, ctx
);
24870 check_insn(ctx
, ISA_MIPS32R2
);
24871 gen_bshfl(ctx
, op2
, rt
, rd
);
24875 #if defined(TARGET_MIPS64)
24882 check_insn(ctx
, ISA_MIPS64R2
);
24883 check_mips_64(ctx
);
24884 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
24887 op2
= MASK_DBSHFL(ctx
->opcode
);
24898 check_insn(ctx
, ISA_MIPS32R6
);
24899 decode_opc_special3_r6(env
, ctx
);
24902 check_insn(ctx
, ISA_MIPS64R2
);
24903 check_mips_64(ctx
);
24904 op2
= MASK_DBSHFL(ctx
->opcode
);
24905 gen_bshfl(ctx
, op2
, rt
, rd
);
24911 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
24916 TCGv t0
= tcg_temp_new();
24917 TCGv t1
= tcg_temp_new();
24919 gen_load_gpr(t0
, rt
);
24920 gen_load_gpr(t1
, rs
);
24921 gen_helper_fork(t0
, t1
);
24929 TCGv t0
= tcg_temp_new();
24931 gen_load_gpr(t0
, rs
);
24932 gen_helper_yield(t0
, cpu_env
, t0
);
24933 gen_store_gpr(t0
, rd
);
24938 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24939 decode_opc_special3_r6(env
, ctx
);
24941 decode_opc_special3_legacy(env
, ctx
);
24946 /* MIPS SIMD Architecture (MSA) */
24947 static inline int check_msa_access(DisasContext
*ctx
)
24949 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
24950 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
24951 generate_exception_end(ctx
, EXCP_RI
);
24955 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
24956 if (ctx
->insn_flags
& ASE_MSA
) {
24957 generate_exception_end(ctx
, EXCP_MSADIS
);
24960 generate_exception_end(ctx
, EXCP_RI
);
24967 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
24969 /* generates tcg ops to check if any element is 0 */
24970 /* Note this function only works with MSA_WRLEN = 128 */
24971 uint64_t eval_zero_or_big
= 0;
24972 uint64_t eval_big
= 0;
24973 TCGv_i64 t0
= tcg_temp_new_i64();
24974 TCGv_i64 t1
= tcg_temp_new_i64();
24977 eval_zero_or_big
= 0x0101010101010101ULL
;
24978 eval_big
= 0x8080808080808080ULL
;
24981 eval_zero_or_big
= 0x0001000100010001ULL
;
24982 eval_big
= 0x8000800080008000ULL
;
24985 eval_zero_or_big
= 0x0000000100000001ULL
;
24986 eval_big
= 0x8000000080000000ULL
;
24989 eval_zero_or_big
= 0x0000000000000001ULL
;
24990 eval_big
= 0x8000000000000000ULL
;
24993 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
24994 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
24995 tcg_gen_andi_i64(t0
, t0
, eval_big
);
24996 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
24997 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
24998 tcg_gen_andi_i64(t1
, t1
, eval_big
);
24999 tcg_gen_or_i64(t0
, t0
, t1
);
25000 /* if all bits are zero then all elements are not zero */
25001 /* if some bit is non-zero then some element is zero */
25002 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
25003 tcg_gen_trunc_i64_tl(tresult
, t0
);
25004 tcg_temp_free_i64(t0
);
25005 tcg_temp_free_i64(t1
);
25008 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
25010 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
25011 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25012 int64_t s16
= (int16_t)ctx
->opcode
;
25014 check_msa_access(ctx
);
25016 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
25017 generate_exception_end(ctx
, EXCP_RI
);
25024 TCGv_i64 t0
= tcg_temp_new_i64();
25025 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
25026 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
25027 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
25028 tcg_gen_trunc_i64_tl(bcond
, t0
);
25029 tcg_temp_free_i64(t0
);
25036 gen_check_zero_element(bcond
, df
, wt
);
25042 gen_check_zero_element(bcond
, df
, wt
);
25043 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
25047 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
25049 ctx
->hflags
|= MIPS_HFLAG_BC
;
25050 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
25053 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
25055 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
25056 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
25057 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25058 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25060 TCGv_i32 twd
= tcg_const_i32(wd
);
25061 TCGv_i32 tws
= tcg_const_i32(ws
);
25062 TCGv_i32 ti8
= tcg_const_i32(i8
);
25064 switch (MASK_MSA_I8(ctx
->opcode
)) {
25066 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
25069 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
25072 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
25075 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
25078 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
25081 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
25084 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
25090 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
25091 if (df
== DF_DOUBLE
) {
25092 generate_exception_end(ctx
, EXCP_RI
);
25094 TCGv_i32 tdf
= tcg_const_i32(df
);
25095 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
25096 tcg_temp_free_i32(tdf
);
25101 MIPS_INVAL("MSA instruction");
25102 generate_exception_end(ctx
, EXCP_RI
);
25106 tcg_temp_free_i32(twd
);
25107 tcg_temp_free_i32(tws
);
25108 tcg_temp_free_i32(ti8
);
25111 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
25113 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
25114 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
25115 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
25116 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
25117 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25118 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25120 TCGv_i32 tdf
= tcg_const_i32(df
);
25121 TCGv_i32 twd
= tcg_const_i32(wd
);
25122 TCGv_i32 tws
= tcg_const_i32(ws
);
25123 TCGv_i32 timm
= tcg_temp_new_i32();
25124 tcg_gen_movi_i32(timm
, u5
);
25126 switch (MASK_MSA_I5(ctx
->opcode
)) {
25128 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
25131 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
25133 case OPC_MAXI_S_df
:
25134 tcg_gen_movi_i32(timm
, s5
);
25135 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25137 case OPC_MAXI_U_df
:
25138 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25140 case OPC_MINI_S_df
:
25141 tcg_gen_movi_i32(timm
, s5
);
25142 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25144 case OPC_MINI_U_df
:
25145 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25148 tcg_gen_movi_i32(timm
, s5
);
25149 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
25151 case OPC_CLTI_S_df
:
25152 tcg_gen_movi_i32(timm
, s5
);
25153 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25155 case OPC_CLTI_U_df
:
25156 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25158 case OPC_CLEI_S_df
:
25159 tcg_gen_movi_i32(timm
, s5
);
25160 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
25162 case OPC_CLEI_U_df
:
25163 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
25167 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
25168 tcg_gen_movi_i32(timm
, s10
);
25169 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
25173 MIPS_INVAL("MSA instruction");
25174 generate_exception_end(ctx
, EXCP_RI
);
25178 tcg_temp_free_i32(tdf
);
25179 tcg_temp_free_i32(twd
);
25180 tcg_temp_free_i32(tws
);
25181 tcg_temp_free_i32(timm
);
25184 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
25186 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
25187 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
25188 uint32_t df
= 0, m
= 0;
25189 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25190 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25197 if ((dfm
& 0x40) == 0x00) {
25200 } else if ((dfm
& 0x60) == 0x40) {
25203 } else if ((dfm
& 0x70) == 0x60) {
25206 } else if ((dfm
& 0x78) == 0x70) {
25210 generate_exception_end(ctx
, EXCP_RI
);
25214 tdf
= tcg_const_i32(df
);
25215 tm
= tcg_const_i32(m
);
25216 twd
= tcg_const_i32(wd
);
25217 tws
= tcg_const_i32(ws
);
25219 switch (MASK_MSA_BIT(ctx
->opcode
)) {
25221 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
25224 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
25227 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
25230 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
25233 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
25236 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
25238 case OPC_BINSLI_df
:
25239 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
25241 case OPC_BINSRI_df
:
25242 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
25245 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
25248 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
25251 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
25254 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
25257 MIPS_INVAL("MSA instruction");
25258 generate_exception_end(ctx
, EXCP_RI
);
25262 tcg_temp_free_i32(tdf
);
25263 tcg_temp_free_i32(tm
);
25264 tcg_temp_free_i32(twd
);
25265 tcg_temp_free_i32(tws
);
25268 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
25270 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
25271 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
25272 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25273 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25274 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25276 TCGv_i32 tdf
= tcg_const_i32(df
);
25277 TCGv_i32 twd
= tcg_const_i32(wd
);
25278 TCGv_i32 tws
= tcg_const_i32(ws
);
25279 TCGv_i32 twt
= tcg_const_i32(wt
);
25281 switch (MASK_MSA_3R(ctx
->opcode
)) {
25283 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
25286 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25289 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25292 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25294 case OPC_SUBS_S_df
:
25295 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25298 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25301 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
25304 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25307 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
25310 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25312 case OPC_ADDS_A_df
:
25313 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25315 case OPC_SUBS_U_df
:
25316 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25319 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25322 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
25325 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
25328 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
25331 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25334 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25336 case OPC_ADDS_S_df
:
25337 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25339 case OPC_SUBSUS_U_df
:
25340 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25343 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25346 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
25349 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25352 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25355 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25358 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25360 case OPC_ADDS_U_df
:
25361 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25363 case OPC_SUBSUU_S_df
:
25364 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25367 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
25370 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
25373 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25376 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25379 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25381 case OPC_ASUB_S_df
:
25382 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25385 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25388 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
25391 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
25394 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25397 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25400 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25402 case OPC_ASUB_U_df
:
25403 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25406 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25409 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25412 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
25415 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25417 case OPC_AVER_S_df
:
25418 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25421 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25424 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
25427 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
25430 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25432 case OPC_AVER_U_df
:
25433 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25436 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25439 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
25442 case OPC_DOTP_S_df
:
25443 case OPC_DOTP_U_df
:
25444 case OPC_DPADD_S_df
:
25445 case OPC_DPADD_U_df
:
25446 case OPC_DPSUB_S_df
:
25447 case OPC_HADD_S_df
:
25448 case OPC_DPSUB_U_df
:
25449 case OPC_HADD_U_df
:
25450 case OPC_HSUB_S_df
:
25451 case OPC_HSUB_U_df
:
25452 if (df
== DF_BYTE
) {
25453 generate_exception_end(ctx
, EXCP_RI
);
25456 switch (MASK_MSA_3R(ctx
->opcode
)) {
25457 case OPC_DOTP_S_df
:
25458 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25460 case OPC_DOTP_U_df
:
25461 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25463 case OPC_DPADD_S_df
:
25464 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25466 case OPC_DPADD_U_df
:
25467 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25469 case OPC_DPSUB_S_df
:
25470 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25472 case OPC_HADD_S_df
:
25473 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25475 case OPC_DPSUB_U_df
:
25476 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25478 case OPC_HADD_U_df
:
25479 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25481 case OPC_HSUB_S_df
:
25482 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
25484 case OPC_HSUB_U_df
:
25485 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
25490 MIPS_INVAL("MSA instruction");
25491 generate_exception_end(ctx
, EXCP_RI
);
25494 tcg_temp_free_i32(twd
);
25495 tcg_temp_free_i32(tws
);
25496 tcg_temp_free_i32(twt
);
25497 tcg_temp_free_i32(tdf
);
25500 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
25502 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
25503 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
25504 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
25505 TCGv telm
= tcg_temp_new();
25506 TCGv_i32 tsr
= tcg_const_i32(source
);
25507 TCGv_i32 tdt
= tcg_const_i32(dest
);
25509 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
25511 gen_load_gpr(telm
, source
);
25512 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
25515 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
25516 gen_store_gpr(telm
, dest
);
25519 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
25522 MIPS_INVAL("MSA instruction");
25523 generate_exception_end(ctx
, EXCP_RI
);
25527 tcg_temp_free(telm
);
25528 tcg_temp_free_i32(tdt
);
25529 tcg_temp_free_i32(tsr
);
25532 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
25535 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
25536 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25537 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25539 TCGv_i32 tws
= tcg_const_i32(ws
);
25540 TCGv_i32 twd
= tcg_const_i32(wd
);
25541 TCGv_i32 tn
= tcg_const_i32(n
);
25542 TCGv_i32 tdf
= tcg_const_i32(df
);
25544 switch (MASK_MSA_ELM(ctx
->opcode
)) {
25546 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
25548 case OPC_SPLATI_df
:
25549 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
25552 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
25554 case OPC_COPY_S_df
:
25555 case OPC_COPY_U_df
:
25556 case OPC_INSERT_df
:
25557 #if !defined(TARGET_MIPS64)
25558 /* Double format valid only for MIPS64 */
25559 if (df
== DF_DOUBLE
) {
25560 generate_exception_end(ctx
, EXCP_RI
);
25564 switch (MASK_MSA_ELM(ctx
->opcode
)) {
25565 case OPC_COPY_S_df
:
25566 if (likely(wd
!= 0)) {
25567 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
25570 case OPC_COPY_U_df
:
25571 if (likely(wd
!= 0)) {
25572 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
25575 case OPC_INSERT_df
:
25576 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
25581 MIPS_INVAL("MSA instruction");
25582 generate_exception_end(ctx
, EXCP_RI
);
25584 tcg_temp_free_i32(twd
);
25585 tcg_temp_free_i32(tws
);
25586 tcg_temp_free_i32(tn
);
25587 tcg_temp_free_i32(tdf
);
25590 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
25592 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
25593 uint32_t df
= 0, n
= 0;
25595 if ((dfn
& 0x30) == 0x00) {
25598 } else if ((dfn
& 0x38) == 0x20) {
25601 } else if ((dfn
& 0x3c) == 0x30) {
25604 } else if ((dfn
& 0x3e) == 0x38) {
25607 } else if (dfn
== 0x3E) {
25608 /* CTCMSA, CFCMSA, MOVE.V */
25609 gen_msa_elm_3e(env
, ctx
);
25612 generate_exception_end(ctx
, EXCP_RI
);
25616 gen_msa_elm_df(env
, ctx
, df
, n
);
25619 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
25621 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
25622 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
25623 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25624 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25625 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25627 TCGv_i32 twd
= tcg_const_i32(wd
);
25628 TCGv_i32 tws
= tcg_const_i32(ws
);
25629 TCGv_i32 twt
= tcg_const_i32(wt
);
25630 TCGv_i32 tdf
= tcg_temp_new_i32();
25632 /* adjust df value for floating-point instruction */
25633 tcg_gen_movi_i32(tdf
, df
+ 2);
25635 switch (MASK_MSA_3RF(ctx
->opcode
)) {
25637 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25640 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
25643 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
25646 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
25649 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
25652 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25655 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
25658 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
25661 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25664 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25667 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
25670 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
25673 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
25676 tcg_gen_movi_i32(tdf
, df
+ 1);
25677 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25680 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
25683 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
25685 case OPC_MADD_Q_df
:
25686 tcg_gen_movi_i32(tdf
, df
+ 1);
25687 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25690 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
25692 case OPC_MSUB_Q_df
:
25693 tcg_gen_movi_i32(tdf
, df
+ 1);
25694 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25697 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
25700 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
25703 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25706 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
25709 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
25712 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
25715 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25718 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25721 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
25724 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25727 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
25730 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
25733 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
25735 case OPC_MULR_Q_df
:
25736 tcg_gen_movi_i32(tdf
, df
+ 1);
25737 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25740 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
25742 case OPC_FMIN_A_df
:
25743 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25745 case OPC_MADDR_Q_df
:
25746 tcg_gen_movi_i32(tdf
, df
+ 1);
25747 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25750 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
25753 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
25755 case OPC_MSUBR_Q_df
:
25756 tcg_gen_movi_i32(tdf
, df
+ 1);
25757 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25760 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
25762 case OPC_FMAX_A_df
:
25763 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25766 MIPS_INVAL("MSA instruction");
25767 generate_exception_end(ctx
, EXCP_RI
);
25771 tcg_temp_free_i32(twd
);
25772 tcg_temp_free_i32(tws
);
25773 tcg_temp_free_i32(twt
);
25774 tcg_temp_free_i32(tdf
);
25777 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
25779 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
25780 (op & (0x7 << 18)))
25781 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25782 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25783 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25784 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
25785 TCGv_i32 twd
= tcg_const_i32(wd
);
25786 TCGv_i32 tws
= tcg_const_i32(ws
);
25787 TCGv_i32 twt
= tcg_const_i32(wt
);
25788 TCGv_i32 tdf
= tcg_const_i32(df
);
25790 switch (MASK_MSA_2R(ctx
->opcode
)) {
25792 #if !defined(TARGET_MIPS64)
25793 /* Double format valid only for MIPS64 */
25794 if (df
== DF_DOUBLE
) {
25795 generate_exception_end(ctx
, EXCP_RI
);
25799 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
25802 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
25805 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
25808 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
25811 MIPS_INVAL("MSA instruction");
25812 generate_exception_end(ctx
, EXCP_RI
);
25816 tcg_temp_free_i32(twd
);
25817 tcg_temp_free_i32(tws
);
25818 tcg_temp_free_i32(twt
);
25819 tcg_temp_free_i32(tdf
);
25822 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
25824 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
25825 (op & (0xf << 17)))
25826 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25827 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25828 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25829 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
25830 TCGv_i32 twd
= tcg_const_i32(wd
);
25831 TCGv_i32 tws
= tcg_const_i32(ws
);
25832 TCGv_i32 twt
= tcg_const_i32(wt
);
25833 /* adjust df value for floating-point instruction */
25834 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
25836 switch (MASK_MSA_2RF(ctx
->opcode
)) {
25837 case OPC_FCLASS_df
:
25838 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
25840 case OPC_FTRUNC_S_df
:
25841 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
25843 case OPC_FTRUNC_U_df
:
25844 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
25847 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
25849 case OPC_FRSQRT_df
:
25850 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
25853 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
25856 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
25859 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
25861 case OPC_FEXUPL_df
:
25862 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
25864 case OPC_FEXUPR_df
:
25865 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
25868 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
25871 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
25873 case OPC_FTINT_S_df
:
25874 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
25876 case OPC_FTINT_U_df
:
25877 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
25879 case OPC_FFINT_S_df
:
25880 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
25882 case OPC_FFINT_U_df
:
25883 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
25887 tcg_temp_free_i32(twd
);
25888 tcg_temp_free_i32(tws
);
25889 tcg_temp_free_i32(twt
);
25890 tcg_temp_free_i32(tdf
);
25893 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
25895 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
25896 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25897 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25898 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25899 TCGv_i32 twd
= tcg_const_i32(wd
);
25900 TCGv_i32 tws
= tcg_const_i32(ws
);
25901 TCGv_i32 twt
= tcg_const_i32(wt
);
25903 switch (MASK_MSA_VEC(ctx
->opcode
)) {
25905 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
25908 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
25911 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
25914 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
25917 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
25920 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
25923 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
25926 MIPS_INVAL("MSA instruction");
25927 generate_exception_end(ctx
, EXCP_RI
);
25931 tcg_temp_free_i32(twd
);
25932 tcg_temp_free_i32(tws
);
25933 tcg_temp_free_i32(twt
);
25936 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
25938 switch (MASK_MSA_VEC(ctx
->opcode
)) {
25946 gen_msa_vec_v(env
, ctx
);
25949 gen_msa_2r(env
, ctx
);
25952 gen_msa_2rf(env
, ctx
);
25955 MIPS_INVAL("MSA instruction");
25956 generate_exception_end(ctx
, EXCP_RI
);
25961 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
25963 uint32_t opcode
= ctx
->opcode
;
25964 check_insn(ctx
, ASE_MSA
);
25965 check_msa_access(ctx
);
25967 switch (MASK_MSA_MINOR(opcode
)) {
25968 case OPC_MSA_I8_00
:
25969 case OPC_MSA_I8_01
:
25970 case OPC_MSA_I8_02
:
25971 gen_msa_i8(env
, ctx
);
25973 case OPC_MSA_I5_06
:
25974 case OPC_MSA_I5_07
:
25975 gen_msa_i5(env
, ctx
);
25977 case OPC_MSA_BIT_09
:
25978 case OPC_MSA_BIT_0A
:
25979 gen_msa_bit(env
, ctx
);
25981 case OPC_MSA_3R_0D
:
25982 case OPC_MSA_3R_0E
:
25983 case OPC_MSA_3R_0F
:
25984 case OPC_MSA_3R_10
:
25985 case OPC_MSA_3R_11
:
25986 case OPC_MSA_3R_12
:
25987 case OPC_MSA_3R_13
:
25988 case OPC_MSA_3R_14
:
25989 case OPC_MSA_3R_15
:
25990 gen_msa_3r(env
, ctx
);
25993 gen_msa_elm(env
, ctx
);
25995 case OPC_MSA_3RF_1A
:
25996 case OPC_MSA_3RF_1B
:
25997 case OPC_MSA_3RF_1C
:
25998 gen_msa_3rf(env
, ctx
);
26001 gen_msa_vec(env
, ctx
);
26012 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
26013 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
26014 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
26015 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
26017 TCGv_i32 twd
= tcg_const_i32(wd
);
26018 TCGv taddr
= tcg_temp_new();
26019 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
26021 switch (MASK_MSA_MINOR(opcode
)) {
26023 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
26026 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
26029 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
26032 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
26035 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
26038 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
26041 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
26044 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
26048 tcg_temp_free_i32(twd
);
26049 tcg_temp_free(taddr
);
26053 MIPS_INVAL("MSA instruction");
26054 generate_exception_end(ctx
, EXCP_RI
);
26060 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
26063 int rs
, rt
, rd
, sa
;
26067 /* make sure instructions are on a word boundary */
26068 if (ctx
->base
.pc_next
& 0x3) {
26069 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
26070 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
26074 /* Handle blikely not taken case */
26075 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
26076 TCGLabel
*l1
= gen_new_label();
26078 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
26079 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
26080 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
26084 op
= MASK_OP_MAJOR(ctx
->opcode
);
26085 rs
= (ctx
->opcode
>> 21) & 0x1f;
26086 rt
= (ctx
->opcode
>> 16) & 0x1f;
26087 rd
= (ctx
->opcode
>> 11) & 0x1f;
26088 sa
= (ctx
->opcode
>> 6) & 0x1f;
26089 imm
= (int16_t)ctx
->opcode
;
26092 decode_opc_special(env
, ctx
);
26095 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
26096 decode_tx79_mmi(env
, ctx
);
26098 decode_opc_special2_legacy(env
, ctx
);
26102 if (ctx
->insn_flags
& INSN_R5900
) {
26103 decode_tx79_sq(env
, ctx
); /* TX79_SQ */
26105 decode_opc_special3(env
, ctx
);
26109 op1
= MASK_REGIMM(ctx
->opcode
);
26111 case OPC_BLTZL
: /* REGIMM branches */
26115 check_insn(ctx
, ISA_MIPS2
);
26116 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26120 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
26124 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26126 /* OPC_NAL, OPC_BAL */
26127 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
26129 generate_exception_end(ctx
, EXCP_RI
);
26132 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
26135 case OPC_TGEI
: /* REGIMM traps */
26142 check_insn(ctx
, ISA_MIPS2
);
26143 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26144 gen_trap(ctx
, op1
, rs
, -1, imm
);
26147 check_insn(ctx
, ISA_MIPS32R6
);
26148 generate_exception_end(ctx
, EXCP_RI
);
26151 check_insn(ctx
, ISA_MIPS32R2
);
26152 /* Break the TB to be able to sync copied instructions
26154 ctx
->base
.is_jmp
= DISAS_STOP
;
26156 case OPC_BPOSGE32
: /* MIPS DSP branch */
26157 #if defined(TARGET_MIPS64)
26161 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
26163 #if defined(TARGET_MIPS64)
26165 check_insn(ctx
, ISA_MIPS32R6
);
26166 check_mips_64(ctx
);
26168 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
26172 check_insn(ctx
, ISA_MIPS32R6
);
26173 check_mips_64(ctx
);
26175 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
26179 default: /* Invalid */
26180 MIPS_INVAL("regimm");
26181 generate_exception_end(ctx
, EXCP_RI
);
26186 check_cp0_enabled(ctx
);
26187 op1
= MASK_CP0(ctx
->opcode
);
26195 #if defined(TARGET_MIPS64)
26199 #ifndef CONFIG_USER_ONLY
26200 gen_cp0(env
, ctx
, op1
, rt
, rd
);
26201 #endif /* !CONFIG_USER_ONLY */
26219 #ifndef CONFIG_USER_ONLY
26220 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
26221 #endif /* !CONFIG_USER_ONLY */
26224 #ifndef CONFIG_USER_ONLY
26227 TCGv t0
= tcg_temp_new();
26229 op2
= MASK_MFMC0(ctx
->opcode
);
26233 gen_helper_dmt(t0
);
26234 gen_store_gpr(t0
, rt
);
26238 gen_helper_emt(t0
);
26239 gen_store_gpr(t0
, rt
);
26243 gen_helper_dvpe(t0
, cpu_env
);
26244 gen_store_gpr(t0
, rt
);
26248 gen_helper_evpe(t0
, cpu_env
);
26249 gen_store_gpr(t0
, rt
);
26252 check_insn(ctx
, ISA_MIPS32R6
);
26254 gen_helper_dvp(t0
, cpu_env
);
26255 gen_store_gpr(t0
, rt
);
26259 check_insn(ctx
, ISA_MIPS32R6
);
26261 gen_helper_evp(t0
, cpu_env
);
26262 gen_store_gpr(t0
, rt
);
26266 check_insn(ctx
, ISA_MIPS32R2
);
26267 save_cpu_state(ctx
, 1);
26268 gen_helper_di(t0
, cpu_env
);
26269 gen_store_gpr(t0
, rt
);
26270 /* Stop translation as we may have switched
26271 the execution mode. */
26272 ctx
->base
.is_jmp
= DISAS_STOP
;
26275 check_insn(ctx
, ISA_MIPS32R2
);
26276 save_cpu_state(ctx
, 1);
26277 gen_helper_ei(t0
, cpu_env
);
26278 gen_store_gpr(t0
, rt
);
26279 /* DISAS_STOP isn't sufficient, we need to ensure we break
26280 out of translated code to check for pending interrupts */
26281 gen_save_pc(ctx
->base
.pc_next
+ 4);
26282 ctx
->base
.is_jmp
= DISAS_EXIT
;
26284 default: /* Invalid */
26285 MIPS_INVAL("mfmc0");
26286 generate_exception_end(ctx
, EXCP_RI
);
26291 #endif /* !CONFIG_USER_ONLY */
26294 check_insn(ctx
, ISA_MIPS32R2
);
26295 gen_load_srsgpr(rt
, rd
);
26298 check_insn(ctx
, ISA_MIPS32R2
);
26299 gen_store_srsgpr(rt
, rd
);
26303 generate_exception_end(ctx
, EXCP_RI
);
26307 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
26308 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26309 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
26310 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26313 /* Arithmetic with immediate opcode */
26314 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26318 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26320 case OPC_SLTI
: /* Set on less than with immediate opcode */
26322 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
26324 case OPC_ANDI
: /* Arithmetic with immediate opcode */
26325 case OPC_LUI
: /* OPC_AUI */
26328 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
26330 case OPC_J
: /* Jump */
26332 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
26333 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
26336 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
26337 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26339 generate_exception_end(ctx
, EXCP_RI
);
26342 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
26343 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26346 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26349 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
26350 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26352 generate_exception_end(ctx
, EXCP_RI
);
26355 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
26356 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26359 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26362 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
26365 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26367 check_insn(ctx
, ISA_MIPS32R6
);
26368 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
26369 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26372 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
26375 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26377 check_insn(ctx
, ISA_MIPS32R6
);
26378 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
26379 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26384 check_insn(ctx
, ISA_MIPS2
);
26385 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26389 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
26391 case OPC_LL
: /* Load and stores */
26392 check_insn(ctx
, ISA_MIPS2
);
26393 check_insn_opc_user_only(ctx
, INSN_R5900
);
26397 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26405 gen_ld(ctx
, op
, rt
, rs
, imm
);
26409 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26414 gen_st(ctx
, op
, rt
, rs
, imm
);
26417 check_insn(ctx
, ISA_MIPS2
);
26418 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26419 check_insn_opc_user_only(ctx
, INSN_R5900
);
26420 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
26423 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26424 check_cp0_enabled(ctx
);
26425 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
26426 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
26427 gen_cache_operation(ctx
, rt
, rs
, imm
);
26429 /* Treat as NOP. */
26432 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26433 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
26435 /* Treat as NOP. */
26438 /* Floating point (COP1). */
26443 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
26447 op1
= MASK_CP1(ctx
->opcode
);
26452 check_cp1_enabled(ctx
);
26453 check_insn(ctx
, ISA_MIPS32R2
);
26459 check_cp1_enabled(ctx
);
26460 gen_cp1(ctx
, op1
, rt
, rd
);
26462 #if defined(TARGET_MIPS64)
26465 check_cp1_enabled(ctx
);
26466 check_insn(ctx
, ISA_MIPS3
);
26467 check_mips_64(ctx
);
26468 gen_cp1(ctx
, op1
, rt
, rd
);
26471 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
26472 check_cp1_enabled(ctx
);
26473 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26475 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
26480 check_insn(ctx
, ASE_MIPS3D
);
26481 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
26482 (rt
>> 2) & 0x7, imm
<< 2);
26486 check_cp1_enabled(ctx
);
26487 check_insn(ctx
, ISA_MIPS32R6
);
26488 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
26492 check_cp1_enabled(ctx
);
26493 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26495 check_insn(ctx
, ASE_MIPS3D
);
26498 check_cp1_enabled(ctx
);
26499 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26500 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
26501 (rt
>> 2) & 0x7, imm
<< 2);
26508 check_cp1_enabled(ctx
);
26509 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
26515 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
26516 check_cp1_enabled(ctx
);
26517 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26519 case R6_OPC_CMP_AF_S
:
26520 case R6_OPC_CMP_UN_S
:
26521 case R6_OPC_CMP_EQ_S
:
26522 case R6_OPC_CMP_UEQ_S
:
26523 case R6_OPC_CMP_LT_S
:
26524 case R6_OPC_CMP_ULT_S
:
26525 case R6_OPC_CMP_LE_S
:
26526 case R6_OPC_CMP_ULE_S
:
26527 case R6_OPC_CMP_SAF_S
:
26528 case R6_OPC_CMP_SUN_S
:
26529 case R6_OPC_CMP_SEQ_S
:
26530 case R6_OPC_CMP_SEUQ_S
:
26531 case R6_OPC_CMP_SLT_S
:
26532 case R6_OPC_CMP_SULT_S
:
26533 case R6_OPC_CMP_SLE_S
:
26534 case R6_OPC_CMP_SULE_S
:
26535 case R6_OPC_CMP_OR_S
:
26536 case R6_OPC_CMP_UNE_S
:
26537 case R6_OPC_CMP_NE_S
:
26538 case R6_OPC_CMP_SOR_S
:
26539 case R6_OPC_CMP_SUNE_S
:
26540 case R6_OPC_CMP_SNE_S
:
26541 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
26543 case R6_OPC_CMP_AF_D
:
26544 case R6_OPC_CMP_UN_D
:
26545 case R6_OPC_CMP_EQ_D
:
26546 case R6_OPC_CMP_UEQ_D
:
26547 case R6_OPC_CMP_LT_D
:
26548 case R6_OPC_CMP_ULT_D
:
26549 case R6_OPC_CMP_LE_D
:
26550 case R6_OPC_CMP_ULE_D
:
26551 case R6_OPC_CMP_SAF_D
:
26552 case R6_OPC_CMP_SUN_D
:
26553 case R6_OPC_CMP_SEQ_D
:
26554 case R6_OPC_CMP_SEUQ_D
:
26555 case R6_OPC_CMP_SLT_D
:
26556 case R6_OPC_CMP_SULT_D
:
26557 case R6_OPC_CMP_SLE_D
:
26558 case R6_OPC_CMP_SULE_D
:
26559 case R6_OPC_CMP_OR_D
:
26560 case R6_OPC_CMP_UNE_D
:
26561 case R6_OPC_CMP_NE_D
:
26562 case R6_OPC_CMP_SOR_D
:
26563 case R6_OPC_CMP_SUNE_D
:
26564 case R6_OPC_CMP_SNE_D
:
26565 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
26568 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
26569 rt
, rd
, sa
, (imm
>> 8) & 0x7);
26574 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
26589 check_insn(ctx
, ASE_MSA
);
26590 gen_msa_branch(env
, ctx
, op1
);
26594 generate_exception_end(ctx
, EXCP_RI
);
26599 /* Compact branches [R6] and COP2 [non-R6] */
26600 case OPC_BC
: /* OPC_LWC2 */
26601 case OPC_BALC
: /* OPC_SWC2 */
26602 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26603 /* OPC_BC, OPC_BALC */
26604 gen_compute_compact_branch(ctx
, op
, 0, 0,
26605 sextract32(ctx
->opcode
<< 2, 0, 28));
26607 /* OPC_LWC2, OPC_SWC2 */
26608 /* COP2: Not implemented. */
26609 generate_exception_err(ctx
, EXCP_CpU
, 2);
26612 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
26613 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
26614 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26616 /* OPC_BEQZC, OPC_BNEZC */
26617 gen_compute_compact_branch(ctx
, op
, rs
, 0,
26618 sextract32(ctx
->opcode
<< 2, 0, 23));
26620 /* OPC_JIC, OPC_JIALC */
26621 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
26624 /* OPC_LWC2, OPC_SWC2 */
26625 /* COP2: Not implemented. */
26626 generate_exception_err(ctx
, EXCP_CpU
, 2);
26630 check_insn(ctx
, INSN_LOONGSON2F
);
26631 /* Note that these instructions use different fields. */
26632 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
26636 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26637 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
26638 check_cp1_enabled(ctx
);
26639 op1
= MASK_CP3(ctx
->opcode
);
26643 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
26649 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26650 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
26653 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26654 /* Treat as NOP. */
26657 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
26671 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26672 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
26676 generate_exception_end(ctx
, EXCP_RI
);
26680 generate_exception_err(ctx
, EXCP_CpU
, 1);
26684 #if defined(TARGET_MIPS64)
26685 /* MIPS64 opcodes */
26687 check_insn_opc_user_only(ctx
, INSN_R5900
);
26691 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26695 check_insn(ctx
, ISA_MIPS3
);
26696 check_mips_64(ctx
);
26697 gen_ld(ctx
, op
, rt
, rs
, imm
);
26701 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26704 check_insn(ctx
, ISA_MIPS3
);
26705 check_mips_64(ctx
);
26706 gen_st(ctx
, op
, rt
, rs
, imm
);
26709 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26710 check_insn(ctx
, ISA_MIPS3
);
26711 check_insn_opc_user_only(ctx
, INSN_R5900
);
26712 check_mips_64(ctx
);
26713 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
26715 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
26716 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26717 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
26718 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26721 check_insn(ctx
, ISA_MIPS3
);
26722 check_mips_64(ctx
);
26723 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26727 check_insn(ctx
, ISA_MIPS3
);
26728 check_mips_64(ctx
);
26729 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26732 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
26733 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26734 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26736 MIPS_INVAL("major opcode");
26737 generate_exception_end(ctx
, EXCP_RI
);
26741 case OPC_DAUI
: /* OPC_JALX */
26742 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26743 #if defined(TARGET_MIPS64)
26745 check_mips_64(ctx
);
26747 generate_exception(ctx
, EXCP_RI
);
26748 } else if (rt
!= 0) {
26749 TCGv t0
= tcg_temp_new();
26750 gen_load_gpr(t0
, rs
);
26751 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
26755 generate_exception_end(ctx
, EXCP_RI
);
26756 MIPS_INVAL("major opcode");
26760 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
26761 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
26762 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
26765 case OPC_MSA
: /* OPC_MDMX */
26766 if (ctx
->insn_flags
& INSN_R5900
) {
26767 decode_tx79_lq(env
, ctx
); /* TX79_LQ */
26769 /* MDMX: Not implemented. */
26774 check_insn(ctx
, ISA_MIPS32R6
);
26775 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
26777 default: /* Invalid */
26778 MIPS_INVAL("major opcode");
26779 generate_exception_end(ctx
, EXCP_RI
);
26784 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
26786 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26787 CPUMIPSState
*env
= cs
->env_ptr
;
26789 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
26790 ctx
->saved_pc
= -1;
26791 ctx
->insn_flags
= env
->insn_flags
;
26792 ctx
->CP0_Config1
= env
->CP0_Config1
;
26793 ctx
->CP0_Config2
= env
->CP0_Config2
;
26794 ctx
->CP0_Config3
= env
->CP0_Config3
;
26795 ctx
->CP0_Config5
= env
->CP0_Config5
;
26797 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
26798 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
26799 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
26800 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
26801 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
26802 ctx
->PAMask
= env
->PAMask
;
26803 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
26804 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
26805 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
26806 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
26807 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
26808 /* Restore delay slot state from the tb context. */
26809 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
26810 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
26811 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
26812 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
26813 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
26814 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
26815 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
26816 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
26817 restore_cpu_state(env
, ctx
);
26818 #ifdef CONFIG_USER_ONLY
26819 ctx
->mem_idx
= MIPS_HFLAG_UM
;
26821 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
26823 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
26824 MO_UNALN
: MO_ALIGN
;
26826 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
26830 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
26834 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
26836 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26838 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
26842 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
26843 const CPUBreakpoint
*bp
)
26845 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26847 save_cpu_state(ctx
, 1);
26848 ctx
->base
.is_jmp
= DISAS_NORETURN
;
26849 gen_helper_raise_exception_debug(cpu_env
);
26850 /* The address covered by the breakpoint must be included in
26851 [tb->pc, tb->pc + tb->size) in order to for it to be
26852 properly cleared -- thus we increment the PC here so that
26853 the logic setting tb->size below does the right thing. */
26854 ctx
->base
.pc_next
+= 4;
26858 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
26860 CPUMIPSState
*env
= cs
->env_ptr
;
26861 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26865 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
26866 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
26867 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
26868 insn_bytes
= decode_nanomips_opc(env
, ctx
);
26869 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
26870 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
26872 decode_opc(env
, ctx
);
26873 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
26874 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
26875 insn_bytes
= decode_micromips_opc(env
, ctx
);
26876 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
26877 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
26878 insn_bytes
= decode_mips16_opc(env
, ctx
);
26880 generate_exception_end(ctx
, EXCP_RI
);
26881 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
26885 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
26886 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
26887 MIPS_HFLAG_FBNSLOT
))) {
26888 /* force to generate branch as there is neither delay nor
26892 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
26893 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
26894 /* Force to generate branch as microMIPS R6 doesn't restrict
26895 branches in the forbidden slot. */
26900 gen_branch(ctx
, insn_bytes
);
26902 ctx
->base
.pc_next
+= insn_bytes
;
26904 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
26907 /* Execute a branch and its delay slot as a single instruction.
26908 This is what GDB expects and is consistent with what the
26909 hardware does (e.g. if a delay slot instruction faults, the
26910 reported PC is the PC of the branch). */
26911 if (ctx
->base
.singlestep_enabled
&&
26912 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
26913 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
26915 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
26916 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
26920 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
26922 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26924 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
26925 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
26926 gen_helper_raise_exception_debug(cpu_env
);
26928 switch (ctx
->base
.is_jmp
) {
26930 gen_save_pc(ctx
->base
.pc_next
);
26931 tcg_gen_lookup_and_goto_ptr();
26934 case DISAS_TOO_MANY
:
26935 save_cpu_state(ctx
, 0);
26936 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
26939 tcg_gen_exit_tb(NULL
, 0);
26941 case DISAS_NORETURN
:
26944 g_assert_not_reached();
26949 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
26951 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
26952 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
26955 static const TranslatorOps mips_tr_ops
= {
26956 .init_disas_context
= mips_tr_init_disas_context
,
26957 .tb_start
= mips_tr_tb_start
,
26958 .insn_start
= mips_tr_insn_start
,
26959 .breakpoint_check
= mips_tr_breakpoint_check
,
26960 .translate_insn
= mips_tr_translate_insn
,
26961 .tb_stop
= mips_tr_tb_stop
,
26962 .disas_log
= mips_tr_disas_log
,
26965 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
26969 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
26972 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
26976 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
26978 #define printfpr(fp) \
26981 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
26982 " fd:%13g fs:%13g psu: %13g\n", \
26983 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
26984 (double)(fp)->fd, \
26985 (double)(fp)->fs[FP_ENDIAN_IDX], \
26986 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
26989 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
26990 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
26991 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
26992 " fd:%13g fs:%13g psu:%13g\n", \
26993 tmp.w[FP_ENDIAN_IDX], tmp.d, \
26995 (double)tmp.fs[FP_ENDIAN_IDX], \
26996 (double)tmp.fs[!FP_ENDIAN_IDX]); \
27001 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
27002 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
27003 get_float_exception_flags(&env
->active_fpu
.fp_status
));
27004 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
27005 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
27006 printfpr(&env
->active_fpu
.fpr
[i
]);
27012 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
27015 MIPSCPU
*cpu
= MIPS_CPU(cs
);
27016 CPUMIPSState
*env
= &cpu
->env
;
27019 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
27020 " LO=0x" TARGET_FMT_lx
" ds %04x "
27021 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
27022 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
27023 env
->hflags
, env
->btarget
, env
->bcond
);
27024 for (i
= 0; i
< 32; i
++) {
27026 cpu_fprintf(f
, "GPR%02d:", i
);
27027 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
27029 cpu_fprintf(f
, "\n");
27032 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
27033 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
27034 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
27036 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
27037 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
27038 env
->CP0_Config2
, env
->CP0_Config3
);
27039 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
27040 env
->CP0_Config4
, env
->CP0_Config5
);
27041 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
27042 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
27046 void mips_tcg_init(void)
27051 for (i
= 1; i
< 32; i
++)
27052 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
27053 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
27056 for (i
= 0; i
< 32; i
++) {
27057 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
27059 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
27060 /* The scalar floating-point unit (FPU) registers are mapped on
27061 * the MSA vector registers. */
27062 fpu_f64
[i
] = msa_wr_d
[i
* 2];
27063 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
27064 msa_wr_d
[i
* 2 + 1] =
27065 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
27068 cpu_PC
= tcg_global_mem_new(cpu_env
,
27069 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
27070 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
27071 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
27072 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
27074 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
27075 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
27078 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
27079 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
27081 bcond
= tcg_global_mem_new(cpu_env
,
27082 offsetof(CPUMIPSState
, bcond
), "bcond");
27083 btarget
= tcg_global_mem_new(cpu_env
,
27084 offsetof(CPUMIPSState
, btarget
), "btarget");
27085 hflags
= tcg_global_mem_new_i32(cpu_env
,
27086 offsetof(CPUMIPSState
, hflags
), "hflags");
27088 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
27089 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
27091 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
27092 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
27096 #include "translate_init.inc.c"
27098 void cpu_mips_realize_env(CPUMIPSState
*env
)
27100 env
->exception_base
= (int32_t)0xBFC00000;
27102 #ifndef CONFIG_USER_ONLY
27103 mmu_init(env
, env
->cpu_model
);
27105 fpu_init(env
, env
->cpu_model
);
27106 mvp_init(env
, env
->cpu_model
);
27109 bool cpu_supports_cps_smp(const char *cpu_type
)
27111 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
27112 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
27115 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
27117 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
27118 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
27121 void cpu_set_exception_base(int vp_index
, target_ulong address
)
27123 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
27124 vp
->env
.exception_base
= address
;
27127 void cpu_state_reset(CPUMIPSState
*env
)
27129 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
27130 CPUState
*cs
= CPU(cpu
);
27132 /* Reset registers to their default values */
27133 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
27134 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
27135 #ifdef TARGET_WORDS_BIGENDIAN
27136 env
->CP0_Config0
|= (1 << CP0C0_BE
);
27138 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
27139 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
27140 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
27141 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
27142 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
27143 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
27144 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
27145 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
27146 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
27147 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
27148 << env
->cpu_model
->CP0_LLAddr_shift
;
27149 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
27150 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
27151 env
->CCRes
= env
->cpu_model
->CCRes
;
27152 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
27153 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
27154 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
27155 env
->current_tc
= 0;
27156 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
27157 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
27158 #if defined(TARGET_MIPS64)
27159 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
27160 env
->SEGMask
|= 3ULL << 62;
27163 env
->PABITS
= env
->cpu_model
->PABITS
;
27164 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
27165 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
27166 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
27167 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
27168 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
27169 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
27170 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
27171 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
27172 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
27173 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
27174 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
27175 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
27176 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
27177 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
27178 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
27179 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
27180 env
->msair
= env
->cpu_model
->MSAIR
;
27181 env
->insn_flags
= env
->cpu_model
->insn_flags
;
27183 #if defined(CONFIG_USER_ONLY)
27184 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
27185 # ifdef TARGET_MIPS64
27186 /* Enable 64-bit register mode. */
27187 env
->CP0_Status
|= (1 << CP0St_PX
);
27189 # ifdef TARGET_ABI_MIPSN64
27190 /* Enable 64-bit address mode. */
27191 env
->CP0_Status
|= (1 << CP0St_UX
);
27193 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
27194 hardware registers. */
27195 env
->CP0_HWREna
|= 0x0000000F;
27196 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
27197 env
->CP0_Status
|= (1 << CP0St_CU1
);
27199 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
27200 env
->CP0_Status
|= (1 << CP0St_MX
);
27202 # if defined(TARGET_MIPS64)
27203 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
27204 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
27205 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
27206 env
->CP0_Status
|= (1 << CP0St_FR
);
27210 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
27211 /* If the exception was raised from a delay slot,
27212 come back to the jump. */
27213 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
27214 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
27216 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
27218 env
->active_tc
.PC
= env
->exception_base
;
27219 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
27220 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
27221 env
->CP0_Wired
= 0;
27222 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
27223 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
27224 if (mips_um_ksegs_enabled()) {
27225 env
->CP0_EBase
|= 0x40000000;
27227 env
->CP0_EBase
|= (int32_t)0x80000000;
27229 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
27230 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
27232 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
27234 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
27235 /* vectored interrupts not implemented, timer on int 7,
27236 no performance counters. */
27237 env
->CP0_IntCtl
= 0xe0000000;
27241 for (i
= 0; i
< 7; i
++) {
27242 env
->CP0_WatchLo
[i
] = 0;
27243 env
->CP0_WatchHi
[i
] = 0x80000000;
27245 env
->CP0_WatchLo
[7] = 0;
27246 env
->CP0_WatchHi
[7] = 0;
27248 /* Count register increments in debug mode, EJTAG version 1 */
27249 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
27251 cpu_mips_store_count(env
, 1);
27253 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
27256 /* Only TC0 on VPE 0 starts as active. */
27257 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
27258 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
27259 env
->tcs
[i
].CP0_TCHalt
= 1;
27261 env
->active_tc
.CP0_TCHalt
= 1;
27264 if (cs
->cpu_index
== 0) {
27265 /* VPE0 starts up enabled. */
27266 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
27267 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
27269 /* TC0 starts up unhalted. */
27271 env
->active_tc
.CP0_TCHalt
= 0;
27272 env
->tcs
[0].CP0_TCHalt
= 0;
27273 /* With thread 0 active. */
27274 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
27275 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
27280 * Configure default legacy segmentation control. We use this regardless of
27281 * whether segmentation control is presented to the guest.
27283 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
27284 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
27285 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
27286 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
27287 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
27288 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
27290 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
27291 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
27292 (3 << CP0SC_C
)) << 16;
27293 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
27294 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
27295 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
27296 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
27297 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
27298 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
27299 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
27300 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
27302 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
27303 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
27304 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
27305 env
->CP0_Status
|= (1 << CP0St_FR
);
27308 if (env
->insn_flags
& ISA_MIPS32R6
) {
27310 env
->CP0_PWSize
= 0x40;
27316 env
->CP0_PWField
= 0x0C30C302;
27323 env
->CP0_PWField
= 0x02;
27326 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
27327 /* microMIPS on reset when Config3.ISA is 3 */
27328 env
->hflags
|= MIPS_HFLAG_M16
;
27332 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
27336 compute_hflags(env
);
27337 restore_fp_status(env
);
27338 restore_pamask(env
);
27339 cs
->exception_index
= EXCP_NONE
;
27341 if (semihosting_get_argc()) {
27342 /* UHI interface can be used to obtain argc and argv */
27343 env
->active_tc
.gpr
[4] = -1;
27347 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
27348 target_ulong
*data
)
27350 env
->active_tc
.PC
= data
[0];
27351 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
27352 env
->hflags
|= data
[1];
27353 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
27354 case MIPS_HFLAG_BR
:
27356 case MIPS_HFLAG_BC
:
27357 case MIPS_HFLAG_BL
:
27359 env
->btarget
= data
[2];